From 2e8f3cbafe8882cac21dda20b99340330efeee5b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Sun, 25 Sep 2016 14:44:00 +0200 Subject: [PATCH 001/100] Initial commit. Very early stages of algorithm development. --- gensim/models/__init__.py | 1 + gensim/models/atvb.py | 259 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 260 insertions(+) create mode 100644 gensim/models/atvb.py diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index d15fac3a3c..8c58059ae6 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -16,6 +16,7 @@ from .ldamulticore import LdaMulticore from .phrases import Phrases from .normmodel import NormModel +from .atvb import AtVb from . import wrappers diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py new file mode 100644 index 0000000000..10cf4deb61 --- /dev/null +++ b/gensim/models/atvb.py @@ -0,0 +1,259 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2011 Radim Rehurek +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" + +""" + +import logging +import numpy +import numbers + +from gensim import utils +from gensim.models.ldamodel import dirichlet_expectation +from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. +from six.moves import xrange + +# log(sum(exp(x))) that tries to avoid overflow +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger('gensim.models.atmodel') + +def get_random_state(seed): + """ Turn seed into a np.random.RandomState instance. + Method originally from maciejkula/glove-python, and written by @joshloyal + """ + if seed is None or seed is numpy.random: + return numpy.random.mtrand._rand + if isinstance(seed, (numbers.Integral, numpy.integer)): + return numpy.random.RandomState(seed) + if isinstance(seed, numpy.random.RandomState): + return seed + raise ValueError('%r cannot be used to seed a numpy.random.RandomState' + ' instance' % seed) + + +class AtVb: + """ + Train the author-topic model using variational Bayes. + """ + # TODO: inherit interfaces.TransformationABC. + + def __init__(self, corpus=None, num_topics=100, id2word=None, + author2doc=None, doc2author=None, threshold=0.001, + iterations=10, alpha=None, eta=None, + eval_every=10): + + if alpha is None: + alpha = 50 / num_topics + if eta is None: + eta = 0.01 + + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + self.corpus = corpus + self.iterations = iterations + self.num_topics = num_topics + self.threshold = threshold + self.alpha = alpha + self.eta = eta + self.author2doc = author2doc + self.doc2author = doc2author + self.num_docs = len(corpus) + self.num_authors = len(doc2author) + self.eval_every = eval_every + + self.random_state = get_random_state(random_state) + + if corpus is not None and author2doc is not None and doc2author is not None: + self.inference(corpus, author2doc, doc2author) + + def inference(corpus=None, author2doc=None, doc2author=None): + if corpus is None: + corpus = self.corpus + + # Initial value of gamma and lambda. + # NOTE: parameters of gamma distribution same as in `ldamodel`. + var_gamma_init = self.random_state.gamma(100., 1. / 100., + (self.num_authors, self.num_topics)) + var_lambda_init = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + + var_gamma = numpy.zeros((self.num_authors, self.num_topics)) + for a in xrange(self.num_authors): + var_gamma[a, :] = var_gamma_init + + var_lambda = numpy.zeros((self.num_authors, self.num_topics)) + for k in xrange(self.num_topics): + var_lambda[k, :] = var_lambda_init + + # Initialize mu. + # mu is 1/|A_d| if a is in A_d, zero otherwise. + mu = numpy.zeros((self.num_docs, self.num_terms, self.num_authors)) + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + for v in ids: + authors_d = doc2author[d] # List of author IDs for document d. + for a in authors_d: + mu[d, v, a] = 1 / len(authors_d) + + # TODO: consider how to vectorize opterations as much as + # possible. + # TODO: check vector and matrix dimensions, and ensure that + # things are multiplied along the correct dimensions. + # TODO: rename variational parameters to "var_[parameter name]". + + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + likelihood = eval_likelihood(docs=corpus, Elogtheta, Elogbeta) + for iteration in xrange(self.iterations): + # Update phi. + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = doc2author[d] # List of author IDs for document d. + + expElogbetad = expElogbeta[:, ids] + + for v in ids: + for k in xrange(self.num_topics): + # Average Elogtheta over authors a in document d. + avgElogtheta = 0.0 + for a in authors_d: + avgElogtheta += var_mu[d, v, a] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute phi. + # TODO: avoid computing phi if possible. + var_phi[d, v, k] = expavgElogtheta * expElogbetad.T[k, v] # FIXME: may have an alignment issue here. + # Normalize phi. + (log_var_phi, _) = log_normalize(var_phi[d, v, k]) + var_phi[d, v, k] = numpy.exp(log_var_phi) + + # Update mu. + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = doc2author[d] # List of author IDs for document d. + + # Prior probability of observing author a in document d is one + # over the number of authors in document d. + author_prior_prob = 1.0 / len(authors_d) + for v in ids: + for a in authors_d: + # Average Elogtheta over topics k. + avgElogtheta = 0.0 + for k in xrange(self.num_topics): + avgElogtheta += var_phi[d, v, k] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute mu. + # TODO: avoid computing mu if possible. + var_mu[d, v, a] = author_prior_prob * avgexpElogtheta[a, k] # FIXME: may have an alignment issue here. + # Normalize mu. + (log_var_mu, _) = log_normalize(var_mu[d, v, a]) + var_mu[d, v, a] = numpy.exp(log_var_mu) + + # Update gamma. + for a in xrange(self.num_authors): + for k in xrange(self.num_topics): + docs_a = author2doc[a] + var_gamma[a, k] = 0.0 + var_gamma[a, k] += self.alpha + for d in docs_a: + doc = corpus[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + for v in ids: + var_gamma[a, k] += cts[v] * var_mu[d, v, a] * var_phi[d, v, k] + + # Update Elogtheta, since gamma has been updated. + Elogtheta = dirichlet_expectation(var_gamma) + + # Update lambda. + for k in xrange(self.num_topics): + for v in xrange(self.num_terms): + var_lambda[k, v] = 0.0 + var_lambda[k, v] += self.eta + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + for v in ids: + var_lambda += cts[v] * var_phi[d, v, k] + + # Update Elogbeta, since lambda has been updated. + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + # Evaluate likelihood. + if (iteration + 1) % self.eval_every == 0: + prev_likelihood = likelihood + likelihood = eval_likelihood(docs=corpus, Elogtheta, Elogbeta) + if numpy.abs(likelihood - prev_likelihood) / prev_likelihood < self.threshold: + break + # End of update loop (iterations). + + return var_gamma, var_lambda + + def eval_likelihood(doc_ids=None, Elogtheta, Elogbeta): + """ + Compute the conditional liklihood of a set of documents, + + p(D | theta, beta, A). + + theta and beta are estimated by exponentiating the expectations of + log theta and log beta. + """ + + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + + + if doc_ids is None: + docs = corpus + else: + docs = [corpus[d] for d in doc_ids] + + likelihood = 0.0 + for d, doc in enumerate(docs): + authors_d = self.doc2author[d] + likelihood_d = 0.0 + for v in ids: + for k in self.num_topics: + for a in authors_d: + likelihood_d += cnt[v] * numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + author_prior_prob = 1.0 / len(authors_d) + likelihood_d *= author_prior_prob + likelihood += likelihood_d + + # TODO: can I do this? + # bound += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) + + + + + + + From a21059e8d2a50ff86b9a643335e0f56f54b26e54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Sun, 25 Sep 2016 15:38:41 +0200 Subject: [PATCH 002/100] Fixed some errors. --- gensim/models/atvb.py | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 10cf4deb61..2d8dadba0d 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -5,7 +5,7 @@ # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html """ - +Author-topic model. """ import logging @@ -28,17 +28,17 @@ logger = logging.getLogger('gensim.models.atmodel') def get_random_state(seed): - """ Turn seed into a np.random.RandomState instance. + """ Turn seed into a np.random.RandomState instance. Method originally from maciejkula/glove-python, and written by @joshloyal - """ - if seed is None or seed is numpy.random: - return numpy.random.mtrand._rand - if isinstance(seed, (numbers.Integral, numpy.integer)): - return numpy.random.RandomState(seed) - if isinstance(seed, numpy.random.RandomState): + """ + if seed is None or seed is numpy.random: + return numpy.random.mtrand._rand + if isinstance(seed, (numbers.Integral, numpy.integer)): + return numpy.random.RandomState(seed) + if isinstance(seed, numpy.random.RandomState): return seed - raise ValueError('%r cannot be used to seed a numpy.random.RandomState' - ' instance' % seed) + raise ValueError('%r cannot be used to seed a numpy.random.RandomState' + ' instance' % seed) class AtVb: @@ -48,9 +48,9 @@ class AtVb: # TODO: inherit interfaces.TransformationABC. def __init__(self, corpus=None, num_topics=100, id2word=None, - author2doc=None, doc2author=None, threshold=0.001, - iterations=10, alpha=None, eta=None, - eval_every=10): + author2doc=None, doc2author=None, threshold=0.001, + iterations=10, alpha=None, eta=None, + eval_every=10): if alpha is None: alpha = 50 / num_topics @@ -61,7 +61,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, if corpus is None and self.id2word is None: raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') - if self.id2word is None: + if self.id2word is None: logger.warning("no word id mapping provided; initializing from corpus, assuming identity") self.id2word = utils.dict_from_corpus(corpus) self.num_terms = len(self.id2word) @@ -71,7 +71,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, self.num_terms = 0 if self.num_terms == 0: - raise ValueError("cannot compute LDA over an empty collection (no terms)") + raise ValueError("cannot compute LDA over an empty collection (no terms)") self.corpus = corpus self.iterations = iterations @@ -90,16 +90,16 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, if corpus is not None and author2doc is not None and doc2author is not None: self.inference(corpus, author2doc, doc2author) - def inference(corpus=None, author2doc=None, doc2author=None): + def inference(self, corpus=None, author2doc=None, doc2author=None): if corpus is None: corpus = self.corpus # Initial value of gamma and lambda. # NOTE: parameters of gamma distribution same as in `ldamodel`. var_gamma_init = self.random_state.gamma(100., 1. / 100., - (self.num_authors, self.num_topics)) + (self.num_authors, self.num_topics)) var_lambda_init = self.random_state.gamma(100., 1. / 100., - (self.num_topics, self.num_terms)) + (self.num_topics, self.num_terms)) var_gamma = numpy.zeros((self.num_authors, self.num_topics)) for a in xrange(self.num_authors): @@ -128,7 +128,7 @@ def inference(corpus=None, author2doc=None, doc2author=None): Elogtheta = dirichlet_expectation(var_gamma) Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - likelihood = eval_likelihood(docs=corpus, Elogtheta, Elogbeta) + likelihood = eval_likelihood(Elogtheta, Elogbeta) for iteration in xrange(self.iterations): # Update phi. for d, doc in enumerate(corpus): @@ -211,14 +211,14 @@ def inference(corpus=None, author2doc=None, doc2author=None): # Evaluate likelihood. if (iteration + 1) % self.eval_every == 0: prev_likelihood = likelihood - likelihood = eval_likelihood(docs=corpus, Elogtheta, Elogbeta) + likelihood = eval_likelihood(Elogtheta, Elogbeta) if numpy.abs(likelihood - prev_likelihood) / prev_likelihood < self.threshold: break # End of update loop (iterations). return var_gamma, var_lambda - def eval_likelihood(doc_ids=None, Elogtheta, Elogbeta): + def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): """ Compute the conditional liklihood of a set of documents, From a9bddaac6b4957e32c2200a472c9edaaca2b2539 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Tue, 27 Sep 2016 14:18:29 +0200 Subject: [PATCH 003/100] Added online algorithm, removed batch algorithm. --- gensim/models/__init__.py | 2 +- gensim/models/atvb.py | 259 ---------------------------------- gensim/models/onlineatvb.py | 273 ++++++++++++++++++++++++++++++++++++ 3 files changed, 274 insertions(+), 260 deletions(-) delete mode 100644 gensim/models/atvb.py create mode 100644 gensim/models/onlineatvb.py diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index 8c58059ae6..9e094c63ce 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -16,7 +16,7 @@ from .ldamulticore import LdaMulticore from .phrases import Phrases from .normmodel import NormModel -from .atvb import AtVb +from .onlineatvb import OnlineAtVb from . import wrappers diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py deleted file mode 100644 index 2d8dadba0d..0000000000 --- a/gensim/models/atvb.py +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (C) 2011 Radim Rehurek -# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html - -""" -Author-topic model. -""" - -import logging -import numpy -import numbers - -from gensim import utils -from gensim.models.ldamodel import dirichlet_expectation -from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. -from six.moves import xrange - -# log(sum(exp(x))) that tries to avoid overflow -try: - # try importing from here if older scipy is installed - from scipy.maxentropy import logsumexp -except ImportError: - # maxentropy has been removed in recent releases, logsumexp now in misc - from scipy.misc import logsumexp - -logger = logging.getLogger('gensim.models.atmodel') - -def get_random_state(seed): - """ Turn seed into a np.random.RandomState instance. - Method originally from maciejkula/glove-python, and written by @joshloyal - """ - if seed is None or seed is numpy.random: - return numpy.random.mtrand._rand - if isinstance(seed, (numbers.Integral, numpy.integer)): - return numpy.random.RandomState(seed) - if isinstance(seed, numpy.random.RandomState): - return seed - raise ValueError('%r cannot be used to seed a numpy.random.RandomState' - ' instance' % seed) - - -class AtVb: - """ - Train the author-topic model using variational Bayes. - """ - # TODO: inherit interfaces.TransformationABC. - - def __init__(self, corpus=None, num_topics=100, id2word=None, - author2doc=None, doc2author=None, threshold=0.001, - iterations=10, alpha=None, eta=None, - eval_every=10): - - if alpha is None: - alpha = 50 / num_topics - if eta is None: - eta = 0.01 - - self.id2word = id2word - if corpus is None and self.id2word is None: - raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') - - if self.id2word is None: - logger.warning("no word id mapping provided; initializing from corpus, assuming identity") - self.id2word = utils.dict_from_corpus(corpus) - self.num_terms = len(self.id2word) - elif len(self.id2word) > 0: - self.num_terms = 1 + max(self.id2word.keys()) - else: - self.num_terms = 0 - - if self.num_terms == 0: - raise ValueError("cannot compute LDA over an empty collection (no terms)") - - self.corpus = corpus - self.iterations = iterations - self.num_topics = num_topics - self.threshold = threshold - self.alpha = alpha - self.eta = eta - self.author2doc = author2doc - self.doc2author = doc2author - self.num_docs = len(corpus) - self.num_authors = len(doc2author) - self.eval_every = eval_every - - self.random_state = get_random_state(random_state) - - if corpus is not None and author2doc is not None and doc2author is not None: - self.inference(corpus, author2doc, doc2author) - - def inference(self, corpus=None, author2doc=None, doc2author=None): - if corpus is None: - corpus = self.corpus - - # Initial value of gamma and lambda. - # NOTE: parameters of gamma distribution same as in `ldamodel`. - var_gamma_init = self.random_state.gamma(100., 1. / 100., - (self.num_authors, self.num_topics)) - var_lambda_init = self.random_state.gamma(100., 1. / 100., - (self.num_topics, self.num_terms)) - - var_gamma = numpy.zeros((self.num_authors, self.num_topics)) - for a in xrange(self.num_authors): - var_gamma[a, :] = var_gamma_init - - var_lambda = numpy.zeros((self.num_authors, self.num_topics)) - for k in xrange(self.num_topics): - var_lambda[k, :] = var_lambda_init - - # Initialize mu. - # mu is 1/|A_d| if a is in A_d, zero otherwise. - mu = numpy.zeros((self.num_docs, self.num_terms, self.num_authors)) - for d, doc in enumerate(corpus): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - for v in ids: - authors_d = doc2author[d] # List of author IDs for document d. - for a in authors_d: - mu[d, v, a] = 1 / len(authors_d) - - # TODO: consider how to vectorize opterations as much as - # possible. - # TODO: check vector and matrix dimensions, and ensure that - # things are multiplied along the correct dimensions. - # TODO: rename variational parameters to "var_[parameter name]". - - Elogtheta = dirichlet_expectation(var_gamma) - Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) - likelihood = eval_likelihood(Elogtheta, Elogbeta) - for iteration in xrange(self.iterations): - # Update phi. - for d, doc in enumerate(corpus): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = doc2author[d] # List of author IDs for document d. - - expElogbetad = expElogbeta[:, ids] - - for v in ids: - for k in xrange(self.num_topics): - # Average Elogtheta over authors a in document d. - avgElogtheta = 0.0 - for a in authors_d: - avgElogtheta += var_mu[d, v, a] * Elogtheta[a, k] - expavgElogtheta = numpy.exp(avgElogtheta) - - # Compute phi. - # TODO: avoid computing phi if possible. - var_phi[d, v, k] = expavgElogtheta * expElogbetad.T[k, v] # FIXME: may have an alignment issue here. - # Normalize phi. - (log_var_phi, _) = log_normalize(var_phi[d, v, k]) - var_phi[d, v, k] = numpy.exp(log_var_phi) - - # Update mu. - for d, doc in enumerate(corpus): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = doc2author[d] # List of author IDs for document d. - - # Prior probability of observing author a in document d is one - # over the number of authors in document d. - author_prior_prob = 1.0 / len(authors_d) - for v in ids: - for a in authors_d: - # Average Elogtheta over topics k. - avgElogtheta = 0.0 - for k in xrange(self.num_topics): - avgElogtheta += var_phi[d, v, k] * Elogtheta[a, k] - expavgElogtheta = numpy.exp(avgElogtheta) - - # Compute mu. - # TODO: avoid computing mu if possible. - var_mu[d, v, a] = author_prior_prob * avgexpElogtheta[a, k] # FIXME: may have an alignment issue here. - # Normalize mu. - (log_var_mu, _) = log_normalize(var_mu[d, v, a]) - var_mu[d, v, a] = numpy.exp(log_var_mu) - - # Update gamma. - for a in xrange(self.num_authors): - for k in xrange(self.num_topics): - docs_a = author2doc[a] - var_gamma[a, k] = 0.0 - var_gamma[a, k] += self.alpha - for d in docs_a: - doc = corpus[d] - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - for v in ids: - var_gamma[a, k] += cts[v] * var_mu[d, v, a] * var_phi[d, v, k] - - # Update Elogtheta, since gamma has been updated. - Elogtheta = dirichlet_expectation(var_gamma) - - # Update lambda. - for k in xrange(self.num_topics): - for v in xrange(self.num_terms): - var_lambda[k, v] = 0.0 - var_lambda[k, v] += self.eta - for d, doc in enumerate(corpus): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - for v in ids: - var_lambda += cts[v] * var_phi[d, v, k] - - # Update Elogbeta, since lambda has been updated. - Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) - - # Evaluate likelihood. - if (iteration + 1) % self.eval_every == 0: - prev_likelihood = likelihood - likelihood = eval_likelihood(Elogtheta, Elogbeta) - if numpy.abs(likelihood - prev_likelihood) / prev_likelihood < self.threshold: - break - # End of update loop (iterations). - - return var_gamma, var_lambda - - def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): - """ - Compute the conditional liklihood of a set of documents, - - p(D | theta, beta, A). - - theta and beta are estimated by exponentiating the expectations of - log theta and log beta. - """ - - # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. - - - if doc_ids is None: - docs = corpus - else: - docs = [corpus[d] for d in doc_ids] - - likelihood = 0.0 - for d, doc in enumerate(docs): - authors_d = self.doc2author[d] - likelihood_d = 0.0 - for v in ids: - for k in self.num_topics: - for a in authors_d: - likelihood_d += cnt[v] * numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) - author_prior_prob = 1.0 / len(authors_d) - likelihood_d *= author_prior_prob - likelihood += likelihood_d - - # TODO: can I do this? - # bound += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) - - - - - - - diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py new file mode 100644 index 0000000000..f78ce762e2 --- /dev/null +++ b/gensim/models/onlineatvb.py @@ -0,0 +1,273 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2011 Radim Rehurek +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Author-topic model. +""" + +import pdb +from pdb import set_trace as st + +import logging +import numpy +import numbers + +from gensim import utils +from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. +from six.moves import xrange + +# log(sum(exp(x))) that tries to avoid overflow +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger(__name__) + + +class OnlineAtVb: + """ + Train the author-topic model using online variational Bayes. + """ + # TODO: inherit interfaces.TransformationABC. + + def __init__(self, corpus=None, num_topics=100, id2word=None, + author2doc=None, doc2author=None, threshold=0.001, + iterations=10, alpha=None, eta=None, decay=0.5, offset=1.0, + eval_every=1, random_state=None): + + # TODO: require only author2doc OR doc2author, and construct the missing one automatically. + + if alpha is None: + alpha = 50 / num_topics + if eta is None: + eta = 0.01 + + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + self.corpus = corpus + self.iterations = iterations + self.num_topics = num_topics + self.threshold = threshold + self.alpha = alpha + self.eta = eta + self.decay = decay + self.offset = offset + self.author2doc = author2doc + self.doc2author = doc2author + self.num_docs = len(corpus) + self.num_authors = len(author2doc) + self.eval_every = eval_every + self.random_state = random_state + + # TODO: find a way out of this nonsense. + self.authorid2idx = dict(zip(list(author2doc.keys()), xrange(self.num_authors))) + self.authoridx2id = dict(zip(xrange(self.num_authors), list(author2doc.keys()))) + + self.random_state = get_random_state(random_state) + + if corpus is not None and author2doc is not None and doc2author is not None: + self.inference(corpus, author2doc, doc2author) + + def rho(self, iteration): + return pow(self.offset + iteration, -self.decay) + + def inference(self, corpus=None, author2doc=None, doc2author=None): + if corpus is None: + corpus = self.corpus.copy() + + # Initial values of gamma and lambda. + # NOTE: parameters of gamma distribution same as in `ldamodel`. + init_gamma = self.random_state.gamma(100., 1. / 100., + (self.num_authors, self.num_topics)) + init_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + + converged = 0 + + # TODO: consider making phi and mu sparse. + var_phi = numpy.zeros((self.num_terms, self.num_topics)) + var_mu = numpy.zeros((self.num_terms, self.num_authors)) + + var_gamma = init_gamma.copy() + var_lambda = init_lambda.copy() + tilde_gamma = init_gamma.copy() + tilde_lambda = init_lambda.copy() + + # Initialize dirichlet expectations. + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + st() + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = doc2author[d] # List of author IDs for document d. + + # Initialize mu. + # mu is 1/|A_d| if a is in A_d, zero otherwise. + # NOTE: I could do random initialization instead. + for v in ids: + for aid in authors_d: + a = self.authorid2idx[aid] + var_mu[v, a] = 1 / len(authors_d) + + for iteration in xrange(self.iterations): + #logger.info('iteration %i', iteration) + + lastgamma = tilde_gamma.copy() + lastlambda = tilde_lambda.copy() + + # Update phi. + for v in ids: + for k in xrange(self.num_topics): + # Average Elogtheta over authors a in document d. + avgElogtheta = 0.0 + for ad in authors_d: + a = self.authorid2idx[aid] + avgElogtheta += var_mu[v, a] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute phi. + # TODO: avoid computing phi if possible. + var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] # FIXME: may have an alignment issue here. + + # Normalize phi over k. + (log_var_phi_v, _) = log_normalize(var_phi[v, :]) # NOTE: it might be possible to do this out of the v loop. + var_phi[v, :] = numpy.exp(log_var_phi_v) + + # Update mu. + for v in ids: + # Prior probability of observing author a in document d is one + # over the number of authors in document d. + author_prior_prob = 1.0 / len(authors_d) + for aid in authors_d: + a = self.authorid2idx[aid] + # Average Elogtheta over topics k. + avgElogtheta = 0.0 + for k in xrange(self.num_topics): + avgElogtheta += var_phi[v, k] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute mu over a. + # TODO: avoid computing mu if possible. + var_mu[v, a] = author_prior_prob * expavgElogtheta + + # Normalize mu. + (log_var_mu_v, _) = log_normalize(var_mu[v, :]) + var_mu[v, :] = numpy.exp(log_var_mu_v) + + + # Update gamma. + for a in xrange(self.num_authors): + for k in xrange(self.num_topics): + tilde_gamma[a, k] = 0.0 + for vi, v in enumerate(ids): + tilde_gamma[a, k] += cts[vi] * var_mu[v, a] * var_phi[v, k] + aid = self.authoridx2id[a] + tilde_gamma[a, k] *= len(author2doc[aid]) + tilde_gamma[a, k] += self.alpha + + # Update lambda. + #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T + for k in xrange(self.num_topics): + for vi, v in enumerate(ids): + tilde_lambda[k, v] = self.eta + self.num_docs * cts[vi] * var_phi[v, k] + + # Check for convergence. + # Criterion is mean change in "local" gamma and lambda. + if iteration > 0: + meanchange_gamma = numpy.mean(abs(tilde_gamma - lastgamma)) + meanchange_lambda = numpy.mean(abs(tilde_lambda - lastlambda)) + #logger.info('Mean change in gamma: %.3e', meanchange_gamma) + #logger.info('Mean change in lambda: %.3e', meanchange_lambda) + if meanchange_gamma < self.threshold and meanchange_lambda < self.threshold: + converged += 1 + break + # End of iterations loop. + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). Same goes for lambda. + rhot = self.rho(d) + var_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma + # Note that we only changed the elements in lambda corresponding to + # the words in document d, hence the [:, ids] indexing. + var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + word_prob = self.eval_word_prob(Elogtheta, Elogbeta) + logger.info('Word probabilities: %.3e', word_prob) + logger.info('Converged documents: %d', converged) + # End of corpus loop. + + return var_gamma, var_lambda + + def eval_word_prob(self, Elogtheta, Elogbeta, doc_ids=None): + """ + Compute the conditional liklihood of a set of documents, + + p(D | theta, beta, A). + + theta and beta are estimated by exponentiating the expectations of + log theta and log beta. + """ + + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + word_prob = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + word_prob_d = 0.0 + for vi, v in enumerate(ids): + for k in xrange(self.num_topics): + for aid in authors_d: + a = self.authorid2idx[aid] + word_prob_d += cts[vi] * numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + author_prior_prob = 1.0 / len(authors_d) + word_prob_d *= author_prior_prob + word_prob += word_prob_d + + # TODO: can I do this? + # bound += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) + + return word_prob + + + + + + + From 7ea76f293df004494e9c717cb1a62814ab38fb1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Thu, 29 Sep 2016 17:46:52 +0200 Subject: [PATCH 004/100] Using max change instead of mean change criterion. Computing a different likelihood measure. OnlineAtVb now extends (inherits) LdaModel. Other minor changes. --- gensim/models/onlineatvb.py | 125 +++++++++++++++++++++++++++++------- 1 file changed, 101 insertions(+), 24 deletions(-) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index f78ce762e2..4bd675a144 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -15,8 +15,9 @@ import numpy import numbers -from gensim import utils +from gensim import utils, matutils from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim.models import LdaModel from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. from six.moves import xrange @@ -31,7 +32,7 @@ logger = logging.getLogger(__name__) -class OnlineAtVb: +class OnlineAtVb(LdaModel): """ Train the author-topic model using online variational Bayes. """ @@ -65,6 +66,11 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, if self.num_terms == 0: raise ValueError("cannot compute LDA over an empty collection (no terms)") + logger.info('Vocabulary consists of %d words.', self.num_terms) + + if author2doc is None or doc2author is None: + raise ValueError('author2doc and doc2author must be supplied.') + self.corpus = corpus self.iterations = iterations self.num_topics = num_topics @@ -75,19 +81,27 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, self.offset = offset self.author2doc = author2doc self.doc2author = doc2author - self.num_docs = len(corpus) self.num_authors = len(author2doc) self.eval_every = eval_every self.random_state = random_state + # Some of the methods in LdaModel are used in this class. + # I.e. composition is used instead of inheriting the LdaModel class. + self.ldamodel = LdaModel(id2word=self.id2word) + + logger.info('Number of authors: %d.', self.num_authors) + # TODO: find a way out of this nonsense. self.authorid2idx = dict(zip(list(author2doc.keys()), xrange(self.num_authors))) self.authoridx2id = dict(zip(xrange(self.num_authors), list(author2doc.keys()))) self.random_state = get_random_state(random_state) - if corpus is not None and author2doc is not None and doc2author is not None: - self.inference(corpus, author2doc, doc2author) + if corpus is not None: + (self.var_gamma, self.var_lambda) = self.inference(corpus, author2doc, doc2author) + else: + self.var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) def rho(self, iteration): return pow(self.offset + iteration, -self.decay) @@ -96,6 +110,10 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): if corpus is None: corpus = self.corpus.copy() + self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. + + logger.info('Starting inference. Training on %d documents.', len(corpus)) + # Initial values of gamma and lambda. # NOTE: parameters of gamma distribution same as in `ldamodel`. init_gamma = self.random_state.gamma(100., 1. / 100., @@ -118,7 +136,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): Elogtheta = dirichlet_expectation(var_gamma) Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - st() for d, doc in enumerate(corpus): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. @@ -197,11 +214,12 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Check for convergence. # Criterion is mean change in "local" gamma and lambda. if iteration > 0: - meanchange_gamma = numpy.mean(abs(tilde_gamma - lastgamma)) - meanchange_lambda = numpy.mean(abs(tilde_lambda - lastlambda)) - #logger.info('Mean change in gamma: %.3e', meanchange_gamma) - #logger.info('Mean change in lambda: %.3e', meanchange_lambda) - if meanchange_gamma < self.threshold and meanchange_lambda < self.threshold: + maxchange_gamma = numpy.max(abs(tilde_gamma - lastgamma)) + maxchange_lambda = numpy.max(abs(tilde_lambda - lastlambda)) + # logger.info('Max change in gamma: %.3e', maxchange_gamma) + # logger.info('Max change in lambda: %.3e', maxchange_lambda) + if maxchange_gamma < self.threshold and maxchange_lambda < self.threshold: + logger.info('Converged after %d iterations.', iteration) converged += 1 break # End of iterations loop. @@ -220,25 +238,74 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - word_prob = self.eval_word_prob(Elogtheta, Elogbeta) - logger.info('Word probabilities: %.3e', word_prob) - logger.info('Converged documents: %d', converged) + word_prob = self.eval_likelihood(var_gamma, var_lambda) + logger.info('Likelihood: %.3e', word_prob) + logger.info('Converged documents: %d/%d', converged, d + 1) # End of corpus loop. return var_gamma, var_lambda - def eval_word_prob(self, Elogtheta, Elogbeta, doc_ids=None): + def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): """ - Compute the conditional liklihood of a set of documents, + Note that this is not strictly speaking a likelihood. - p(D | theta, beta, A). + Compute the expectation of the log conditional likelihood of the data, - theta and beta are estimated by exponentiating the expectations of - log theta and log beta. + E_q[log p(w_d | theta, beta, A_d)], + + where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. """ + + # TODO: call this something other than "likelihood". # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + likelihood = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + likelihood_d = 0.0 + for vi, v in enumerate(ids): + for k in xrange(self.num_topics): + for aid in authors_d: + a = self.authorid2idx[aid] + likelihood_d += numpy.log(cts[vi]) + Elogtheta[a, k] + Elogbeta[k, v] + author_prior_prob = 1.0 / len(authors_d) + likelihood_d += numpy.log(author_prior_prob) + likelihood += likelihood_d + + # For per-word likelihood, do: + # likelihood *= 1 /sum(len(doc) for doc in docs) + + # TODO: can I do something along the lines of: + # likelihood += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) + + return likelihood + + def eval_word_prob(self, var_gamma, var_lambda, doc_ids=None): + """ + Compute the liklihood of the corpus under the model, by first + computing the conditional probabilities of the words in a + document d, + + p(w_d | theta, beta, A_d), + + summing over all documents, and dividing by the number of documents. + """ + # NOTE: unsure if this is correct. + + norm_gamma = var_gamma.copy() + norm_lambda = var_lambda.copy() + for a in xrange(self.num_authors): + norm_gamma[a, :] = var_gamma[a, :] / var_gamma.sum(axis=1)[a] + for k in xrange(self.num_topics): + norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] if doc_ids is None: docs = self.corpus @@ -255,15 +322,25 @@ def eval_word_prob(self, Elogtheta, Elogbeta, doc_ids=None): for k in xrange(self.num_topics): for aid in authors_d: a = self.authorid2idx[aid] - word_prob_d += cts[vi] * numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + word_prob_d += cts[vi] * norm_gamma[a, k] * norm_lambda[k, v] author_prior_prob = 1.0 / len(authors_d) - word_prob_d *= author_prior_prob + word_prob_d += numpy.log(author_prior_prob) word_prob += word_prob_d - - # TODO: can I do this? - # bound += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) + word_prob *= 1 / len(docs) return word_prob + + # Overriding LdaModel.get_topic_terms. + def get_topic_terms(self, topicid, topn=10): + """ + Return a list of `(word_id, probability)` 2-tuples for the most + probable words in topic `topicid`. + Only return 2-tuples for the topn most probable words (ignore the rest). + """ + topic = self.var_lambda[topicid, :] + topic = topic / topic.sum() # normalize to probability distribution + bestn = matutils.argsort(topic, topn, reverse=True) + return [(id, topic[id]) for id in bestn] From 839a8b33f90385c75ea1923bf0ecc6321bb2ce37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 30 Sep 2016 13:07:28 +0200 Subject: [PATCH 005/100] Fixed some things with var_mu. Also was passing the wrong arguments to eval_liklihood. --- gensim/models/onlineatvb.py | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 4bd675a144..eaed4f6112 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -21,6 +21,8 @@ from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. from six.moves import xrange +from pprint import pprint + # log(sum(exp(x))) that tries to avoid overflow try: # try importing from here if older scipy is installed @@ -125,7 +127,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # TODO: consider making phi and mu sparse. var_phi = numpy.zeros((self.num_terms, self.num_topics)) - var_mu = numpy.zeros((self.num_terms, self.num_authors)) var_gamma = init_gamma.copy() var_lambda = init_lambda.copy() @@ -144,9 +145,10 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Initialize mu. # mu is 1/|A_d| if a is in A_d, zero otherwise. # NOTE: I could do random initialization instead. + # NOTE: maybe not the best idea that mu changes shape every iteration. + var_mu = numpy.zeros((self.num_terms, len(authors_d))) for v in ids: - for aid in authors_d: - a = self.authorid2idx[aid] + for a in xrange(len(authors_d)): var_mu[v, a] = 1 / len(authors_d) for iteration in xrange(self.iterations): @@ -160,14 +162,13 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for k in xrange(self.num_topics): # Average Elogtheta over authors a in document d. avgElogtheta = 0.0 - for ad in authors_d: - a = self.authorid2idx[aid] + for a in xrange(len(authors_d)): avgElogtheta += var_mu[v, a] * Elogtheta[a, k] expavgElogtheta = numpy.exp(avgElogtheta) # Compute phi. # TODO: avoid computing phi if possible. - var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] # FIXME: may have an alignment issue here. + var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] # Normalize phi over k. (log_var_phi_v, _) = log_normalize(var_phi[v, :]) # NOTE: it might be possible to do this out of the v loop. @@ -178,8 +179,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Prior probability of observing author a in document d is one # over the number of authors in document d. author_prior_prob = 1.0 / len(authors_d) - for aid in authors_d: - a = self.authorid2idx[aid] + for a in xrange(len(authors_d)): # Average Elogtheta over topics k. avgElogtheta = 0.0 for k in xrange(self.num_topics): @@ -196,7 +196,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Update gamma. - for a in xrange(self.num_authors): + for a in xrange(len(authors_d)): for k in xrange(self.num_topics): tilde_gamma[a, k] = 0.0 for vi, v in enumerate(ids): @@ -238,9 +238,16 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - word_prob = self.eval_likelihood(var_gamma, var_lambda) - logger.info('Likelihood: %.3e', word_prob) + + # Print topics: + # self.var_lambda = var_lambda + # pprint(self.show_topics()) + + likelihood = self.eval_likelihood(Elogtheta, Elogbeta) + logger.info('Likelihood: %.3e', likelihood) logger.info('Converged documents: %d/%d', converged, d + 1) + # Evaluating word probabilities: + # likelihood = self.eval_word_prob(var_gamma, var_lambda) # End of corpus loop. return var_gamma, var_lambda From bd13c60695ecc416b3b96e255dc541ff4dd276df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 30 Sep 2016 14:24:39 +0200 Subject: [PATCH 006/100] Added 'offline' algorithm, and notebook for experiments. --- docs/notebooks/at_with_nips.ipynb | 681 ++++++++++++++++++++++++++++++ gensim/models/__init__.py | 1 + gensim/models/atvb.py | 299 +++++++++++++ 3 files changed, 981 insertions(+) create mode 100644 docs/notebooks/at_with_nips.ipynb create mode 100644 gensim/models/atvb.py diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb new file mode 100644 index 0000000000..e51f77b1d9 --- /dev/null +++ b/docs/notebooks/at_with_nips.ipynb @@ -0,0 +1,681 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "application/javascript": [ + "// Run for table of contents.\n", + "$.getScript('https://kmahelona.github.io/ipython_notebook_goodies/ipython_notebook_toc.js')\n", + "\n", + "// https://github.com/kmahelona/ipython_notebook_goodies" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%%javascript\n", + "// Run for table of contents.\n", + "$.getScript('https://kmahelona.github.io/ipython_notebook_goodies/ipython_notebook_toc.js')\n", + "\n", + "// https://github.com/kmahelona/ipython_notebook_goodies" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tests with NIPS data\n", + "\n", + "

Table of Contents

\n", + "
\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from nltk.tokenize import RegexpTokenizer\n", + "from nltk.stem.wordnet import WordNetLemmatizer\n", + "import gensim\n", + "from gensim.models import Phrases\n", + "from gensim.corpora import Dictionary\n", + "from imp import reload\n", + "\n", + "import logging\n", + "\n", + "from gensim.models import OnlineAtVb\n", + "from gensim.models import onlineatvb\n", + "from gensim.models import AtVb\n", + "from gensim.models import atvb\n", + "\n", + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Configure logging.\n", + "logger = logging.getLogger()\n", + "fhandler = logging.FileHandler(filename='../../../log_files/log.log', mode='a')\n", + "formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", + "fhandler.setFormatter(formatter)\n", + "logger.addHandler(fhandler)\n", + "logger.setLevel(logging.DEBUG)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load and pre-process data" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import os\n", + "import re\n", + "\n", + "# Folder containing all NIPS papers.\n", + "data_dir = '../../../../data/nipstxt/'\n", + "\n", + "# Folders containin individual NIPS papers.\n", + "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", + "yrs = ['00']\n", + "dirs = ['nips' + yr for yr in yrs]\n", + "\n", + "# Get all document texts and their corresponding IDs.\n", + "docs = []\n", + "doc_ids = []\n", + "for yr_dir in dirs:\n", + " files = os.listdir(data_dir + yr_dir) # List of filenames.\n", + " for filen in files:\n", + " # Get document ID.\n", + " (idx1, idx2) = re.search('[0-9]+', filen).span() # Matches the indexes of the start end end of the ID.\n", + " doc_ids.append(yr_dir[4:] + '_' + str(int(filen[idx1:idx2])))\n", + " \n", + " # Read document text.\n", + " # Note: ignoring characters that cause encoding errors.\n", + " with open(data_dir + yr_dir + '/' + filen, errors='ignore', encoding='utf-8') as fid:\n", + " txt = fid.read()\n", + " docs.append(txt)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "filenames = [data_dir + 'idx/a' + yr + '.txt' for yr in yrs] # Using the years defined in previous cell.\n", + "\n", + "# Get all author names and their corresponding document IDs.\n", + "authors_names = []\n", + "author2doc = {}\n", + "author_id = 0\n", + "for yr in yrs:\n", + " filename = data_dir + 'idx/a' + yr + '.txt'\n", + " for line in open(filename, errors='ignore', encoding='utf-8'):\n", + " contents = re.split(',', line)\n", + " author_name = (contents[1] + contents[0]).strip()\n", + " ids = [c.strip() for c in contents[2:]]\n", + " authors_names.append(author_name)\n", + " author2doc[author_id] = [yr + '_' + id for id in ids]\n", + " author_id += 1" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Tokenize the documents.\n", + "\n", + "# Split the documents into tokens.\n", + "tokenizer = RegexpTokenizer(r'\\w+')\n", + "for idx in range(len(docs)):\n", + " docs[idx] = docs[idx].lower() # Convert to lowercase.\n", + " docs[idx] = tokenizer.tokenize(docs[idx]) # Split into words.\n", + "\n", + "# Remove numbers, but not words that contain numbers.\n", + "docs = [[token for token in doc if not token.isnumeric()] for doc in docs]\n", + "\n", + "# Remove words that are only one character.\n", + "docs = [[token for token in doc if len(token) > 1] for doc in docs]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Lemmatize the documents.\n", + "\n", + "# Lemmatize all words in documents.\n", + "lemmatizer = WordNetLemmatizer()\n", + "docs = [[lemmatizer.lemmatize(token) for token in doc] for doc in docs]" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Compute bigrams.\n", + "\n", + "# Add bigrams and trigrams to docs (only ones that appear 20 times or more).\n", + "bigram = Phrases(docs, min_count=20)\n", + "for idx in range(len(docs)):\n", + " for token in bigram[docs[idx]]:\n", + " if '_' in token:\n", + " # Token is a bigram, add to document.\n", + " docs[idx].append(token)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Create a dictionary representation of the documents.\n", + "dictionary = Dictionary(docs)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Frequency distribution of words.\n", + "\n", + "one_doc = []\n", + "for doc in docs:\n", + " one_doc.extend(doc)\n", + "\n", + "bow = dictionary.doc2bow(one_doc)\n", + "word_freq = [cnt for _, cnt in bow]\n", + "\n", + "plt.plot(sorted(word_freq))\n", + "plt.xlabel('Words')\n", + "plt.ylabel('#Occurences')\n", + "plt.title('Frequency distribution of words.\\nPower-law behaviour.')\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Remove rare and common tokens.\n", + "\n", + "# Filter out words that occur too frequently or too rarely.\n", + "max_freq = 0.5\n", + "min_wordcount = 20\n", + "dictionary.filter_extremes(no_below=min_wordcount, no_above=max_freq)\n", + "\n", + "dict0 = dictionary[0] # This sort of \"initializes\" dictionary.id2token." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Vectorize data.\n", + "\n", + "# Bag-of-words representation of the documents.\n", + "corpus = [dictionary.doc2bow(doc) for doc in docs]" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of unique tokens: 681\n", + "Number of documents: 90\n" + ] + } + ], + "source": [ + "print('Number of unique tokens: %d' % len(dictionary))\n", + "print('Number of documents: %d' % len(corpus))" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Make a mapping from document IDs to author IDs.\n", + "doc2author = {}\n", + "for i, doc_id in enumerate(doc_ids):\n", + " author_ids = []\n", + " for a, a_doc_ids in author2doc.items():\n", + " if doc_id in a_doc_ids:\n", + " author_ids.append(a)\n", + " doc2author[i] = author_ids" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## \"Offline\" AT VB" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "small_doc2author = [(d, a) for d, a in list(doc2author.items())[:20]]\n", + "small_doc2author = dict(small_doc2author)\n", + "\n", + "small_corpus = [corpus[d] for d in small_doc2author.keys()]" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "small_author2doc = {}\n", + "for d, author_ids in small_doc2author.items():\n", + " for a in author_ids:\n", + " small_author2doc[a] = set()\n", + "for d, author_ids in small_doc2author.items():\n", + " for a in author_ids:\n", + " small_author2doc[a].add(d)\n", + "for a in small_author2doc.keys():\n", + " small_author2doc[a] = list(small_author2doc[a])" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "phi is 20 x 681 x 10 (136200 elements)\n", + "mu is 20 x 681 x 43 (585660 elements)\n" + ] + } + ], + "source": [ + "print('phi is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), 10,\n", + " len(small_corpus) * len(dictionary.id2token) * 10))\n", + "print('mu is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), len(small_author2doc),\n", + " len(small_corpus) * len(dictionary.id2token) * len(small_author2doc)))" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(atvb)\n", + "AtVb = atvb.AtVb" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(0,\n", + " '0.002*itself + 0.002*assumed + 0.002*somewhat + 0.002*update + 0.002*length '\n", + " '+ 0.002*design + 0.002*u + 0.002*criterion + 0.002*information_processing + '\n", + " '0.002*short'),\n", + " (1,\n", + " '0.002*among + 0.002*compare + 0.002*proportional_to + 0.002*contrast + '\n", + " '0.002*coordinate + 0.002*environment + 0.002*learn + 0.002*self + '\n", + " '0.002*distribution + 0.002*observed'),\n", + " (2,\n", + " '0.002*synapsis + 0.002*surface + 0.002*block + 0.002*search + 0.002*again + '\n", + " '0.002*selected + 0.002*differential + 0.002*construct + '\n", + " '0.002*associative_memory + 0.002*greater'),\n", + " (3,\n", + " '0.002*hidden_unit + 0.002*followed + 0.002*com + 0.002*sci + '\n", + " '0.002*additional + 0.002*full + 0.002*lower + 0.002*oo + 0.002*suggest + '\n", + " '0.002*construct'),\n", + " (4,\n", + " '0.002*corresponds + 0.002*define + 0.002*convergence + 0.002*gain + '\n", + " '0.002*connection_between + 0.002*a_follows + 0.002*ca + 0.002*bound + '\n", + " '0.002*enough + 0.002*determines'),\n", + " (5,\n", + " '0.002*row + 0.002*usa + 0.002*according_to + 0.002*enough + '\n", + " '0.002*environment + 0.002*department + 0.002*involved + 0.002*sign + '\n", + " '0.002*seems + 0.002*manner'),\n", + " (6,\n", + " '0.002*minimize + 0.002*quantity + 0.002*natural + 0.002*bound + '\n", + " '0.002*acad_sci + 0.002*itself + 0.002*control + 0.002*depending_on + '\n", + " '0.002*become + 0.002*additional'),\n", + " (7,\n", + " '0.002*particularly + 0.002*functional + 0.002*depends_on + 0.002*selected + '\n", + " '0.002*column + 0.002*exhibit + 0.002*correspond_to + 0.002*processor + '\n", + " '0.002*proposed + 0.002*task'),\n", + " (8,\n", + " '0.002*achieve + 0.002*minimum + 0.002*u + 0.002*sci + 0.002*detail + '\n", + " '0.002*setting + 0.002*learned + 0.002*exhibit + 0.002*learning_algorithm + '\n", + " '0.002*additional'),\n", + " (9,\n", + " '0.002*binary + 0.002*via + 0.002*done + 0.002*exist + 0.002*mit_press + '\n", + " '0.002*composed + 0.002*followed + 0.002*noise + 0.002*determines + '\n", + " '0.002*consequence')]\n", + "[(0,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate'),\n", + " (1,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate'),\n", + " (2,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate'),\n", + " (3,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate'),\n", + " (4,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate'),\n", + " (5,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate'),\n", + " (6,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate'),\n", + " (7,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate'),\n", + " (8,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate'),\n", + " (9,\n", + " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", + " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", + " '0.007*rate')]\n" + ] + } + ], + "source": [ + "model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token,\n", + " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-7,\n", + " iterations=10, alpha=None, eta=None,\n", + " eval_every=1, random_state=None)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Online AT VB" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(onlineatvb)\n", + "OnlineAtVb = onlineatvb.OnlineAtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, author2doc=author2doc, doc2author=doc2author, threshold=1e-12,\n\u001b[0;32m----> 2\u001b[0;31m iterations=10, alpha=None, eta=None, decay=0.5, offset=64.0, eval_every=1, random_state=None)\n\u001b[0m", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, author2doc, doc2author, threshold, iterations, alpha, eta, decay, offset, eval_every, random_state)\u001b[0m\n\u001b[1;32m 99\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 100\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 101\u001b[0;31m \u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvar_gamma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvar_lambda\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc2author\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 102\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 103\u001b[0m self.var_lambda = self.random_state.gamma(100., 1. / 100.,\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, author2doc, doc2author)\u001b[0m\n\u001b[1;32m 201\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0.0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 202\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 203\u001b[0;31m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcts\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_mu\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 204\u001b[0m \u001b[0maid\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mauthoridx2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 205\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0maid\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-12,\n", + " iterations=10, alpha=None, eta=None, decay=0.5, offset=64.0,\n", + " eval_every=1, random_state=None)" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 3.78467562e-03, -5.81409165e-03, 1.96754089e-03,\n", + " -5.83668669e-04, 7.57789779e-05, 3.43176041e-03,\n", + " -4.04724774e-03, 8.99283293e-04, 1.00692016e-03,\n", + " 6.13459183e-03])" + ] + }, + "execution_count": 83, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "(model.var_lambda[0, :] - model.var_lambda[1, :])[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 153, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(onlineatvb)\n", + "OnlineAtVb = onlineatvb.OnlineAtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 154, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, \n", + " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12,\n", + " iterations=10, alpha=None, eta=None, decay=0.5, offset=64.0,\n", + " eval_every=1, random_state=None)" + ] + }, + { + "cell_type": "code", + "execution_count": 112, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "8.2220534365310787" + ] + }, + "execution_count": 112, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sum(model.var_gamma[0, :] - model.var_gamma[1, :]) / model.num_topics" + ] + }, + { + "cell_type": "code", + "execution_count": 110, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.0022532199189799953" + ] + }, + "execution_count": 110, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sum(model.var_lambda[0, :] - model.var_lambda[1, :]) / model.num_terms" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.2" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index 9e094c63ce..d94986da61 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -17,6 +17,7 @@ from .phrases import Phrases from .normmodel import NormModel from .onlineatvb import OnlineAtVb +from .atvb import AtVb from . import wrappers diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py new file mode 100644 index 0000000000..be48375ba9 --- /dev/null +++ b/gensim/models/atvb.py @@ -0,0 +1,299 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2011 Radim Rehurek +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Author-topic model. +""" + +import pdb +from pdb import set_trace as st + +import logging +import numpy +import numbers + +from gensim import utils, matutils +from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim.models import LdaModel +from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. +from six.moves import xrange + +from pprint import pprint + +# log(sum(exp(x))) that tries to avoid overflow +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger('gensim.models.atmodel') + + +class AtVb(LdaModel): + """ + Train the author-topic model using variational Bayes. + """ + # TODO: inherit interfaces.TransformationABC. + + def __init__(self, corpus=None, num_topics=100, id2word=None, + author2doc=None, doc2author=None, threshold=0.001, + iterations=10, alpha=None, eta=None, + eval_every=1, random_state=None): + + # TODO: require only author2doc OR doc2author, and construct the missing one automatically. + + if alpha is None: + alpha = 50 / num_topics + if eta is None: + eta = 0.01 + + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + logger.info('Vocabulary consists of %d words.', self.num_terms) + + self.corpus = corpus + self.iterations = iterations + self.num_topics = num_topics + self.threshold = threshold + self.alpha = alpha + self.eta = eta + self.author2doc = author2doc + self.doc2author = doc2author + self.num_docs = len(corpus) + self.num_authors = len(author2doc) + self.eval_every = eval_every + self.random_state = random_state + + logger.info('Number of authors: %d.', self.num_authors) + + # TODO: find a way out of this nonsense. + self.authorid2idx = dict(zip(list(author2doc.keys()), xrange(self.num_authors))) + self.authoridx2id = dict(zip(xrange(self.num_authors), list(author2doc.keys()))) + + self.random_state = get_random_state(random_state) + + if corpus is not None: + self.inference(corpus, author2doc, doc2author) + + def inference(self, corpus=None, author2doc=None, doc2author=None): + if corpus is None: + corpus = self.corpus + + logger.info('Starting inference. Training on %d documents.', len(corpus)) + + # Initial value of gamma and lambda. + # NOTE: parameters of gamma distribution same as in `ldamodel`. + var_gamma = self.random_state.gamma(100., 1. / 100., + (self.num_authors, self.num_topics)) + var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + + # Initialize mu. + # mu is 1/|A_d| if a is in A_d, zero otherwise. + # var_mu is essentially a (self.num_docs, self.num_terms, self.num_authors) sparse matrix, + # which we represent using a dictionary. + var_mu = dict() + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + for v in ids: + authors_d = doc2author[d] # List of author IDs for document d. + for aid in authors_d: + a = self.authorid2idx[aid] + var_mu[(d, v, a)] = 1 / len(authors_d) + + var_phi = numpy.zeros((self.num_docs, self.num_terms, self.num_topics)) + + # TODO: consider how to vectorize opterations as much as + # possible. + # TODO: check vector and matrix dimensions, and ensure that + # things are multiplied along the correct dimensions. + + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + likelihood = self.eval_likelihood(Elogtheta, Elogbeta) + logger.info('Likelihood: %.3e', likelihood) + for iteration in xrange(self.iterations): + # Update phi. + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + authors_d = doc2author[d] # List of author IDs for document d. + + #expElogbetad = expElogbeta[:, ids] + + # Update phi. + for v in ids: + for k in xrange(self.num_topics): + # Average Elogtheta over authors a in document d. + avgElogtheta = 0.0 + for aid in authors_d: + a = self.authorid2idx[aid] + avgElogtheta += var_mu[(d, v, a)] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute phi. + # TODO: avoid computing phi if possible. + var_phi[d, v, k] = expavgElogtheta * expElogbeta[k, v] + # Normalize phi. + (log_var_phi_dv, _) = log_normalize(var_phi[d, v, :]) + var_phi[d, v, :] = numpy.exp(log_var_phi_dv) + + + # Update mu. + for v in ids: + # Prior probability of observing author a in document d is one + # over the number of authors in document d. + author_prior_prob = 1.0 / len(authors_d) + for aid in authors_d: + a = self.authorid2idx[aid] + # Average Elogtheta over topics k. + avgElogtheta = 0.0 + for k in xrange(self.num_topics): + avgElogtheta += var_phi[d, v, k] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute mu. + # TODO: avoid computing mu if possible. + var_mu[(d, v, a)] = author_prior_prob * expavgElogtheta + + # Normalize mu. + mu_sum = 0.0 + for aid_prime in authors_d: + a_prime = self.authorid2idx[aid] + mu_sum += var_mu[(d, v, a)] + + for aid_prime in authors_d: + a_prime = self.authorid2idx[aid] + var_mu[(d, v, a)] *= 1 / mu_sum + + # Update gamma. + for a in xrange(self.num_authors): + for k in xrange(self.num_topics): + aid = self.authoridx2id[a] + docs_a = self.author2doc[aid] + var_gamma[a, k] = 0.0 + var_gamma[a, k] += self.alpha + for d in docs_a: + # TODO: if this document doesn't exist, we will have problems here. Could to an "if corpus.get(d)" type of thing. + doc = corpus[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + for vi, v in enumerate(ids): + var_gamma[a, k] += cts[vi] * var_mu[(d, v, a)] * var_phi[d, v, k] + + # Update Elogtheta, since gamma has been updated. + Elogtheta = dirichlet_expectation(var_gamma) + + # Update lambda. + for k in xrange(self.num_topics): + for v in xrange(self.num_terms): + var_lambda[k, v] = 0.0 + var_lambda[k, v] += self.eta + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + for vi, v in enumerate(ids): + var_lambda[k, v] += cts[vi] * var_phi[d, v, k] + + # Update Elogbeta, since lambda has been updated. + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + + logger.info('All variables updated.') + + # Print topics: + self.var_lambda = var_lambda + pprint(self.show_topics()) + + # Evaluate likelihood. + if (iteration + 1) % self.eval_every == 0: + prev_likelihood = likelihood + likelihood = self.eval_likelihood(Elogtheta, Elogbeta) + logger.info('Likelihood: %.3e', likelihood) + if numpy.abs(likelihood - prev_likelihood) / abs(prev_likelihood) < self.threshold: + break + # End of update loop (iterations). + + return var_gamma, var_lambda + + def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): + """ + Note that this is not strictly speaking a likelihood. + + Compute the expectation of the log conditional likelihood of the data, + + E_q[log p(w_d | theta, beta, A_d)], + + where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. + """ + + # TODO: call this something other than "likelihood". + + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + likelihood = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + likelihood_d = 0.0 + for vi, v in enumerate(ids): + for k in xrange(self.num_topics): + for aid in authors_d: + a = self.authorid2idx[aid] + likelihood_d += numpy.log(cts[vi]) + Elogtheta[a, k] + Elogbeta[k, v] + author_prior_prob = 1.0 / len(authors_d) + likelihood_d += numpy.log(author_prior_prob) + likelihood += likelihood_d + + # For per-word likelihood, do: + # likelihood *= 1 /sum(len(doc) for doc in docs) + + # TODO: can I do something along the lines of: + # likelihood += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) + + return likelihood + + # Overriding LdaModel.get_topic_terms. + def get_topic_terms(self, topicid, topn=10): + """ + Return a list of `(word_id, probability)` 2-tuples for the most + probable words in topic `topicid`. + Only return 2-tuples for the topn most probable words (ignore the rest). + """ + topic = self.var_lambda[topicid, :] + topic = topic / topic.sum() # normalize to probability distribution + bestn = matutils.argsort(topic, topn, reverse=True) + return [(id, topic[id]) for id in bestn] + + + + + + + From ebc808c717ee748d8c73429517fa2516316b4a95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Sun, 9 Oct 2016 15:55:27 +0200 Subject: [PATCH 007/100] Fixed log normalization. Also changed symmetric initilization of hyperparameters. Updated notebook accordingly. --- docs/notebooks/at_with_nips.ipynb | 253 ++++++++---------------------- gensim/models/atvb.py | 66 +++++--- gensim/models/onlineatvb.py | 12 +- 3 files changed, 112 insertions(+), 219 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index e51f77b1d9..8b15a218ff 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -234,7 +234,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -344,7 +344,7 @@ }, "outputs": [], "source": [ - "small_doc2author = [(d, a) for d, a in list(doc2author.items())[:20]]\n", + "small_doc2author = [(d, a) for d, a in list(doc2author.items())[:10]]\n", "small_doc2author = dict(small_doc2author)\n", "\n", "small_corpus = [corpus[d] for d in small_doc2author.keys()]" @@ -380,8 +380,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 20 x 681 x 10 (136200 elements)\n", - "mu is 20 x 681 x 43 (585660 elements)\n" + "phi is 10 x 681 x 10 (68100 elements)\n", + "mu is 10 x 681 x 22 (149820 elements)\n" ] } ], @@ -394,9 +394,9 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 38, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ @@ -406,164 +406,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 41, "metadata": { "collapsed": false }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[(0,\n", - " '0.002*itself + 0.002*assumed + 0.002*somewhat + 0.002*update + 0.002*length '\n", - " '+ 0.002*design + 0.002*u + 0.002*criterion + 0.002*information_processing + '\n", - " '0.002*short'),\n", - " (1,\n", - " '0.002*among + 0.002*compare + 0.002*proportional_to + 0.002*contrast + '\n", - " '0.002*coordinate + 0.002*environment + 0.002*learn + 0.002*self + '\n", - " '0.002*distribution + 0.002*observed'),\n", - " (2,\n", - " '0.002*synapsis + 0.002*surface + 0.002*block + 0.002*search + 0.002*again + '\n", - " '0.002*selected + 0.002*differential + 0.002*construct + '\n", - " '0.002*associative_memory + 0.002*greater'),\n", - " (3,\n", - " '0.002*hidden_unit + 0.002*followed + 0.002*com + 0.002*sci + '\n", - " '0.002*additional + 0.002*full + 0.002*lower + 0.002*oo + 0.002*suggest + '\n", - " '0.002*construct'),\n", - " (4,\n", - " '0.002*corresponds + 0.002*define + 0.002*convergence + 0.002*gain + '\n", - " '0.002*connection_between + 0.002*a_follows + 0.002*ca + 0.002*bound + '\n", - " '0.002*enough + 0.002*determines'),\n", - " (5,\n", - " '0.002*row + 0.002*usa + 0.002*according_to + 0.002*enough + '\n", - " '0.002*environment + 0.002*department + 0.002*involved + 0.002*sign + '\n", - " '0.002*seems + 0.002*manner'),\n", - " (6,\n", - " '0.002*minimize + 0.002*quantity + 0.002*natural + 0.002*bound + '\n", - " '0.002*acad_sci + 0.002*itself + 0.002*control + 0.002*depending_on + '\n", - " '0.002*become + 0.002*additional'),\n", - " (7,\n", - " '0.002*particularly + 0.002*functional + 0.002*depends_on + 0.002*selected + '\n", - " '0.002*column + 0.002*exhibit + 0.002*correspond_to + 0.002*processor + '\n", - " '0.002*proposed + 0.002*task'),\n", - " (8,\n", - " '0.002*achieve + 0.002*minimum + 0.002*u + 0.002*sci + 0.002*detail + '\n", - " '0.002*setting + 0.002*learned + 0.002*exhibit + 0.002*learning_algorithm + '\n", - " '0.002*additional'),\n", - " (9,\n", - " '0.002*binary + 0.002*via + 0.002*done + 0.002*exist + 0.002*mit_press + '\n", - " '0.002*composed + 0.002*followed + 0.002*noise + 0.002*determines + '\n", - " '0.002*consequence')]\n", - "[(0,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate'),\n", - " (1,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate'),\n", - " (2,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate'),\n", - " (3,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate'),\n", - " (4,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate'),\n", - " (5,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate'),\n", - " (6,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate'),\n", - " (7,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate'),\n", - " (8,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate'),\n", - " (9,\n", - " '0.017*image + 0.012*matrix + 0.011*training + 0.011*cell + 0.010*stimulus + '\n", - " '0.008*adaptive + 0.008*artificial + 0.008*operation + 0.007*noise + '\n", - " '0.007*rate')]\n" - ] - } - ], + "outputs": [], "source": [ "model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token,\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-7,\n", " iterations=10, alpha=None, eta=None,\n", - " eval_every=1, random_state=None)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Online AT VB" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(onlineatvb)\n", - "OnlineAtVb = onlineatvb.OnlineAtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 65, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, author2doc=author2doc, doc2author=doc2author, threshold=1e-12,\n\u001b[0;32m----> 2\u001b[0;31m iterations=10, alpha=None, eta=None, decay=0.5, offset=64.0, eval_every=1, random_state=None)\n\u001b[0m", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, author2doc, doc2author, threshold, iterations, alpha, eta, decay, offset, eval_every, random_state)\u001b[0m\n\u001b[1;32m 99\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 100\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 101\u001b[0;31m \u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvar_gamma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvar_lambda\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc2author\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 102\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 103\u001b[0m self.var_lambda = self.random_state.gamma(100., 1. / 100.,\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, author2doc, doc2author)\u001b[0m\n\u001b[1;32m 201\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0.0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 202\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 203\u001b[0;31m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcts\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_mu\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 204\u001b[0m \u001b[0maid\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mauthoridx2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 205\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0maid\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], - "source": [ - "model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-12,\n", - " iterations=10, alpha=None, eta=None, decay=0.5, offset=64.0,\n", - " eval_every=1, random_state=None)" + " eval_every=1, random_state=0)\n" ] }, { "cell_type": "code", - "execution_count": 83, + "execution_count": 42, "metadata": { "collapsed": false }, @@ -571,24 +428,47 @@ { "data": { "text/plain": [ - "array([ 3.78467562e-03, -5.81409165e-03, 1.96754089e-03,\n", - " -5.83668669e-04, 7.57789779e-05, 3.43176041e-03,\n", - " -4.04724774e-03, 8.99283293e-04, 1.00692016e-03,\n", - " 6.13459183e-03])" + "[(0,\n", + " '0.041*gradient + 0.032*image + 0.032*node + 0.018*flow + 0.018*technique + 0.017*constraint + 0.017*square + 0.016*training + 0.015*analog + 0.015*component'),\n", + " (1,\n", + " '0.064*processor + 0.050*activation + 0.038*edge + 0.030*update + 0.019*operation + 0.017*column + 0.016*stored + 0.016*machine + 0.016*store + 0.013*address'),\n", + " (2,\n", + " '0.102*map + 0.093*brain + 0.079*field + 0.041*location + 0.035*series + 0.034*functional + 0.029*potential + 0.026*activity + 0.025*left + 0.020*strategy'),\n", + " (3,\n", + " '0.028*cell + 0.023*interaction + 0.023*cycle + 0.022*dynamic + 0.021*respect + 0.019*exist + 0.018*with_respect + 0.018*control + 0.018*total + 0.017*path'),\n", + " (4,\n", + " '0.035*group + 0.029*scheme + 0.023*matrix + 0.020*noise + 0.020*representation + 0.019*probability + 0.018*capacity + 0.017*log + 0.016*image + 0.015*recognition'),\n", + " (5,\n", + " '0.053*edge + 0.040*mapping + 0.027*processor + 0.027*current + 0.025*energy + 0.023*associated + 0.020*propagation + 0.019*estimate + 0.019*constructed + 0.019*activation'),\n", + " (6,\n", + " '0.024*dynamic + 0.023*phase + 0.019*variable + 0.016*binary + 0.015*required + 0.014*noise + 0.014*becomes + 0.014*limit + 0.013*finally + 0.013*via'),\n", + " (7,\n", + " '0.040*potential + 0.040*cell + 0.037*artificial + 0.027*computational + 0.023*connectivity + 0.020*fact_that + 0.019*though + 0.017*biological + 0.015*spatial + 0.015*architecture'),\n", + " (8,\n", + " '0.037*synaptic + 0.035*connectivity + 0.029*cell + 0.025*back_propagation + 0.023*capability + 0.023*architecture + 0.022*back + 0.021*potential + 0.021*propagation + 0.019*target'),\n", + " (9,\n", + " '0.080*image + 0.037*stimulus + 0.020*visual + 0.018*recall + 0.018*vision + 0.017*center + 0.017*associated + 0.017*phase + 0.017*dimensional + 0.017*stable')]" ] }, - "execution_count": 83, + "execution_count": 42, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "(model.var_lambda[0, :] - model.var_lambda[1, :])[:10]" + "model.show_topics()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Online AT VB" ] }, { "cell_type": "code", - "execution_count": 153, + "execution_count": 55, "metadata": { "collapsed": false }, @@ -600,7 +480,7 @@ }, { "cell_type": "code", - "execution_count": 154, + "execution_count": 60, "metadata": { "collapsed": false }, @@ -609,34 +489,12 @@ "model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, \n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12,\n", " iterations=10, alpha=None, eta=None, decay=0.5, offset=64.0,\n", - " eval_every=1, random_state=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 112, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "8.2220534365310787" - ] - }, - "execution_count": 112, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sum(model.var_gamma[0, :] - model.var_gamma[1, :]) / model.num_topics" + " eval_every=1, random_state=0)" ] }, { "cell_type": "code", - "execution_count": 110, + "execution_count": 59, "metadata": { "collapsed": false }, @@ -644,16 +502,35 @@ { "data": { "text/plain": [ - "0.0022532199189799953" + "[(0,\n", + " '0.021*field + 0.015*synaptic + 0.014*matrix + 0.014*representation + 0.013*connectivity + 0.012*flow + 0.012*mapping + 0.012*gradient + 0.012*log + 0.010*architecture'),\n", + " (1,\n", + " '0.009*connectivity + 0.006*image + 0.006*log + 0.006*node + 0.005*scale + 0.005*associative_memory + 0.005*recognition + 0.005*analog + 0.005*series + 0.005*architecture'),\n", + " (2,\n", + " '0.013*recall + 0.010*architecture + 0.007*series + 0.007*connectivity + 0.007*capability + 0.006*node + 0.006*recognition + 0.005*analog + 0.005*gain + 0.005*storage'),\n", + " (3,\n", + " '0.015*scale + 0.013*synaptic + 0.007*architecture + 0.007*image + 0.006*series + 0.006*associative_memory + 0.006*connectivity + 0.005*subject + 0.005*bound + 0.005*recall'),\n", + " (4,\n", + " '0.016*connectivity + 0.013*associative_memory + 0.010*energy + 0.008*u + 0.007*mapping + 0.007*circuit + 0.006*proceeding + 0.006*log + 0.006*image + 0.006*experiment'),\n", + " (5,\n", + " '0.009*recall + 0.009*connectivity + 0.007*energy + 0.006*word + 0.006*series + 0.006*associative_memory + 0.006*synaptic + 0.005*technology + 0.005*suggested + 0.005*bound'),\n", + " (6,\n", + " '0.015*flow + 0.013*recall + 0.011*synaptic + 0.010*log + 0.008*gradient + 0.007*circuit + 0.006*image + 0.006*recognition + 0.006*associative_memory + 0.006*location'),\n", + " (7,\n", + " '0.013*image + 0.011*recall + 0.010*component + 0.008*ma + 0.008*connectivity + 0.007*phase + 0.007*limit + 0.006*log + 0.006*analog + 0.006*constraint'),\n", + " (8,\n", + " '0.012*scale + 0.011*connectivity + 0.010*recall + 0.007*series + 0.006*analog + 0.006*associative_memory + 0.005*architecture + 0.005*action + 0.005*come + 0.005*strength'),\n", + " (9,\n", + " '0.009*connectivity + 0.007*subject + 0.007*series + 0.006*recall + 0.006*analog + 0.005*storage + 0.005*associative_memory + 0.005*denker + 0.005*longer + 0.005*architecture')]" ] }, - "execution_count": 110, + "execution_count": 59, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "sum(model.var_lambda[0, :] - model.var_lambda[1, :]) / model.num_terms" + "model.show_topics()" ] } ], diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index be48375ba9..ff04f8c772 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -38,7 +38,7 @@ class AtVb(LdaModel): """ Train the author-topic model using variational Bayes. """ - # TODO: inherit interfaces.TransformationABC. + # TODO: inherit interfaces.TransformationABC. Probably not necessary if I'm inheriting LdaModel. def __init__(self, corpus=None, num_topics=100, id2word=None, author2doc=None, doc2author=None, threshold=0.001, @@ -48,9 +48,9 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, # TODO: require only author2doc OR doc2author, and construct the missing one automatically. if alpha is None: - alpha = 50 / num_topics + alpha = 1.0 / num_topics if eta is None: - eta = 0.01 + eta = 1.0 / num_topics self.id2word = id2word if corpus is None and self.id2word is None: @@ -111,6 +111,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # mu is 1/|A_d| if a is in A_d, zero otherwise. # var_mu is essentially a (self.num_docs, self.num_terms, self.num_authors) sparse matrix, # which we represent using a dictionary. + # TODO: consider initializing mu randomly. var_mu = dict() for d, doc in enumerate(corpus): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. @@ -118,7 +119,18 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): authors_d = doc2author[d] # List of author IDs for document d. for aid in authors_d: a = self.authorid2idx[aid] + # Draw mu from gamma distribution. + # var_mu[(d, v, a)] = self.random_state.gamma(100., 1. / 100., (1,))[0] var_mu[(d, v, a)] = 1 / len(authors_d) + # Normalize mu. + # mu_sum = 0.0 + # for aid_prime in authors_d: + # a_prime = self.authorid2idx[aid] + # mu_sum += var_mu[(d, v, a)] + + # for aid_prime in authors_d: + # a_prime = self.authorid2idx[aid] + # var_mu[(d, v, a)] *= 1 / mu_sum var_phi = numpy.zeros((self.num_docs, self.num_terms, self.num_topics)) @@ -138,8 +150,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. authors_d = doc2author[d] # List of author IDs for document d. - #expElogbetad = expElogbeta[:, ids] - # Update phi. for v in ids: for k in xrange(self.num_topics): @@ -153,16 +163,18 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Compute phi. # TODO: avoid computing phi if possible. var_phi[d, v, k] = expavgElogtheta * expElogbeta[k, v] - # Normalize phi. - (log_var_phi_dv, _) = log_normalize(var_phi[d, v, :]) + # Normalize phi. + #(log_var_phi_dv, _) = log_normalize(var_phi[d, v, :]) + (log_var_phi_dv, _) = log_normalize(numpy.log(var_phi[d, v, :])) var_phi[d, v, :] = numpy.exp(log_var_phi_dv) - - # Update mu. + # Update mu. + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + authors_d = doc2author[d] # List of author IDs for document d. + # author_prior_prob = 1. / len(authors_d) for v in ids: - # Prior probability of observing author a in document d is one - # over the number of authors in document d. - author_prior_prob = 1.0 / len(authors_d) + mu_sum = 0.0 for aid in authors_d: a = self.authorid2idx[aid] # Average Elogtheta over topics k. @@ -173,17 +185,14 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Compute mu. # TODO: avoid computing mu if possible. - var_mu[(d, v, a)] = author_prior_prob * expavgElogtheta - - # Normalize mu. - mu_sum = 0.0 - for aid_prime in authors_d: - a_prime = self.authorid2idx[aid] + # var_mu[(d, v, a)] = author_prior_prob * expavgElogtheta + var_mu[(d, v, a)] = expavgElogtheta mu_sum += var_mu[(d, v, a)] - for aid_prime in authors_d: - a_prime = self.authorid2idx[aid] - var_mu[(d, v, a)] *= 1 / mu_sum + mu_norm_const = 1.0 / mu_sum + for aid in authors_d: + a = self.authorid2idx[aid] + var_mu[(d, v, a)] *= mu_norm_const # Update gamma. for a in xrange(self.num_authors): @@ -206,13 +215,18 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Update lambda. for k in xrange(self.num_topics): for v in xrange(self.num_terms): + # TODO: highly unnecessary: var_lambda[k, v] = 0.0 var_lambda[k, v] += self.eta for d, doc in enumerate(corpus): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - for vi, v in enumerate(ids): - var_lambda[k, v] += cts[vi] * var_phi[d, v, k] + # Get the count of v in doc. If v is not in doc, return 0. + cnt = dict(doc).get(v, 0) + var_lambda[k, v] += cnt * var_phi[d, v, k] + #ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + #cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + #for vi, v in enumerate(ids): + # # FIXME: I'm 90% sure this is wrong. + # var_lambda[k, v] += cts[vi] * var_phi[d, v, k] # Update Elogbeta, since lambda has been updated. Elogbeta = dirichlet_expectation(var_lambda) @@ -223,7 +237,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Print topics: self.var_lambda = var_lambda - pprint(self.show_topics()) + #pprint(self.show_topics()) # Evaluate likelihood. if (iteration + 1) % self.eval_every == 0: diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index eaed4f6112..b966e10ffe 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -48,9 +48,9 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, # TODO: require only author2doc OR doc2author, and construct the missing one automatically. if alpha is None: - alpha = 50 / num_topics + alpha = 1.0 / num_topics if eta is None: - eta = 0.01 + eta = 1.0 / num_topics self.id2word = id2word if corpus is None and self.id2word is None: @@ -171,7 +171,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] # Normalize phi over k. - (log_var_phi_v, _) = log_normalize(var_phi[v, :]) # NOTE: it might be possible to do this out of the v loop. + (log_var_phi_v, _) = log_normalize(numpy.log(var_phi[v, :])) # NOTE: it might be possible to do this out of the v loop. var_phi[v, :] = numpy.exp(log_var_phi_v) # Update mu. @@ -191,7 +191,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): var_mu[v, a] = author_prior_prob * expavgElogtheta # Normalize mu. - (log_var_mu_v, _) = log_normalize(var_mu[v, :]) + (log_var_mu_v, _) = log_normalize(numpy.log(var_mu[v, :])) var_mu[v, :] = numpy.exp(log_var_mu_v) @@ -209,7 +209,9 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T for k in xrange(self.num_topics): for vi, v in enumerate(ids): - tilde_lambda[k, v] = self.eta + self.num_docs * cts[vi] * var_phi[v, k] + cnt = dict(doc).get(v, 0) + var_lambda[k, v] = self.eta + cnt * var_phi[v, k] + #tilde_lambda[k, v] = self.eta + self.num_docs * cts[vi] * var_phi[v, k] # Check for convergence. # Criterion is mean change in "local" gamma and lambda. From 16b26f7fbc0ed8e1d9471fca3afacc932721faf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Sun, 9 Oct 2016 15:57:43 +0200 Subject: [PATCH 008/100] Removed offline algorithm class as it is no longer necessary. --- gensim/models/atvb.py | 313 ------------------------------------------ 1 file changed, 313 deletions(-) delete mode 100644 gensim/models/atvb.py diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py deleted file mode 100644 index ff04f8c772..0000000000 --- a/gensim/models/atvb.py +++ /dev/null @@ -1,313 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (C) 2011 Radim Rehurek -# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html - -""" -Author-topic model. -""" - -import pdb -from pdb import set_trace as st - -import logging -import numpy -import numbers - -from gensim import utils, matutils -from gensim.models.ldamodel import dirichlet_expectation, get_random_state -from gensim.models import LdaModel -from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. -from six.moves import xrange - -from pprint import pprint - -# log(sum(exp(x))) that tries to avoid overflow -try: - # try importing from here if older scipy is installed - from scipy.maxentropy import logsumexp -except ImportError: - # maxentropy has been removed in recent releases, logsumexp now in misc - from scipy.misc import logsumexp - -logger = logging.getLogger('gensim.models.atmodel') - - -class AtVb(LdaModel): - """ - Train the author-topic model using variational Bayes. - """ - # TODO: inherit interfaces.TransformationABC. Probably not necessary if I'm inheriting LdaModel. - - def __init__(self, corpus=None, num_topics=100, id2word=None, - author2doc=None, doc2author=None, threshold=0.001, - iterations=10, alpha=None, eta=None, - eval_every=1, random_state=None): - - # TODO: require only author2doc OR doc2author, and construct the missing one automatically. - - if alpha is None: - alpha = 1.0 / num_topics - if eta is None: - eta = 1.0 / num_topics - - self.id2word = id2word - if corpus is None and self.id2word is None: - raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') - - if self.id2word is None: - logger.warning("no word id mapping provided; initializing from corpus, assuming identity") - self.id2word = utils.dict_from_corpus(corpus) - self.num_terms = len(self.id2word) - elif len(self.id2word) > 0: - self.num_terms = 1 + max(self.id2word.keys()) - else: - self.num_terms = 0 - - if self.num_terms == 0: - raise ValueError("cannot compute LDA over an empty collection (no terms)") - - logger.info('Vocabulary consists of %d words.', self.num_terms) - - self.corpus = corpus - self.iterations = iterations - self.num_topics = num_topics - self.threshold = threshold - self.alpha = alpha - self.eta = eta - self.author2doc = author2doc - self.doc2author = doc2author - self.num_docs = len(corpus) - self.num_authors = len(author2doc) - self.eval_every = eval_every - self.random_state = random_state - - logger.info('Number of authors: %d.', self.num_authors) - - # TODO: find a way out of this nonsense. - self.authorid2idx = dict(zip(list(author2doc.keys()), xrange(self.num_authors))) - self.authoridx2id = dict(zip(xrange(self.num_authors), list(author2doc.keys()))) - - self.random_state = get_random_state(random_state) - - if corpus is not None: - self.inference(corpus, author2doc, doc2author) - - def inference(self, corpus=None, author2doc=None, doc2author=None): - if corpus is None: - corpus = self.corpus - - logger.info('Starting inference. Training on %d documents.', len(corpus)) - - # Initial value of gamma and lambda. - # NOTE: parameters of gamma distribution same as in `ldamodel`. - var_gamma = self.random_state.gamma(100., 1. / 100., - (self.num_authors, self.num_topics)) - var_lambda = self.random_state.gamma(100., 1. / 100., - (self.num_topics, self.num_terms)) - - # Initialize mu. - # mu is 1/|A_d| if a is in A_d, zero otherwise. - # var_mu is essentially a (self.num_docs, self.num_terms, self.num_authors) sparse matrix, - # which we represent using a dictionary. - # TODO: consider initializing mu randomly. - var_mu = dict() - for d, doc in enumerate(corpus): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - for v in ids: - authors_d = doc2author[d] # List of author IDs for document d. - for aid in authors_d: - a = self.authorid2idx[aid] - # Draw mu from gamma distribution. - # var_mu[(d, v, a)] = self.random_state.gamma(100., 1. / 100., (1,))[0] - var_mu[(d, v, a)] = 1 / len(authors_d) - # Normalize mu. - # mu_sum = 0.0 - # for aid_prime in authors_d: - # a_prime = self.authorid2idx[aid] - # mu_sum += var_mu[(d, v, a)] - - # for aid_prime in authors_d: - # a_prime = self.authorid2idx[aid] - # var_mu[(d, v, a)] *= 1 / mu_sum - - var_phi = numpy.zeros((self.num_docs, self.num_terms, self.num_topics)) - - # TODO: consider how to vectorize opterations as much as - # possible. - # TODO: check vector and matrix dimensions, and ensure that - # things are multiplied along the correct dimensions. - - Elogtheta = dirichlet_expectation(var_gamma) - Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) - likelihood = self.eval_likelihood(Elogtheta, Elogbeta) - logger.info('Likelihood: %.3e', likelihood) - for iteration in xrange(self.iterations): - # Update phi. - for d, doc in enumerate(corpus): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - authors_d = doc2author[d] # List of author IDs for document d. - - # Update phi. - for v in ids: - for k in xrange(self.num_topics): - # Average Elogtheta over authors a in document d. - avgElogtheta = 0.0 - for aid in authors_d: - a = self.authorid2idx[aid] - avgElogtheta += var_mu[(d, v, a)] * Elogtheta[a, k] - expavgElogtheta = numpy.exp(avgElogtheta) - - # Compute phi. - # TODO: avoid computing phi if possible. - var_phi[d, v, k] = expavgElogtheta * expElogbeta[k, v] - # Normalize phi. - #(log_var_phi_dv, _) = log_normalize(var_phi[d, v, :]) - (log_var_phi_dv, _) = log_normalize(numpy.log(var_phi[d, v, :])) - var_phi[d, v, :] = numpy.exp(log_var_phi_dv) - - # Update mu. - for d, doc in enumerate(corpus): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - authors_d = doc2author[d] # List of author IDs for document d. - # author_prior_prob = 1. / len(authors_d) - for v in ids: - mu_sum = 0.0 - for aid in authors_d: - a = self.authorid2idx[aid] - # Average Elogtheta over topics k. - avgElogtheta = 0.0 - for k in xrange(self.num_topics): - avgElogtheta += var_phi[d, v, k] * Elogtheta[a, k] - expavgElogtheta = numpy.exp(avgElogtheta) - - # Compute mu. - # TODO: avoid computing mu if possible. - # var_mu[(d, v, a)] = author_prior_prob * expavgElogtheta - var_mu[(d, v, a)] = expavgElogtheta - mu_sum += var_mu[(d, v, a)] - - mu_norm_const = 1.0 / mu_sum - for aid in authors_d: - a = self.authorid2idx[aid] - var_mu[(d, v, a)] *= mu_norm_const - - # Update gamma. - for a in xrange(self.num_authors): - for k in xrange(self.num_topics): - aid = self.authoridx2id[a] - docs_a = self.author2doc[aid] - var_gamma[a, k] = 0.0 - var_gamma[a, k] += self.alpha - for d in docs_a: - # TODO: if this document doesn't exist, we will have problems here. Could to an "if corpus.get(d)" type of thing. - doc = corpus[d] - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - for vi, v in enumerate(ids): - var_gamma[a, k] += cts[vi] * var_mu[(d, v, a)] * var_phi[d, v, k] - - # Update Elogtheta, since gamma has been updated. - Elogtheta = dirichlet_expectation(var_gamma) - - # Update lambda. - for k in xrange(self.num_topics): - for v in xrange(self.num_terms): - # TODO: highly unnecessary: - var_lambda[k, v] = 0.0 - var_lambda[k, v] += self.eta - for d, doc in enumerate(corpus): - # Get the count of v in doc. If v is not in doc, return 0. - cnt = dict(doc).get(v, 0) - var_lambda[k, v] += cnt * var_phi[d, v, k] - #ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - #cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - #for vi, v in enumerate(ids): - # # FIXME: I'm 90% sure this is wrong. - # var_lambda[k, v] += cts[vi] * var_phi[d, v, k] - - # Update Elogbeta, since lambda has been updated. - Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) - - - logger.info('All variables updated.') - - # Print topics: - self.var_lambda = var_lambda - #pprint(self.show_topics()) - - # Evaluate likelihood. - if (iteration + 1) % self.eval_every == 0: - prev_likelihood = likelihood - likelihood = self.eval_likelihood(Elogtheta, Elogbeta) - logger.info('Likelihood: %.3e', likelihood) - if numpy.abs(likelihood - prev_likelihood) / abs(prev_likelihood) < self.threshold: - break - # End of update loop (iterations). - - return var_gamma, var_lambda - - def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): - """ - Note that this is not strictly speaking a likelihood. - - Compute the expectation of the log conditional likelihood of the data, - - E_q[log p(w_d | theta, beta, A_d)], - - where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. - """ - - # TODO: call this something other than "likelihood". - - # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. - - if doc_ids is None: - docs = self.corpus - else: - docs = [self.corpus[d] for d in doc_ids] - - likelihood = 0.0 - for d, doc in enumerate(docs): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = self.doc2author[d] - likelihood_d = 0.0 - for vi, v in enumerate(ids): - for k in xrange(self.num_topics): - for aid in authors_d: - a = self.authorid2idx[aid] - likelihood_d += numpy.log(cts[vi]) + Elogtheta[a, k] + Elogbeta[k, v] - author_prior_prob = 1.0 / len(authors_d) - likelihood_d += numpy.log(author_prior_prob) - likelihood += likelihood_d - - # For per-word likelihood, do: - # likelihood *= 1 /sum(len(doc) for doc in docs) - - # TODO: can I do something along the lines of: - # likelihood += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) - - return likelihood - - # Overriding LdaModel.get_topic_terms. - def get_topic_terms(self, topicid, topn=10): - """ - Return a list of `(word_id, probability)` 2-tuples for the most - probable words in topic `topicid`. - Only return 2-tuples for the topn most probable words (ignore the rest). - """ - topic = self.var_lambda[topicid, :] - topic = topic / topic.sum() # normalize to probability distribution - bestn = matutils.argsort(topic, topn, reverse=True) - return [(id, topic[id]) for id in bestn] - - - - - - - From 10d2b36b2889cf76dd1688617206d40cab7172e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Sun, 9 Oct 2016 15:58:27 +0200 Subject: [PATCH 009/100] Changed name of online algorithm class and file. --- gensim/models/{onlineatvb.py => atvb.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename gensim/models/{onlineatvb.py => atvb.py} (99%) diff --git a/gensim/models/onlineatvb.py b/gensim/models/atvb.py similarity index 99% rename from gensim/models/onlineatvb.py rename to gensim/models/atvb.py index b966e10ffe..62ff1e221d 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/atvb.py @@ -34,7 +34,7 @@ logger = logging.getLogger(__name__) -class OnlineAtVb(LdaModel): +class AtVb(LdaModel): """ Train the author-topic model using online variational Bayes. """ From c94f5167ddef6bb964628a59342908fb1e6a3147 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Mon, 10 Oct 2016 12:00:47 +0200 Subject: [PATCH 010/100] Made some changes to how the likelihood is computed. --- gensim/models/atvb.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 62ff1e221d..e0deb91570 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -194,7 +194,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): (log_var_mu_v, _) = log_normalize(numpy.log(var_mu[v, :])) var_mu[v, :] = numpy.exp(log_var_mu_v) - # Update gamma. for a in xrange(len(authors_d)): for k in xrange(self.num_topics): @@ -276,25 +275,24 @@ def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): likelihood = 0.0 for d, doc in enumerate(docs): + authors_d = self.doc2author[d] ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = self.doc2author[d] likelihood_d = 0.0 for vi, v in enumerate(ids): + likelihood_v = 0.0 for k in xrange(self.num_topics): for aid in authors_d: a = self.authorid2idx[aid] - likelihood_d += numpy.log(cts[vi]) + Elogtheta[a, k] + Elogbeta[k, v] - author_prior_prob = 1.0 / len(authors_d) - likelihood_d += numpy.log(author_prior_prob) - likelihood += likelihood_d + likelihood_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + likelihood_d += cts[vi] * numpy.log(likelihood_v) + likelihood += numpy.log(1.0 / len(authors_d)) + likelihood_d + #authors_idxs = [self.authorid2idx[aid] for aid in authors_d] + #likelihood += author_prior_prob * numpy.sum(cnt * numpy.log(numpy.sum(numpy.exp(logsumexp(Elogtheta[a, :] + Elogbeta[:, id])) for a in authors_idxs)) for id, cnt in doc) # For per-word likelihood, do: # likelihood *= 1 /sum(len(doc) for doc in docs) - # TODO: can I do something along the lines of: - # likelihood += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) - return likelihood def eval_word_prob(self, var_gamma, var_lambda, doc_ids=None): From a1d758f8fcaeaa826d5334819b167ffa15b44a67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Mon, 10 Oct 2016 12:04:14 +0200 Subject: [PATCH 011/100] Changed the name of the online algorithm again. --- gensim/models/onlineatvb.py | 357 ++++++++++++++++++++++++++++++++++++ 1 file changed, 357 insertions(+) create mode 100644 gensim/models/onlineatvb.py diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py new file mode 100644 index 0000000000..4d0f073f4c --- /dev/null +++ b/gensim/models/onlineatvb.py @@ -0,0 +1,357 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2011 Radim Rehurek +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Author-topic model. +""" + +import pdb +from pdb import set_trace as st + +import logging +import numpy +import numbers + +from gensim import utils, matutils +from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim.models import LdaModel +from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. +from six.moves import xrange + +from pprint import pprint + +# log(sum(exp(x))) that tries to avoid overflow +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger(__name__) + + +class OnlineAtVb(LdaModel): + """ + Train the author-topic model using online variational Bayes. + """ + # TODO: inherit interfaces.TransformationABC. + + def __init__(self, corpus=None, num_topics=100, id2word=None, + author2doc=None, doc2author=None, threshold=0.001, + iterations=10, alpha=None, eta=None, decay=0.5, offset=1.0, + eval_every=1, random_state=None): + + # TODO: require only author2doc OR doc2author, and construct the missing one automatically. + + if alpha is None: + alpha = 1.0 / num_topics + if eta is None: + eta = 1.0 / num_topics + + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + logger.info('Vocabulary consists of %d words.', self.num_terms) + + if author2doc is None or doc2author is None: + raise ValueError('author2doc and doc2author must be supplied.') + + self.corpus = corpus + self.iterations = iterations + self.num_topics = num_topics + self.threshold = threshold + self.alpha = alpha + self.eta = eta + self.decay = decay + self.offset = offset + self.author2doc = author2doc + self.doc2author = doc2author + self.num_authors = len(author2doc) + self.eval_every = eval_every + self.random_state = random_state + + # Some of the methods in LdaModel are used in this class. + # I.e. composition is used instead of inheriting the LdaModel class. + self.ldamodel = LdaModel(id2word=self.id2word) + + logger.info('Number of authors: %d.', self.num_authors) + + # TODO: find a way out of this nonsense. + self.authorid2idx = dict(zip(list(author2doc.keys()), xrange(self.num_authors))) + self.authoridx2id = dict(zip(xrange(self.num_authors), list(author2doc.keys()))) + + self.random_state = get_random_state(random_state) + + if corpus is not None: + (self.var_gamma, self.var_lambda) = self.inference(corpus, author2doc, doc2author) + else: + self.var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + + def rho(self, iteration): + return pow(self.offset + iteration, -self.decay) + + def inference(self, corpus=None, author2doc=None, doc2author=None): + if corpus is None: + corpus = self.corpus.copy() + + self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. + + logger.info('Starting inference. Training on %d documents.', len(corpus)) + + # Initial values of gamma and lambda. + # NOTE: parameters of gamma distribution same as in `ldamodel`. + init_gamma = self.random_state.gamma(100., 1. / 100., + (self.num_authors, self.num_topics)) + init_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + + converged = 0 + + # TODO: consider making phi and mu sparse. + var_phi = numpy.zeros((self.num_terms, self.num_topics)) + + var_gamma = init_gamma.copy() + var_lambda = init_lambda.copy() + tilde_gamma = init_gamma.copy() + tilde_lambda = init_lambda.copy() + + # Initialize dirichlet expectations. + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = doc2author[d] # List of author IDs for document d. + + # Initialize mu. + # mu is 1/|A_d| if a is in A_d, zero otherwise. + # NOTE: I could do random initialization instead. + # NOTE: maybe not the best idea that mu changes shape every iteration. + var_mu = numpy.zeros((self.num_terms, len(authors_d))) + for v in ids: + for a in xrange(len(authors_d)): + var_mu[v, a] = 1 / len(authors_d) + + for iteration in xrange(self.iterations): + #logger.info('iteration %i', iteration) + + lastgamma = tilde_gamma.copy() + lastlambda = tilde_lambda.copy() + + # Update phi. + for v in ids: + for k in xrange(self.num_topics): + # Average Elogtheta over authors a in document d. + avgElogtheta = 0.0 + for a in xrange(len(authors_d)): + avgElogtheta += var_mu[v, a] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute phi. + # TODO: avoid computing phi if possible. + var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] + + # Normalize phi over k. + (log_var_phi_v, _) = log_normalize(numpy.log(var_phi[v, :])) # NOTE: it might be possible to do this out of the v loop. + var_phi[v, :] = numpy.exp(log_var_phi_v) + + # Update mu. + for v in ids: + # Prior probability of observing author a in document d is one + # over the number of authors in document d. + author_prior_prob = 1.0 / len(authors_d) + for a in xrange(len(authors_d)): + # Average Elogtheta over topics k. + avgElogtheta = 0.0 + for k in xrange(self.num_topics): + avgElogtheta += var_phi[v, k] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute mu over a. + # TODO: avoid computing mu if possible. + var_mu[v, a] = author_prior_prob * expavgElogtheta + + # Normalize mu. + (log_var_mu_v, _) = log_normalize(numpy.log(var_mu[v, :])) + var_mu[v, :] = numpy.exp(log_var_mu_v) + + # Update gamma. + for a in xrange(len(authors_d)): + for k in xrange(self.num_topics): + tilde_gamma[a, k] = 0.0 + for vi, v in enumerate(ids): + tilde_gamma[a, k] += cts[vi] * var_mu[v, a] * var_phi[v, k] + aid = self.authoridx2id[a] + tilde_gamma[a, k] *= len(author2doc[aid]) + tilde_gamma[a, k] += self.alpha + + # Update lambda. + #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T + for k in xrange(self.num_topics): + for vi, v in enumerate(ids): + cnt = dict(doc).get(v, 0) + var_lambda[k, v] = self.eta + cnt * var_phi[v, k] + #tilde_lambda[k, v] = self.eta + self.num_docs * cts[vi] * var_phi[v, k] + + # Check for convergence. + # Criterion is mean change in "local" gamma and lambda. + if iteration > 0: + maxchange_gamma = numpy.max(abs(tilde_gamma - lastgamma)) + maxchange_lambda = numpy.max(abs(tilde_lambda - lastlambda)) + # logger.info('Max change in gamma: %.3e', maxchange_gamma) + # logger.info('Max change in lambda: %.3e', maxchange_lambda) + if maxchange_gamma < self.threshold and maxchange_lambda < self.threshold: + logger.info('Converged after %d iterations.', iteration) + converged += 1 + break + # End of iterations loop. + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). Same goes for lambda. + rhot = self.rho(d) + var_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma + # Note that we only changed the elements in lambda corresponding to + # the words in document d, hence the [:, ids] indexing. + var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + + # Print topics: + # self.var_lambda = var_lambda + # pprint(self.show_topics()) + + likelihood = self.eval_likelihood(Elogtheta, Elogbeta) + logger.info('Likelihood: %.3e', likelihood) + logger.info('Converged documents: %d/%d', converged, d + 1) + # Evaluating word probabilities: + # likelihood = self.eval_word_prob(var_gamma, var_lambda) + # End of corpus loop. + + return var_gamma, var_lambda + + def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): + """ + Note that this is not strictly speaking a likelihood. + + Compute the expectation of the log conditional likelihood of the data, + + E_q[log p(w_d | theta, beta, A_d)], + + where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. + """ + + # TODO: call this something other than "likelihood". + + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + likelihood = 0.0 + for d, doc in enumerate(docs): + authors_d = self.doc2author[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + likelihood_d = 0.0 + for vi, v in enumerate(ids): + likelihood_v = 0.0 + for k in xrange(self.num_topics): + for aid in authors_d: + a = self.authorid2idx[aid] + likelihood_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + likelihood_d += cts[vi] * numpy.log(likelihood_v) + likelihood += numpy.log(1.0 / len(authors_d)) + likelihood_d + #authors_idxs = [self.authorid2idx[aid] for aid in authors_d] + #likelihood += author_prior_prob * numpy.sum(cnt * numpy.log(numpy.sum(numpy.exp(logsumexp(Elogtheta[a, :] + Elogbeta[:, id])) for a in authors_idxs)) for id, cnt in doc) + + # For per-word likelihood, do: + # likelihood *= 1 /sum(len(doc) for doc in docs) + + return likelihood + + def eval_word_prob(self, var_gamma, var_lambda, doc_ids=None): + """ + Compute the liklihood of the corpus under the model, by first + computing the conditional probabilities of the words in a + document d, + + p(w_d | theta, beta, A_d), + + summing over all documents, and dividing by the number of documents. + """ + # NOTE: unsure if this is correct. + + norm_gamma = var_gamma.copy() + norm_lambda = var_lambda.copy() + for a in xrange(self.num_authors): + norm_gamma[a, :] = var_gamma[a, :] / var_gamma.sum(axis=1)[a] + for k in xrange(self.num_topics): + norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + word_prob = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + word_prob_d = 0.0 + for vi, v in enumerate(ids): + for k in xrange(self.num_topics): + for aid in authors_d: + a = self.authorid2idx[aid] + word_prob_d += cts[vi] * norm_gamma[a, k] * norm_lambda[k, v] + author_prior_prob = 1.0 / len(authors_d) + word_prob_d += numpy.log(author_prior_prob) + word_prob += word_prob_d + word_prob *= 1 / len(docs) + + return word_prob + + # Overriding LdaModel.get_topic_terms. + def get_topic_terms(self, topicid, topn=10): + """ + Return a list of `(word_id, probability)` 2-tuples for the most + probable words in topic `topicid`. + Only return 2-tuples for the topn most probable words (ignore the rest). + """ + topic = self.var_lambda[topicid, :] + topic = topic / topic.sum() # normalize to probability distribution + bestn = matutils.argsort(topic, topn, reverse=True) + return [(id, topic[id]) for id in bestn] + + + + + + + From 46cc8bf3ea0696b7c8fb41de03c61e49bb048e9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Mon, 10 Oct 2016 12:04:36 +0200 Subject: [PATCH 012/100] Brought the offline algorithm back. --- gensim/models/atvb.py | 300 ++++++++++++++++++------------------------ 1 file changed, 128 insertions(+), 172 deletions(-) diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index e0deb91570..ff04f8c772 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -31,18 +31,18 @@ # maxentropy has been removed in recent releases, logsumexp now in misc from scipy.misc import logsumexp -logger = logging.getLogger(__name__) +logger = logging.getLogger('gensim.models.atmodel') class AtVb(LdaModel): """ - Train the author-topic model using online variational Bayes. + Train the author-topic model using variational Bayes. """ - # TODO: inherit interfaces.TransformationABC. + # TODO: inherit interfaces.TransformationABC. Probably not necessary if I'm inheriting LdaModel. def __init__(self, corpus=None, num_topics=100, id2word=None, author2doc=None, doc2author=None, threshold=0.001, - iterations=10, alpha=None, eta=None, decay=0.5, offset=1.0, + iterations=10, alpha=None, eta=None, eval_every=1, random_state=None): # TODO: require only author2doc OR doc2author, and construct the missing one automatically. @@ -67,30 +67,22 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, if self.num_terms == 0: raise ValueError("cannot compute LDA over an empty collection (no terms)") - + logger.info('Vocabulary consists of %d words.', self.num_terms) - if author2doc is None or doc2author is None: - raise ValueError('author2doc and doc2author must be supplied.') - self.corpus = corpus self.iterations = iterations self.num_topics = num_topics self.threshold = threshold self.alpha = alpha self.eta = eta - self.decay = decay - self.offset = offset self.author2doc = author2doc self.doc2author = doc2author + self.num_docs = len(corpus) self.num_authors = len(author2doc) self.eval_every = eval_every self.random_state = random_state - # Some of the methods in LdaModel are used in this class. - # I.e. composition is used instead of inheriting the LdaModel class. - self.ldamodel = LdaModel(id2word=self.id2word) - logger.info('Number of authors: %d.', self.num_authors) # TODO: find a way out of this nonsense. @@ -100,156 +92,161 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, self.random_state = get_random_state(random_state) if corpus is not None: - (self.var_gamma, self.var_lambda) = self.inference(corpus, author2doc, doc2author) - else: - self.var_lambda = self.random_state.gamma(100., 1. / 100., - (self.num_topics, self.num_terms)) - - def rho(self, iteration): - return pow(self.offset + iteration, -self.decay) + self.inference(corpus, author2doc, doc2author) def inference(self, corpus=None, author2doc=None, doc2author=None): if corpus is None: - corpus = self.corpus.copy() - - self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. + corpus = self.corpus logger.info('Starting inference. Training on %d documents.', len(corpus)) - # Initial values of gamma and lambda. + # Initial value of gamma and lambda. # NOTE: parameters of gamma distribution same as in `ldamodel`. - init_gamma = self.random_state.gamma(100., 1. / 100., + var_gamma = self.random_state.gamma(100., 1. / 100., (self.num_authors, self.num_topics)) - init_lambda = self.random_state.gamma(100., 1. / 100., + var_lambda = self.random_state.gamma(100., 1. / 100., (self.num_topics, self.num_terms)) - converged = 0 - - # TODO: consider making phi and mu sparse. - var_phi = numpy.zeros((self.num_terms, self.num_topics)) - - var_gamma = init_gamma.copy() - var_lambda = init_lambda.copy() - tilde_gamma = init_gamma.copy() - tilde_lambda = init_lambda.copy() - - # Initialize dirichlet expectations. - Elogtheta = dirichlet_expectation(var_gamma) - Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) + # Initialize mu. + # mu is 1/|A_d| if a is in A_d, zero otherwise. + # var_mu is essentially a (self.num_docs, self.num_terms, self.num_authors) sparse matrix, + # which we represent using a dictionary. + # TODO: consider initializing mu randomly. + var_mu = dict() for d, doc in enumerate(corpus): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = doc2author[d] # List of author IDs for document d. - - # Initialize mu. - # mu is 1/|A_d| if a is in A_d, zero otherwise. - # NOTE: I could do random initialization instead. - # NOTE: maybe not the best idea that mu changes shape every iteration. - var_mu = numpy.zeros((self.num_terms, len(authors_d))) for v in ids: - for a in xrange(len(authors_d)): - var_mu[v, a] = 1 / len(authors_d) + authors_d = doc2author[d] # List of author IDs for document d. + for aid in authors_d: + a = self.authorid2idx[aid] + # Draw mu from gamma distribution. + # var_mu[(d, v, a)] = self.random_state.gamma(100., 1. / 100., (1,))[0] + var_mu[(d, v, a)] = 1 / len(authors_d) + # Normalize mu. + # mu_sum = 0.0 + # for aid_prime in authors_d: + # a_prime = self.authorid2idx[aid] + # mu_sum += var_mu[(d, v, a)] + + # for aid_prime in authors_d: + # a_prime = self.authorid2idx[aid] + # var_mu[(d, v, a)] *= 1 / mu_sum + + var_phi = numpy.zeros((self.num_docs, self.num_terms, self.num_topics)) + + # TODO: consider how to vectorize opterations as much as + # possible. + # TODO: check vector and matrix dimensions, and ensure that + # things are multiplied along the correct dimensions. - for iteration in xrange(self.iterations): - #logger.info('iteration %i', iteration) - - lastgamma = tilde_gamma.copy() - lastlambda = tilde_lambda.copy() + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + likelihood = self.eval_likelihood(Elogtheta, Elogbeta) + logger.info('Likelihood: %.3e', likelihood) + for iteration in xrange(self.iterations): + # Update phi. + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + authors_d = doc2author[d] # List of author IDs for document d. # Update phi. for v in ids: for k in xrange(self.num_topics): # Average Elogtheta over authors a in document d. avgElogtheta = 0.0 - for a in xrange(len(authors_d)): - avgElogtheta += var_mu[v, a] * Elogtheta[a, k] + for aid in authors_d: + a = self.authorid2idx[aid] + avgElogtheta += var_mu[(d, v, a)] * Elogtheta[a, k] expavgElogtheta = numpy.exp(avgElogtheta) # Compute phi. # TODO: avoid computing phi if possible. - var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] - - # Normalize phi over k. - (log_var_phi_v, _) = log_normalize(numpy.log(var_phi[v, :])) # NOTE: it might be possible to do this out of the v loop. - var_phi[v, :] = numpy.exp(log_var_phi_v) - - # Update mu. + var_phi[d, v, k] = expavgElogtheta * expElogbeta[k, v] + # Normalize phi. + #(log_var_phi_dv, _) = log_normalize(var_phi[d, v, :]) + (log_var_phi_dv, _) = log_normalize(numpy.log(var_phi[d, v, :])) + var_phi[d, v, :] = numpy.exp(log_var_phi_dv) + + # Update mu. + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + authors_d = doc2author[d] # List of author IDs for document d. + # author_prior_prob = 1. / len(authors_d) for v in ids: - # Prior probability of observing author a in document d is one - # over the number of authors in document d. - author_prior_prob = 1.0 / len(authors_d) - for a in xrange(len(authors_d)): + mu_sum = 0.0 + for aid in authors_d: + a = self.authorid2idx[aid] # Average Elogtheta over topics k. avgElogtheta = 0.0 for k in xrange(self.num_topics): - avgElogtheta += var_phi[v, k] * Elogtheta[a, k] + avgElogtheta += var_phi[d, v, k] * Elogtheta[a, k] expavgElogtheta = numpy.exp(avgElogtheta) - # Compute mu over a. + # Compute mu. # TODO: avoid computing mu if possible. - var_mu[v, a] = author_prior_prob * expavgElogtheta + # var_mu[(d, v, a)] = author_prior_prob * expavgElogtheta + var_mu[(d, v, a)] = expavgElogtheta + mu_sum += var_mu[(d, v, a)] - # Normalize mu. - (log_var_mu_v, _) = log_normalize(numpy.log(var_mu[v, :])) - var_mu[v, :] = numpy.exp(log_var_mu_v) + mu_norm_const = 1.0 / mu_sum + for aid in authors_d: + a = self.authorid2idx[aid] + var_mu[(d, v, a)] *= mu_norm_const - # Update gamma. - for a in xrange(len(authors_d)): - for k in xrange(self.num_topics): - tilde_gamma[a, k] = 0.0 + # Update gamma. + for a in xrange(self.num_authors): + for k in xrange(self.num_topics): + aid = self.authoridx2id[a] + docs_a = self.author2doc[aid] + var_gamma[a, k] = 0.0 + var_gamma[a, k] += self.alpha + for d in docs_a: + # TODO: if this document doesn't exist, we will have problems here. Could to an "if corpus.get(d)" type of thing. + doc = corpus[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. for vi, v in enumerate(ids): - tilde_gamma[a, k] += cts[vi] * var_mu[v, a] * var_phi[v, k] - aid = self.authoridx2id[a] - tilde_gamma[a, k] *= len(author2doc[aid]) - tilde_gamma[a, k] += self.alpha + var_gamma[a, k] += cts[vi] * var_mu[(d, v, a)] * var_phi[d, v, k] - # Update lambda. - #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T - for k in xrange(self.num_topics): - for vi, v in enumerate(ids): - cnt = dict(doc).get(v, 0) - var_lambda[k, v] = self.eta + cnt * var_phi[v, k] - #tilde_lambda[k, v] = self.eta + self.num_docs * cts[vi] * var_phi[v, k] - - # Check for convergence. - # Criterion is mean change in "local" gamma and lambda. - if iteration > 0: - maxchange_gamma = numpy.max(abs(tilde_gamma - lastgamma)) - maxchange_lambda = numpy.max(abs(tilde_lambda - lastlambda)) - # logger.info('Max change in gamma: %.3e', maxchange_gamma) - # logger.info('Max change in lambda: %.3e', maxchange_lambda) - if maxchange_gamma < self.threshold and maxchange_lambda < self.threshold: - logger.info('Converged after %d iterations.', iteration) - converged += 1 - break - # End of iterations loop. - - # Update gamma and lambda. - # Interpolation between document d's "local" gamma (tilde_gamma), - # and "global" gamma (var_gamma). Same goes for lambda. - rhot = self.rho(d) - var_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma - # Note that we only changed the elements in lambda corresponding to - # the words in document d, hence the [:, ids] indexing. - var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] - - # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + # Update Elogtheta, since gamma has been updated. Elogtheta = dirichlet_expectation(var_gamma) + + # Update lambda. + for k in xrange(self.num_topics): + for v in xrange(self.num_terms): + # TODO: highly unnecessary: + var_lambda[k, v] = 0.0 + var_lambda[k, v] += self.eta + for d, doc in enumerate(corpus): + # Get the count of v in doc. If v is not in doc, return 0. + cnt = dict(doc).get(v, 0) + var_lambda[k, v] += cnt * var_phi[d, v, k] + #ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + #cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + #for vi, v in enumerate(ids): + # # FIXME: I'm 90% sure this is wrong. + # var_lambda[k, v] += cts[vi] * var_phi[d, v, k] + + # Update Elogbeta, since lambda has been updated. Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - # Print topics: - # self.var_lambda = var_lambda - # pprint(self.show_topics()) + logger.info('All variables updated.') - likelihood = self.eval_likelihood(Elogtheta, Elogbeta) - logger.info('Likelihood: %.3e', likelihood) - logger.info('Converged documents: %d/%d', converged, d + 1) - # Evaluating word probabilities: - # likelihood = self.eval_word_prob(var_gamma, var_lambda) - # End of corpus loop. + # Print topics: + self.var_lambda = var_lambda + #pprint(self.show_topics()) + + # Evaluate likelihood. + if (iteration + 1) % self.eval_every == 0: + prev_likelihood = likelihood + likelihood = self.eval_likelihood(Elogtheta, Elogbeta) + logger.info('Likelihood: %.3e', likelihood) + if numpy.abs(likelihood - prev_likelihood) / abs(prev_likelihood) < self.threshold: + break + # End of update loop (iterations). return var_gamma, var_lambda @@ -275,68 +272,27 @@ def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): likelihood = 0.0 for d, doc in enumerate(docs): - authors_d = self.doc2author[d] ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] likelihood_d = 0.0 for vi, v in enumerate(ids): - likelihood_v = 0.0 for k in xrange(self.num_topics): for aid in authors_d: a = self.authorid2idx[aid] - likelihood_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) - likelihood_d += cts[vi] * numpy.log(likelihood_v) - likelihood += numpy.log(1.0 / len(authors_d)) + likelihood_d - #authors_idxs = [self.authorid2idx[aid] for aid in authors_d] - #likelihood += author_prior_prob * numpy.sum(cnt * numpy.log(numpy.sum(numpy.exp(logsumexp(Elogtheta[a, :] + Elogbeta[:, id])) for a in authors_idxs)) for id, cnt in doc) + likelihood_d += numpy.log(cts[vi]) + Elogtheta[a, k] + Elogbeta[k, v] + author_prior_prob = 1.0 / len(authors_d) + likelihood_d += numpy.log(author_prior_prob) + likelihood += likelihood_d # For per-word likelihood, do: # likelihood *= 1 /sum(len(doc) for doc in docs) - return likelihood + # TODO: can I do something along the lines of: + # likelihood += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) - def eval_word_prob(self, var_gamma, var_lambda, doc_ids=None): - """ - Compute the liklihood of the corpus under the model, by first - computing the conditional probabilities of the words in a - document d, - - p(w_d | theta, beta, A_d), - - summing over all documents, and dividing by the number of documents. - """ - # NOTE: unsure if this is correct. - - norm_gamma = var_gamma.copy() - norm_lambda = var_lambda.copy() - for a in xrange(self.num_authors): - norm_gamma[a, :] = var_gamma[a, :] / var_gamma.sum(axis=1)[a] - for k in xrange(self.num_topics): - norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] - - if doc_ids is None: - docs = self.corpus - else: - docs = [self.corpus[d] for d in doc_ids] - - word_prob = 0.0 - for d, doc in enumerate(docs): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = self.doc2author[d] - word_prob_d = 0.0 - for vi, v in enumerate(ids): - for k in xrange(self.num_topics): - for aid in authors_d: - a = self.authorid2idx[aid] - word_prob_d += cts[vi] * norm_gamma[a, k] * norm_lambda[k, v] - author_prior_prob = 1.0 / len(authors_d) - word_prob_d += numpy.log(author_prior_prob) - word_prob += word_prob_d - word_prob *= 1 / len(docs) + return likelihood - return word_prob - # Overriding LdaModel.get_topic_terms. def get_topic_terms(self, topicid, topn=10): """ From 3e536556ea7dcc2d5586580b7d3356c9399be7ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Tue, 11 Oct 2016 12:57:19 +0200 Subject: [PATCH 013/100] Working on bound computation. --- docs/notebooks/at_with_nips.ipynb | 116 +++++++++++------------------ gensim/models/atvb.py | 118 +++++++++++++++++++++++++----- gensim/models/onlineatvb.py | 88 +++++++++++++++------- 3 files changed, 200 insertions(+), 122 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 8b15a218ff..3bcb854e39 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": { "collapsed": false }, @@ -61,17 +61,17 @@ "\n", "import logging\n", "\n", - "from gensim.models import OnlineAtVb\n", - "from gensim.models import onlineatvb\n", "from gensim.models import AtVb\n", "from gensim.models import atvb\n", + "from gensim.models import OnlineAtVb\n", + "from gensim.models import onlineatvb\n", "\n", "%matplotlib inline" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -95,7 +95,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": { "collapsed": false }, @@ -131,7 +131,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -156,7 +156,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": { "collapsed": false }, @@ -179,7 +179,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": { "collapsed": true }, @@ -194,7 +194,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 7, "metadata": { "collapsed": true }, @@ -213,7 +213,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 8, "metadata": { "collapsed": true }, @@ -225,7 +225,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -234,7 +234,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -260,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 10, "metadata": { "collapsed": true }, @@ -278,7 +278,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 11, "metadata": { "collapsed": true }, @@ -292,7 +292,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 12, "metadata": { "collapsed": false }, @@ -313,7 +313,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 13, "metadata": { "collapsed": false }, @@ -338,7 +338,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 14, "metadata": { "collapsed": false }, @@ -352,7 +352,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 15, "metadata": { "collapsed": false }, @@ -394,7 +394,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 85, "metadata": { "collapsed": false }, @@ -406,21 +406,21 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 86, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token,\n", - " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-7,\n", - " iterations=10, alpha=None, eta=None,\n", - " eval_every=1, random_state=0)\n" + "model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, \n", + " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12,\n", + " iterations=10, alpha=None, eta=None,\n", + " eval_every=1, random_state=1)" ] }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 28, "metadata": { "collapsed": false }, @@ -429,28 +429,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.041*gradient + 0.032*image + 0.032*node + 0.018*flow + 0.018*technique + 0.017*constraint + 0.017*square + 0.016*training + 0.015*analog + 0.015*component'),\n", + " '0.043*gradient + 0.028*image + 0.026*constraint + 0.020*square + 0.019*technique + 0.019*training + 0.018*optimal + 0.017*matrix + 0.016*surface + 0.015*component'),\n", " (1,\n", - " '0.064*processor + 0.050*activation + 0.038*edge + 0.030*update + 0.019*operation + 0.017*column + 0.016*stored + 0.016*machine + 0.016*store + 0.013*address'),\n", + " '0.071*processor + 0.052*activation + 0.039*edge + 0.025*update + 0.021*operation + 0.018*machine + 0.018*column + 0.017*stored + 0.016*store + 0.015*address'),\n", " (2,\n", - " '0.102*map + 0.093*brain + 0.079*field + 0.041*location + 0.035*series + 0.034*functional + 0.029*potential + 0.026*activity + 0.025*left + 0.020*strategy'),\n", + " '0.099*brain + 0.095*map + 0.065*field + 0.038*functional + 0.031*series + 0.029*location + 0.026*activity + 0.025*potential + 0.024*left + 0.020*subject'),\n", " (3,\n", - " '0.028*cell + 0.023*interaction + 0.023*cycle + 0.022*dynamic + 0.021*respect + 0.019*exist + 0.018*with_respect + 0.018*control + 0.018*total + 0.017*path'),\n", + " '0.055*update + 0.052*edge + 0.038*processor + 0.037*current + 0.037*control + 0.027*activation + 0.021*provided + 0.015*rate + 0.015*implementation + 0.014*total'),\n", " (4,\n", - " '0.035*group + 0.029*scheme + 0.023*matrix + 0.020*noise + 0.020*representation + 0.019*probability + 0.018*capacity + 0.017*log + 0.016*image + 0.015*recognition'),\n", + " '0.030*scheme + 0.024*group + 0.021*noise + 0.021*representation + 0.019*capacity + 0.019*probability + 0.018*matrix + 0.016*log + 0.015*recognition + 0.015*recall'),\n", " (5,\n", - " '0.053*edge + 0.040*mapping + 0.027*processor + 0.027*current + 0.025*energy + 0.023*associated + 0.020*propagation + 0.019*estimate + 0.019*constructed + 0.019*activation'),\n", + " '0.036*image + 0.026*flow + 0.021*associated + 0.021*analog + 0.018*approximation + 0.018*original + 0.018*degree + 0.016*moving + 0.015*mapping + 0.015*view'),\n", " (6,\n", - " '0.024*dynamic + 0.023*phase + 0.019*variable + 0.016*binary + 0.015*required + 0.014*noise + 0.014*becomes + 0.014*limit + 0.013*finally + 0.013*via'),\n", + " '0.028*dynamic + 0.020*feedback + 0.020*phase + 0.015*variable + 0.015*path + 0.015*cell + 0.014*group + 0.013*hopfield + 0.013*noise + 0.012*useful'),\n", " (7,\n", - " '0.040*potential + 0.040*cell + 0.037*artificial + 0.027*computational + 0.023*connectivity + 0.020*fact_that + 0.019*though + 0.017*biological + 0.015*spatial + 0.015*architecture'),\n", + " '0.055*cell + 0.045*potential + 0.040*connectivity + 0.032*artificial + 0.029*synaptic + 0.026*computational + 0.025*architecture + 0.018*fact_that + 0.017*though + 0.016*biological'),\n", " (8,\n", - " '0.037*synaptic + 0.035*connectivity + 0.029*cell + 0.025*back_propagation + 0.023*capability + 0.023*architecture + 0.022*back + 0.021*potential + 0.021*propagation + 0.019*target'),\n", + " '0.064*node + 0.040*propagation + 0.038*back + 0.036*back_propagation + 0.022*flow + 0.019*target + 0.015*energy + 0.014*course + 0.014*hidden + 0.013*analog'),\n", " (9,\n", - " '0.080*image + 0.037*stimulus + 0.020*visual + 0.018*recall + 0.018*vision + 0.017*center + 0.017*associated + 0.017*phase + 0.017*dimensional + 0.017*stable')]" + " '0.097*image + 0.031*stimulus + 0.025*dimensional + 0.024*vision + 0.021*center + 0.017*visual + 0.015*scale + 0.015*phase + 0.014*recall + 0.012*non_linear')]" ] }, - "execution_count": 42, + "execution_count": 28, "metadata": {}, "output_type": "execute_result" } @@ -468,7 +468,7 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 83, "metadata": { "collapsed": false }, @@ -480,7 +480,7 @@ }, { "cell_type": "code", - "execution_count": 60, + "execution_count": 84, "metadata": { "collapsed": false }, @@ -494,44 +494,12 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": null, "metadata": { - "collapsed": false + "collapsed": true }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.021*field + 0.015*synaptic + 0.014*matrix + 0.014*representation + 0.013*connectivity + 0.012*flow + 0.012*mapping + 0.012*gradient + 0.012*log + 0.010*architecture'),\n", - " (1,\n", - " '0.009*connectivity + 0.006*image + 0.006*log + 0.006*node + 0.005*scale + 0.005*associative_memory + 0.005*recognition + 0.005*analog + 0.005*series + 0.005*architecture'),\n", - " (2,\n", - " '0.013*recall + 0.010*architecture + 0.007*series + 0.007*connectivity + 0.007*capability + 0.006*node + 0.006*recognition + 0.005*analog + 0.005*gain + 0.005*storage'),\n", - " (3,\n", - " '0.015*scale + 0.013*synaptic + 0.007*architecture + 0.007*image + 0.006*series + 0.006*associative_memory + 0.006*connectivity + 0.005*subject + 0.005*bound + 0.005*recall'),\n", - " (4,\n", - " '0.016*connectivity + 0.013*associative_memory + 0.010*energy + 0.008*u + 0.007*mapping + 0.007*circuit + 0.006*proceeding + 0.006*log + 0.006*image + 0.006*experiment'),\n", - " (5,\n", - " '0.009*recall + 0.009*connectivity + 0.007*energy + 0.006*word + 0.006*series + 0.006*associative_memory + 0.006*synaptic + 0.005*technology + 0.005*suggested + 0.005*bound'),\n", - " (6,\n", - " '0.015*flow + 0.013*recall + 0.011*synaptic + 0.010*log + 0.008*gradient + 0.007*circuit + 0.006*image + 0.006*recognition + 0.006*associative_memory + 0.006*location'),\n", - " (7,\n", - " '0.013*image + 0.011*recall + 0.010*component + 0.008*ma + 0.008*connectivity + 0.007*phase + 0.007*limit + 0.006*log + 0.006*analog + 0.006*constraint'),\n", - " (8,\n", - " '0.012*scale + 0.011*connectivity + 0.010*recall + 0.007*series + 0.006*analog + 0.006*associative_memory + 0.005*architecture + 0.005*action + 0.005*come + 0.005*strength'),\n", - " (9,\n", - " '0.009*connectivity + 0.007*subject + 0.007*series + 0.006*recall + 0.006*analog + 0.005*storage + 0.005*associative_memory + 0.005*denker + 0.005*longer + 0.005*architecture')]" - ] - }, - "execution_count": 59, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.show_topics()" - ] + "outputs": [], + "source": [] } ], "metadata": { diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index ff04f8c772..3886fd93d3 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -19,6 +19,7 @@ from gensim.models.ldamodel import dirichlet_expectation, get_random_state from gensim.models import LdaModel from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. +from scipy.special import gammaln from six.moves import xrange from pprint import pprint @@ -142,8 +143,13 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): Elogtheta = dirichlet_expectation(var_gamma) Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - likelihood = self.eval_likelihood(Elogtheta, Elogbeta) - logger.info('Likelihood: %.3e', likelihood) + + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta, var_gamma) + beta_bound = self.beta_bound(Elogbeta, var_lambda) + bound = word_bound + theta_bound + beta_bound + #likelihood = self.log_word_prob(var_gamma, var_lambda) + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) for iteration in xrange(self.iterations): # Update phi. for d, doc in enumerate(corpus): @@ -239,18 +245,22 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): self.var_lambda = var_lambda #pprint(self.show_topics()) - # Evaluate likelihood. + # Evaluate bound. if (iteration + 1) % self.eval_every == 0: - prev_likelihood = likelihood - likelihood = self.eval_likelihood(Elogtheta, Elogbeta) - logger.info('Likelihood: %.3e', likelihood) - if numpy.abs(likelihood - prev_likelihood) / abs(prev_likelihood) < self.threshold: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta, var_gamma) + beta_bound = self.beta_bound(Elogbeta, var_lambda) + bound = word_bound + theta_bound + beta_bound + #likelihood = self.log_word_prob(var_gamma, var_lambda) + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.threshold: break # End of update loop (iterations). return var_gamma, var_lambda - def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): + def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): """ Note that this is not strictly speaking a likelihood. @@ -261,8 +271,6 @@ def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. """ - # TODO: call this something other than "likelihood". - # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. if doc_ids is None: @@ -270,28 +278,98 @@ def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): else: docs = [self.corpus[d] for d in doc_ids] - likelihood = 0.0 + bound= 0.0 for d, doc in enumerate(docs): + authors_d = self.doc2author[d] ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = self.doc2author[d] - likelihood_d = 0.0 + bound_d = 0.0 for vi, v in enumerate(ids): + bound_v = 0.0 for k in xrange(self.num_topics): for aid in authors_d: a = self.authorid2idx[aid] - likelihood_d += numpy.log(cts[vi]) + Elogtheta[a, k] + Elogbeta[k, v] - author_prior_prob = 1.0 / len(authors_d) - likelihood_d += numpy.log(author_prior_prob) - likelihood += likelihood_d + bound_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + bound_d += cts[vi] * numpy.log(bound_v) + bound += numpy.log(1.0 / len(authors_d)) + bound_d # For per-word likelihood, do: # likelihood *= 1 /sum(len(doc) for doc in docs) - # TODO: can I do something along the lines of: - # likelihood += author_prior_prob * numpy.sum(cnt * sum(logsumexp(Elogtheta[authors_d, :] + Elogbeta[:, id])) for id, cnt in doc) + # TODO: can I do something along the lines of (as in ldamodel): + # likelihood += numpy.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, id]) for id, cnt in doc) + + return bound + + def theta_bound(self, Elogtheta, var_gamma, doc_ids=None): + """ + """ + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + bound = 0.0 + for a in xrange(self.num_authors): + var_gamma_a = var_gamma[a, :] + Elogtheta_a = Elogtheta[a, :] + # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector + bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) + bound += numpy.sum(gammaln(var_gamma_a) - gammaln(self.alpha)) + bound += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(var_gamma_a)) + + return bound + + def beta_bound(self, Elogbeta, var_lambda, doc_ids=None): + bound = 0.0 + bound += numpy.sum((self.eta - var_lambda) * Elogbeta) + bound += numpy.sum(gammaln(var_lambda) - gammaln(self.eta)) + bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(var_lambda, 1))) + + return bound + + def log_word_prob(self, var_gamma, var_lambda, doc_ids=None): + """ + Compute the liklihood of the corpus under the model, by first + computing the conditional probabilities of the words in a + document d, + + p(w_d | theta, beta, A_d), + + summing over all documents, and dividing by the number of documents. + """ + + norm_gamma = var_gamma.copy() + norm_lambda = var_lambda.copy() + for a in xrange(self.num_authors): + norm_gamma[a, :] = var_gamma[a, :] / var_gamma.sum(axis=1)[a] + for k in xrange(self.num_topics): + norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + log_word_prob = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + log_word_prob_d = 0.0 + for vi, v in enumerate(ids): + log_word_prob_v = 0.0 + for k in xrange(self.num_topics): + for aid in authors_d: + a = self.authorid2idx[aid] + log_word_prob_v += norm_gamma[a, k] * norm_lambda[k, v] + log_word_prob_d += cts[vi] * numpy.log(log_word_prob_v) + log_word_prob += numpy.log(1.0 / len(authors_d)) + log_word_prob_d + #authors_idxs = [self.authorid2idx[aid] for aid in authors_d] + #likelihood += author_prior_prob * numpy.sum(cnt * numpy.log(numpy.sum(numpy.exp(logsumexp(Elogtheta[a, :] + Elogbeta[:, id])) for a in authors_idxs)) for id, cnt in doc) - return likelihood + return log_word_prob # Overriding LdaModel.get_topic_terms. def get_topic_terms(self, topicid, topn=10): diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 4d0f073f4c..f6c78d85b8 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -20,6 +20,7 @@ from gensim.models import LdaModel from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. from six.moves import xrange +from scipy.special import gammaln from pprint import pprint @@ -244,16 +245,20 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # self.var_lambda = var_lambda # pprint(self.show_topics()) - likelihood = self.eval_likelihood(Elogtheta, Elogbeta) - logger.info('Likelihood: %.3e', likelihood) + # Evaluate bound. + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta, var_gamma) + beta_bound = self.beta_bound(Elogbeta, var_lambda) + bound = word_bound + theta_bound + beta_bound + #likelihood = self.log_word_prob(var_gamma, var_lambda) + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + logger.info('Converged documents: %d/%d', converged, d + 1) - # Evaluating word probabilities: - # likelihood = self.eval_word_prob(var_gamma, var_lambda) # End of corpus loop. return var_gamma, var_lambda - def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): + def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): """ Note that this is not strictly speaking a likelihood. @@ -263,8 +268,6 @@ def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. """ - - # TODO: call this something other than "likelihood". # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. @@ -273,29 +276,58 @@ def eval_likelihood(self, Elogtheta, Elogbeta, doc_ids=None): else: docs = [self.corpus[d] for d in doc_ids] - likelihood = 0.0 + bound= 0.0 for d, doc in enumerate(docs): authors_d = self.doc2author[d] ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - likelihood_d = 0.0 + bound_d = 0.0 for vi, v in enumerate(ids): - likelihood_v = 0.0 + bound_v = 0.0 for k in xrange(self.num_topics): for aid in authors_d: a = self.authorid2idx[aid] - likelihood_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) - likelihood_d += cts[vi] * numpy.log(likelihood_v) - likelihood += numpy.log(1.0 / len(authors_d)) + likelihood_d - #authors_idxs = [self.authorid2idx[aid] for aid in authors_d] - #likelihood += author_prior_prob * numpy.sum(cnt * numpy.log(numpy.sum(numpy.exp(logsumexp(Elogtheta[a, :] + Elogbeta[:, id])) for a in authors_idxs)) for id, cnt in doc) + bound_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + bound_d += cts[vi] * numpy.log(bound_v) + bound += numpy.log(1.0 / len(authors_d)) + bound_d # For per-word likelihood, do: # likelihood *= 1 /sum(len(doc) for doc in docs) - return likelihood + # TODO: can I do something along the lines of (as in ldamodel): + # likelihood += numpy.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, id]) for id, cnt in doc) - def eval_word_prob(self, var_gamma, var_lambda, doc_ids=None): + return bound + + def theta_bound(self, Elogtheta, var_gamma, doc_ids=None): + """ + """ + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + bound = 0.0 + for a in xrange(self.num_authors): + var_gamma_a = var_gamma[a, :] + Elogtheta_a = Elogtheta[a, :] + # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector + bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) + bound += numpy.sum(gammaln(var_gamma_a) - gammaln(self.alpha)) + bound += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(var_gamma_a)) + + return bound + + def beta_bound(self, Elogbeta, var_lambda, doc_ids=None): + bound = 0.0 + bound += numpy.sum((self.eta - var_lambda) * Elogbeta) + bound += numpy.sum(gammaln(var_lambda) - gammaln(self.eta)) + bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(var_lambda, 1))) + + return bound + + def log_word_prob(self, var_gamma, var_lambda, doc_ids=None): """ Compute the liklihood of the corpus under the model, by first computing the conditional probabilities of the words in a @@ -305,7 +337,6 @@ def eval_word_prob(self, var_gamma, var_lambda, doc_ids=None): summing over all documents, and dividing by the number of documents. """ - # NOTE: unsure if this is correct. norm_gamma = var_gamma.copy() norm_lambda = var_lambda.copy() @@ -319,24 +350,25 @@ def eval_word_prob(self, var_gamma, var_lambda, doc_ids=None): else: docs = [self.corpus[d] for d in doc_ids] - word_prob = 0.0 + log_word_prob = 0.0 for d, doc in enumerate(docs): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. authors_d = self.doc2author[d] - word_prob_d = 0.0 + log_word_prob_d = 0.0 for vi, v in enumerate(ids): + log_word_prob_v = 0.0 for k in xrange(self.num_topics): for aid in authors_d: a = self.authorid2idx[aid] - word_prob_d += cts[vi] * norm_gamma[a, k] * norm_lambda[k, v] - author_prior_prob = 1.0 / len(authors_d) - word_prob_d += numpy.log(author_prior_prob) - word_prob += word_prob_d - word_prob *= 1 / len(docs) - - return word_prob - + log_word_prob_v += norm_gamma[a, k] * norm_lambda[k, v] + log_word_prob_d += cts[vi] * numpy.log(log_word_prob_v) + log_word_prob += numpy.log(1.0 / len(authors_d)) + log_word_prob_d + #authors_idxs = [self.authorid2idx[aid] for aid in authors_d] + #likelihood += author_prior_prob * numpy.sum(cnt * numpy.log(numpy.sum(numpy.exp(logsumexp(Elogtheta[a, :] + Elogbeta[:, id])) for a in authors_idxs)) for id, cnt in doc) + + return log_word_prob + # Overriding LdaModel.get_topic_terms. def get_topic_terms(self, topicid, topn=10): """ From 09666c428a5b8644be39fd83a51ebbf274673409 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 12 Oct 2016 15:40:29 +0200 Subject: [PATCH 014/100] Changed the way the data structure is prepared and how the model accepts it. Still work to be done in that area. --- docs/notebooks/at_with_nips.ipynb | 213 ++++++++++++++++++++++-------- gensim/models/atvb.py | 53 +++++++- 2 files changed, 204 insertions(+), 62 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 3bcb854e39..e195373d66 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -71,7 +71,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": { "collapsed": false }, @@ -90,12 +90,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Load and pre-process data" + "## Load and prepare data structure" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 171, "metadata": { "collapsed": false }, @@ -131,7 +131,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 172, "metadata": { "collapsed": false }, @@ -156,7 +156,63 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 173, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Make a mapping from author ID to author name.\n", + "id2author = dict(zip(range(len(authors_names)), authors_names))" + ] + }, + { + "cell_type": "code", + "execution_count": 174, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Use an integer ID in author2doc, instead of the IDs provided in the NIPS dataset.\n", + "\n", + "# Mapping from ID of document in NIPS datast, to an integer ID.\n", + "doc_id_dict = dict(zip(doc_ids, range(len(doc_ids))))\n", + "\n", + "for a, a_doc_ids in author2doc.items():\n", + " for i, doc_id in enumerate(a_doc_ids):\n", + " author2doc[a][i] = doc_id_dict[doc_id]" + ] + }, + { + "cell_type": "code", + "execution_count": 175, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Make a mapping from document IDs to author IDs.\n", + "# Same as in the atvb code.\n", + "doc2author = {}\n", + "for d, _ in enumerate(corpus):\n", + " author_ids = []\n", + " for a, a_doc_ids in author2doc.items():\n", + " if d in a_doc_ids:\n", + " author_ids.append(a)\n", + " doc2author[d] = author_ids" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Pre-process and vectorize data" + ] + }, + { + "cell_type": "code", + "execution_count": 176, "metadata": { "collapsed": false }, @@ -179,7 +235,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 177, "metadata": { "collapsed": true }, @@ -194,7 +250,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 178, "metadata": { "collapsed": true }, @@ -213,7 +269,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 179, "metadata": { "collapsed": true }, @@ -225,7 +281,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 180, "metadata": { "collapsed": false }, @@ -234,7 +290,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -260,7 +316,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 181, "metadata": { "collapsed": true }, @@ -278,7 +334,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 182, "metadata": { "collapsed": true }, @@ -292,7 +348,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 183, "metadata": { "collapsed": false }, @@ -311,62 +367,107 @@ "print('Number of documents: %d' % len(corpus))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## \"Offline\" AT VB" + ] + }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": { - "collapsed": false + "collapsed": true }, "outputs": [], "source": [ - "# Make a mapping from document IDs to author IDs.\n", - "doc2author = {}\n", - "for i, doc_id in enumerate(doc_ids):\n", - " author_ids = []\n", - " for a, a_doc_ids in author2doc.items():\n", - " if doc_id in a_doc_ids:\n", - " author_ids.append(a)\n", - " doc2author[i] = author_ids" + "#model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author,\n", + "# author2doc=author2doc, doc2author=doc2author, threshold=1e-12,\n", + "# iterations=10, alpha=None, eta=None,\n", + "# eval_every=1, random_state=1)" ] }, { - "cell_type": "markdown", - "metadata": {}, + "cell_type": "code", + "execution_count": 185, + "metadata": { + "collapsed": false + }, + "outputs": [], "source": [ - "## \"Offline\" AT VB" + "n_docs = 10\n", + "\n", + "from copy import deepcopy\n", + "\n", + "small_doc2author = deepcopy(dict(list(doc2author.items())[:n_docs]))\n", + "small_doc2author = dict(small_doc2author)\n", + "\n", + "small_corpus = corpus[:n_docs]" ] }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 186, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "small_doc2author = [(d, a) for d, a in list(doc2author.items())[:10]]\n", - "small_doc2author = dict(small_doc2author)\n", + "authors_ids = set()\n", + "author_id_dict\n", + "for d, a_doc_ids in small_doc2author.items():\n", + " for a in a_doc_ids:\n", + " authors_ids.add(a)\n", "\n", - "small_corpus = [corpus[d] for d in small_doc2author.keys()]" + "authors_ids = list(authors_ids)\n", + "author_id_dict = dict(zip(authors_ids, range(len(authors_ids))))" ] }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 187, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "small_author2doc = {}\n", - "for d, author_ids in small_doc2author.items():\n", - " for a in author_ids:\n", - " small_author2doc[a] = set()\n", - "for d, author_ids in small_doc2author.items():\n", - " for a in author_ids:\n", - " small_author2doc[a].add(d)\n", - "for a in small_author2doc.keys():\n", - " small_author2doc[a] = list(small_author2doc[a])" + "for d, a_ids in small_doc2author.items():\n", + " for i, a in enumerate(a_ids):\n", + " small_doc2author[d][i] = author_id_dict[a]" + ] + }, + { + "cell_type": "code", + "execution_count": 188, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Make a mapping from author IDs to document IDs.\n", + "author2doc = {}\n", + "for a in range(len(author_id_dict)):\n", + " author2doc[a] = []\n", + " for d, a_ids in small_doc2author.items():\n", + " if a in a_ids:\n", + " author2doc[a].append(d)" + ] + }, + { + "cell_type": "code", + "execution_count": 194, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "author_id_dict_rev = dict(zip(range(len(authors_ids)), authors_ids))\n", + "\n", + "small_id2author = {}\n", + "for a, a_id in author_id_dict_rev.items():\n", + " small_id2author[a] = id2author[a_id]" ] }, { @@ -394,7 +495,7 @@ }, { "cell_type": "code", - "execution_count": 85, + "execution_count": 197, "metadata": { "collapsed": false }, @@ -406,13 +507,13 @@ }, { "cell_type": "code", - "execution_count": 86, + "execution_count": 198, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, \n", + "model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author,\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12,\n", " iterations=10, alpha=None, eta=None,\n", " eval_every=1, random_state=1)" @@ -420,7 +521,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 199, "metadata": { "collapsed": false }, @@ -429,28 +530,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.043*gradient + 0.028*image + 0.026*constraint + 0.020*square + 0.019*technique + 0.019*training + 0.018*optimal + 0.017*matrix + 0.016*surface + 0.015*component'),\n", + " '0.080*group + 0.032*feedback + 0.030*whose + 0.030*matrix + 0.020*obtain + 0.017*computational + 0.016*constraint + 0.015*scheme + 0.015*hence + 0.015*expression'),\n", " (1,\n", - " '0.071*processor + 0.052*activation + 0.039*edge + 0.025*update + 0.021*operation + 0.018*machine + 0.018*column + 0.017*stored + 0.016*store + 0.015*address'),\n", + " '0.037*activation + 0.030*propagation + 0.029*back_propagation + 0.027*back + 0.024*node + 0.023*column + 0.023*address + 0.021*processor + 0.019*edge + 0.017*update'),\n", " (2,\n", - " '0.099*brain + 0.095*map + 0.065*field + 0.038*functional + 0.031*series + 0.029*location + 0.026*activity + 0.025*potential + 0.024*left + 0.020*subject'),\n", + " '0.048*flow + 0.043*analog + 0.038*circuit + 0.037*field + 0.030*node + 0.026*energy + 0.024*location + 0.019*edge + 0.018*current + 0.018*square'),\n", " (3,\n", - " '0.055*update + 0.052*edge + 0.038*processor + 0.037*current + 0.037*control + 0.027*activation + 0.021*provided + 0.015*rate + 0.015*implementation + 0.014*total'),\n", + " '0.037*gradient + 0.026*technique + 0.024*optimal + 0.022*constraint + 0.021*image + 0.018*whether + 0.016*surface + 0.015*right + 0.015*visual + 0.014*assumption'),\n", " (4,\n", - " '0.030*scheme + 0.024*group + 0.021*noise + 0.021*representation + 0.019*capacity + 0.019*probability + 0.018*matrix + 0.016*log + 0.015*recognition + 0.015*recall'),\n", + " '0.023*dynamic + 0.019*phase + 0.017*respect + 0.016*cell + 0.016*path + 0.016*variable + 0.013*noise + 0.013*with_respect + 0.013*design + 0.011*limit'),\n", " (5,\n", - " '0.036*image + 0.026*flow + 0.021*associated + 0.021*analog + 0.018*approximation + 0.018*original + 0.018*degree + 0.016*moving + 0.015*mapping + 0.015*view'),\n", + " '0.123*processor + 0.067*activation + 0.064*edge + 0.046*update + 0.026*operation + 0.025*implementation + 0.020*store + 0.019*control + 0.018*weighted + 0.018*current'),\n", " (6,\n", - " '0.028*dynamic + 0.020*feedback + 0.020*phase + 0.015*variable + 0.015*path + 0.015*cell + 0.014*group + 0.013*hopfield + 0.013*noise + 0.012*useful'),\n", + " '0.075*map + 0.069*brain + 0.034*functional + 0.032*field + 0.027*stimulus + 0.024*series + 0.017*left + 0.017*activity + 0.017*subject + 0.016*location'),\n", " (7,\n", - " '0.055*cell + 0.045*potential + 0.040*connectivity + 0.032*artificial + 0.029*synaptic + 0.026*computational + 0.025*architecture + 0.018*fact_that + 0.017*though + 0.016*biological'),\n", + " '0.052*cell + 0.049*potential + 0.041*connectivity + 0.030*synaptic + 0.028*artificial + 0.026*architecture + 0.016*computational + 0.016*temporal + 0.015*action + 0.015*capability'),\n", " (8,\n", - " '0.064*node + 0.040*propagation + 0.038*back + 0.036*back_propagation + 0.022*flow + 0.019*target + 0.015*energy + 0.014*course + 0.014*hidden + 0.013*analog'),\n", + " '0.084*image + 0.029*log + 0.025*dimensional + 0.023*recall + 0.021*matrix + 0.021*noise + 0.018*associative_memory + 0.017*recognition + 0.016*vision + 0.015*scale'),\n", " (9,\n", - " '0.097*image + 0.031*stimulus + 0.025*dimensional + 0.024*vision + 0.021*center + 0.017*visual + 0.015*scale + 0.015*phase + 0.014*recall + 0.012*non_linear')]" + " '0.053*scheme + 0.042*capacity + 0.037*representation + 0.034*probability + 0.030*stored + 0.027*represented + 0.026*binary + 0.025*code + 0.020*relationship + 0.018*bound')]" ] }, - "execution_count": 28, + "execution_count": 199, "metadata": {}, "output_type": "execute_result" } diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 3886fd93d3..8b8f111edd 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -41,13 +41,12 @@ class AtVb(LdaModel): """ # TODO: inherit interfaces.TransformationABC. Probably not necessary if I'm inheriting LdaModel. - def __init__(self, corpus=None, num_topics=100, id2word=None, + def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, author2doc=None, doc2author=None, threshold=0.001, iterations=10, alpha=None, eta=None, eval_every=1, random_state=None): - # TODO: require only author2doc OR doc2author, and construct the missing one automatically. - + # TODO: allow for asymmetric priors. if alpha is None: alpha = 1.0 / num_topics if eta is None: @@ -57,6 +56,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, if corpus is None and self.id2word is None: raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + # NOTE: this stuff is confusing to me (from LDA code). Why would id2word not be none, but have length 0? if self.id2word is None: logger.warning("no word id mapping provided; initializing from corpus, assuming identity") self.id2word = utils.dict_from_corpus(corpus) @@ -71,20 +71,61 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, logger.info('Vocabulary consists of %d words.', self.num_terms) + if doc2author is None and author2doc is None: + raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') + + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). + # If either doc2author or author2doc is missing, construct them from the other. + if doc2author is None: + # Make a mapping from document IDs to author IDs. + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + elif author2doc is None: + # Make a mapping from author IDs to document IDs. + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + + self.author2doc = author2doc + self.doc2author = doc2author + + self.num_authors = len(self.author2doc) + logger.info('Number of authors: %d.', self.num_authors) + + # TODO: finish this. Initialize id2author with integer "names" if actual names are not provided. + self.id2author = id2author + if self.id2author is None: + logger.warning("no author id mapping provided; initializing from corpus, assuming identity") + author_integer_ids = [str(i) for i in range(len(author2doc))] + self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) + self.corpus = corpus self.iterations = iterations self.num_topics = num_topics self.threshold = threshold self.alpha = alpha self.eta = eta - self.author2doc = author2doc - self.doc2author = doc2author self.num_docs = len(corpus) self.num_authors = len(author2doc) self.eval_every = eval_every self.random_state = random_state - logger.info('Number of authors: %d.', self.num_authors) # TODO: find a way out of this nonsense. self.authorid2idx = dict(zip(list(author2doc.keys()), xrange(self.num_authors))) From a562fca5232c96fcad1060953bcf133741dfd419 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 12 Oct 2016 16:11:33 +0200 Subject: [PATCH 015/100] Cleaned the code up a bit. Added a simple method to get author topics. --- docs/notebooks/at_with_nips.ipynb | 151 +++++++++++++++++++++--------- gensim/models/atvb.py | 49 +++++----- 2 files changed, 135 insertions(+), 65 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index e195373d66..ed95314cbb 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -57,6 +57,7 @@ "import gensim\n", "from gensim.models import Phrases\n", "from gensim.corpora import Dictionary\n", + "from gensim.models import LdaModel\n", "from imp import reload\n", "\n", "import logging\n", @@ -95,7 +96,7 @@ }, { "cell_type": "code", - "execution_count": 171, + "execution_count": 14, "metadata": { "collapsed": false }, @@ -131,7 +132,7 @@ }, { "cell_type": "code", - "execution_count": 172, + "execution_count": 15, "metadata": { "collapsed": false }, @@ -156,7 +157,7 @@ }, { "cell_type": "code", - "execution_count": 173, + "execution_count": 16, "metadata": { "collapsed": true }, @@ -168,7 +169,7 @@ }, { "cell_type": "code", - "execution_count": 174, + "execution_count": 17, "metadata": { "collapsed": false }, @@ -186,16 +187,16 @@ }, { "cell_type": "code", - "execution_count": 175, + "execution_count": 18, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ "# Make a mapping from document IDs to author IDs.\n", "# Same as in the atvb code.\n", "doc2author = {}\n", - "for d, _ in enumerate(corpus):\n", + "for d, _ in enumerate(docs):\n", " author_ids = []\n", " for a, a_doc_ids in author2doc.items():\n", " if d in a_doc_ids:\n", @@ -212,7 +213,7 @@ }, { "cell_type": "code", - "execution_count": 176, + "execution_count": 19, "metadata": { "collapsed": false }, @@ -235,7 +236,7 @@ }, { "cell_type": "code", - "execution_count": 177, + "execution_count": 20, "metadata": { "collapsed": true }, @@ -250,7 +251,7 @@ }, { "cell_type": "code", - "execution_count": 178, + "execution_count": 21, "metadata": { "collapsed": true }, @@ -269,7 +270,7 @@ }, { "cell_type": "code", - "execution_count": 179, + "execution_count": 22, "metadata": { "collapsed": true }, @@ -281,7 +282,7 @@ }, { "cell_type": "code", - "execution_count": 180, + "execution_count": 23, "metadata": { "collapsed": false }, @@ -290,7 +291,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -316,7 +317,7 @@ }, { "cell_type": "code", - "execution_count": 181, + "execution_count": 24, "metadata": { "collapsed": true }, @@ -334,7 +335,7 @@ }, { "cell_type": "code", - "execution_count": 182, + "execution_count": 25, "metadata": { "collapsed": true }, @@ -348,7 +349,7 @@ }, { "cell_type": "code", - "execution_count": 183, + "execution_count": 26, "metadata": { "collapsed": false }, @@ -376,7 +377,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": { "collapsed": true }, @@ -390,7 +391,7 @@ }, { "cell_type": "code", - "execution_count": 185, + "execution_count": 28, "metadata": { "collapsed": false }, @@ -408,14 +409,13 @@ }, { "cell_type": "code", - "execution_count": 186, + "execution_count": 29, "metadata": { "collapsed": false }, "outputs": [], "source": [ "authors_ids = set()\n", - "author_id_dict\n", "for d, a_doc_ids in small_doc2author.items():\n", " for a in a_doc_ids:\n", " authors_ids.add(a)\n", @@ -426,7 +426,7 @@ }, { "cell_type": "code", - "execution_count": 187, + "execution_count": 30, "metadata": { "collapsed": false }, @@ -439,24 +439,24 @@ }, { "cell_type": "code", - "execution_count": 188, + "execution_count": 31, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ "# Make a mapping from author IDs to document IDs.\n", - "author2doc = {}\n", + "small_author2doc = {}\n", "for a in range(len(author_id_dict)):\n", - " author2doc[a] = []\n", + " small_author2doc[a] = []\n", " for d, a_ids in small_doc2author.items():\n", " if a in a_ids:\n", - " author2doc[a].append(d)" + " small_author2doc[a].append(d)" ] }, { "cell_type": "code", - "execution_count": 194, + "execution_count": 32, "metadata": { "collapsed": false }, @@ -472,7 +472,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 33, "metadata": { "collapsed": false }, @@ -495,7 +495,7 @@ }, { "cell_type": "code", - "execution_count": 197, + "execution_count": 42, "metadata": { "collapsed": false }, @@ -507,7 +507,7 @@ }, { "cell_type": "code", - "execution_count": 198, + "execution_count": 43, "metadata": { "collapsed": false }, @@ -521,7 +521,7 @@ }, { "cell_type": "code", - "execution_count": 199, + "execution_count": 44, "metadata": { "collapsed": false }, @@ -530,28 +530,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.080*group + 0.032*feedback + 0.030*whose + 0.030*matrix + 0.020*obtain + 0.017*computational + 0.016*constraint + 0.015*scheme + 0.015*hence + 0.015*expression'),\n", + " '0.064*group + 0.032*whose + 0.028*matrix + 0.023*obtain + 0.019*scheme + 0.016*expression + 0.014*computational + 0.014*every + 0.013*more_than + 0.013*become'),\n", " (1,\n", - " '0.037*activation + 0.030*propagation + 0.029*back_propagation + 0.027*back + 0.024*node + 0.023*column + 0.023*address + 0.021*processor + 0.019*edge + 0.017*update'),\n", + " '0.053*node + 0.039*propagation + 0.036*back_propagation + 0.035*back + 0.017*target + 0.016*complexity + 0.016*requires + 0.015*probability + 0.013*supported + 0.013*forward'),\n", " (2,\n", - " '0.048*flow + 0.043*analog + 0.038*circuit + 0.037*field + 0.030*node + 0.026*energy + 0.024*location + 0.019*edge + 0.018*current + 0.018*square'),\n", + " '0.042*content + 0.042*cycle + 0.025*choice + 0.024*selected + 0.020*relation + 0.019*symmetric + 0.019*linearly + 0.015*include + 0.014*requires + 0.014*described_above'),\n", " (3,\n", - " '0.037*gradient + 0.026*technique + 0.024*optimal + 0.022*constraint + 0.021*image + 0.018*whether + 0.016*surface + 0.015*right + 0.015*visual + 0.014*assumption'),\n", + " '0.062*brain + 0.058*map + 0.049*field + 0.025*location + 0.023*functional + 0.020*left + 0.018*series + 0.017*spatial + 0.017*visual + 0.016*potential'),\n", " (4,\n", - " '0.023*dynamic + 0.019*phase + 0.017*respect + 0.016*cell + 0.016*path + 0.016*variable + 0.013*noise + 0.013*with_respect + 0.013*design + 0.011*limit'),\n", + " '0.025*dynamic + 0.020*variable + 0.019*phase + 0.018*feedback + 0.017*cell + 0.017*path + 0.016*with_respect + 0.013*energy + 0.013*design + 0.013*respect'),\n", " (5,\n", - " '0.123*processor + 0.067*activation + 0.064*edge + 0.046*update + 0.026*operation + 0.025*implementation + 0.020*store + 0.019*control + 0.018*weighted + 0.018*current'),\n", + " '0.104*processor + 0.072*activation + 0.068*edge + 0.048*update + 0.026*operation + 0.020*current + 0.019*machine + 0.019*control + 0.018*implementation + 0.018*store'),\n", " (6,\n", - " '0.075*map + 0.069*brain + 0.034*functional + 0.032*field + 0.027*stimulus + 0.024*series + 0.017*left + 0.017*activity + 0.017*subject + 0.016*location'),\n", + " '0.031*recall + 0.029*noise + 0.028*image + 0.026*stimulus + 0.023*associative_memory + 0.022*scale + 0.019*recognition + 0.016*associated + 0.014*log + 0.014*phase'),\n", " (7,\n", - " '0.052*cell + 0.049*potential + 0.041*connectivity + 0.030*synaptic + 0.028*artificial + 0.026*architecture + 0.016*computational + 0.016*temporal + 0.015*action + 0.015*capability'),\n", + " '0.045*cell + 0.038*potential + 0.035*connectivity + 0.027*synaptic + 0.026*artificial + 0.023*architecture + 0.015*computational + 0.013*action + 0.013*capability + 0.013*technique'),\n", " (8,\n", - " '0.084*image + 0.029*log + 0.025*dimensional + 0.023*recall + 0.021*matrix + 0.021*noise + 0.018*associative_memory + 0.017*recognition + 0.016*vision + 0.015*scale'),\n", + " '0.091*image + 0.025*log + 0.022*matrix + 0.021*dimensional + 0.017*vision + 0.015*training + 0.015*gradient + 0.014*component + 0.014*square + 0.012*mapping'),\n", " (9,\n", - " '0.053*scheme + 0.042*capacity + 0.037*representation + 0.034*probability + 0.030*stored + 0.027*represented + 0.026*binary + 0.025*code + 0.020*relationship + 0.018*bound')]" + " '0.052*scheme + 0.041*capacity + 0.030*representation + 0.030*stored + 0.029*probability + 0.027*code + 0.027*binary + 0.026*represented + 0.018*bound + 0.018*feature')]" ] }, - "execution_count": 199, + "execution_count": 44, "metadata": {}, "output_type": "execute_result" } @@ -560,6 +560,31 @@ "model.show_topics()" ] }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0, 0.34230097594591424),\n", + " (4, 0.2487692783005907),\n", + " (6, 0.19935367234756304),\n", + " (8, 0.20805744284415623)]" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.get_author_topics(0)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -593,14 +618,52 @@ " eval_every=1, random_state=0)" ] }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "## LDA" + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 51, "metadata": { "collapsed": true }, "outputs": [], - "source": [] + "source": [ + "lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token)" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0, 0.14679045510589872),\n", + " (2, 0.024722847345847499),\n", + " (3, 0.036692535207794273),\n", + " (4, 0.10874558108160597),\n", + " (6, 0.29675634369596471),\n", + " (9, 0.38555538612902118)]" + ] + }, + "execution_count": 59, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lda[corpus[0]]" + ] } ], "metadata": { diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 8b8f111edd..b9cf1b8c9a 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -43,7 +43,7 @@ class AtVb(LdaModel): def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, author2doc=None, doc2author=None, threshold=0.001, - iterations=10, alpha=None, eta=None, + iterations=10, alpha=None, eta=None, minimum_probability=0.01, eval_every=1, random_state=None): # TODO: allow for asymmetric priors. @@ -119,6 +119,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.iterations = iterations self.num_topics = num_topics self.threshold = threshold + self.minimum_probability = minimum_probability self.alpha = alpha self.eta = eta self.num_docs = len(corpus) @@ -126,11 +127,6 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.eval_every = eval_every self.random_state = random_state - - # TODO: find a way out of this nonsense. - self.authorid2idx = dict(zip(list(author2doc.keys()), xrange(self.num_authors))) - self.authoridx2id = dict(zip(xrange(self.num_authors), list(author2doc.keys()))) - self.random_state = get_random_state(random_state) if corpus is not None: @@ -159,8 +155,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. for v in ids: authors_d = doc2author[d] # List of author IDs for document d. - for aid in authors_d: - a = self.authorid2idx[aid] + for a in authors_d: # Draw mu from gamma distribution. # var_mu[(d, v, a)] = self.random_state.gamma(100., 1. / 100., (1,))[0] var_mu[(d, v, a)] = 1 / len(authors_d) @@ -202,8 +197,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for k in xrange(self.num_topics): # Average Elogtheta over authors a in document d. avgElogtheta = 0.0 - for aid in authors_d: - a = self.authorid2idx[aid] + for a in authors_d: avgElogtheta += var_mu[(d, v, a)] * Elogtheta[a, k] expavgElogtheta = numpy.exp(avgElogtheta) @@ -222,8 +216,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # author_prior_prob = 1. / len(authors_d) for v in ids: mu_sum = 0.0 - for aid in authors_d: - a = self.authorid2idx[aid] + for a in authors_d: # Average Elogtheta over topics k. avgElogtheta = 0.0 for k in xrange(self.num_topics): @@ -237,15 +230,13 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): mu_sum += var_mu[(d, v, a)] mu_norm_const = 1.0 / mu_sum - for aid in authors_d: - a = self.authorid2idx[aid] + for a in authors_d: var_mu[(d, v, a)] *= mu_norm_const # Update gamma. for a in xrange(self.num_authors): for k in xrange(self.num_topics): - aid = self.authoridx2id[a] - docs_a = self.author2doc[aid] + docs_a = self.author2doc[a] var_gamma[a, k] = 0.0 var_gamma[a, k] += self.alpha for d in docs_a: @@ -279,13 +270,14 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - logger.info('All variables updated.') # Print topics: self.var_lambda = var_lambda #pprint(self.show_topics()) + self.var_gamma = var_gamma + # Evaluate bound. if (iteration + 1) % self.eval_every == 0: prev_bound = bound @@ -328,8 +320,7 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): for vi, v in enumerate(ids): bound_v = 0.0 for k in xrange(self.num_topics): - for aid in authors_d: - a = self.authorid2idx[aid] + for a in authors_d: bound_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) bound_d += cts[vi] * numpy.log(bound_v) bound += numpy.log(1.0 / len(authors_d)) + bound_d @@ -402,8 +393,7 @@ def log_word_prob(self, var_gamma, var_lambda, doc_ids=None): for vi, v in enumerate(ids): log_word_prob_v = 0.0 for k in xrange(self.num_topics): - for aid in authors_d: - a = self.authorid2idx[aid] + for a in authors_d: log_word_prob_v += norm_gamma[a, k] * norm_lambda[k, v] log_word_prob_d += cts[vi] * numpy.log(log_word_prob_v) log_word_prob += numpy.log(1.0 / len(authors_d)) + log_word_prob_d @@ -425,6 +415,23 @@ def get_topic_terms(self, topicid, topn=10): return [(id, topic[id]) for id in bestn] + def get_author_topics(self, author_id, minimum_probability=None): + """ + Return topic distribution the given author, as a list of + (topic_id, topic_probability) 2-tuples. + Ignore topics with very low probability (below `minimum_probability`). + """ + if minimum_probability is None: + minimum_probability = self.minimum_probability + minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output + + topic_dist = self.var_gamma[author_id, :] / sum(self.var_gamma[author_id, :]) + + author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) + if topicvalue >= minimum_probability] + + return author_topics + From 0de43a59c603b3a3fa3c9de8ab2dc347b81f3782 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 12 Oct 2016 17:13:53 +0200 Subject: [PATCH 016/100] Removed some comments, mostly TODOs. --- gensim/models/atvb.py | 28 +++++----------------------- 1 file changed, 5 insertions(+), 23 deletions(-) diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index b9cf1b8c9a..58ba69fe12 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -39,7 +39,6 @@ class AtVb(LdaModel): """ Train the author-topic model using variational Bayes. """ - # TODO: inherit interfaces.TransformationABC. Probably not necessary if I'm inheriting LdaModel. def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, author2doc=None, doc2author=None, threshold=0.001, @@ -108,7 +107,6 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.num_authors = len(self.author2doc) logger.info('Number of authors: %d.', self.num_authors) - # TODO: finish this. Initialize id2author with integer "names" if actual names are not provided. self.id2author = id2author if self.id2author is None: logger.warning("no author id mapping provided; initializing from corpus, assuming identity") @@ -149,7 +147,8 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # mu is 1/|A_d| if a is in A_d, zero otherwise. # var_mu is essentially a (self.num_docs, self.num_terms, self.num_authors) sparse matrix, # which we represent using a dictionary. - # TODO: consider initializing mu randomly. + # TODO: consider initializing mu randomly. i.e.: + # var_mu[(d, v, a)] = self.random_state.gamma(100., 1. / 100., (1,))[0] var_mu = dict() for d, doc in enumerate(corpus): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. @@ -157,17 +156,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): authors_d = doc2author[d] # List of author IDs for document d. for a in authors_d: # Draw mu from gamma distribution. - # var_mu[(d, v, a)] = self.random_state.gamma(100., 1. / 100., (1,))[0] var_mu[(d, v, a)] = 1 / len(authors_d) - # Normalize mu. - # mu_sum = 0.0 - # for aid_prime in authors_d: - # a_prime = self.authorid2idx[aid] - # mu_sum += var_mu[(d, v, a)] - - # for aid_prime in authors_d: - # a_prime = self.authorid2idx[aid] - # var_mu[(d, v, a)] *= 1 / mu_sum var_phi = numpy.zeros((self.num_docs, self.num_terms, self.num_topics)) @@ -205,7 +194,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # TODO: avoid computing phi if possible. var_phi[d, v, k] = expavgElogtheta * expElogbeta[k, v] # Normalize phi. - #(log_var_phi_dv, _) = log_normalize(var_phi[d, v, :]) (log_var_phi_dv, _) = log_normalize(numpy.log(var_phi[d, v, :])) var_phi[d, v, :] = numpy.exp(log_var_phi_dv) @@ -213,7 +201,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for d, doc in enumerate(corpus): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. authors_d = doc2author[d] # List of author IDs for document d. - # author_prior_prob = 1. / len(authors_d) for v in ids: mu_sum = 0.0 for a in authors_d: @@ -225,7 +212,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Compute mu. # TODO: avoid computing mu if possible. - # var_mu[(d, v, a)] = author_prior_prob * expavgElogtheta var_mu[(d, v, a)] = expavgElogtheta mu_sum += var_mu[(d, v, a)] @@ -254,17 +240,11 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for k in xrange(self.num_topics): for v in xrange(self.num_terms): # TODO: highly unnecessary: - var_lambda[k, v] = 0.0 - var_lambda[k, v] += self.eta + var_lambda[k, v] = self.eta for d, doc in enumerate(corpus): # Get the count of v in doc. If v is not in doc, return 0. cnt = dict(doc).get(v, 0) var_lambda[k, v] += cnt * var_phi[d, v, k] - #ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - #cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - #for vi, v in enumerate(ids): - # # FIXME: I'm 90% sure this is wrong. - # var_lambda[k, v] += cts[vi] * var_phi[d, v, k] # Update Elogbeta, since lambda has been updated. Elogbeta = dirichlet_expectation(var_lambda) @@ -430,6 +410,8 @@ def get_author_topics(self, author_id, minimum_probability=None): author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) if topicvalue >= minimum_probability] + # author_name = self.id2author[author_id] + return author_topics From a892564ef997c1d99423671e09ca332185b6ce94 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Thu, 13 Oct 2016 16:58:18 +0200 Subject: [PATCH 017/100] Ran some very successful experiments on 286 documents. Offline algorithm works. Updated notebook. --- docs/notebooks/at_with_nips.ipynb | 309 +++++++++++++++++++++++++----- 1 file changed, 257 insertions(+), 52 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index ed95314cbb..11f0735c76 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 89, "metadata": { "collapsed": false }, @@ -59,6 +59,7 @@ "from gensim.corpora import Dictionary\n", "from gensim.models import LdaModel\n", "from imp import reload\n", + "from pprint import pprint\n", "\n", "import logging\n", "\n", @@ -80,7 +81,7 @@ "source": [ "# Configure logging.\n", "logger = logging.getLogger()\n", - "fhandler = logging.FileHandler(filename='../../../log_files/log.log', mode='a')\n", + "fhandler = logging.FileHandler(filename='../../../../log_files/log.log', mode='a')\n", "formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", "fhandler.setFormatter(formatter)\n", "logger.addHandler(fhandler)\n", @@ -96,7 +97,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 65, "metadata": { "collapsed": false }, @@ -106,11 +107,11 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "data_dir = '../../../../data/nipstxt/'\n", + "data_dir = '../../../nipstxt/'\n", "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00']\n", + "yrs = ['00', '01', '02']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -132,7 +133,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 66, "metadata": { "collapsed": false }, @@ -157,19 +158,22 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 67, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Make a mapping from author ID to author name.\n", - "id2author = dict(zip(range(len(authors_names)), authors_names))" + "id2author = dict(zip(range(len(authors_names)), authors_names))\n", + "\n", + "# Also the reverse mapping.\n", + "author2id = dict(zip(authors_names, range(len(authors_names))))" ] }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 68, "metadata": { "collapsed": false }, @@ -187,7 +191,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 69, "metadata": { "collapsed": false }, @@ -213,7 +217,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 70, "metadata": { "collapsed": false }, @@ -236,7 +240,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 71, "metadata": { "collapsed": true }, @@ -251,7 +255,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 72, "metadata": { "collapsed": true }, @@ -270,7 +274,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 73, "metadata": { "collapsed": true }, @@ -282,16 +286,16 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 74, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHVWZ//HPlwBBlgTGmAQEBUUQcIFEhKgsDpLIqjOg\n2G4g4IKA/KIoijowoI6iJOyKArKojQiDwx4IjhC2CSaIICEoO2gCgdCBBMj2/P4455JK5fZ2b3du\n377f9+tVr9v31FN1TtXt5elT51QpIjAzMzNrNWs0ugFmZmZmjeAkyMzMzFqSkyAzMzNrSU6CzMzM\nrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMysJTkJMjMzs5bkJMjM+pykpyT9vPB+D0nLJb1v\nNdT9PUlLCu+H5Lon9Xfdub7Dc32brI76aiXpm5IekbRU0vRGt6enJL01n99PNrot1vycBNmgIeng\n/Mux2vKDRrevxVR7Hk+vn9Ej6duS9quh7uW9rau3umhbUMOxrk6S9gZ+APwvcAjw3YY2yKxB1mx0\nA8z6WJB+oT9WKr9/9TfFKiLiZkmvi4jFvdz0O8AlwNW92OYE4KRe1lOLztp2AXBJDce6On0QWAIc\nHn6ApLUwJ0E2GN0QETN7GixJwNoR8Wo/tqnl9XdSIGndiFgUEctZDT1BnclJxUBOgABGAQsHYgLk\nn0dbnXw5zFpKcXyIpM9I+ivwCrBHXi9JX5X0V0mvSPqnpHMkDSvtR5L+I499eUnSVElvl/RkaSzM\nSuNTCuVVx41I2kfStLzPDklXSXp7KeZXkuZL2jSvf1HSM5J+WKUeSZoo6S+SXs5x10naPq+/XdKf\nOjlXD0vqsgems/NQJW6VMUGStpL035Lm5LY9IenXktarfE7A2kDlXC2vnNt8XpfnffxW0nzSpZ1O\nz3le9xlJs3N908tjlPK5/VuV7V7bZw/a1tlne3Th++ppSWdU+b66TdJMSdtJ+l9Ji/K5/WpXn0Nh\n+zUlnZA/u1eUxvycJGmtUts/BQzP7VymTsbX5O+dJZLWK5Qdl7f7YaFszfz5n1QoW1/S5Pwz8Yqk\nWZL+X2n/3f08biTpYkkvSHpe0vnASucsx20s6aJ8rl6R9A9JV0ratCfnzVqXe4JsMBou6fXFgoh4\nrhQzHvgEcDbwPPBELr8AaMuvpwFvAY4G3i1pl9zLAGk8xXHAVcAUYCxwI/C6Uj2djQ9ZpVzSIcD5\nwHXAN4D1gC8D0yTtEBFPFbZdM9c3DfhaPp6vS/pbRJxf2O3FpD94VwM/J/3h3hXYCfhzXn+OpK0i\n4qFCW8YBWwDfqtL2op6eh0q7K/sfmuPWIJ3nucCmwH7AsIhYKOnTwC+B2/J5Afh7aV//DTwIfLNQ\n1tk53wP4JHAG6VLQkcAUSe+JiNndbPtaeUQs60Hbyp/t94DjgRtI33PbkD7bsaXvqwBGANcDvwMu\nBT4O/FjSvRFxc5W2FV2Yj/FS0vfGzqTLdlsDBxXa/mXg3cAXAAG3d7K/aaTP6P2kzwvgA8AyYJdC\n3FjSZ35rPl4B1+btfgH8BdgLmCRp44g4rlTPKj+PeR9Xk75XzwFmAweQznv5M/o9sCXps32C1NM1\nnvQ99RRmnYkIL14GxQIcTLoMUl6WFWKG5LLFwJal7XfP6w4ole+Vyw/M70fm7a8oxf0wx/28UHYy\nsLhKWw8j/SHZJL/fAHgBOLMUNyqXn1UouyRv+41S7J+BOwrv98ztOaWLc7Yh8DJwUqn87FzvOl1s\n25vzsEdu8/vy+7E5Zr9uPtOXi/spndflwIWdrFtceF/5zJcC7yiUv5nU63Bp6dw+1N0+u2lb+bMd\nlc/TVaW4r+S4TxXKpuWyjxfK1iYlib/p5lyNycd5dql8Ut7n+0vH+XwPfqaGAC8CJxfKniclWa9U\nvj+Ar+djXD+/PyC35djS/q4gJaBv6sHPY2UfXymUrUFKPJcBn8xl/1KO8+Klp4svh9lgE8ARwIcK\ny55V4m6OiL+Xyg4k/YL/o6TXVxbgT6Q/eB/McRNIv7zPLG1/Wh3t/jApEbq0VPcy4O5C3UU/L72/\njdRzVXEA6Q//yZ1VGhEvANeQeg+AdIkC+BgpuXmlizaPp/bz8EJ+3UvSOj2IryaAn/UiflpEvDZA\nPiIeJ/U0fLjG+ntqT9J5Kp+Xc4FFwD6l8o6IuKzyJtJYqrtZ+bOtZm/SOSnfCuBUUm9PuZ5uRcQy\n4E5S7yGS3gUMB/4LWIvUSwOpd+jeiHgpv9+LlNicXdrlJNK5KJ/zaj+PewGvUvg+j9RjdlY+nopF\npMTqg5KG9/IQrcU5CbLB6O6I+ENxqRLzWJWyt5H+q3y2tMwF1iH1fAC8Kb+u9Es7IuaQ/muuxZak\nX+zTSnU/A/xroe6Kl3ICUzQf2Kjw/i3AUxHRXZsuBraQtHN+/2Hg9aTegq68Ob/2+jxExMPA6cAX\ngeckXS/pCEkbdFNn2aO9iC3/kQV4CNhA0kZV1vWVynl6qFgYaeDvo4X1FU9W2Uf5s+2snqX53Bbr\neZr0eZTr6anbgB3zuKJdgCcj4l7SjMvKJbH3k753i215KiJeLu1rVmF90WNV6n0z8HSVRHx28U1e\nfzywL/CMpD9KOlZS+WfGbBUeE2StqvzLGdI/Bf8APsPK/2lWPJNfK+t6MrOms5ghVeoO0nikeVXi\nywN9l3WyX3XydVeuz3V+Grgrvz4dEX/sZrvenIdVRMTEPND1I6RepbOA4yTtnBOpnqj2OfZG+Rz1\n9POqp47u9OSz7e363rahaBrptgM7kXp8phXKd5G0Hemfh1vrqK/a5yiqfx6r7DsiTpV0JfBRUk/t\n94BvSdqt2PtnVuaeILMVHiYNSr2t3JOUl8ov08fy61bFjSWNJl3SKpoPDJG0bql88yp1AzzTSd3T\n6L2/A5uWZyCVRcRS8gBcSRuSBif/ugf7fyy/9uQ8dFb3/RHx/YjYDdiN1Mv2hWJIT/bTQ2+rUrYV\n8GJEzM/v55PGSZVtXqWsp217LL9uXSyUtHbe7+M93E9P6llT0ltL9WwCrF9HPXeRLqvuSur5qXwv\n3gq8j3SpNkg9RsW2bCqpPEB+m/zak7ZU9lG+XLp1lVgi4pGImBQRE4B3kgZq92hWnbUuJ0FmK1xG\nGoT6nfKKPAW4kkzcRPpv/ehS2MQq+3yY9J/rroV9rU/qbSq6HngJ+HYek1Ouf0QPj6HoClJvb0/u\nBnwJKQE8l/THoydJUG/Ow0okDZNU/v1zP+mP6dBC2UKqJyW1+EAe01Jpw+akSyg3FGIeBl4vaZtC\n3BtJiWFZT9tWOU/HlMq/SJoBeE0P9tET15G+1/5fqfxrpPN6bS07zZe0ZpK+Zzdm5Z6g9YCjgNkR\nUezBvI70s/Tl0u4mks7F9T2o+jrS98IXKwX5Z+MoVp5p+Lo827DoEdLP09BC3GhJW1f5vrMW5sth\nNtjU3O0fEX/Il2e+I2kMMJX0H/BWpEHTR5Bm+MyVNBk4VtJVpF/o7yENwn6+tNvrgaeBCyX9JJcd\nCvwTeO0+MhHRIeko0tT8mZIuJV2iejNpQOv/0sv/aiNiqqR24KtK9+65kXRZZxdgSkQUB5z+SdIs\n0oDov/TkEkIvzwOs/NnsCUyW9Dvgb6RBtgeTLvv9dyFuBjA+31/mn8DDEVH1vkY9cD9wo6QzSZ/r\nl/PrfxZifkOa9n9Vjlsf+BJpGv67S/vrUdvyefoRcLyk60hJzzZ5v3eSeuHqFhEzJf0a+HIeVD8N\nGEe6vHlZRHQ2Db4npgHHAs9FxKxc3z8lPUz6+fhFKf5KUk/RjyRtyYop8vsAP46IauOeyq4k9UL9\nJPduVabIl3tVtwVukHQZ8AApyTqQNK6tvRD3E9IEgE1Jl73NPEXey+BZSH9ElwFjuogZkmNO7SLm\n86TZOC+RLo/cA3wfGFmK+w9SgvMS6b/9rUmDWn9eihtD+mP3Muk/1CMpTaMuxO5O6pmYn/c7GzgP\n2L4Qcwnpj1G53ScDr5bKRPrj9UCufw5pRtS7qmz/zdymr/byvFc7D08A5xZiylPk35KP62+kHpVn\n8ra7lvb9duCPed/LKuc2H+sy0j2FujwPxc+clBA8lM/F9Ep7StuPB+4jTQH/K+k+PdWmyHfWts4+\n2yPz/l7J5+t0YINSzDRgRpU2XULqbenusxiSP4+Hcz2PkpK8Navsb5XvoS72u18+pitL5RdQmuZf\nWLceaTbYU7ktDwLH9ObnkTQY/GLSbMLnSPdk2oGVp8iPIM1QfABYQErAbwc+WuWYl5Y/Fy+tvSii\nLy+5m7U2SU8C10fEF7oNHmAkfY10j583RcQ/G90eM7P+5mujZlZxKOl+LU6AzKwleEyQWQtTeibU\n/qRxPG/Hs2nMrIU4CTLrW509e2qgGk2aCfY86dEZUxrcHjOz1cZjgszMzKwleUyQmZmZtSQnQWZm\nZtaSnASZWb+S9D1J5Wefre42DJG0XFL5Cev17HOPvM/9+2qfvaj7V5L+trrrNRtsnASZNZCkg/Mf\n0srysqTZks4cRE/BbrbB4r3RqOMKYHmD6jYbNDw7zKzxgvR8r8eAdUhP6j4C2EvSOyLilQa2zbpW\nz9PZ63FIA+s2GzScBJkNDDdExMz89QWSnic9bPIjwG8b16zuSVo3IhY1uh2tJCKWNaJef9Y22Phy\nmNnA9AfSf/pbVAokbSHpd5Kek7RQ0p2S9i5uJOnZwoNaUfKCpCWShhXKj8tl6xbKtpZ0ed7/y5Lu\nlrRfaf+Vy3e7SjpH0lzS89J6RdJhkm6WNDfXdb+kz5diTpc0p1T201z/lwplm+SyQ3tY92fyJceX\nJU2X9L4qMW+UdKGkOZJekXSfpIOr7C6ANSR9V9JTkhZJuknSFqX97ZY/uyfy/h6X9JPi088lfVPS\nMkmblCvJsS9L2iC/X2VMkKT1JU2W9GSuY1Z+uGsx5q35XH2yVF4ZM3V8oex7uWwrSb+VNJ/0IF+z\nQcNJkNnAtGV+fQ4gjw+6k/T09bOA44GhwNWSPlLY7nZg18L7dwGV5Of9hfIPADMr/9VL2o70xO6t\ngf8i3Tn6JeD3pf1XnEO6w/R/kp431ltHkB4m+33ga6QHip5bSoSmAW+QtFWp3cuAXQplu5KSkWk9\nqHcP4MfARaQHjY4EpkjauhIgaTTp4aq7AWcAx+S2/lLSl0v7E+lS5j7Aj/LyPtJDP4s+Tvq8zgKO\nIj0s9hjSA0grLs37+1iVdh8IXBcRL+b3K42zkiTgWuBo0lPqJ5IeTjtJ6Qn2tajs/79JDzr9JukB\npmaDR6Of4OrFSysvrHjy/QeB1wNvBA4CniUlIRvnuMk5blxh2/VITwt/uFD2NWAxsF5+fxTpD/id\nwA8Kcc8DPym8nwrcw6pPG78NeLDU3uWkp6erh8dY7QnsQ6vE3QTMKrwfles6LL/fKJ+DS4EnCnFn\nAXO6acOQvK+lwDsK5W8mPeH80kLZhcATwPDSPi4D5gFr5fd75H3eCwwpxE3M7dyqm+P9dm7PxoWy\n/wPuKMWNy/V8vFB2CfBQ4f0BOebY0rZXAEtID8UFeGuO+2Qn5+f40ue2HLiw0T8nXrz01+KeILPG\nE3AzKfF5EvgNsAD4aKx4mOlewPSIuLOyUUQsBH4ObC5p21w8jTTWr3KJZ5dcNi1/jaR3ARvmMiRt\nRErCfgcMl/T6ygLcCLxN0saF9gbwi4ioeWZURLz62sFLw3JdtwBbSXpdjpkL/J0VPVu7AK8CpwKb\nSnpz6Rh7YlpE3F9ox+PA1cCHc1sE/BvwP8CaVc7FRsD2pX2eHyuP0ZlG+kzf0snxrpv3d0eOK+7v\nt8BOkt5UKDsIWETq4enMXqTk9+xS+SRSgvPhLrbtSgA/q3FbswHPSZBZ4wXp8tCHgN2BbSPirREx\ntRDzZmB2lW1nFdYDzCT9waxcLvoAK5Kg90haO68LUi8PpEtvIv3n/2xpOTHHlKfrP1Z8I2ktSaOK\nS1cHLGkXSX+Q9BLwQq7rpLx6eCH0ttKxTAf+BHQAu0gaDryDnidBf69S9hCwQU4GRwMbAF9m1XPx\n8xxfPhflMVHz8+tGlQJJb5Z0saTnSD18z5ISX1j5eC/Lrx8vlB0AXBNdD0h+M/BURLxcKi9/f9Ti\n0Tq2NRvQPDvMbGC4O1bMDqtZRCyV9H/ArpLeCmwM3Er6o7sWsBMpmZgVEc/lzSr/DP0E6OwBquXk\nofzHdlfS5awgJVQhabOI+Ed5R5LelmPvJ106epLUi7E/aUxL8Z+zacDBkjYjJUNTIyIk3Z7fVxKO\nWztpd08Up5pX6r4I+FUn8feW3nc2U0uQBh2TLjduAPyAlMwuAt5EGhP02vFGxFOS7iQlQT+RtAvp\nEumlvTiGrnTWezeki23Kn7XZoOEkyKw5PE4atFy2TWF9xTTgG6RB1M9GxEMAkv5KSlZ2IV0Cqngk\nvy6JiD/U2L4ZpJ6somc7id2flJDtky95kds3oUpspYdnAjAGOCG/vxX4HCkJepFVE5POvK1K2VbA\nixExX9ICYCGwRh3nomx70lictoh47XYHkjq7RHUpcLqkt5Auhb0IXN9NHY8BH5D0ulJvUPn7o5I0\nbljavp6eIrOm5cthZs3hOuC9knaqFEhaD/gC8GhEPFCInUa66eIxrLjkRf76M6TeodcuH0XEs6SB\nzl/MM6NWImlEd42LiBci4g+lpbNHZVR6Tl77/ZMvRX22yn7/DswlDfhegzSOpnKMW5PG79zRi/FJ\nH8hjoir1bg7sC9yQ61sGXAl8XNI25Y2rnIue1FvteEX6fKpt/zvy4GXSpbCrimOKOnEdsDbpMl5R\nZZD29QARMZ90+XHXUtxRnbSlKknDlW6psH5PtzEbiNwTZNZ4PbmU8UOgDbhB0hmk2V2HkP6D//dS\n7J2kWUdbAecWym8ljT2qNp38yFx2n6RfkHqHRpFmJr0R2KGX7e3KFNJU8utyXcOAzwP/ZNXxNpCS\ntwNJU/pfymV3ky7TbEmazdVT9wM3SjqTdI6+nF//sxDzDVKSMD23bxbwL8B7SL1oxUSxJ+fir6Rx\nNaflwdwv5eMZVi04IuZKmgZ8HVifnt0s80rS5/sjSVsCfyENlt4H+HFEFMctnQccK6mDNIZsd1JP\nVW8+108AP82vl3UTazZguSfIrPG6/Q88Ip4hJSQ3kv5r/wFpave+EXFVKXYRabp7cfAzpCQnSNPL\nnyxtM4v0R/4a0jT4s4AvknoRTmJltcwKe22bXNeBpN8/PwEOB84k3Xuomkq7i71XS0nTyXt6f6BK\nG24GjiUd44mkXqbxuU2Vfc8BdiSNC/r33LavkJKW4zo7rs7Kc4/YvqTE5HjgO6TE6HNdtPW3pATo\nBTofp1WsI0gJzxnAfqRbKmwFfDUivlna7gTSWKSPk5LRpbl9vX3G22B9Hpy1ENUxy9XMzMysaTW8\nJ0jSo1r5KdqV5cy8fqiksyXNk/Si0m39R5b2sZmka5UeJTBH0imS1ijF7C5pRr6d/EOqcgt8SUfm\n9rws6S5JO/bv0ZuZmVmjNDwJInXBjy4se5K6WSvXmU8jdfMeQLpOvwnpLqgA5GTnOtL4pp1J3dyH\nUOjCz4MfryF1hb8bOB04T9KehZiDSDdhO4E0/uFe0u30ux0UamZmZs1nwF0Ok3QasHdEbKX0wMdn\ngU9ExJV5/dakgYo7R8R0SXsBV5FuPT8vx3yRNJD0Dfm+KT8C9oqI4qyQdtJt8ffO7+8C/i8ijsnv\nRbp/yRkRccrqOXozMzNbXQZCT9BrJK0FfIoVD+l7D6mHp3JnVSJiNum5PuNy0c7AfZUEKJtCugvr\ndoWY4t13KzHjCvWOLdUTeZtxmJmZ2aAzoJIg0j0/hpNmZUCaors4IhaU4uayYprq6Py+vJ4exAyT\nNBQYQbpjarWYVe6bYmZmZs1voN0n6FDg+jxFtSuiZ9Mzu4pRD2M6XZ8fgjiBdLfWV3rQHjMzM0vW\nATYHphQe47NaDZgkKD81+UPARwvFc4C1JQ0r9QaNZEWvTeWeHkWjCusqr+UHOo4EFkTEYknzSPdD\nqRZT7h0qmgD8uov1ZmZm1rVPAb9pRMUDJgki9QLNJc30qphBupHXHqQ7oiJpK9KDByu3z78TOF7S\niMK4oPGkp0zPKsTsVapvfC4nIpZImpHruSrXo/z+jC7a/BjAr371K7bZZpU77FsXJk6cyOTJkxvd\njKbic1Ybn7fe8zmrjc9b78yaNYtPf/rTkP+WNsKASIJywnEIcGFELK+UR8QCSecDkyRVHpR4BnB7\nRNydw24EHgAukXQc6blIJwNnFZ5d9DPgqDxL7AJScnMgsHehGZOAi3IyNJ30zJ116fqW/K8AbLPN\nNowZM6bGo29Nw4cP9znrJZ+z2vi89Z7PWW183mrWsOEkAyIJIl0G2wz4ZZV1lQcAXg4MJT3o8MjK\nyohYLmlf0nNs7iA9AfpCVjxtmoh4TNI+pETnK8BTwGERMbUQc1m+J9BJpMtifwYm5IdLmpmZ2SAz\nIJKgiLiJNDur2rpXgaPz0tn2T5KefdNVHbeQpsF3FXMOnT+/yMzMzAaRgTZF3szMzGy1cBJkDdHW\n1tboJjQdn7Pa+Lz1ns9ZbXzems+Ae2xGM5E0BpgxY8YMD4YzMzPrhZkzZzJ27FiAsRExsxFtcE+Q\nmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJ\nkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYtyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3J\nSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYt\nyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pIGRBIkaRNJl0iaJ2mR\npHsljSnFnCTpH3n9TZK2LK3fSNKvJXVImi/pPEnrlWLeJelWSS9LelzS16u05WOSZuWYeyXt1T9H\nbWZmZo3U8CRI0obA7cCrwARgG+BrwPxCzHHAUcAXgfcCC4EpktYu7Oo3eds9gH2AXYFzC/vYAJgC\nPAqMAb4OnCjp8ELMuLyfXwDbA78Hfi9p2z49aDMzszr9/vfwkY80uhXNbc1GNwD4JvBERBxeKHu8\nFHMMcHJEXA0g6bPAXOCjwGWStiElUGMj4p4cczRwraRjI2IO8GlgLeCwiFgKzJK0A/BV4LxCPddH\nxKT8/gRJ40kJ2Jf79KjNzMzq8PjjcPPNjW5Fc2t4TxCwH/AnSZdJmitpZql3ZgtgNPDaRx0RC4D/\nA8blop2B+ZUEKJsKBLBTIebWnABVTAG2ljQ8vx+Xt6MUMw4zMzMbVAZCEvQW4AhgNjAe+BlwhqRP\n5/WjScnM3NJ2c/O6SswzxZURsQx4vhRTbR/0IGY0ZmZmNqgMhMthawDTI+K7+f29krYjJUa/6mI7\nkZKjrnQXox7GdFePmZmZNZmBkAT9E5hVKpsF/Hv+eg4pERnFyr00I4F7CjEjizuQNATYKK+rxIwq\n1TOSlXuZOosp9w6tZOLEiQwfPnylsra2Ntra2rrazMzMrCW0t7fT3t6+UllHR0eDWrPCQEiCbge2\nLpVtTR4cHRGPSppDmvX1FwBJw0hjfc7O8XcCG0raoTAuaA9S8jS9EPM9SUPypTJIl99mR0RHIWYP\n4IxCW/bM5Z2aPHkyY8aM6SrEzMysZVXrGJg5cyZjx45tUIuSgTAmaDKws6RvSXqrpE8ChwNnFWJO\nA74jaT9J7wQuBp4C/gcgIh4kDWD+haQdJb0fOBNozzPDIE19XwxcIGlbSQcBXwFOLdRzOrCXpK9K\n2lrSicDYUlvMzMxsEGh4EhQRfwL+DWgD7gO+DRwTEZcWYk4hJTXnkmaFvQ7YKyIWF3b1SeBB0uyu\na4BbSfcVquxjAWka/ebAn4AfAydGxPmFmDtzO74A/Jl0Se4jEfFAnx60mZmZNdxAuBxGRFwHXNdN\nzInAiV2sf4F0L6Cu9nEfsFs3MVcAV3QVY2ZmZs2v4T1BZmZmZo3gJMjMzMxakpMgMzMza0lOgszM\nzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLM\nzMysJTkJMjMzs5bkJMjMzKwJRTS6Bc3PSZCZmVmTkhrdgubmJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSQ1PgiSd\nIGl5aXmgsH6opLMlzZP0oqTLJY0s7WMzSddKWihpjqRTJK1Ritld0gxJr0h6SNLBVdpypKRHJb0s\n6S5JO/bfkZuZmVkjNTwJyu4HRgGj8/KBwrrTgH2AA4BdgU2AKyorc7JzHbAmsDNwMHAIcFIhZnPg\nGuBm4N3A6cB5kvYsxBwEnAqcAOwA3AtMkTSiD4/TzMzMBoiBkgQtjYhnI+KZvDwPIGkYcCgwMSJu\niYh7gM8B75f03rztBODtwKci4r6ImAJ8FzhS0po55gjgkYj4RkTMjoizgcuBiYU2TATOjYiLI+JB\n4EvAoly/mZmZDTIDJQl6m6SnJT0s6VeSNsvlY0k9PDdXAiNiNvAEMC4X7QzcFxHzCvubAgwHtivE\nTC3VOaWyD0lr5bqK9UTeZhxmZmYDTESjW9D8BkISdBfp8tUEUu/LFsCtktYjXRpbHBELStvMzevI\nr3OrrKcHMcMkDQVGAEM6iRmNmZnZACQ1ugXNbc3uQ/pXvnxVcb+k6cDjwMeBVzrZTEBPcuCuYtTD\nGOfaZmZmg1DDk6CyiOiQ9BCwJely1NqShpV6g0ayotdmDlCexTWqsK7yOqoUMxJYEBGLJc0DlnUS\nU+4dWsXEiRMZPnz4SmVtbW20tbV1t6mZmdmg197eTnt7+0plHR0dDWrNCgMuCZK0PvBW4CJgBrAU\n2AO4Mq+xdAPHAAAgAElEQVTfCngTcEfe5E7geEkjCuOCxgMdwKxCzF6lqsbnciJiiaQZuZ6rcj3K\n78/ors2TJ09mzJgxvT5WMzOzVlCtY2DmzJmMHTu2QS1KGp4ESfoxcDXpEtgbgf8kJT6XRsQCSecD\nkyTNB14kJSW3R8TdeRc3Ag8Al0g6DtgYOBk4KyKW5JifAUdJ+hFwASm5ORDYu9CUScBFORmaTpot\nti5wYb8cuJmZmTVUw5MgYFPgN8DrgWeB24CdI+K5vH4i6VLV5cBQ4AbgyMrGEbFc0r7AT0m9QwtJ\nicsJhZjHJO1DSnS+AjwFHBYRUwsxl+V7Ap1Euiz2Z2BCRDzbD8dsZmZmDdbwJCgiuhw4ExGvAkfn\npbOYJ4F9u9nPLaRp8F3FnAOc01WMmZmZDQ4DYYq8mZmZ2WrnJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzNrQhGNbkHzcxJkZmbWpKRGt6C59UkS\nJGmIpO0lbdQX+zMzMzPrbzUlQZJOk3RY/noIcAswE3hS0u591zwzMzOz/lFrT9CBwL356/2ALYC3\nA5OB7/dBu8zMzMz6Va1J0AhgTv56b+B3EfEQcAHwzr5omJmZmVl/qjUJmgtsmy+FfRiYmsvXBZb1\nRcPMzMzM+tOaNW73S+Ay4J9AADfl8p2AB/ugXWZmZmb9qqYkKCJOlHQ/sBnpUtiredUy4Id91Tgz\nMzOz/lJrTxARcTmApHUKZRf1RaPMzMzM+lutU+SHSPqupKeBlyS9JZefXJk6b2ZmZjaQ1Tow+tvA\nIcA3gMWF8vuBw+tsk5mZmVm/qzUJ+izwhYj4NSvPBruXdL8gMzMzswGt1iTojcDfO9nfWrU3x8zM\nzGz1qDUJegDYpUr5gcA9tTfHzMzMbPWodXbYScBFkt5ISqT+XdLWpMtk+/ZV48zMzMz6S009QRHx\nP6Rk50PAQlJStA2wX0Tc1NW2ZmZmZgNBPfcJug3Ysw/bYmZmZrba1HqfoB0l7VSlfCdJ76mnQZK+\nJWm5pEmFsqGSzpY0T9KLki6XNLK03WaSrpW0UNIcSadIWqMUs7ukGZJekfSQpIOr1H+kpEclvSzp\nLkk71nM8ZmZmNjDVOjD6bNIjM8remNfVJCccnydNtS86DdgHOADYFdgEuKKw3RrAdaSerZ2Bg0n3\nMTqpELM5cA1wM/Bu4HTgPEl7FmIOAk4FTgB2yO2YImlErcdkZmZmA1OtSdC2wMwq5ffkdb0maX3g\nV6SbLb5QKB8GHApMjIhbIuIe4HPA+yW9N4dNIN2f6FMRcV9ETAG+CxwpqXLJ7wjgkYj4RkTMjoiz\ngcuBiYVmTATOjYiLI+JB4EvAoly/mZmZDSK1JkGvAqOqlG8MLK1xn2cDV0fEH0rl7yH18NxcKYiI\n2cATwLhctDNwX0TMK2w3BRgObFeImVra95TKPiStBYwt1RN5m3GYmZnZoFJrEnQj8F+ShlcKJG0I\n/ADo9ewwSZ8Atge+VWX1KGBxRCwolc8FRuevR+f35fX0IGaYpKHACGBIJzGjMTMzs0Gl1tlhxwK3\nAo9LqtwccXtSwvCZ3uxI0qakMT97RsSS3mwKRA/iuopRD2O6rGfixIkMHz58pbK2tjba2tp60Dwz\nM7Pei578BRwg2tvbaW9vX6mso6OjQa1ZoaYkKCKelvQu4FOkQcYvA78E2nuZyEC6BPUGYIakSlIy\nBNhV0lHAh4GhkoaVeoNGsqLXZg5QnsU1qrCu8lq+hDcSWBARiyXNIz0HrVpMuXdoJZMnT2bMmDFd\nhZiZmfW51/5qDnDVOgZmzpzJ2LFjG9SipJ77BC0Eft4HbZgKvLNUdiEwC/gh8DSwBNgDuBJA0lbA\nm4A7cvydwPGSRhTGBY0HOvJ+KjF7leoZn8uJiCWSZuR6rsr1KL8/o96DNDMzs4Gl5iQoJyK7k3pK\nVhpbFBEnVdummpxMPVDa90LguYiYld+fD0ySNB94kZSU3B4Rd+dNbsz7uETScaQB2icDZxV6pn4G\nHCXpR8AFpOTmQGDvQtWTSI8DmQFMJ80WW5eUlJmZmdkgUlMSJOnzwE+BeaTLTMUrk0Hh/jw1Kl/p\nnEi6VHU5MBS4ATjyteCI5ZL2zW26g/QojwtJ9/upxDwmaR9SovMV4CngsIiYWoi5LN8T6CTSZbE/\nAxMi4tk6j8fMzMwGmFp7gr4DfDsiftSXjamIiH8tvX8VODovnW3zJN08vDUibiGNQeoq5hzgnB43\n1szMzJpSrVPkNwJ+15cNMTMzM1udak2CfkcaVGxmZmbWlGq9HPZ34GRJOwP3kWZvvSYiPJvKzMzM\nBrRak6AvAC8Bu+WlKPCUcjMzMxvgar1Z4hZ93RAzMzOz1anWMUEASFpb0taFJ7WbmZmZNYWakiBJ\n6+YbGC4C/kq6ezOSzpT0zT5sn5mZmVm/qLUn6L9IzwzbHXilUD4VOKjONpmZmZn1u1ovY30UOCgi\n7pJUvLvzX4G31t8sMzMzs/5Va0/QG4BnqpSvx6qPvDAzMzMbcGpNgv4E7FN4X0l8Dic/ld3MzMxs\nIKv1ctjxwPWSts37OEbSdsA4Vr1vkJmZmdmAU1NPUETcRhoYvSbpjtHjgbnAuIiY0XfNMzMzM+sf\nve4JyvcE+iQwJSI+3/dNMjMzM+t/ve4JioilwM+Adfq+OWZmZmarR60Do6cDO/RlQ8zMzMxWp1oH\nRp8DnCppU2AGsLC4MiL+Um/DzMzMzPpTrUnQpfm1+LT4AJRfh9TTKDMzM+ta+K58das1CfJT5M3M\nzBpManQLmltNSVBEPN7XDTEzMzNbnWpKgiR9tqv1EXFxbc0xMzMzWz1qvRx2eun9WsC6wGJgEeAk\nyMzMzAa0Wi+HbVQuk/Q24KfAj+ttlJmZmVl/q/U+QauIiL8B32TVXiIzMzOzAafPkqBsKbBJH+/T\nzMzMrM/VOjB6/3IRsDFwFHB7vY0yMzMz62+1Doz+fel9AM8CfwC+VleLzMzMzFaDWgdG9/VlNDMz\nM7PVysmMmZmZtaSakiBJl0v6ZpXyr0v6Xf3NMjMzM+tftfYE7QZcW6X8BmDX2ptjZmZmtnrUmgSt\nT7o7dNkSYFjtzTEzMzNbPWpNgu4DDqpS/gnggdqbY2ZmZrZ61JoEnQx8V9JFkg7Oy8XAt/O6HpP0\nJUn3SurIyx2SPlxYP1TS2ZLmSXoxj0caWdrHZpKulbRQ0hxJp0haoxSzu6QZkl6R9JCkg6u05UhJ\nj0p6WdJdknbs1VkxMzOzplFTEhQRVwMfBbYEzgFOBTYFPhQR5XsIdedJ4DhgbF7+APyPpG3y+tOA\nfYADSOONNgGuqGyck53rSNP9dwYOBg4BTirEbA5cA9wMvJv0aI/zJO1ZiDkoH8cJwA7AvcAUSSN6\neTxmZmbWBGq9WSIRcS3VB0fXsp+i70g6AthZ0tPAocAnIuIWAEmfA2ZJem9ETAcmAG8HPhgR84D7\nJH0X+KGkEyNiKXAE8EhEfCPXMVvSB4CJwE25bCJwbkRcnOv5Ein5OhQ4pd7jNDMzs4Gl1inyO0ra\nqUr5TpLeU2tjJK0h6RPAusCdpJ6hNUk9OABExGzgCWBcLtoZuC8nQBVTgOHAdoWYqaXqplT2IWmt\nXFexnsjbjMPMzMwGnVrHBJ0NbFal/I15Xa9IeoekF4FXSZfX/i0iHgRGA4sjYkFpk7l5Hfl1bpX1\n9CBmmKShwAhgSCcxozEzM7NBp9bLYdsCM6uU35PX9daDpLE6G5LG/lwsqav7DYn0vLLudBWjHsb0\npB4zMzNrMrUmQa8Co4BHSuUbA0t7u7M8bqeyr5mS3gscA1wGrC1pWKk3aCQrem3mAOVZXKMK6yqv\no0oxI4EFEbFY0jxgWScx5d6hVUycOJHhw4evVNbW1kZbW1t3m5qZmQ167e3ttLe3r1TW0dHRoNas\nUGsSdCPwX5I+EhEdAJI2BH7AioHG9VgDGArMICVVewBX5nq2At4E3JFj7wSOlzSiMC5oPNABzCrE\n7FWqY3wuJyKWSJqR67kq16P8/ozuGjt58mTGjBnT+6M0MzNrAdU6BmbOnMnYsWMb1KKk1iToWOBW\n4HFJ9+Sy7Um9Jp/pzY4kfR+4njRVfgPgU6THcoyPiAWSzgcmSZoPvEhKSm6PiLvzLm4k3aDxEknH\nkXqjTgbOioglOeZnwFGSfgRcQEpuDgT2LjRlEnBRToamk2aLrQtc2JvjMTMzWx3CgzXqVlMSFBFP\nS3oXKWF5N/Ay8EugvZB49NQo4GJS8tIB/IWUAP0hr59IulR1Oal36AbgyEJblkvaF/gpqXdoISlx\nOaEQ85ikfUiJzleAp4DDImJqIeayfE+gk3Kb/gxMiIhne3k8ZmZmq4XUfYx1rp77BC0Efl5vAyLi\n8G7WvwocnZfOYp4E9u1mP7eQpsF3FXMOaXaamZmZDXI1JUGSPga0AVuRZk/9DfhNRFzeh20zMzMz\n6ze9uk9Qvpnhb4HfkqbC/500q2s74DJJl+YBxWZmZmYDWm97go4BPgTsHxHXFFdI2p80LugY0vO+\nzMzMzAas3t4x+nPA18sJEEBEXAV8g/SsLTMzM7MBrbdJ0NtY9RlcRVNzjJmZmdmA1tsk6GXSoy06\nMwx4pfbmmJmZma0evU2C7gSO6GL9kTnGzMzMbEDr7cDo7wN/lPR64CekB58K2Ab4GvAR4IN92kIz\nMzOzftCrJCgi7pB0EOkmiQeUVs8H2iLi9r5qnJmZmVl/6fXNEiPiSklTSA8g3SoXPwTcGBGL+rJx\nZmZmZv2l1meHLZL0IeA/IuL5Pm6TmZmZWb/r7R2jNy28/SSwfi6/T9JmfdkwMzMzs/7U256gByU9\nB9wOrANsBjwBbA6s1bdNMzMzM+s/vZ0iPxz4GDAjb3udpIeAocAESaP7uH1mZmZm/aK3SdBaETE9\nIk4l3ThxB9KjNJaRHpfxsKTZfdxGMzMzsz7X28thCyTdQ7octjawbkTcLmkpcBDwFPDePm6jmZmZ\nWZ/rbU/QJsD3gFdJCdSfJE0jJURjgIiI2/q2iWZmZmZ9r1dJUETMi4irI+JbwCJgR+BMIEh3kF4g\n6Za+b6aZmZlZ3+ptT1BZR0RcBiwB/hXYAjin7laZmZmZ9bOabpaYvQt4On/9OLAkIuYAv627VWZm\nZmb9rOYkKCKeLHz9jr5pjpmZmfVERKNb0PzqvRxmZmZmDSI1ugXNzUmQmZmZtSQnQWZmZtaSnASZ\nmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwE\nmZmZWUtqeBIk6VuSpktaIGmupCslbVWKGSrpbEnzJL0o6XJJI0sxm0m6VtJCSXMknSJpjVLM7pJm\nSHpF0kOSDq7SniMlPSrpZUl3Sdqxf47czMzMGqnhSRCwC3AmsBPwIWAt4EZJryvEnAbsAxwA7Aps\nAlxRWZmTneuANYGdgYOBQ4CTCjGbA9cANwPvBk4HzpO0ZyHmIOBU4ARgB+BeYIqkEX13uGZmZjYQ\nrNnoBkTE3sX3kg4BngHGArdJGgYcCnwiIm7JMZ8DZkl6b0RMByYAbwc+GBHzgPskfRf4oaQTI2Ip\ncATwSER8I1c1W9IHgInATblsInBuRFyc6/kSKfk6FDilf86AmZmZNcJA6Akq2xAI4Pn8fiwpWbu5\nEhARs4EngHG5aGfgvpwAVUwBhgPbFWKmluqaUtmHpLVyXcV6Im8zDjMzMxtUBlQSJEmkS1+3RcQD\nuXg0sDgiFpTC5+Z1lZi5VdbTg5hhkoYCI4AhncSMxszMzAaVhl8OKzkH2Bb4QA9iReox6k5XMeph\nTE/qMTMzsyYyYJIgSWcBewO7RMQ/CqvmAGtLGlbqDRrJil6bOUB5FteowrrK66hSzEhgQUQsljQP\nWNZJTLl3aCUTJ05k+PDhK5W1tbXR1tbW1WZmZmYtob29nfb29pXKOjo6GtSaFQZEEpQToI8Au0XE\nE6XVM4ClwB7AlTl+K+BNwB055k7geEkjCuOCxgMdwKxCzF6lfY/P5UTEEkkzcj1X5XqU35/RVfsn\nT57MmDFjeny8ZmZmraRax8DMmTMZO3Zsg1qUNDwJknQO0AbsDyyUVOmJ6YiIVyJigaTzgUmS5gMv\nkpKS2yPi7hx7I/AAcImk44CNgZOBsyJiSY75GXCUpB8BF5CSmwNJvU8Vk4CLcjI0nTRbbF3gwn44\ndDMzM2ughidBwJdIY27+WCr/HHBx/noi6VLV5cBQ4AbgyEpgRCyXtC/wU1Lv0EJS4nJCIeYxSfuQ\nEp2vAE8Bh0XE1ELMZfmeQCeRLov9GZgQEc/20bGamZnZANHwJCgiup2hFhGvAkfnpbOYJ4F9u9nP\nLaRp8F3FnEMaoG1mZjZghafs1G1ATZE3MzOznpO6j7HOOQkyMzNrQu4Jqp+TIDMzsyYU4Z6gejkJ\nMjMza0JOgurnJMjMzKxJOQmqj5MgMzOzJuQxQfVzEmRmZtaEfDmsfk6CzMzMmpCToPo5CTIzM2tC\nToLq5yTIzMysCTkJqp+TIDMzM2tJToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzM\nmpCToPo5CTIzM2tCToLq5yTIzMzMWpKTIDMzsybknqD6OQkyMzNrQk6C6uckyMzMrAk5CaqfkyAz\nM7Mm5CSofk6CzMzMmlBEo1vQ/JwEmZmZNSn3BNXHSZCZmVkT8uWw+jkJMjMza0JOgurnJMjMzKwJ\nOQmqn5MgMzOzJuQkqH5OgszMzJqQZ4fVz0mQmZlZk3JPUH2cBJmZmTUhXw6rn5MgMzOzJuQkqH5O\ngszMzJqQk6D6OQkyMzNrQh4YXb8BkQRJ2kXSVZKelrRc0v5VYk6S9A9JiyTdJGnL0vqNJP1aUoek\n+ZLOk7ReKeZdkm6V9LKkxyV9vUo9H5M0K8fcK2mvvj9iMzOz+rgnqH4DIgkC1gP+DBwJrJLbSjoO\nOAr4IvBeYCEwRdLahbDfANsAewD7ALsC5xb2sQEwBXgUGAN8HThR0uGFmHF5P78Atgd+D/xe0rZ9\ndaBmZmZ9xUlQfdZsdAMAIuIG4AYAqepHegxwckRcnWM+C8wFPgpcJmkbYAIwNiLuyTFHA9dKOjYi\n5gCfBtYCDouIpcAsSTsAXwXOK9RzfURMyu9PkDSelIB9ua+P28zMrFbuCarfQOkJ6pSkLYDRwM2V\nsohYAPwfMC4X7QzMryRA2VRSr9JOhZhbcwJUMQXYWtLw/H5c3o5SzDjMzMwGECdB9RvwSRApAQpS\nz0/R3LyuEvNMcWVELAOeL8VU2wc9iBmNmZnZAOKB0fVrhiSoM6LK+KFexqiHMf5WMzOzAcU9QfUb\nEGOCujGHlIiMYuVempHAPYWYkcWNJA0BNsrrKjGjSvseycq9TJ3FlHuHVjJx4kSGDx++UllbWxtt\nbW1dbWZmZlazZkqC2tvbaW9vX6mso6OjQa1ZYcAnQRHxqKQ5pFlffwGQNIw01ufsHHYnsKGkHQrj\ngvYgJU/TCzHfkzQkXyoDGA/MjoiOQswewBmFJuyZyzs1efJkxowZU+shmpmZ1aRZkqBqHQMzZ85k\n7NixDWpRMiAuh0laT9K7JW2fi96S32+W358GfEfSfpLeCVwMPAX8D0BEPEgawPwLSTtKej9wJtCe\nZ4ZBmvq+GLhA0raSDgK+ApxaaMrpwF6Svippa0knAmOBs/rr2M3MzGrRTD1BA9VA6Ql6D/C/pEtT\nwYrE5CLg0Ig4RdK6pPv+bAhMA/aKiMWFfXySlKxMBZYDl5OmvANpRpmkCTnmT8A84MSIOL8Qc6ek\nNuD7efkb8JGIeKDvD9nMzKx2ToLqNyCSoIi4hW56pSLiRODELta/QLoXUFf7uA/YrZuYK4Aruoox\nMzNrNM8Oq9+AuBxmZmZmveOeoPo5CTIzM2tCToLq5yTIzMysSTkJqo+TIDMzsybknqD6OQkyMzNr\nQh4YXT8nQWZmZk3IPUH1cxJkZmbWhJwE1c9JkJmZWZNyElQfJ0FmZmZNyD1B9XMSZGZm1oQ8MLp+\nToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzMmpSToPo4CTIzM2tC7gmqn5MgMzOz\nJuTZYfVzEmRmZtaE3BNUPydBZmZmTchJUP2cBJmZmTUhJ0H1cxJkZmbWhJYtgzXXbHQrmpuTIDMz\nsya0eDGstVajW9HcnASZmZk1oSVLnATVy0mQmZlZE1qyBNZeu9GtaG5OgszMzJqQL4fVz0mQmZlZ\nE/LlsPo5CTIzM2tCvhxWPydBZmZmTcg9QfVzEmRmZtaEXn3VSVC9nASZmZk1oWefhTe8odGtaG5O\ngszMzJrMwoXwwguw6aaNbklzcxJkZmbWZJ5+Or06CaqPkyAzM7Mm89RT6dVJUH2cBJmZmTWZShK0\nySaNbUezcxJkZmbWZJ5+Gl7/enjd6xrdkubmJKgKSUdKelTSy5LukrRjo9s02LS3tze6CU3H56w2\nPm+953NWm9V53m6/HbbbbrVVN2g5CSqRdBBwKnACsANwLzBF0oiGNmyQ8S/Z3vM5q43PW+/5nNVm\ndZ23Rx+Fm26Cf/u31VLdoOYkaFUTgXMj4uKIeBD4ErAIOLSxzTIzs1Z3113wwQ+msUAHH9zo1jS/\nNRvdgIFE0lrAWOAHlbKICElTgXENa5iZmbWUCFiwAB5/HGbPhhkz4MYb4Z570mWw66+HjTZqdCub\nn5OglY0AhgBzS+Vzga072+iBB2D58p5XEtH7hg22bZ5/Hu64Y/XUNVi2ee45uOWW1de2WrcbaNs8\n+2y6dNDf9dS7zeqsq7tt5s6Fa6/t/3r6apt66lq+PC2Vr6u9drWu+PrIIzBpEixdCsuWpdfy14sX\np+XVV1e8LlwIixal5cUXU/Lz/PNpXcUmm8Auu8AJJ8C++8KQIbWdJ1uZk6CeEVDtR2wdgM98Ztbq\nbc2g0MH73z+z0Y1oMh3svrvPWe91MH68z1vvdLDvvj5nXZFWXZYt6+C7353JkCF0uqy11srL2mvD\nOuvAeuul2V7rrpu+HjYsvR89GjbbbOVen3vvbdxx96VZs17727lOo9rgJGhl84BlwKhS+UhW7R0C\n2Dy9fLo/2zSIjW10A5qQz1ltfN56z+esK5UeorJFi3zearA5UMO1gfo5CSqIiCWSZgB7AFcBSFJ+\nf0aVTaYAnwIeA15ZTc00MzMbDNYhJUBTGtUARa0XYAcpSR8HLgK+CEwnzRY7EHh7RDzbyLaZmZlZ\n33FPUElEXJbvCXQS6bLYn4EJToDMzMwGF/cEmZmZWUvyzRLNzMysJTkJqkOrPmNM0gmSlpeWBwrr\nh0o6W9I8SS9KulzSyNI+NpN0raSFkuZIOkXSGqWY3SXNkPSKpIckNdX9USXtIukqSU/nc7R/lZiT\nJP1D0iJJN0nasrR+I0m/ltQhab6k8yStV4p5l6Rb8/fh45K+XqWej0malWPulbRX3x9x/bo7Z5J+\nWeV777pSTEudMwBJ35I0XdICSXMlXSlpq1LMavu5bIbfjT08Z38sfa8tk3ROKaZlzhmApC/ln4eO\nvNwh6cOF9c31fRYRXmpYgINIM8I+C7wdOBd4HhjR6LathmM/AfgL8AbS7QNGAv9SWP9T0oy53UjP\nX7sDmFZYvwZwH2lGwDuBCcAzwPcKMZsDLwGnkG5UeSSwBNiz0cffi/P0YdLYso+Sbr2wf2n9cfl7\nZj/gHcDvgYeBtQsx1wMzgfcA7wMeAn5VWL8B8E/SYP5tgI8DC4HDCzH/v707j9WjKuM4/v1VoECb\ntsjiTaTQ0goUQZayNWy3LCVIkCCJNpi68QfKHypEiYkkQEQJokQCNCaCRBZBTAwQKSLLbRsNS1hS\nK9CylbKUawoUKHgV2j7+cc5Lz532Lr2Ud7nz+yRvmpk5M++Zp2fmfe6ZMzOzcuzOz7G8BPgfsF+r\nYzSCmN0A3F1pexMrZWoVs1zfBcC8vD8HAH/Jx+AORZmmHJd0yLlxmDHrAX5TaW/j6xqzXNdT83E6\nPX8uzcfGjE5sZy0PaKd+gIeBq4ppAa8CF7S6bk3Y94uAJwZYNiEfEGcU8/YBNgCH5+lTcoPepShz\nDrAG2CZPXw78s7LtW4EFrd7/EcZsA5v+oK8CzqvErg/4Sp6ekdc7uChzMrAO6MrT3yU932qbosxl\nwNPF9G3AXZXvfgiY3+q4jCBmNwB/HmSdfescs6Kuu+Q4HF20raYcl516bqzGLM/rAa4cZJ1ax6yo\n75vAtzqxnfly2Aho4zvGHmjMi/S/UKd3jH0uX7J4QdLNkibn+TNJdx2WsVkOvMzG2BwJLI2IN4rt\n3QtMBD5flLm/8p33MkriK2kq0EX/OL0LPEL/OK2JiCeLVe8nPb38iKLM4ohYV5S5F9hH0sQ8PYvR\nFcvufPlimaT5kj5dLJuFYwYwibTPb+XpphyXHX5urMas4WuSVktaKunnknYoltU6ZpLGSJoL7Ej6\nI6Hj2pmToJEZ7B1jXc2vTtM9DHyT9Bf2d4CpwOI87qIL+CD/oJfK2HSx+dgxjDITJI39uDvQBrpI\nJ9zB2lAXqZv4IxGxnnSS3hqx7MS2eg+p+/t44AJSl/sCScrLax+zHItfA3+PiMZYvWYdlx15bhwg\nZgC3kF4J0E16sfY84KZieS1jJml/SWtJvT7zST0/y+jAdubnBG1dA71jbFSJiPLpnv+S9CiwkjS2\nYqAnZw83NoOV0TDKdLrhxGmoMhpmmY6LY0TcXkw+JWkpaRxVN+nSxUDqFLP5wH7A0cMo26zjst1j\n14jZUeXMiLiumHxKUi/wgKSpEbFiiG2O5pgtAw4k9Z6dCdwo6dhByrdtO3NP0Mhs6TvGRrWIeIc0\n+HhpytQAAAY1SURBVHQ60AtsJ2lCpVgZm142jd1nimUDldkNeDciPtga9W6xXtIBO1gb6s3TH5H0\nKWAnho5T2cs0UJmOb6v5h+gNUtuDmsdM0jXAF4HuiFhVLGrWcdlx58ZKzF4fovgj+d+yvdUuZhGx\nLiJejIgnIuInwBLg+3RgO3MSNAIR8SHQeMcY0O8dYy15CVwrSRoPTCMN9H2cNAi1jM3ewB5sjM1D\nwAFKT+ZumAO8AzxTlDmB/ubk+R0v/3j30j9OE0jjVso4TZJ0cLHqCaTk6dGizLH5h75hDrA8J6eN\nMtVYnsQoiKWk3YGdSXd7QY1jln/MTwdmR8TLlcVNOS477dw4RMw252BSsly2t1rFbABjgLF0Yjtr\n9ajyTv2QLv300f/2vDeBXVtdtybs+xXAscCepFuQ7yNl3zvn5fOBFaRLFDOBf7DpLZJLSOM7vkAa\nW/Rv4KdFmSmkWyQvJ91dcC7wAXBiq/d/C+I0jtRlfBDp7ogf5OnJefkFuc2cRrpV9A7gOfrfIr8A\neAw4jNRVvxy4qVg+gZR8/p7Unf/VHLezizKzcuwat3tfTLps2Xa3ew8Ws7zsF6REcU/SCe8x0olz\n27rGLNd3PunummNIfx03PttXynzixyUdcm4cKmbAXsCFwCG5vX0JeB54sK4xy3X9GelS656kR3tc\nRkp8ju/EdtbygHbyJ//HvJT/Ix4CDm11nZq037eSbkXsI436/wMwtVg+Fria1GW5FvgTsFtlG5NJ\nz+V4Lx8AlwNjKmWOI2X7faTkYF6r930L43Qc6Yd8feXzu6LMxaQf5P+Q7n6YXtnGJOBm0l9Ja4Df\nAjtWyhwALMrbeBn44WbqcibpOn4f6RlPJ7c6PlsaM9Ibp/9K6kH7L/Ai6Zkku1a2UauY5bpuLmbr\nga8XZZp2XNIB58ahYgbsDiwEVud2spz0gz++sp3axCzX87p87PXlY/Fv5ASoE9uZ3x1mZmZmteQx\nQWZmZlZLToLMzMyslpwEmZmZWS05CTIzM7NachJkZmZmteQkyMzMzGrJSZCZmZnVkpMgMzMzqyUn\nQWZmZlZLToLMzDJJPZKubHU9zKw5nASZWVuQdI6kdyWNKeaNk/ShpAcqZWdL2iBpSrPraWajh5Mg\nM2sXPaQ3xR9azDsGeB04UtJ2xfzjgJUR8dKWfomkbT5OJc1s9HASZGZtISKeJSU83cXsbuAOYAVw\nZGV+D4CkyZLulLRW0juS/ihpt0ZBSRdJelLS2ZJeJL2BHkk7Sroxr/eapPOrdZJ0rqRnJfVJ6pV0\n+9bdazNrJSdBZtZOFgKzi+nZed6ixnxJY4EjgAdzmTuBSaReoxOBacBtle1OB74MnAEclOf9Mq9z\nGjCHlFjNbKwg6VDgKuBCYG/gZGDxx9w/M2sj7hY2s3ayELgyjwsaR0pYFgPbAecAlwBH5emFkk4C\n9gemRMQqAEnzgKckzYyIx/N2twXmRcRbucw44NvAWRGxMM/7BvBqUZfJwHvA3RHxPvAKsOQT2m8z\nawH3BJlZO2mMCzoMOBp4NiLeIPUEHZHHBXUDL0TEq8C+wCuNBAggIp4B3gZmFNtd2UiAsmmkxOjR\nYr01wPKizH3ASmBFvmx2lqQdttqemlnLOQkys7YRES8Ar5Eufc0mJT9ExOuknpijKMYDAQJiM5uq\nzn9/M8sZYN1GXd4DDgHmAqtIvVBLJE0Y9g6ZWVtzEmRm7aaHlAB1ky6PNSwGTgEOZ2MS9DSwh6TP\nNgpJ2g+YmJcN5HlgHcVga0k7kcb+fCQiNkTEgxHxY+BAYApw/Aj2yczakMcEmVm76QGuJZ2fFhXz\nFwPXkC5jLQSIiPslLQVukXReXnYt0BMRTw70BRHxvqTrgSskvQWsBi4F1jfKSDoV2Ct/7xrgVFIP\n0vJNt2hmnchJkJm1mx5ge+CZiFhdzF8EjAeWRURvMf904Oq8fANwD/C9YXzPj0jjj+4C1gK/AspL\nXW+T7ii7KNfnOWBuHnNkZqOAIga8JG5mZmY2anlMkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\nnASZmZlZLTkJMjMzs1pyEmRmZma15CTIzMzMaslJkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\n/g8dz3zImRgHIwAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -317,7 +321,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 75, "metadata": { "collapsed": true }, @@ -335,7 +339,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 76, "metadata": { "collapsed": true }, @@ -349,7 +353,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 77, "metadata": { "collapsed": false }, @@ -358,12 +362,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of unique tokens: 681\n", - "Number of documents: 90\n" + "Number of authors: 578\n", + "Number of unique tokens: 2245\n", + "Number of documents: 286\n" ] } ], "source": [ + "print('Number of authors: %d' % len(author2doc))\n", "print('Number of unique tokens: %d' % len(dictionary))\n", "print('Number of documents: %d' % len(corpus))" ] @@ -377,21 +383,179 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 78, "metadata": { - "collapsed": true + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "phi is 286 x 2245 x 10 (6420700 elements)\n", + "mu is 286 x 2245 x 578 (371116460 elements)\n" + ] + } + ], + "source": [ + "print('phi is %d x %d x %d (%d elements)' %(len(corpus), len(dictionary.id2token), 10,\n", + " len(corpus) * len(dictionary.id2token) * 10))\n", + "print('mu is %d x %d x %d (%d elements)' %(len(corpus), len(dictionary.id2token), len(author2doc),\n", + " len(corpus) * len(dictionary.id2token) * len(author2doc)))" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(atvb)\n", + "AtVb = atvb.AtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 91, + "metadata": { + "collapsed": false }, "outputs": [], "source": [ - "#model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author,\n", - "# author2doc=author2doc, doc2author=doc2author, threshold=1e-12,\n", - "# iterations=10, alpha=None, eta=None,\n", - "# eval_every=1, random_state=1)" + "model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author,\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-12,\n", + " iterations=40, alpha=None, eta=None,\n", + " eval_every=1, random_state=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 92, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.046*image + 0.022*object + 0.014*visual + 0.013*field + 0.012*motion + 0.011*filter + 0.011*velocity + 0.009*pixel + 0.009*line + 0.008*noise'),\n", + " (1,\n", + " '0.023*fig + 0.023*cell + 0.012*delay + 0.011*cortex + 0.010*noise + 0.009*eye + 0.009*phase + 0.008*activity + 0.008*cortical + 0.008*oscillation'),\n", + " (2,\n", + " '0.020*map + 0.015*field + 0.013*cell + 0.013*region + 0.012*human + 0.012*receptive + 0.011*receptive_field + 0.009*response + 0.008*chain + 0.008*orientation'),\n", + " (3,\n", + " '0.023*vector + 0.019*matrix + 0.015*hopfield + 0.009*probability + 0.008*let + 0.008*code + 0.008*optimization + 0.008*convergence + 0.008*theorem + 0.007*minimum'),\n", + " (4,\n", + " '0.027*memory + 0.015*vector + 0.013*activation + 0.010*bit + 0.009*processor + 0.008*associative + 0.008*capacity + 0.008*hidden + 0.007*machine + 0.006*address'),\n", + " (5,\n", + " '0.012*hidden + 0.010*energy + 0.009*gradient + 0.007*probability + 0.007*hidden_unit + 0.006*learning_algorithm + 0.006*adaptive + 0.006*forward + 0.006*procedure + 0.006*recurrent'),\n", + " (6,\n", + " '0.019*classifier + 0.019*recognition + 0.019*hidden + 0.018*speech + 0.011*classification + 0.011*trained + 0.009*test + 0.009*class + 0.008*hidden_layer + 0.008*propagation'),\n", + " (7,\n", + " '0.031*node + 0.007*surface + 0.006*sample + 0.006*distribution + 0.005*noise + 0.005*scale + 0.005*back_propagation + 0.005*dimensional + 0.005*propagation + 0.005*neural_net'),\n", + " (8,\n", + " '0.034*circuit + 0.023*chip + 0.021*analog + 0.018*voltage + 0.017*current + 0.014*synapse + 0.010*vlsi + 0.010*transistor + 0.010*threshold + 0.009*implementation'),\n", + " (9,\n", + " '0.029*cell + 0.019*firing + 0.015*activity + 0.014*response + 0.013*stimulus + 0.013*potential + 0.012*synaptic + 0.012*spike + 0.010*frequency + 0.009*motor')]" + ] + }, + "execution_count": 92, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 118, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [21]\n", + "[(0, 0.090225715808980797),\n", + " (1, 0.014047723409152875),\n", + " (3, 0.38971799227229242),\n", + " (4, 0.30695125800680684),\n", + " (5, 0.11680215128570454),\n", + " (7, 0.012641840087616362),\n", + " (8, 0.069095036605336377)]\n", + "\n", + "Geoffrey E. Hinton\n", + "Docs: [276, 235, 270]\n", + "[(0, 0.17326190127690461),\n", + " (2, 0.062709625689712375),\n", + " (3, 0.023215349136065065),\n", + " (4, 0.096803072840719678),\n", + " (5, 0.1267901905748583),\n", + " (6, 0.47635551675437715),\n", + " (7, 0.025581291656655011),\n", + " (9, 0.013530262666658776)]\n", + "\n", + "Michael I. Jordan\n", + "Docs: [205]\n", + "[(0, 0.22189029162114421),\n", + " (2, 0.033072831647105602),\n", + " (4, 0.051509519512663651),\n", + " (5, 0.63361728214218349),\n", + " (7, 0.045992411979857574),\n", + " (9, 0.012757930948596466)]\n", + "\n", + "James M. Bower\n", + "Docs: [188, 251, 244]\n", + "[(1, 0.29194178492747924),\n", + " (2, 0.47740737076112999),\n", + " (3, 0.023636461735819542),\n", + " (4, 0.010413505064807139),\n", + " (7, 0.018554608959817139),\n", + " (9, 0.17063597622983562)]\n" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Test on a small dataset" ] }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 44, "metadata": { "collapsed": false }, @@ -409,7 +573,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 45, "metadata": { "collapsed": false }, @@ -426,7 +590,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 46, "metadata": { "collapsed": false }, @@ -439,7 +603,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 47, "metadata": { "collapsed": false }, @@ -456,7 +620,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 48, "metadata": { "collapsed": false }, @@ -472,7 +636,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 49, "metadata": { "collapsed": false }, @@ -482,7 +646,7 @@ "output_type": "stream", "text": [ "phi is 10 x 681 x 10 (68100 elements)\n", - "mu is 10 x 681 x 22 (149820 elements)\n" + "mu is 10 x 681 x 21 (143010 elements)\n" ] } ], @@ -495,7 +659,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 50, "metadata": { "collapsed": false }, @@ -507,7 +671,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 51, "metadata": { "collapsed": false }, @@ -521,7 +685,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 52, "metadata": { "collapsed": false }, @@ -530,28 +694,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.064*group + 0.032*whose + 0.028*matrix + 0.023*obtain + 0.019*scheme + 0.016*expression + 0.014*computational + 0.014*every + 0.013*more_than + 0.013*become'),\n", + " '0.045*hopfield + 0.042*matrix + 0.025*gradient + 0.022*classification + 0.022*descent + 0.019*minimum + 0.018*training + 0.014*positive + 0.014*indicated + 0.014*called'),\n", " (1,\n", - " '0.053*node + 0.039*propagation + 0.036*back_propagation + 0.035*back + 0.017*target + 0.016*complexity + 0.016*requires + 0.015*probability + 0.013*supported + 0.013*forward'),\n", + " '0.063*cell + 0.037*activity + 0.026*region + 0.016*interaction + 0.016*connectivity + 0.014*along + 0.014*synaptic + 0.014*principle + 0.013*spatial + 0.013*robust'),\n", " (2,\n", - " '0.042*content + 0.042*cycle + 0.025*choice + 0.024*selected + 0.020*relation + 0.019*symmetric + 0.019*linearly + 0.015*include + 0.014*requires + 0.014*described_above'),\n", + " '0.056*principle + 0.033*region + 0.031*noise + 0.029*position + 0.024*center + 0.020*dimensional + 0.018*mapping + 0.018*map + 0.016*previous + 0.016*coordinate'),\n", " (3,\n", - " '0.062*brain + 0.058*map + 0.049*field + 0.025*location + 0.023*functional + 0.020*left + 0.018*series + 0.017*spatial + 0.017*visual + 0.016*potential'),\n", + " '0.049*scheme + 0.037*capacity + 0.027*probability + 0.025*stored + 0.025*binary + 0.024*representation + 0.017*feature + 0.016*represented + 0.016*code + 0.016*bound'),\n", " (4,\n", - " '0.025*dynamic + 0.020*variable + 0.019*phase + 0.018*feedback + 0.017*cell + 0.017*path + 0.016*with_respect + 0.013*energy + 0.013*design + 0.013*respect'),\n", + " '0.063*control + 0.033*brain + 0.026*action + 0.024*situation + 0.024*goal + 0.018*associative_memory + 0.018*task + 0.017*iv + 0.017*basic + 0.016*higher'),\n", " (5,\n", - " '0.104*processor + 0.072*activation + 0.068*edge + 0.048*update + 0.026*operation + 0.020*current + 0.019*machine + 0.019*control + 0.018*implementation + 0.018*store'),\n", + " '0.059*architecture + 0.046*potential + 0.037*connectivity + 0.036*energy + 0.035*computational + 0.029*storage + 0.019*current + 0.017*artificial + 0.015*dynamic + 0.015*though'),\n", " (6,\n", - " '0.031*recall + 0.029*noise + 0.028*image + 0.026*stimulus + 0.023*associative_memory + 0.022*scale + 0.019*recognition + 0.016*associated + 0.014*log + 0.014*phase'),\n", + " '0.052*dynamic + 0.042*training + 0.021*hidden + 0.021*noise + 0.019*propagation + 0.017*matrix + 0.015*hidden_unit + 0.015*context + 0.014*back + 0.014*architecture'),\n", " (7,\n", - " '0.045*cell + 0.038*potential + 0.035*connectivity + 0.027*synaptic + 0.026*artificial + 0.023*architecture + 0.015*computational + 0.013*action + 0.013*capability + 0.013*technique'),\n", + " '0.056*training + 0.030*human + 0.029*speed + 0.027*block + 0.026*he + 0.020*decision + 0.019*control + 0.018*distance + 0.017*distribution + 0.015*artificial'),\n", " (8,\n", - " '0.091*image + 0.025*log + 0.022*matrix + 0.021*dimensional + 0.017*vision + 0.015*training + 0.015*gradient + 0.014*component + 0.014*square + 0.012*mapping'),\n", + " '0.020*produced + 0.018*relative + 0.017*capability + 0.016*other_hand + 0.016*potential + 0.015*magnitude + 0.015*circuit + 0.015*cell + 0.014*consequence + 0.014*interconnected'),\n", " (9,\n", - " '0.052*scheme + 0.041*capacity + 0.030*representation + 0.030*stored + 0.029*probability + 0.027*code + 0.027*binary + 0.026*represented + 0.018*bound + 0.018*feature')]" + " '0.093*cell + 0.054*firing + 0.045*synaptic + 0.040*produce + 0.035*probability + 0.029*potential + 0.029*relation + 0.027*correlation + 0.023*produced + 0.023*connection_between')]" ] }, - "execution_count": 44, + "execution_count": 52, "metadata": {}, "output_type": "execute_result" } @@ -629,13 +793,54 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 96, "metadata": { "collapsed": true }, "outputs": [], "source": [ - "lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token)" + "lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10)" + ] + }, + { + "cell_type": "code", + "execution_count": 98, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.025*node + 0.015*processor + 0.012*constraint + 0.012*propagation + 0.010*activation + 0.009*back_propagation + 0.009*classifier + 0.009*update + 0.007*hidden + 0.007*energy'),\n", + " (1,\n", + " '0.022*image + 0.019*classifier + 0.010*classification + 0.009*noise + 0.008*region + 0.007*gaussian + 0.007*class + 0.007*node + 0.006*decision + 0.006*vector'),\n", + " (2,\n", + " '0.028*hidden + 0.020*speech + 0.012*hidden_unit + 0.011*chain + 0.010*region + 0.009*hidden_layer + 0.008*human + 0.007*propagation + 0.007*orientation + 0.007*acoustic'),\n", + " (3,\n", + " '0.028*memory + 0.024*vector + 0.010*capacity + 0.010*bit + 0.010*associative + 0.008*code + 0.008*associative_memory + 0.007*stored + 0.006*matrix + 0.006*threshold'),\n", + " (4,\n", + " '0.023*circuit + 0.018*chip + 0.015*analog + 0.014*voltage + 0.013*current + 0.011*synapse + 0.008*pulse + 0.008*transistor + 0.007*vlsi + 0.007*synaptic'),\n", + " (5,\n", + " '0.009*node + 0.008*hidden + 0.007*activation + 0.005*generalization + 0.005*hidden_unit + 0.005*connectionist + 0.004*training_set + 0.004*trained + 0.004*word + 0.004*probability'),\n", + " (6,\n", + " '0.033*cell + 0.012*response + 0.011*stimulus + 0.011*firing + 0.008*synaptic + 0.008*activity + 0.008*potential + 0.007*spike + 0.006*fig + 0.006*cortex'),\n", + " (7,\n", + " '0.009*energy + 0.007*gradient + 0.007*field + 0.007*hopfield + 0.006*matrix + 0.006*minimum + 0.006*convergence + 0.006*hidden + 0.005*vector + 0.004*equilibrium'),\n", + " (8,\n", + " '0.019*recognition + 0.009*joint + 0.009*visual + 0.009*speech + 0.008*field + 0.007*speaker + 0.007*object + 0.007*motion + 0.005*aspect + 0.005*control'),\n", + " (9,\n", + " '0.016*map + 0.010*delay + 0.010*region + 0.008*cortex + 0.008*activity + 0.008*brain + 0.008*oscillation + 0.008*distribution + 0.007*phase + 0.007*cell')]" + ] + }, + "execution_count": 98, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lda.show_topics()" ] }, { @@ -682,7 +887,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.2" + "version": "3.4.3+" } }, "nbformat": 4, From 388a5e9654211a49bd8036cbca0184043c354fd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 14 Oct 2016 12:57:10 +0200 Subject: [PATCH 018/100] Changed the online algorithm according to all the changes that have happened to the offline lately. --- gensim/models/onlineatvb.py | 111 ++++++++++++++++++++++++++---------- 1 file changed, 80 insertions(+), 31 deletions(-) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index f6c78d85b8..d0ab583fe8 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -41,13 +41,12 @@ class OnlineAtVb(LdaModel): """ # TODO: inherit interfaces.TransformationABC. - def __init__(self, corpus=None, num_topics=100, id2word=None, - author2doc=None, doc2author=None, threshold=0.001, + def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, + author2doc=None, doc2author=None, threshold=0.001, minimum_probability=0.01, iterations=10, alpha=None, eta=None, decay=0.5, offset=1.0, eval_every=1, random_state=None): - # TODO: require only author2doc OR doc2author, and construct the missing one automatically. - + # TODO: allow for asymmetric priors. if alpha is None: alpha = 1.0 / num_topics if eta is None: @@ -57,6 +56,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, if corpus is None and self.id2word is None: raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + # NOTE: this stuff is confusing to me (from LDA code). Why would id2word not be none, but have length 0? if self.id2word is None: logger.warning("no word id mapping provided; initializing from corpus, assuming identity") self.id2word = utils.dict_from_corpus(corpus) @@ -71,40 +71,67 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, logger.info('Vocabulary consists of %d words.', self.num_terms) - if author2doc is None or doc2author is None: - raise ValueError('author2doc and doc2author must be supplied.') + if doc2author is None and author2doc is None: + raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') + + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). + # If either doc2author or author2doc is missing, construct them from the other. + if doc2author is None: + # Make a mapping from document IDs to author IDs. + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + elif author2doc is None: + # Make a mapping from author IDs to document IDs. + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + + self.author2doc = author2doc + self.doc2author = doc2author + + self.num_authors = len(self.author2doc) + logger.info('Number of authors: %d.', self.num_authors) + + self.id2author = id2author + if self.id2author is None: + logger.warning("no author id mapping provided; initializing from corpus, assuming identity") + author_integer_ids = [str(i) for i in range(len(author2doc))] + self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) self.corpus = corpus self.iterations = iterations self.num_topics = num_topics self.threshold = threshold + self.minimum_probability = minimum_probability self.alpha = alpha self.eta = eta self.decay = decay self.offset = offset - self.author2doc = author2doc - self.doc2author = doc2author + self.num_docs = len(corpus) self.num_authors = len(author2doc) self.eval_every = eval_every self.random_state = random_state - # Some of the methods in LdaModel are used in this class. - # I.e. composition is used instead of inheriting the LdaModel class. - self.ldamodel = LdaModel(id2word=self.id2word) - - logger.info('Number of authors: %d.', self.num_authors) - - # TODO: find a way out of this nonsense. - self.authorid2idx = dict(zip(list(author2doc.keys()), xrange(self.num_authors))) - self.authoridx2id = dict(zip(xrange(self.num_authors), list(author2doc.keys()))) - self.random_state = get_random_state(random_state) if corpus is not None: - (self.var_gamma, self.var_lambda) = self.inference(corpus, author2doc, doc2author) - else: - self.var_lambda = self.random_state.gamma(100., 1. / 100., - (self.num_topics, self.num_terms)) + self.inference(corpus, author2doc, doc2author) def rho(self, iteration): return pow(self.offset + iteration, -self.decay) @@ -149,7 +176,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # NOTE: maybe not the best idea that mu changes shape every iteration. var_mu = numpy.zeros((self.num_terms, len(authors_d))) for v in ids: - for a in xrange(len(authors_d)): + for a in xrange(len(authors_d)): # TODO: not 100% sure this makes sense. var_mu[v, a] = 1 / len(authors_d) for iteration in xrange(self.iterations): @@ -172,6 +199,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] # Normalize phi over k. + # TODO: replace log_normalization. Also in offline algo. (log_var_phi_v, _) = log_normalize(numpy.log(var_phi[v, :])) # NOTE: it might be possible to do this out of the v loop. var_phi[v, :] = numpy.exp(log_var_phi_v) @@ -192,6 +220,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): var_mu[v, a] = author_prior_prob * expavgElogtheta # Normalize mu. + # TODO: replace log_normalization. Also in offline algo. (log_var_mu_v, _) = log_normalize(numpy.log(var_mu[v, :])) var_mu[v, :] = numpy.exp(log_var_mu_v) @@ -201,8 +230,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): tilde_gamma[a, k] = 0.0 for vi, v in enumerate(ids): tilde_gamma[a, k] += cts[vi] * var_mu[v, a] * var_phi[v, k] - aid = self.authoridx2id[a] - tilde_gamma[a, k] *= len(author2doc[aid]) + tilde_gamma[a, k] *= len(author2doc[a]) tilde_gamma[a, k] += self.alpha # Update lambda. @@ -210,11 +238,13 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for k in xrange(self.num_topics): for vi, v in enumerate(ids): cnt = dict(doc).get(v, 0) + # TODO: I'm supposed to multiply the "sufficient statistic" by the size of the corpus. var_lambda[k, v] = self.eta + cnt * var_phi[v, k] #tilde_lambda[k, v] = self.eta + self.num_docs * cts[vi] * var_phi[v, k] + # TODO: probably use mean change, since that is used in LDA. # Check for convergence. - # Criterion is mean change in "local" gamma and lambda. + # Criterion is max change in "local" gamma and lambda. if iteration > 0: maxchange_gamma = numpy.max(abs(tilde_gamma - lastgamma)) maxchange_lambda = numpy.max(abs(tilde_lambda - lastlambda)) @@ -240,7 +270,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - # Print topics: # self.var_lambda = var_lambda # pprint(self.show_topics()) @@ -256,6 +285,9 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): logger.info('Converged documents: %d/%d', converged, d + 1) # End of corpus loop. + self.var_lambda = var_lambda + self.var_gamma = var_gamma + return var_gamma, var_lambda def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): @@ -285,8 +317,7 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): for vi, v in enumerate(ids): bound_v = 0.0 for k in xrange(self.num_topics): - for aid in authors_d: - a = self.authorid2idx[aid] + for a in authors_d: bound_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) bound_d += cts[vi] * numpy.log(bound_v) bound += numpy.log(1.0 / len(authors_d)) + bound_d @@ -359,8 +390,7 @@ def log_word_prob(self, var_gamma, var_lambda, doc_ids=None): for vi, v in enumerate(ids): log_word_prob_v = 0.0 for k in xrange(self.num_topics): - for aid in authors_d: - a = self.authorid2idx[aid] + for a in authors_d: log_word_prob_v += norm_gamma[a, k] * norm_lambda[k, v] log_word_prob_d += cts[vi] * numpy.log(log_word_prob_v) log_word_prob += numpy.log(1.0 / len(authors_d)) + log_word_prob_d @@ -381,6 +411,25 @@ def get_topic_terms(self, topicid, topn=10): bestn = matutils.argsort(topic, topn, reverse=True) return [(id, topic[id]) for id in bestn] + def get_author_topics(self, author_id, minimum_probability=None): + """ + Return topic distribution the given author, as a list of + (topic_id, topic_probability) 2-tuples. + Ignore topics with very low probability (below `minimum_probability`). + """ + if minimum_probability is None: + minimum_probability = self.minimum_probability + minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output + + topic_dist = self.var_gamma[author_id, :] / sum(self.var_gamma[author_id, :]) + + author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) + if topicvalue >= minimum_probability] + + # author_name = self.id2author[author_id] + + return author_topics + From 2b2a896080ad73b15b660150002ec5b112e01c1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 14 Oct 2016 13:06:06 +0200 Subject: [PATCH 019/100] Fixed mistake with mu variable. --- gensim/models/onlineatvb.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index d0ab583fe8..ae3d71f7f9 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -173,11 +173,10 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Initialize mu. # mu is 1/|A_d| if a is in A_d, zero otherwise. # NOTE: I could do random initialization instead. - # NOTE: maybe not the best idea that mu changes shape every iteration. - var_mu = numpy.zeros((self.num_terms, len(authors_d))) + var_mu = dict() for v in ids: - for a in xrange(len(authors_d)): # TODO: not 100% sure this makes sense. - var_mu[v, a] = 1 / len(authors_d) + for a in authors_d: + var_mu[(v, a)] = 1 / len(authors_d) for iteration in xrange(self.iterations): #logger.info('iteration %i', iteration) @@ -190,8 +189,8 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for k in xrange(self.num_topics): # Average Elogtheta over authors a in document d. avgElogtheta = 0.0 - for a in xrange(len(authors_d)): - avgElogtheta += var_mu[v, a] * Elogtheta[a, k] + for a in authors_d: + avgElogtheta += var_mu[(v, a)] * Elogtheta[a, k] expavgElogtheta = numpy.exp(avgElogtheta) # Compute phi. @@ -208,7 +207,8 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Prior probability of observing author a in document d is one # over the number of authors in document d. author_prior_prob = 1.0 / len(authors_d) - for a in xrange(len(authors_d)): + mu_sum = 0.0 + for a in authors_d: # Average Elogtheta over topics k. avgElogtheta = 0.0 for k in xrange(self.num_topics): @@ -217,19 +217,20 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Compute mu over a. # TODO: avoid computing mu if possible. - var_mu[v, a] = author_prior_prob * expavgElogtheta + var_mu[(v, a)] = author_prior_prob * expavgElogtheta + mu_sum += var_mu[(v, a)] # Normalize mu. - # TODO: replace log_normalization. Also in offline algo. - (log_var_mu_v, _) = log_normalize(numpy.log(var_mu[v, :])) - var_mu[v, :] = numpy.exp(log_var_mu_v) + mu_norm_const = 1.0 / mu_sum + for a in authors_d: + var_mu[(v, a)] *= mu_norm_const # Update gamma. - for a in xrange(len(authors_d)): + for a in authors_d: for k in xrange(self.num_topics): tilde_gamma[a, k] = 0.0 for vi, v in enumerate(ids): - tilde_gamma[a, k] += cts[vi] * var_mu[v, a] * var_phi[v, k] + tilde_gamma[a, k] += cts[vi] * var_mu[(v, a)] * var_phi[v, k] tilde_gamma[a, k] *= len(author2doc[a]) tilde_gamma[a, k] += self.alpha From 3756435f07993a8371c0b6c027e11762507a85d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 14 Oct 2016 13:16:41 +0200 Subject: [PATCH 020/100] Fixed lambda update, multiplication by size of corpus was missing. Removed author_prior_prob from mu update. --- gensim/models/onlineatvb.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index ae3d71f7f9..eb5e60cf22 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -153,7 +153,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): converged = 0 - # TODO: consider making phi and mu sparse. + # TODO: consider making phi sparse. Each document does not contain all terms. var_phi = numpy.zeros((self.num_terms, self.num_topics)) var_gamma = init_gamma.copy() @@ -172,7 +172,8 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Initialize mu. # mu is 1/|A_d| if a is in A_d, zero otherwise. - # NOTE: I could do random initialization instead. + # TODO: consider doing random initialization instead. + # TODO: consider making mu a sparse matrix instead of a dictionary. var_mu = dict() for v in ids: for a in authors_d: @@ -206,7 +207,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for v in ids: # Prior probability of observing author a in document d is one # over the number of authors in document d. - author_prior_prob = 1.0 / len(authors_d) mu_sum = 0.0 for a in authors_d: # Average Elogtheta over topics k. @@ -217,7 +217,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Compute mu over a. # TODO: avoid computing mu if possible. - var_mu[(v, a)] = author_prior_prob * expavgElogtheta + var_mu[(v, a)] = expavgElogtheta mu_sum += var_mu[(v, a)] # Normalize mu. @@ -239,9 +239,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for k in xrange(self.num_topics): for vi, v in enumerate(ids): cnt = dict(doc).get(v, 0) - # TODO: I'm supposed to multiply the "sufficient statistic" by the size of the corpus. - var_lambda[k, v] = self.eta + cnt * var_phi[v, k] - #tilde_lambda[k, v] = self.eta + self.num_docs * cts[vi] * var_phi[v, k] + var_lambda[k, v] = self.eta + self.num_docs * cnt * var_phi[v, k] # TODO: probably use mean change, since that is used in LDA. # Check for convergence. From ed3416de2466c897a9354da704712ab24b71a70b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 14 Oct 2016 15:30:27 +0200 Subject: [PATCH 021/100] Added a loop for passing over entire corpus. Discarded use of log_normalize. Various other changes. --- gensim/models/onlineatvb.py | 242 +++++++++++++++++++----------------- 1 file changed, 130 insertions(+), 112 deletions(-) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index eb5e60cf22..51cb0a9597 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -43,7 +43,7 @@ class OnlineAtVb(LdaModel): def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, author2doc=None, doc2author=None, threshold=0.001, minimum_probability=0.01, - iterations=10, alpha=None, eta=None, decay=0.5, offset=1.0, + iterations=10, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, eval_every=1, random_state=None): # TODO: allow for asymmetric priors. @@ -114,8 +114,12 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, author_integer_ids = [str(i) for i in range(len(author2doc))] self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) + # Make the reverse mapping, from author names to author IDs. + self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) + self.corpus = corpus self.iterations = iterations + self.passes = passes self.num_topics = num_topics self.threshold = threshold self.minimum_probability = minimum_probability @@ -131,13 +135,14 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.random_state = get_random_state(random_state) if corpus is not None: - self.inference(corpus, author2doc, doc2author) + self.inference(corpus) def rho(self, iteration): return pow(self.offset + iteration, -self.decay) - def inference(self, corpus=None, author2doc=None, doc2author=None): + def inference(self, corpus=None): if corpus is None: + # TODO: I can't remember why I used "copy()" here. corpus = self.corpus.copy() self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. @@ -151,8 +156,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): init_lambda = self.random_state.gamma(100., 1. / 100., (self.num_topics, self.num_terms)) - converged = 0 - # TODO: consider making phi sparse. Each document does not contain all terms. var_phi = numpy.zeros((self.num_terms, self.num_topics)) @@ -165,124 +168,139 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): Elogtheta = dirichlet_expectation(var_gamma) Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - for d, doc in enumerate(corpus): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = doc2author[d] # List of author IDs for document d. - - # Initialize mu. - # mu is 1/|A_d| if a is in A_d, zero otherwise. - # TODO: consider doing random initialization instead. - # TODO: consider making mu a sparse matrix instead of a dictionary. - var_mu = dict() - for v in ids: - for a in authors_d: - var_mu[(v, a)] = 1 / len(authors_d) - for iteration in xrange(self.iterations): - #logger.info('iteration %i', iteration) - - lastgamma = tilde_gamma.copy() - lastlambda = tilde_lambda.copy() - - # Update phi. + # Evaluate bound. + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta, var_gamma) + beta_bound = self.beta_bound(Elogbeta, var_lambda) + bound = word_bound + theta_bound + beta_bound + #likelihood = self.log_word_prob(var_gamma, var_lambda) + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + for _ in xrange(self.passes): + converged = 0 # Number of documents converged for current pass over corpus. + prev_bound = bound + for d, doc in enumerate(corpus): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] # List of author IDs for document d. + + # Initialize mu. + # mu is 1/|A_d| if a is in A_d, zero otherwise. + # TODO: consider doing random initialization instead. + # TODO: consider making mu a sparse matrix instead of a dictionary. + var_mu = dict() for v in ids: - for k in xrange(self.num_topics): - # Average Elogtheta over authors a in document d. - avgElogtheta = 0.0 - for a in authors_d: - avgElogtheta += var_mu[(v, a)] * Elogtheta[a, k] - expavgElogtheta = numpy.exp(avgElogtheta) + for a in authors_d: + var_mu[(v, a)] = 1 / len(authors_d) - # Compute phi. - # TODO: avoid computing phi if possible. - var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] + for iteration in xrange(self.iterations): + #logger.info('iteration %i', iteration) - # Normalize phi over k. - # TODO: replace log_normalization. Also in offline algo. - (log_var_phi_v, _) = log_normalize(numpy.log(var_phi[v, :])) # NOTE: it might be possible to do this out of the v loop. - var_phi[v, :] = numpy.exp(log_var_phi_v) + lastgamma = tilde_gamma.copy() + lastlambda = tilde_lambda.copy() - # Update mu. - for v in ids: - # Prior probability of observing author a in document d is one - # over the number of authors in document d. - mu_sum = 0.0 - for a in authors_d: - # Average Elogtheta over topics k. - avgElogtheta = 0.0 + # Update phi. + for v in ids: for k in xrange(self.num_topics): - avgElogtheta += var_phi[v, k] * Elogtheta[a, k] - expavgElogtheta = numpy.exp(avgElogtheta) - - # Compute mu over a. - # TODO: avoid computing mu if possible. - var_mu[(v, a)] = expavgElogtheta - mu_sum += var_mu[(v, a)] + # Average Elogtheta over authors a in document d. + avgElogtheta = 0.0 + for a in authors_d: + avgElogtheta += var_mu[(v, a)] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute phi. + # TODO: avoid computing phi if possible. + var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] + + # Normalize phi over k. + var_phi[v, :] = var_phi[v, :] / var_phi[v, :].sum() + + # Update mu. + for v in ids: + # Prior probability of observing author a in document d is one + # over the number of authors in document d. + mu_sum = 0.0 + for a in authors_d: + # Average Elogtheta over topics k. + avgElogtheta = 0.0 + for k in xrange(self.num_topics): + avgElogtheta += var_phi[v, k] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute mu over a. + # TODO: avoid computing mu if possible. + var_mu[(v, a)] = expavgElogtheta + mu_sum += var_mu[(v, a)] + + # Normalize mu. + mu_norm_const = 1.0 / mu_sum + for a in authors_d: + var_mu[(v, a)] *= mu_norm_const - # Normalize mu. - mu_norm_const = 1.0 / mu_sum + # Update gamma. for a in authors_d: - var_mu[(v, a)] *= mu_norm_const - - # Update gamma. - for a in authors_d: + for k in xrange(self.num_topics): + tilde_gamma[a, k] = 0.0 + for vi, v in enumerate(ids): + tilde_gamma[a, k] += cts[vi] * var_mu[(v, a)] * var_phi[v, k] + tilde_gamma[a, k] *= len(self.author2doc[a]) + tilde_gamma[a, k] += self.alpha + + # Update lambda. + #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T for k in xrange(self.num_topics): - tilde_gamma[a, k] = 0.0 for vi, v in enumerate(ids): - tilde_gamma[a, k] += cts[vi] * var_mu[(v, a)] * var_phi[v, k] - tilde_gamma[a, k] *= len(author2doc[a]) - tilde_gamma[a, k] += self.alpha + cnt = dict(doc).get(v, 0) + var_lambda[k, v] = self.eta + self.num_docs * cnt * var_phi[v, k] + + # Check for convergence. + # Criterion is mean change in "local" gamma and lambda. + # TODO: consider using separate thresholds for lambda and gamma. + if iteration > 0: + meanchange_gamma = numpy.mean(abs(tilde_gamma - lastgamma)) + meanchange_lambda = numpy.mean(abs(tilde_lambda - lastlambda)) + # logger.info('Mean change in gamma: %.3e', meanchange_gamma) + # logger.info('Mean change in lambda: %.3e', meanchange_lambda) + if meanchange_gamma < self.threshold and meanchange_lambda < self.threshold: + logger.info('Converged after %d iterations.', iteration) + converged += 1 + break + # End of iterations loop. + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). Same goes for lambda. + rhot = self.rho(d) + var_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma + # Note that we only changed the elements in lambda corresponding to + # the words in document d, hence the [:, ids] indexing. + var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + # Print topics: + # self.var_lambda = var_lambda + # pprint(self.show_topics()) + + # Evaluate bound. + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta, var_gamma) + beta_bound = self.beta_bound(Elogbeta, var_lambda) + bound = word_bound + theta_bound + beta_bound + #likelihood = self.log_word_prob(var_gamma, var_lambda) + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + # End of corpus loop. + + logger.info('Converged documents: %d/%d', converged, self.num_docs) + + # TODO: consider whether to include somthing like this: + #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold: + # break + # End of pass over corpus loop. - # Update lambda. - #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T - for k in xrange(self.num_topics): - for vi, v in enumerate(ids): - cnt = dict(doc).get(v, 0) - var_lambda[k, v] = self.eta + self.num_docs * cnt * var_phi[v, k] - - # TODO: probably use mean change, since that is used in LDA. - # Check for convergence. - # Criterion is max change in "local" gamma and lambda. - if iteration > 0: - maxchange_gamma = numpy.max(abs(tilde_gamma - lastgamma)) - maxchange_lambda = numpy.max(abs(tilde_lambda - lastlambda)) - # logger.info('Max change in gamma: %.3e', maxchange_gamma) - # logger.info('Max change in lambda: %.3e', maxchange_lambda) - if maxchange_gamma < self.threshold and maxchange_lambda < self.threshold: - logger.info('Converged after %d iterations.', iteration) - converged += 1 - break - # End of iterations loop. - - # Update gamma and lambda. - # Interpolation between document d's "local" gamma (tilde_gamma), - # and "global" gamma (var_gamma). Same goes for lambda. - rhot = self.rho(d) - var_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma - # Note that we only changed the elements in lambda corresponding to - # the words in document d, hence the [:, ids] indexing. - var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] - - # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. - Elogtheta = dirichlet_expectation(var_gamma) - Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) - - # Print topics: - # self.var_lambda = var_lambda - # pprint(self.show_topics()) - - # Evaluate bound. - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta, var_gamma) - beta_bound = self.beta_bound(Elogbeta, var_lambda) - bound = word_bound + theta_bound + beta_bound - #likelihood = self.log_word_prob(var_gamma, var_lambda) - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) - - logger.info('Converged documents: %d/%d', converged, d + 1) - # End of corpus loop. self.var_lambda = var_lambda self.var_gamma = var_gamma From 994f212adbfab2ff22c796564b73566682313a5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 14 Oct 2016 16:22:18 +0200 Subject: [PATCH 022/100] Moved bound computation out of corpus-wide loop. --- gensim/models/onlineatvb.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 51cb0a9597..986b0a6020 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -262,7 +262,7 @@ def inference(self, corpus=None): # logger.info('Mean change in gamma: %.3e', meanchange_gamma) # logger.info('Mean change in lambda: %.3e', meanchange_lambda) if meanchange_gamma < self.threshold and meanchange_lambda < self.threshold: - logger.info('Converged after %d iterations.', iteration) + # logger.info('Converged after %d iterations.', iteration) converged += 1 break # End of iterations loop. @@ -285,15 +285,16 @@ def inference(self, corpus=None): # self.var_lambda = var_lambda # pprint(self.show_topics()) - # Evaluate bound. - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta, var_gamma) - beta_bound = self.beta_bound(Elogbeta, var_lambda) - bound = word_bound + theta_bound + beta_bound - #likelihood = self.log_word_prob(var_gamma, var_lambda) - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) # End of corpus loop. + # Evaluate bound. + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta, var_gamma) + beta_bound = self.beta_bound(Elogbeta, var_lambda) + bound = word_bound + theta_bound + beta_bound + #likelihood = self.log_word_prob(var_gamma, var_lambda) + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + logger.info('Converged documents: %d/%d', converged, self.num_docs) # TODO: consider whether to include somthing like this: From 956fbd5fbfd1a6de09bb3851b0a2b7170c5c6310 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 14 Oct 2016 16:22:47 +0200 Subject: [PATCH 023/100] Updated notebook. --- docs/notebooks/at_with_nips.ipynb | 510 +++++++++++++++++++----------- 1 file changed, 324 insertions(+), 186 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 11f0735c76..78d9879898 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 89, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -81,7 +81,7 @@ "source": [ "# Configure logging.\n", "logger = logging.getLogger()\n", - "fhandler = logging.FileHandler(filename='../../../../log_files/log.log', mode='a')\n", + "fhandler = logging.FileHandler(filename='../../../log_files/log.log', mode='a')\n", "formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", "fhandler.setFormatter(formatter)\n", "logger.addHandler(fhandler)\n", @@ -97,7 +97,7 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 13, "metadata": { "collapsed": false }, @@ -107,11 +107,11 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "data_dir = '../../../nipstxt/'\n", + "data_dir = '../../../../data/nipstxt/'\n", "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00', '01', '02']\n", + "yrs = ['00']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -133,7 +133,7 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": 14, "metadata": { "collapsed": false }, @@ -158,7 +158,7 @@ }, { "cell_type": "code", - "execution_count": 67, + "execution_count": 15, "metadata": { "collapsed": true }, @@ -173,7 +173,7 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": 16, "metadata": { "collapsed": false }, @@ -191,7 +191,7 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": 17, "metadata": { "collapsed": false }, @@ -217,7 +217,7 @@ }, { "cell_type": "code", - "execution_count": 70, + "execution_count": 18, "metadata": { "collapsed": false }, @@ -240,7 +240,7 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 19, "metadata": { "collapsed": true }, @@ -255,7 +255,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 20, "metadata": { "collapsed": true }, @@ -274,7 +274,7 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 21, "metadata": { "collapsed": true }, @@ -286,16 +286,16 @@ }, { "cell_type": "code", - "execution_count": 74, + "execution_count": 22, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHVWZ//HPlwBBlgTGmAQEBUUQcIFEhKgsDpLIqjOg\n2G4g4IKA/KIoijowoI6iJOyKArKojQiDwx4IjhC2CSaIICEoO2gCgdCBBMj2/P4455JK5fZ2b3du\n377f9+tVr9v31FN1TtXt5elT51QpIjAzMzNrNWs0ugFmZmZmjeAkyMzMzFqSkyAzMzNrSU6CzMzM\nrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMysJTkJMjMzs5bkJMjM+pykpyT9vPB+D0nLJb1v\nNdT9PUlLCu+H5Lon9Xfdub7Dc32brI76aiXpm5IekbRU0vRGt6enJL01n99PNrot1vycBNmgIeng\n/Mux2vKDRrevxVR7Hk+vn9Ej6duS9quh7uW9rau3umhbUMOxrk6S9gZ+APwvcAjw3YY2yKxB1mx0\nA8z6WJB+oT9WKr9/9TfFKiLiZkmvi4jFvdz0O8AlwNW92OYE4KRe1lOLztp2AXBJDce6On0QWAIc\nHn6ApLUwJ0E2GN0QETN7GixJwNoR8Wo/tqnl9XdSIGndiFgUEctZDT1BnclJxUBOgABGAQsHYgLk\nn0dbnXw5zFpKcXyIpM9I+ivwCrBHXi9JX5X0V0mvSPqnpHMkDSvtR5L+I499eUnSVElvl/RkaSzM\nSuNTCuVVx41I2kfStLzPDklXSXp7KeZXkuZL2jSvf1HSM5J+WKUeSZoo6S+SXs5x10naPq+/XdKf\nOjlXD0vqsgems/NQJW6VMUGStpL035Lm5LY9IenXktarfE7A2kDlXC2vnNt8XpfnffxW0nzSpZ1O\nz3le9xlJs3N908tjlPK5/VuV7V7bZw/a1tlne3Th++ppSWdU+b66TdJMSdtJ+l9Ji/K5/WpXn0Nh\n+zUlnZA/u1eUxvycJGmtUts/BQzP7VymTsbX5O+dJZLWK5Qdl7f7YaFszfz5n1QoW1/S5Pwz8Yqk\nWZL+X2n/3f08biTpYkkvSHpe0vnASucsx20s6aJ8rl6R9A9JV0ratCfnzVqXe4JsMBou6fXFgoh4\nrhQzHvgEcDbwPPBELr8AaMuvpwFvAY4G3i1pl9zLAGk8xXHAVcAUYCxwI/C6Uj2djQ9ZpVzSIcD5\nwHXAN4D1gC8D0yTtEBFPFbZdM9c3DfhaPp6vS/pbRJxf2O3FpD94VwM/J/3h3hXYCfhzXn+OpK0i\n4qFCW8YBWwDfqtL2op6eh0q7K/sfmuPWIJ3nucCmwH7AsIhYKOnTwC+B2/J5Afh7aV//DTwIfLNQ\n1tk53wP4JHAG6VLQkcAUSe+JiNndbPtaeUQs60Hbyp/t94DjgRtI33PbkD7bsaXvqwBGANcDvwMu\nBT4O/FjSvRFxc5W2FV2Yj/FS0vfGzqTLdlsDBxXa/mXg3cAXAAG3d7K/aaTP6P2kzwvgA8AyYJdC\n3FjSZ35rPl4B1+btfgH8BdgLmCRp44g4rlTPKj+PeR9Xk75XzwFmAweQznv5M/o9sCXps32C1NM1\nnvQ99RRmnYkIL14GxQIcTLoMUl6WFWKG5LLFwJal7XfP6w4ole+Vyw/M70fm7a8oxf0wx/28UHYy\nsLhKWw8j/SHZJL/fAHgBOLMUNyqXn1UouyRv+41S7J+BOwrv98ztOaWLc7Yh8DJwUqn87FzvOl1s\n25vzsEdu8/vy+7E5Zr9uPtOXi/spndflwIWdrFtceF/5zJcC7yiUv5nU63Bp6dw+1N0+u2lb+bMd\nlc/TVaW4r+S4TxXKpuWyjxfK1iYlib/p5lyNycd5dql8Ut7n+0vH+XwPfqaGAC8CJxfKniclWa9U\nvj+Ar+djXD+/PyC35djS/q4gJaBv6sHPY2UfXymUrUFKPJcBn8xl/1KO8+Klp4svh9lgE8ARwIcK\ny55V4m6OiL+Xyg4k/YL/o6TXVxbgT6Q/eB/McRNIv7zPLG1/Wh3t/jApEbq0VPcy4O5C3UU/L72/\njdRzVXEA6Q//yZ1VGhEvANeQeg+AdIkC+BgpuXmlizaPp/bz8EJ+3UvSOj2IryaAn/UiflpEvDZA\nPiIeJ/U0fLjG+ntqT9J5Kp+Xc4FFwD6l8o6IuKzyJtJYqrtZ+bOtZm/SOSnfCuBUUm9PuZ5uRcQy\n4E5S7yGS3gUMB/4LWIvUSwOpd+jeiHgpv9+LlNicXdrlJNK5KJ/zaj+PewGvUvg+j9RjdlY+nopF\npMTqg5KG9/IQrcU5CbLB6O6I+ENxqRLzWJWyt5H+q3y2tMwF1iH1fAC8Kb+u9Es7IuaQ/muuxZak\nX+zTSnU/A/xroe6Kl3ICUzQf2Kjw/i3AUxHRXZsuBraQtHN+/2Hg9aTegq68Ob/2+jxExMPA6cAX\ngeckXS/pCEkbdFNn2aO9iC3/kQV4CNhA0kZV1vWVynl6qFgYaeDvo4X1FU9W2Uf5s+2snqX53Bbr\neZr0eZTr6anbgB3zuKJdgCcj4l7SjMvKJbH3k753i215KiJeLu1rVmF90WNV6n0z8HSVRHx28U1e\nfzywL/CMpD9KOlZS+WfGbBUeE2StqvzLGdI/Bf8APsPK/2lWPJNfK+t6MrOms5ghVeoO0nikeVXi\nywN9l3WyX3XydVeuz3V+Grgrvz4dEX/sZrvenIdVRMTEPND1I6RepbOA4yTtnBOpnqj2OfZG+Rz1\n9POqp47u9OSz7e363rahaBrptgM7kXp8phXKd5G0Hemfh1vrqK/a5yiqfx6r7DsiTpV0JfBRUk/t\n94BvSdqt2PtnVuaeILMVHiYNSr2t3JOUl8ov08fy61bFjSWNJl3SKpoPDJG0bql88yp1AzzTSd3T\n6L2/A5uWZyCVRcRS8gBcSRuSBif/ugf7fyy/9uQ8dFb3/RHx/YjYDdiN1Mv2hWJIT/bTQ2+rUrYV\n8GJEzM/v55PGSZVtXqWsp217LL9uXSyUtHbe7+M93E9P6llT0ltL9WwCrF9HPXeRLqvuSur5qXwv\n3gq8j3SpNkg9RsW2bCqpPEB+m/zak7ZU9lG+XLp1lVgi4pGImBQRE4B3kgZq92hWnbUuJ0FmK1xG\nGoT6nfKKPAW4kkzcRPpv/ehS2MQq+3yY9J/rroV9rU/qbSq6HngJ+HYek1Ouf0QPj6HoClJvb0/u\nBnwJKQE8l/THoydJUG/Ow0okDZNU/v1zP+mP6dBC2UKqJyW1+EAe01Jpw+akSyg3FGIeBl4vaZtC\n3BtJiWFZT9tWOU/HlMq/SJoBeE0P9tET15G+1/5fqfxrpPN6bS07zZe0ZpK+Zzdm5Z6g9YCjgNkR\nUezBvI70s/Tl0u4mks7F9T2o+jrS98IXKwX5Z+MoVp5p+Lo827DoEdLP09BC3GhJW1f5vrMW5sth\nNtjU3O0fEX/Il2e+I2kMMJX0H/BWpEHTR5Bm+MyVNBk4VtJVpF/o7yENwn6+tNvrgaeBCyX9JJcd\nCvwTeO0+MhHRIeko0tT8mZIuJV2iejNpQOv/0sv/aiNiqqR24KtK9+65kXRZZxdgSkQUB5z+SdIs\n0oDov/TkEkIvzwOs/NnsCUyW9Dvgb6RBtgeTLvv9dyFuBjA+31/mn8DDEVH1vkY9cD9wo6QzSZ/r\nl/PrfxZifkOa9n9Vjlsf+BJpGv67S/vrUdvyefoRcLyk60hJzzZ5v3eSeuHqFhEzJf0a+HIeVD8N\nGEe6vHlZRHQ2Db4npgHHAs9FxKxc3z8lPUz6+fhFKf5KUk/RjyRtyYop8vsAP46IauOeyq4k9UL9\nJPduVabIl3tVtwVukHQZ8AApyTqQNK6tvRD3E9IEgE1Jl73NPEXey+BZSH9ElwFjuogZkmNO7SLm\n86TZOC+RLo/cA3wfGFmK+w9SgvMS6b/9rUmDWn9eihtD+mP3Muk/1CMpTaMuxO5O6pmYn/c7GzgP\n2L4Qcwnpj1G53ScDr5bKRPrj9UCufw5pRtS7qmz/zdymr/byvFc7D08A5xZiylPk35KP62+kHpVn\n8ra7lvb9duCPed/LKuc2H+sy0j2FujwPxc+clBA8lM/F9Ep7StuPB+4jTQH/K+k+PdWmyHfWts4+\n2yPz/l7J5+t0YINSzDRgRpU2XULqbenusxiSP4+Hcz2PkpK8Navsb5XvoS72u18+pitL5RdQmuZf\nWLceaTbYU7ktDwLH9ObnkTQY/GLSbMLnSPdk2oGVp8iPIM1QfABYQErAbwc+WuWYl5Y/Fy+tvSii\nLy+5m7U2SU8C10fEF7oNHmAkfY10j583RcQ/G90eM7P+5mujZlZxKOl+LU6AzKwleEyQWQtTeibU\n/qRxPG/Hs2nMrIU4CTLrW509e2qgGk2aCfY86dEZUxrcHjOz1cZjgszMzKwleUyQmZmZtSQnQWZm\nZtaSnASZWb+S9D1J5Wefre42DJG0XFL5Cev17HOPvM/9+2qfvaj7V5L+trrrNRtsnASZNZCkg/Mf\n0srysqTZks4cRE/BbrbB4r3RqOMKYHmD6jYbNDw7zKzxgvR8r8eAdUhP6j4C2EvSOyLilQa2zbpW\nz9PZ63FIA+s2GzScBJkNDDdExMz89QWSnic9bPIjwG8b16zuSVo3IhY1uh2tJCKWNaJef9Y22Phy\nmNnA9AfSf/pbVAokbSHpd5Kek7RQ0p2S9i5uJOnZwoNaUfKCpCWShhXKj8tl6xbKtpZ0ed7/y5Lu\nlrRfaf+Vy3e7SjpH0lzS89J6RdJhkm6WNDfXdb+kz5diTpc0p1T201z/lwplm+SyQ3tY92fyJceX\nJU2X9L4qMW+UdKGkOZJekXSfpIOr7C6ANSR9V9JTkhZJuknSFqX97ZY/uyfy/h6X9JPi088lfVPS\nMkmblCvJsS9L2iC/X2VMkKT1JU2W9GSuY1Z+uGsx5q35XH2yVF4ZM3V8oex7uWwrSb+VNJ/0IF+z\nQcNJkNnAtGV+fQ4gjw+6k/T09bOA44GhwNWSPlLY7nZg18L7dwGV5Of9hfIPADMr/9VL2o70xO6t\ngf8i3Tn6JeD3pf1XnEO6w/R/kp431ltHkB4m+33ga6QHip5bSoSmAW+QtFWp3cuAXQplu5KSkWk9\nqHcP4MfARaQHjY4EpkjauhIgaTTp4aq7AWcAx+S2/lLSl0v7E+lS5j7Aj/LyPtJDP4s+Tvq8zgKO\nIj0s9hjSA0grLs37+1iVdh8IXBcRL+b3K42zkiTgWuBo0lPqJ5IeTjtJ6Qn2tajs/79JDzr9JukB\npmaDR6Of4OrFSysvrHjy/QeB1wNvBA4CniUlIRvnuMk5blxh2/VITwt/uFD2NWAxsF5+fxTpD/id\nwA8Kcc8DPym8nwrcw6pPG78NeLDU3uWkp6erh8dY7QnsQ6vE3QTMKrwfles6LL/fKJ+DS4EnCnFn\nAXO6acOQvK+lwDsK5W8mPeH80kLZhcATwPDSPi4D5gFr5fd75H3eCwwpxE3M7dyqm+P9dm7PxoWy\n/wPuKMWNy/V8vFB2CfBQ4f0BOebY0rZXAEtID8UFeGuO+2Qn5+f40ue2HLiw0T8nXrz01+KeILPG\nE3AzKfF5EvgNsAD4aKx4mOlewPSIuLOyUUQsBH4ObC5p21w8jTTWr3KJZ5dcNi1/jaR3ARvmMiRt\nRErCfgcMl/T6ygLcCLxN0saF9gbwi4ioeWZURLz62sFLw3JdtwBbSXpdjpkL/J0VPVu7AK8CpwKb\nSnpz6Rh7YlpE3F9ox+PA1cCHc1sE/BvwP8CaVc7FRsD2pX2eHyuP0ZlG+kzf0snxrpv3d0eOK+7v\nt8BOkt5UKDsIWETq4enMXqTk9+xS+SRSgvPhLrbtSgA/q3FbswHPSZBZ4wXp8tCHgN2BbSPirREx\ntRDzZmB2lW1nFdYDzCT9waxcLvoAK5Kg90haO68LUi8PpEtvIv3n/2xpOTHHlKfrP1Z8I2ktSaOK\nS1cHLGkXSX+Q9BLwQq7rpLx6eCH0ttKxTAf+BHQAu0gaDryDnidBf69S9hCwQU4GRwMbAF9m1XPx\n8xxfPhflMVHz8+tGlQJJb5Z0saTnSD18z5ISX1j5eC/Lrx8vlB0AXBNdD0h+M/BURLxcKi9/f9Ti\n0Tq2NRvQPDvMbGC4O1bMDqtZRCyV9H/ArpLeCmwM3Er6o7sWsBMpmZgVEc/lzSr/DP0E6OwBquXk\nofzHdlfS5awgJVQhabOI+Ed5R5LelmPvJ106epLUi7E/aUxL8Z+zacDBkjYjJUNTIyIk3Z7fVxKO\nWztpd08Up5pX6r4I+FUn8feW3nc2U0uQBh2TLjduAPyAlMwuAt5EGhP02vFGxFOS7iQlQT+RtAvp\nEumlvTiGrnTWezeki23Kn7XZoOEkyKw5PE4atFy2TWF9xTTgG6RB1M9GxEMAkv5KSlZ2IV0Cqngk\nvy6JiD/U2L4ZpJ6somc7id2flJDtky95kds3oUpspYdnAjAGOCG/vxX4HCkJepFVE5POvK1K2VbA\nixExX9ICYCGwRh3nomx70lictoh47XYHkjq7RHUpcLqkt5Auhb0IXN9NHY8BH5D0ulJvUPn7o5I0\nbljavp6eIrOm5cthZs3hOuC9knaqFEhaD/gC8GhEPFCInUa66eIxrLjkRf76M6TeodcuH0XEs6SB\nzl/MM6NWImlEd42LiBci4g+lpbNHZVR6Tl77/ZMvRX22yn7/DswlDfhegzSOpnKMW5PG79zRi/FJ\nH8hjoir1bg7sC9yQ61sGXAl8XNI25Y2rnIue1FvteEX6fKpt/zvy4GXSpbCrimOKOnEdsDbpMl5R\nZZD29QARMZ90+XHXUtxRnbSlKknDlW6psH5PtzEbiNwTZNZ4PbmU8UOgDbhB0hmk2V2HkP6D//dS\n7J2kWUdbAecWym8ljT2qNp38yFx2n6RfkHqHRpFmJr0R2KGX7e3KFNJU8utyXcOAzwP/ZNXxNpCS\ntwNJU/pfymV3ky7TbEmazdVT9wM3SjqTdI6+nF//sxDzDVKSMD23bxbwL8B7SL1oxUSxJ+fir6Rx\nNaflwdwv5eMZVi04IuZKmgZ8HVifnt0s80rS5/sjSVsCfyENlt4H+HFEFMctnQccK6mDNIZsd1JP\nVW8+108AP82vl3UTazZguSfIrPG6/Q88Ip4hJSQ3kv5r/wFpave+EXFVKXYRabp7cfAzpCQnSNPL\nnyxtM4v0R/4a0jT4s4AvknoRTmJltcwKe22bXNeBpN8/PwEOB84k3Xuomkq7i71XS0nTyXt6f6BK\nG24GjiUd44mkXqbxuU2Vfc8BdiSNC/r33LavkJKW4zo7rs7Kc4/YvqTE5HjgO6TE6HNdtPW3pATo\nBTofp1WsI0gJzxnAfqRbKmwFfDUivlna7gTSWKSPk5LRpbl9vX3G22B9Hpy1ENUxy9XMzMysaTW8\nJ0jSo1r5KdqV5cy8fqiksyXNk/Si0m39R5b2sZmka5UeJTBH0imS1ijF7C5pRr6d/EOqcgt8SUfm\n9rws6S5JO/bv0ZuZmVmjNDwJInXBjy4se5K6WSvXmU8jdfMeQLpOvwnpLqgA5GTnOtL4pp1J3dyH\nUOjCz4MfryF1hb8bOB04T9KehZiDSDdhO4E0/uFe0u30ux0UamZmZs1nwF0Ok3QasHdEbKX0wMdn\ngU9ExJV5/dakgYo7R8R0SXsBV5FuPT8vx3yRNJD0Dfm+KT8C9oqI4qyQdtJt8ffO7+8C/i8ijsnv\nRbp/yRkRccrqOXozMzNbXQZCT9BrJK0FfIoVD+l7D6mHp3JnVSJiNum5PuNy0c7AfZUEKJtCugvr\ndoWY4t13KzHjCvWOLdUTeZtxmJmZ2aAzoJIg0j0/hpNmZUCaors4IhaU4uayYprq6Py+vJ4exAyT\nNBQYQbpjarWYVe6bYmZmZs1voN0n6FDg+jxFtSuiZ9Mzu4pRD2M6XZ8fgjiBdLfWV3rQHjMzM0vW\nATYHphQe47NaDZgkKD81+UPARwvFc4C1JQ0r9QaNZEWvTeWeHkWjCusqr+UHOo4EFkTEYknzSPdD\nqRZT7h0qmgD8uov1ZmZm1rVPAb9pRMUDJgki9QLNJc30qphBupHXHqQ7oiJpK9KDByu3z78TOF7S\niMK4oPGkp0zPKsTsVapvfC4nIpZImpHruSrXo/z+jC7a/BjAr371K7bZZpU77FsXJk6cyOTJkxvd\njKbic1Ybn7fe8zmrjc9b78yaNYtPf/rTkP+WNsKASIJywnEIcGFELK+UR8QCSecDkyRVHpR4BnB7\nRNydw24EHgAukXQc6blIJwNnFZ5d9DPgqDxL7AJScnMgsHehGZOAi3IyNJ30zJ116fqW/K8AbLPN\nNowZM6bGo29Nw4cP9znrJZ+z2vi89Z7PWW183mrWsOEkAyIJIl0G2wz4ZZV1lQcAXg4MJT3o8MjK\nyohYLmlf0nNs7iA9AfpCVjxtmoh4TNI+pETnK8BTwGERMbUQc1m+J9BJpMtifwYm5IdLmpmZ2SAz\nIJKgiLiJNDur2rpXgaPz0tn2T5KefdNVHbeQpsF3FXMOnT+/yMzMzAaRgTZF3szMzGy1cBJkDdHW\n1tboJjQdn7Pa+Lz1ns9ZbXzems+Ae2xGM5E0BpgxY8YMD4YzMzPrhZkzZzJ27FiAsRExsxFtcE+Q\nmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJ\nkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYtyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3J\nSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYt\nyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pIGRBIkaRNJl0iaJ2mR\npHsljSnFnCTpH3n9TZK2LK3fSNKvJXVImi/pPEnrlWLeJelWSS9LelzS16u05WOSZuWYeyXt1T9H\nbWZmZo3U8CRI0obA7cCrwARgG+BrwPxCzHHAUcAXgfcCC4EpktYu7Oo3eds9gH2AXYFzC/vYAJgC\nPAqMAb4OnCjp8ELMuLyfXwDbA78Hfi9p2z49aDMzszr9/vfwkY80uhXNbc1GNwD4JvBERBxeKHu8\nFHMMcHJEXA0g6bPAXOCjwGWStiElUGMj4p4cczRwraRjI2IO8GlgLeCwiFgKzJK0A/BV4LxCPddH\nxKT8/gRJ40kJ2Jf79KjNzMzq8PjjcPPNjW5Fc2t4TxCwH/AnSZdJmitpZql3ZgtgNPDaRx0RC4D/\nA8blop2B+ZUEKJsKBLBTIebWnABVTAG2ljQ8vx+Xt6MUMw4zMzMbVAZCEvQW4AhgNjAe+BlwhqRP\n5/WjScnM3NJ2c/O6SswzxZURsQx4vhRTbR/0IGY0ZmZmNqgMhMthawDTI+K7+f29krYjJUa/6mI7\nkZKjrnQXox7GdFePmZmZNZmBkAT9E5hVKpsF/Hv+eg4pERnFyr00I4F7CjEjizuQNATYKK+rxIwq\n1TOSlXuZOosp9w6tZOLEiQwfPnylsra2Ntra2rrazMzMrCW0t7fT3t6+UllHR0eDWrPCQEiCbge2\nLpVtTR4cHRGPSppDmvX1FwBJw0hjfc7O8XcCG0raoTAuaA9S8jS9EPM9SUPypTJIl99mR0RHIWYP\n4IxCW/bM5Z2aPHkyY8aM6SrEzMysZVXrGJg5cyZjx45tUIuSgTAmaDKws6RvSXqrpE8ChwNnFWJO\nA74jaT9J7wQuBp4C/gcgIh4kDWD+haQdJb0fOBNozzPDIE19XwxcIGlbSQcBXwFOLdRzOrCXpK9K\n2lrSicDYUlvMzMxsEGh4EhQRfwL+DWgD7gO+DRwTEZcWYk4hJTXnkmaFvQ7YKyIWF3b1SeBB0uyu\na4BbSfcVquxjAWka/ebAn4AfAydGxPmFmDtzO74A/Jl0Se4jEfFAnx60mZmZNdxAuBxGRFwHXNdN\nzInAiV2sf4F0L6Cu9nEfsFs3MVcAV3QVY2ZmZs2v4T1BZmZmZo3gJMjMzMxakpMgMzMza0lOgszM\nzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLM\nzMysJTkJMjMzs5bkJMjMzKwJRTS6Bc3PSZCZmVmTkhrdgubmJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSQ1PgiSd\nIGl5aXmgsH6opLMlzZP0oqTLJY0s7WMzSddKWihpjqRTJK1Ritld0gxJr0h6SNLBVdpypKRHJb0s\n6S5JO/bfkZuZmVkjNTwJyu4HRgGj8/KBwrrTgH2AA4BdgU2AKyorc7JzHbAmsDNwMHAIcFIhZnPg\nGuBm4N3A6cB5kvYsxBwEnAqcAOwA3AtMkTSiD4/TzMzMBoiBkgQtjYhnI+KZvDwPIGkYcCgwMSJu\niYh7gM8B75f03rztBODtwKci4r6ImAJ8FzhS0po55gjgkYj4RkTMjoizgcuBiYU2TATOjYiLI+JB\n4EvAoly/mZmZDTIDJQl6m6SnJT0s6VeSNsvlY0k9PDdXAiNiNvAEMC4X7QzcFxHzCvubAgwHtivE\nTC3VOaWyD0lr5bqK9UTeZhxmZmYDTESjW9D8BkISdBfp8tUEUu/LFsCtktYjXRpbHBELStvMzevI\nr3OrrKcHMcMkDQVGAEM6iRmNmZnZACQ1ugXNbc3uQ/pXvnxVcb+k6cDjwMeBVzrZTEBPcuCuYtTD\nGOfaZmZmg1DDk6CyiOiQ9BCwJely1NqShpV6g0ayotdmDlCexTWqsK7yOqoUMxJYEBGLJc0DlnUS\nU+4dWsXEiRMZPnz4SmVtbW20tbV1t6mZmdmg197eTnt7+0plHR0dDWrNCgMuCZK0PvBW4CJgBrAU\n2AO4Mq+xdAPHAAAgAElEQVTfCngTcEfe5E7geEkjCuOCxgMdwKxCzF6lqsbnciJiiaQZuZ6rcj3K\n78/ors2TJ09mzJgxvT5WMzOzVlCtY2DmzJmMHTu2QS1KGp4ESfoxcDXpEtgbgf8kJT6XRsQCSecD\nkyTNB14kJSW3R8TdeRc3Ag8Al0g6DtgYOBk4KyKW5JifAUdJ+hFwASm5ORDYu9CUScBFORmaTpot\nti5wYb8cuJmZmTVUw5MgYFPgN8DrgWeB24CdI+K5vH4i6VLV5cBQ4AbgyMrGEbFc0r7AT0m9QwtJ\nicsJhZjHJO1DSnS+AjwFHBYRUwsxl+V7Ap1Euiz2Z2BCRDzbD8dsZmZmDdbwJCgiuhw4ExGvAkfn\npbOYJ4F9u9nPLaRp8F3FnAOc01WMmZmZDQ4DYYq8mZmZ2WrnJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzNrQhGNbkHzcxJkZmbWpKRGt6C59UkS\nJGmIpO0lbdQX+zMzMzPrbzUlQZJOk3RY/noIcAswE3hS0u591zwzMzOz/lFrT9CBwL356/2ALYC3\nA5OB7/dBu8zMzMz6Va1J0AhgTv56b+B3EfEQcAHwzr5omJmZmVl/qjUJmgtsmy+FfRiYmsvXBZb1\nRcPMzMzM+tOaNW73S+Ay4J9AADfl8p2AB/ugXWZmZmb9qqYkKCJOlHQ/sBnpUtiredUy4Id91Tgz\nMzOz/lJrTxARcTmApHUKZRf1RaPMzMzM+lutU+SHSPqupKeBlyS9JZefXJk6b2ZmZjaQ1Tow+tvA\nIcA3gMWF8vuBw+tsk5mZmVm/qzUJ+izwhYj4NSvPBruXdL8gMzMzswGt1iTojcDfO9nfWrU3x8zM\nzGz1qDUJegDYpUr5gcA9tTfHzMzMbPWodXbYScBFkt5ISqT+XdLWpMtk+/ZV48zMzMz6S009QRHx\nP6Rk50PAQlJStA2wX0Tc1NW2ZmZmZgNBPfcJug3Ysw/bYmZmZrba1HqfoB0l7VSlfCdJ76mnQZK+\nJWm5pEmFsqGSzpY0T9KLki6XNLK03WaSrpW0UNIcSadIWqMUs7ukGZJekfSQpIOr1H+kpEclvSzp\nLkk71nM8ZmZmNjDVOjD6bNIjM8remNfVJCccnydNtS86DdgHOADYFdgEuKKw3RrAdaSerZ2Bg0n3\nMTqpELM5cA1wM/Bu4HTgPEl7FmIOAk4FTgB2yO2YImlErcdkZmZmA1OtSdC2wMwq5ffkdb0maX3g\nV6SbLb5QKB8GHApMjIhbIuIe4HPA+yW9N4dNIN2f6FMRcV9ETAG+CxwpqXLJ7wjgkYj4RkTMjoiz\ngcuBiYVmTATOjYiLI+JB4EvAoly/mZmZDSK1JkGvAqOqlG8MLK1xn2cDV0fEH0rl7yH18NxcKYiI\n2cATwLhctDNwX0TMK2w3BRgObFeImVra95TKPiStBYwt1RN5m3GYmZnZoFJrEnQj8F+ShlcKJG0I\n/ADo9ewwSZ8Atge+VWX1KGBxRCwolc8FRuevR+f35fX0IGaYpKHACGBIJzGjMTMzs0Gl1tlhxwK3\nAo9LqtwccXtSwvCZ3uxI0qakMT97RsSS3mwKRA/iuopRD2O6rGfixIkMHz58pbK2tjba2tp60Dwz\nM7Pei578BRwg2tvbaW9vX6mso6OjQa1ZoaYkKCKelvQu4FOkQcYvA78E2nuZyEC6BPUGYIakSlIy\nBNhV0lHAh4GhkoaVeoNGsqLXZg5QnsU1qrCu8lq+hDcSWBARiyXNIz0HrVpMuXdoJZMnT2bMmDFd\nhZiZmfW51/5qDnDVOgZmzpzJ2LFjG9SipJ77BC0Eft4HbZgKvLNUdiEwC/gh8DSwBNgDuBJA0lbA\nm4A7cvydwPGSRhTGBY0HOvJ+KjF7leoZn8uJiCWSZuR6rsr1KL8/o96DNDMzs4Gl5iQoJyK7k3pK\nVhpbFBEnVdummpxMPVDa90LguYiYld+fD0ySNB94kZSU3B4Rd+dNbsz7uETScaQB2icDZxV6pn4G\nHCXpR8AFpOTmQGDvQtWTSI8DmQFMJ80WW5eUlJmZmdkgUlMSJOnzwE+BeaTLTMUrk0Hh/jw1Kl/p\nnEi6VHU5MBS4ATjyteCI5ZL2zW26g/QojwtJ9/upxDwmaR9SovMV4CngsIiYWoi5LN8T6CTSZbE/\nAxMi4tk6j8fMzMwGmFp7gr4DfDsiftSXjamIiH8tvX8VODovnW3zJN08vDUibiGNQeoq5hzgnB43\n1szMzJpSrVPkNwJ+15cNMTMzM1udak2CfkcaVGxmZmbWlGq9HPZ34GRJOwP3kWZvvSYiPJvKzMzM\nBrRak6AvAC8Bu+WlKPCUcjMzMxvgar1Z4hZ93RAzMzOz1anWMUEASFpb0taFJ7WbmZmZNYWakiBJ\n6+YbGC4C/kq6ezOSzpT0zT5sn5mZmVm/qLUn6L9IzwzbHXilUD4VOKjONpmZmZn1u1ovY30UOCgi\n7pJUvLvzX4G31t8sMzMzs/5Va0/QG4BnqpSvx6qPvDAzMzMbcGpNgv4E7FN4X0l8Dic/ld3MzMxs\nIKv1ctjxwPWSts37OEbSdsA4Vr1vkJmZmdmAU1NPUETcRhoYvSbpjtHjgbnAuIiY0XfNMzMzM+sf\nve4JyvcE+iQwJSI+3/dNMjMzM+t/ve4JioilwM+Adfq+OWZmZmarR60Do6cDO/RlQ8zMzMxWp1oH\nRp8DnCppU2AGsLC4MiL+Um/DzMzMzPpTrUnQpfm1+LT4AJRfh9TTKDMzM+ta+K58das1CfJT5M3M\nzBpManQLmltNSVBEPN7XDTEzMzNbnWpKgiR9tqv1EXFxbc0xMzMzWz1qvRx2eun9WsC6wGJgEeAk\nyMzMzAa0Wi+HbVQuk/Q24KfAj+ttlJmZmVl/q/U+QauIiL8B32TVXiIzMzOzAafPkqBsKbBJH+/T\nzMzMrM/VOjB6/3IRsDFwFHB7vY0yMzMz62+1Doz+fel9AM8CfwC+VleLzMzMzFaDWgdG9/VlNDMz\nM7PVysmMmZmZtaSakiBJl0v6ZpXyr0v6Xf3NMjMzM+tftfYE7QZcW6X8BmDX2ptjZmZmtnrUmgSt\nT7o7dNkSYFjtzTEzMzNbPWpNgu4DDqpS/gnggdqbY2ZmZrZ61JoEnQx8V9JFkg7Oy8XAt/O6HpP0\nJUn3SurIyx2SPlxYP1TS2ZLmSXoxj0caWdrHZpKulbRQ0hxJp0haoxSzu6QZkl6R9JCkg6u05UhJ\nj0p6WdJdknbs1VkxMzOzplFTEhQRVwMfBbYEzgFOBTYFPhQR5XsIdedJ4DhgbF7+APyPpG3y+tOA\nfYADSOONNgGuqGyck53rSNP9dwYOBg4BTirEbA5cA9wMvJv0aI/zJO1ZiDkoH8cJwA7AvcAUSSN6\neTxmZmbWBGq9WSIRcS3VB0fXsp+i70g6AthZ0tPAocAnIuIWAEmfA2ZJem9ETAcmAG8HPhgR84D7\nJH0X+KGkEyNiKXAE8EhEfCPXMVvSB4CJwE25bCJwbkRcnOv5Ein5OhQ4pd7jNDMzs4Gl1inyO0ra\nqUr5TpLeU2tjJK0h6RPAusCdpJ6hNUk9OABExGzgCWBcLtoZuC8nQBVTgOHAdoWYqaXqplT2IWmt\nXFexnsjbjMPMzMwGnVrHBJ0NbFal/I15Xa9IeoekF4FXSZfX/i0iHgRGA4sjYkFpk7l5Hfl1bpX1\n9CBmmKShwAhgSCcxozEzM7NBp9bLYdsCM6uU35PX9daDpLE6G5LG/lwsqav7DYn0vLLudBWjHsb0\npB4zMzNrMrUmQa8Co4BHSuUbA0t7u7M8bqeyr5mS3gscA1wGrC1pWKk3aCQrem3mAOVZXKMK6yqv\no0oxI4EFEbFY0jxgWScx5d6hVUycOJHhw4evVNbW1kZbW1t3m5qZmQ167e3ttLe3r1TW0dHRoNas\nUGsSdCPwX5I+EhEdAJI2BH7AioHG9VgDGArMICVVewBX5nq2At4E3JFj7wSOlzSiMC5oPNABzCrE\n7FWqY3wuJyKWSJqR67kq16P8/ozuGjt58mTGjBnT+6M0MzNrAdU6BmbOnMnYsWMb1KKk1iToWOBW\n4HFJ9+Sy7Um9Jp/pzY4kfR+4njRVfgPgU6THcoyPiAWSzgcmSZoPvEhKSm6PiLvzLm4k3aDxEknH\nkXqjTgbOioglOeZnwFGSfgRcQEpuDgT2LjRlEnBRToamk2aLrQtc2JvjMTMzWx3CgzXqVlMSFBFP\nS3oXKWF5N/Ay8EugvZB49NQo4GJS8tIB/IWUAP0hr59IulR1Oal36AbgyEJblkvaF/gpqXdoISlx\nOaEQ85ikfUiJzleAp4DDImJqIeayfE+gk3Kb/gxMiIhne3k8ZmZmq4XUfYx1rp77BC0Efl5vAyLi\n8G7WvwocnZfOYp4E9u1mP7eQpsF3FXMOaXaamZmZDXI1JUGSPga0AVuRZk/9DfhNRFzeh20zMzMz\n6ze9uk9Qvpnhb4HfkqbC/500q2s74DJJl+YBxWZmZmYDWm97go4BPgTsHxHXFFdI2p80LugY0vO+\nzMzMzAas3t4x+nPA18sJEEBEXAV8g/SsLTMzM7MBrbdJ0NtY9RlcRVNzjJmZmdmA1tsk6GXSoy06\nMwx4pfbmmJmZma0evU2C7gSO6GL9kTnGzMzMbEDr7cDo7wN/lPR64CekB58K2Ab4GvAR4IN92kIz\nMzOzftCrJCgi7pB0EOkmiQeUVs8H2iLi9r5qnJmZmVl/6fXNEiPiSklTSA8g3SoXPwTcGBGL+rJx\nZmZmZv2l1meHLZL0IeA/IuL5Pm6TmZmZWb/r7R2jNy28/SSwfi6/T9JmfdkwMzMzs/7U256gByU9\nB9wOrANsBjwBbA6s1bdNMzMzM+s/vZ0iPxz4GDAjb3udpIeAocAESaP7uH1mZmZm/aK3SdBaETE9\nIk4l3ThxB9KjNJaRHpfxsKTZfdxGMzMzsz7X28thCyTdQ7octjawbkTcLmkpcBDwFPDePm6jmZmZ\nWZ/rbU/QJsD3gFdJCdSfJE0jJURjgIiI2/q2iWZmZmZ9r1dJUETMi4irI+JbwCJgR+BMIEh3kF4g\n6Za+b6aZmZlZ3+ptT1BZR0RcBiwB/hXYAjin7laZmZmZ9bOabpaYvQt4On/9OLAkIuYAv627VWZm\nZmb9rOYkKCKeLHz9jr5pjpmZmfVERKNb0PzqvRxmZmZmDSI1ugXNzUmQmZmZtSQnQWZmZtaSnASZ\nmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwE\nmZmZWUtqeBIk6VuSpktaIGmupCslbVWKGSrpbEnzJL0o6XJJI0sxm0m6VtJCSXMknSJpjVLM7pJm\nSHpF0kOSDq7SniMlPSrpZUl3Sdqxf47czMzMGqnhSRCwC3AmsBPwIWAt4EZJryvEnAbsAxwA7Aps\nAlxRWZmTneuANYGdgYOBQ4CTCjGbA9cANwPvBk4HzpO0ZyHmIOBU4ARgB+BeYIqkEX13uGZmZjYQ\nrNnoBkTE3sX3kg4BngHGArdJGgYcCnwiIm7JMZ8DZkl6b0RMByYAbwc+GBHzgPskfRf4oaQTI2Ip\ncATwSER8I1c1W9IHgInATblsInBuRFyc6/kSKfk6FDilf86AmZmZNcJA6Akq2xAI4Pn8fiwpWbu5\nEhARs4EngHG5aGfgvpwAVUwBhgPbFWKmluqaUtmHpLVyXcV6Im8zDjMzMxtUBlQSJEmkS1+3RcQD\nuXg0sDgiFpTC5+Z1lZi5VdbTg5hhkoYCI4AhncSMxszMzAaVhl8OKzkH2Bb4QA9iReox6k5XMeph\nTE/qMTMzsyYyYJIgSWcBewO7RMQ/CqvmAGtLGlbqDRrJil6bOUB5FteowrrK66hSzEhgQUQsljQP\nWNZJTLl3aCUTJ05k+PDhK5W1tbXR1tbW1WZmZmYtob29nfb29pXKOjo6GtSaFQZEEpQToI8Au0XE\nE6XVM4ClwB7AlTl+K+BNwB055k7geEkjCuOCxgMdwKxCzF6lfY/P5UTEEkkzcj1X5XqU35/RVfsn\nT57MmDFjeny8ZmZmraRax8DMmTMZO3Zsg1qUNDwJknQO0AbsDyyUVOmJ6YiIVyJigaTzgUmS5gMv\nkpKS2yPi7hx7I/AAcImk44CNgZOBsyJiSY75GXCUpB8BF5CSmwNJvU8Vk4CLcjI0nTRbbF3gwn44\ndDMzM2ughidBwJdIY27+WCr/HHBx/noi6VLV5cBQ4AbgyEpgRCyXtC/wU1Lv0EJS4nJCIeYxSfuQ\nEp2vAE8Bh0XE1ELMZfmeQCeRLov9GZgQEc/20bGamZnZANHwJCgiup2hFhGvAkfnpbOYJ4F9u9nP\nLaRp8F3FnEMaoG1mZjZghafs1G1ATZE3MzOznpO6j7HOOQkyMzNrQu4Jqp+TIDMzsyYU4Z6gejkJ\nMjMza0JOgurnJMjMzKxJOQmqj5MgMzOzJuQxQfVzEmRmZtaEfDmsfk6CzMzMmpCToPo5CTIzM2tC\nToLq5yTIzMysCTkJqp+TIDMzM2tJToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzM\nmpCToPo5CTIzM2tCToLq5yTIzMzMWpKTIDMzsybknqD6OQkyMzNrQk6C6uckyMzMrAk5CaqfkyAz\nM7Mm5CSofk6CzMzMmlBEo1vQ/JwEmZmZNSn3BNXHSZCZmVkT8uWw+jkJMjMza0JOgurnJMjMzKwJ\nOQmqn5MgMzOzJuQkqH5OgszMzJqQZ4fVz0mQmZlZk3JPUH2cBJmZmTUhXw6rn5MgMzOzJuQkqH5O\ngszMzJqQk6D6OQkyMzNrQh4YXb8BkQRJ2kXSVZKelrRc0v5VYk6S9A9JiyTdJGnL0vqNJP1aUoek\n+ZLOk7ReKeZdkm6V9LKkxyV9vUo9H5M0K8fcK2mvvj9iMzOz+rgnqH4DIgkC1gP+DBwJrJLbSjoO\nOAr4IvBeYCEwRdLahbDfANsAewD7ALsC5xb2sQEwBXgUGAN8HThR0uGFmHF5P78Atgd+D/xe0rZ9\ndaBmZmZ9xUlQfdZsdAMAIuIG4AYAqepHegxwckRcnWM+C8wFPgpcJmkbYAIwNiLuyTFHA9dKOjYi\n5gCfBtYCDouIpcAsSTsAXwXOK9RzfURMyu9PkDSelIB9ua+P28zMrFbuCarfQOkJ6pSkLYDRwM2V\nsohYAPwfMC4X7QzMryRA2VRSr9JOhZhbcwJUMQXYWtLw/H5c3o5SzDjMzMwGECdB9RvwSRApAQpS\nz0/R3LyuEvNMcWVELAOeL8VU2wc9iBmNmZnZAOKB0fVrhiSoM6LK+KFexqiHMf5WMzOzAcU9QfUb\nEGOCujGHlIiMYuVempHAPYWYkcWNJA0BNsrrKjGjSvseycq9TJ3FlHuHVjJx4kSGDx++UllbWxtt\nbW1dbWZmZlazZkqC2tvbaW9vX6mso6OjQa1ZYcAnQRHxqKQ5pFlffwGQNIw01ufsHHYnsKGkHQrj\ngvYgJU/TCzHfkzQkXyoDGA/MjoiOQswewBmFJuyZyzs1efJkxowZU+shmpmZ1aRZkqBqHQMzZ85k\n7NixDWpRMiAuh0laT9K7JW2fi96S32+W358GfEfSfpLeCVwMPAX8D0BEPEgawPwLSTtKej9wJtCe\nZ4ZBmvq+GLhA0raSDgK+ApxaaMrpwF6Svippa0knAmOBs/rr2M3MzGrRTD1BA9VA6Ql6D/C/pEtT\nwYrE5CLg0Ig4RdK6pPv+bAhMA/aKiMWFfXySlKxMBZYDl5OmvANpRpmkCTnmT8A84MSIOL8Qc6ek\nNuD7efkb8JGIeKDvD9nMzKx2ToLqNyCSoIi4hW56pSLiRODELta/QLoXUFf7uA/YrZuYK4Aruoox\nMzNrNM8Oq9+AuBxmZmZmveOeoPo5CTIzM2tCToLq5yTIzMysSTkJqo+TIDMzsybknqD6OQkyMzNr\nQh4YXT8nQWZmZk3IPUH1cxJkZmbWhJwE1c9JkJmZWZNyElQfJ0FmZmZNyD1B9XMSZGZm1oQ8MLp+\nToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzMmpSToPo4CTIzM2tC7gmqn5MgMzOz\nJuTZYfVzEmRmZtaE3BNUPydBZmZmTchJUP2cBJmZmTUhJ0H1cxJkZmbWhJYtgzXXbHQrmpuTIDMz\nsya0eDGstVajW9HcnASZmZk1oSVLnATVy0mQmZlZE1qyBNZeu9GtaG5OgszMzJqQL4fVz0mQmZlZ\nE/LlsPo5CTIzM2tCvhxWPydBZmZmTcg9QfVzEmRmZtaEXn3VSVC9nASZmZk1oWefhTe8odGtaG5O\ngszMzJrMwoXwwguw6aaNbklzcxJkZmbWZJ5+Or06CaqPkyAzM7Mm89RT6dVJUH2cBJmZmTWZShK0\nySaNbUezcxJkZmbWZJ5+Gl7/enjd6xrdkubmJKgKSUdKelTSy5LukrRjo9s02LS3tze6CU3H56w2\nPm+953NWm9V53m6/HbbbbrVVN2g5CSqRdBBwKnACsANwLzBF0oiGNmyQ8S/Z3vM5q43PW+/5nNVm\ndZ23Rx+Fm26Cf/u31VLdoOYkaFUTgXMj4uKIeBD4ErAIOLSxzTIzs1Z3113wwQ+msUAHH9zo1jS/\nNRvdgIFE0lrAWOAHlbKICElTgXENa5iZmbWUCFiwAB5/HGbPhhkz4MYb4Z570mWw66+HjTZqdCub\nn5OglY0AhgBzS+Vzga072+iBB2D58p5XEtH7hg22bZ5/Hu64Y/XUNVi2ee45uOWW1de2WrcbaNs8\n+2y6dNDf9dS7zeqsq7tt5s6Fa6/t/3r6apt66lq+PC2Vr6u9drWu+PrIIzBpEixdCsuWpdfy14sX\np+XVV1e8LlwIixal5cUXU/Lz/PNpXcUmm8Auu8AJJ8C++8KQIbWdJ1uZk6CeEVDtR2wdgM98Ztbq\nbc2g0MH73z+z0Y1oMh3svrvPWe91MH68z1vvdLDvvj5nXZFWXZYt6+C7353JkCF0uqy11srL2mvD\nOuvAeuul2V7rrpu+HjYsvR89GjbbbOVen3vvbdxx96VZs17727lOo9rgJGhl84BlwKhS+UhW7R0C\n2Dy9fLo/2zSIjW10A5qQz1ltfN56z+esK5UeorJFi3zearA5UMO1gfo5CSqIiCWSZgB7AFcBSFJ+\nf0aVTaYAnwIeA15ZTc00MzMbDNYhJUBTGtUARa0XYAcpSR8HLgK+CEwnzRY7EHh7RDzbyLaZmZlZ\n33FPUElEXJbvCXQS6bLYn4EJToDMzMwGF/cEmZmZWUvyzRLNzMysJTkJqkOrPmNM0gmSlpeWBwrr\nh0o6W9I8SS9KulzSyNI+NpN0raSFkuZIOkXSGqWY3SXNkPSKpIckNdX9USXtIukqSU/nc7R/lZiT\nJP1D0iJJN0nasrR+I0m/ltQhab6k8yStV4p5l6Rb8/fh45K+XqWej0malWPulbRX3x9x/bo7Z5J+\nWeV777pSTEudMwBJ35I0XdICSXMlXSlpq1LMavu5bIbfjT08Z38sfa8tk3ROKaZlzhmApC/ln4eO\nvNwh6cOF9c31fRYRXmpYgINIM8I+C7wdOBd4HhjR6LathmM/AfgL8AbS7QNGAv9SWP9T0oy53UjP\nX7sDmFZYvwZwH2lGwDuBCcAzwPcKMZsDLwGnkG5UeSSwBNiz0cffi/P0YdLYso+Sbr2wf2n9cfl7\nZj/gHcDvgYeBtQsx1wMzgfcA7wMeAn5VWL8B8E/SYP5tgI8DC4HDCzH/v707j9WjKuM4/v1VoECb\ntsjiTaTQ0goUQZayNWy3LCVIkCCJNpi68QfKHypEiYkkQEQJokQCNCaCRBZBTAwQKSLLbRsNS1hS\nK9CylbKUawoUKHgV2j7+cc5Lz532Lr2Ud7nz+yRvmpk5M++Zp2fmfe6ZMzOzcuzOz7G8BPgfsF+r\nYzSCmN0A3F1pexMrZWoVs1zfBcC8vD8HAH/Jx+AORZmmHJd0yLlxmDHrAX5TaW/j6xqzXNdT83E6\nPX8uzcfGjE5sZy0PaKd+gIeBq4ppAa8CF7S6bk3Y94uAJwZYNiEfEGcU8/YBNgCH5+lTcoPepShz\nDrAG2CZPXw78s7LtW4EFrd7/EcZsA5v+oK8CzqvErg/4Sp6ekdc7uChzMrAO6MrT3yU932qbosxl\nwNPF9G3AXZXvfgiY3+q4jCBmNwB/HmSdfescs6Kuu+Q4HF20raYcl516bqzGLM/rAa4cZJ1ax6yo\n75vAtzqxnfly2Aho4zvGHmjMi/S/UKd3jH0uX7J4QdLNkibn+TNJdx2WsVkOvMzG2BwJLI2IN4rt\n3QtMBD5flLm/8p33MkriK2kq0EX/OL0LPEL/OK2JiCeLVe8nPb38iKLM4ohYV5S5F9hH0sQ8PYvR\nFcvufPlimaT5kj5dLJuFYwYwibTPb+XpphyXHX5urMas4WuSVktaKunnknYoltU6ZpLGSJoL7Ej6\nI6Hj2pmToJEZ7B1jXc2vTtM9DHyT9Bf2d4CpwOI87qIL+CD/oJfK2HSx+dgxjDITJI39uDvQBrpI\nJ9zB2lAXqZv4IxGxnnSS3hqx7MS2eg+p+/t44AJSl/sCScrLax+zHItfA3+PiMZYvWYdlx15bhwg\nZgC3kF4J0E16sfY84KZieS1jJml/SWtJvT7zST0/y+jAdubnBG1dA71jbFSJiPLpnv+S9CiwkjS2\nYqAnZw83NoOV0TDKdLrhxGmoMhpmmY6LY0TcXkw+JWkpaRxVN+nSxUDqFLP5wH7A0cMo26zjst1j\n14jZUeXMiLiumHxKUi/wgKSpEbFiiG2O5pgtAw4k9Z6dCdwo6dhByrdtO3NP0Mhs6TvGRrWIeIc0\n+HhpytQAAAY1SURBVHQ60AtsJ2lCpVgZm142jd1nimUDldkNeDciPtga9W6xXtIBO1gb6s3TH5H0\nKWAnho5T2cs0UJmOb6v5h+gNUtuDmsdM0jXAF4HuiFhVLGrWcdlx58ZKzF4fovgj+d+yvdUuZhGx\nLiJejIgnIuInwBLg+3RgO3MSNAIR8SHQeMcY0O8dYy15CVwrSRoPTCMN9H2cNAi1jM3ewB5sjM1D\nwAFKT+ZumAO8AzxTlDmB/ubk+R0v/3j30j9OE0jjVso4TZJ0cLHqCaTk6dGizLH5h75hDrA8J6eN\nMtVYnsQoiKWk3YGdSXd7QY1jln/MTwdmR8TLlcVNOS477dw4RMw252BSsly2t1rFbABjgLF0Yjtr\n9ajyTv2QLv300f/2vDeBXVtdtybs+xXAscCepFuQ7yNl3zvn5fOBFaRLFDOBf7DpLZJLSOM7vkAa\nW/Rv4KdFmSmkWyQvJ91dcC7wAXBiq/d/C+I0jtRlfBDp7ogf5OnJefkFuc2cRrpV9A7gOfrfIr8A\neAw4jNRVvxy4qVg+gZR8/p7Unf/VHLezizKzcuwat3tfTLps2Xa3ew8Ws7zsF6REcU/SCe8x0olz\n27rGLNd3PunummNIfx03PttXynzixyUdcm4cKmbAXsCFwCG5vX0JeB54sK4xy3X9GelS656kR3tc\nRkp8ju/EdtbygHbyJ//HvJT/Ix4CDm11nZq037eSbkXsI436/wMwtVg+Fria1GW5FvgTsFtlG5NJ\nz+V4Lx8AlwNjKmWOI2X7faTkYF6r930L43Qc6Yd8feXzu6LMxaQf5P+Q7n6YXtnGJOBm0l9Ja4Df\nAjtWyhwALMrbeBn44WbqcibpOn4f6RlPJ7c6PlsaM9Ibp/9K6kH7L/Ai6Zkku1a2UauY5bpuLmbr\nga8XZZp2XNIB58ahYgbsDiwEVud2spz0gz++sp3axCzX87p87PXlY/Fv5ASoE9uZ3x1mZmZmteQx\nQWZmZlZLToLMzMyslpwEmZmZWS05CTIzM7NachJkZmZmteQkyMzMzGrJSZCZmZnVkpMgMzMzqyUn\nQWZmZlZLToLMzDJJPZKubHU9zKw5nASZWVuQdI6kdyWNKeaNk/ShpAcqZWdL2iBpSrPraWajh5Mg\nM2sXPaQ3xR9azDsGeB04UtJ2xfzjgJUR8dKWfomkbT5OJc1s9HASZGZtISKeJSU83cXsbuAOYAVw\nZGV+D4CkyZLulLRW0juS/ihpt0ZBSRdJelLS2ZJeJL2BHkk7Sroxr/eapPOrdZJ0rqRnJfVJ6pV0\n+9bdazNrJSdBZtZOFgKzi+nZed6ixnxJY4EjgAdzmTuBSaReoxOBacBtle1OB74MnAEclOf9Mq9z\nGjCHlFjNbKwg6VDgKuBCYG/gZGDxx9w/M2sj7hY2s3ayELgyjwsaR0pYFgPbAecAlwBH5emFkk4C\n9gemRMQqAEnzgKckzYyIx/N2twXmRcRbucw44NvAWRGxMM/7BvBqUZfJwHvA3RHxPvAKsOQT2m8z\nawH3BJlZO2mMCzoMOBp4NiLeIPUEHZHHBXUDL0TEq8C+wCuNBAggIp4B3gZmFNtd2UiAsmmkxOjR\nYr01wPKizH3ASmBFvmx2lqQdttqemlnLOQkys7YRES8Ar5Eufc0mJT9ExOuknpijKMYDAQJiM5uq\nzn9/M8sZYN1GXd4DDgHmAqtIvVBLJE0Y9g6ZWVtzEmRm7aaHlAB1ky6PNSwGTgEOZ2MS9DSwh6TP\nNgpJ2g+YmJcN5HlgHcVga0k7kcb+fCQiNkTEgxHxY+BAYApw/Aj2yczakMcEmVm76QGuJZ2fFhXz\nFwPXkC5jLQSIiPslLQVukXReXnYt0BMRTw70BRHxvqTrgSskvQWsBi4F1jfKSDoV2Ct/7xrgVFIP\n0vJNt2hmnchJkJm1mx5ge+CZiFhdzF8EjAeWRURvMf904Oq8fANwD/C9YXzPj0jjj+4C1gK/AspL\nXW+T7ii7KNfnOWBuHnNkZqOAIga8JG5mZmY2anlMkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\nnASZmZlZLTkJMjMzs1pyEmRmZma15CTIzMzMaslJkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\n/g8dz3zImRgHIwAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -321,7 +321,7 @@ }, { "cell_type": "code", - "execution_count": 75, + "execution_count": 23, "metadata": { "collapsed": true }, @@ -339,7 +339,7 @@ }, { "cell_type": "code", - "execution_count": 76, + "execution_count": 24, "metadata": { "collapsed": true }, @@ -353,7 +353,7 @@ }, { "cell_type": "code", - "execution_count": 77, + "execution_count": 25, "metadata": { "collapsed": false }, @@ -362,9 +362,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 578\n", - "Number of unique tokens: 2245\n", - "Number of documents: 286\n" + "Number of authors: 166\n", + "Number of unique tokens: 681\n", + "Number of documents: 90\n" ] } ], @@ -374,6 +374,247 @@ "print('Number of documents: %d' % len(corpus))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Online AT VB" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Test on a small dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "n_docs = 10\n", + "\n", + "from copy import deepcopy\n", + "\n", + "small_doc2author = deepcopy(dict(list(doc2author.items())[:n_docs]))\n", + "small_doc2author = dict(small_doc2author)\n", + "\n", + "small_corpus = corpus[:n_docs]" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "authors_ids = set()\n", + "for d, a_doc_ids in small_doc2author.items():\n", + " for a in a_doc_ids:\n", + " authors_ids.add(a)\n", + "\n", + "authors_ids = list(authors_ids)\n", + "author_id_dict = dict(zip(authors_ids, range(len(authors_ids))))" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "for d, a_ids in small_doc2author.items():\n", + " for i, a in enumerate(a_ids):\n", + " small_doc2author[d][i] = author_id_dict[a]" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Make a mapping from author IDs to document IDs.\n", + "small_author2doc = {}\n", + "for a in range(len(author_id_dict)):\n", + " small_author2doc[a] = []\n", + " for d, a_ids in small_doc2author.items():\n", + " if a in a_ids:\n", + " small_author2doc[a].append(d)" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "author_id_dict_rev = dict(zip(range(len(authors_ids)), authors_ids))\n", + "\n", + "small_id2author = {}\n", + "for a, a_id in author_id_dict_rev.items():\n", + " small_id2author[a] = id2author[a_id]" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "phi is 10 x 681 x 10 (68100 elements)\n", + "mu is 10 x 681 x 22 (149820 elements)\n" + ] + } + ], + "source": [ + "print('phi is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), 10,\n", + " len(small_corpus) * len(dictionary.id2token) * 10))\n", + "print('mu is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), len(small_author2doc),\n", + " len(small_corpus) * len(dictionary.id2token) * len(small_author2doc)))" + ] + }, + { + "cell_type": "code", + "execution_count": 165, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(onlineatvb)\n", + "OnlineAtVb = onlineatvb.OnlineAtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 167, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 13.4 s, sys: 8 ms, total: 13.4 s\n", + "Wall time: 13.4 s\n" + ] + } + ], + "source": [ + "%time model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", + " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-3, \\\n", + " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=64.0, \\\n", + " eval_every=1, random_state=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 168, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.059*though + 0.052*subject + 0.033*estimate + 0.033*produced + 0.030*get + 0.027*assume_that + 0.027*technology + 0.026*actual + 0.026*lead + 0.020*taken'),\n", + " (1,\n", + " '0.066*node + 0.036*eq + 0.024*course + 0.024*goal + 0.023*comparison + 0.018*associated + 0.018*ma + 0.018*equivalent + 0.018*feature + 0.018*construct'),\n", + " (2,\n", + " '0.069*current + 0.069*synapsis + 0.057*inhibition + 0.046*cycle + 0.042*detailed + 0.038*circuit + 0.035*dynamic + 0.034*total + 0.034*combination + 0.027*rate'),\n", + " (3,\n", + " '0.144*connectivity + 0.073*image + 0.047*phase + 0.031*constraint + 0.026*vision + 0.026*spatial + 0.026*non_linear + 0.023*interaction + 0.021*biological + 0.021*characteristic'),\n", + " (4,\n", + " '0.088*associative_memory + 0.071*analog + 0.056*matrix + 0.041*edge + 0.030*variety + 0.020*assume + 0.020*required + 0.020*appropriate + 0.019*final + 0.015*implementation'),\n", + " (5,\n", + " '0.100*recall + 0.087*synaptic + 0.070*architecture + 0.065*scale + 0.057*flow + 0.032*circuit + 0.029*energy + 0.020*role + 0.014*path + 0.012*variable'),\n", + " (6,\n", + " '0.075*recognition + 0.057*representation + 0.042*capability + 0.034*u + 0.029*learning_algorithm + 0.029*might + 0.023*experiment + 0.023*pattern_recognition + 0.023*more_than + 0.023*view'),\n", + " (7,\n", + " '0.070*series + 0.069*field + 0.048*brain + 0.046*action + 0.042*location + 0.037*activity + 0.037*related + 0.037*limit + 0.036*adaptive + 0.031*strength'),\n", + " (8,\n", + " '0.083*gradient + 0.048*square + 0.042*component + 0.035*moving + 0.028*learn + 0.028*communication + 0.021*additional + 0.021*configuration + 0.020*solve + 0.020*connection_between'),\n", + " (9,\n", + " '0.152*log + 0.093*mapping + 0.070*bound + 0.044*connectionist + 0.026*viewed + 0.022*appear + 0.017*previously + 0.017*determined_by + 0.017*john + 0.017*yield')]" + ] + }, + "execution_count": 168, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 133, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Amir F.Atiya\n", + "Docs: [5]\n", + "[(0, 0.26236663424329809),\n", + " (1, 0.055837758145413023),\n", + " (2, 0.32385947243135804),\n", + " (4, 0.031231118362347546),\n", + " (5, 0.049702348068489471),\n", + " (6, 0.063277167602715914),\n", + " (7, 0.11515798924424819),\n", + " (9, 0.098115022122885684)]\n", + "\n", + "FrankWilczek\n", + "Docs: [1]\n", + "[(0, 0.21018310687516228),\n", + " (1, 0.39886126379385306),\n", + " (2, 0.18071281961456737),\n", + " (3, 0.052218386110533886),\n", + " (5, 0.039636353968810233),\n", + " (8, 0.032375816267307712),\n", + " (9, 0.073725725628590477)]\n" + ] + } + ], + "source": [ + "name = 'Amir F.Atiya'\n", + "print('\\n%s' % name)\n", + "print('Docs:', model.author2doc[model.author2id[name]])\n", + "pprint(model.get_author_topics(model.author2id[name]))\n", + "\n", + "name = 'FrankWilczek'\n", + "print('\\n%s' % name)\n", + "print('Docs:', model.author2doc[model.author2id[name]])\n", + "pprint(model.get_author_topics(model.author2id[name]))\n" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -406,7 +647,7 @@ }, { "cell_type": "code", - "execution_count": 79, + "execution_count": 73, "metadata": { "collapsed": false }, @@ -418,15 +659,35 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": 76, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, author2doc=author2doc, doc2author=doc2author, threshold=1e-12, iterations=40, alpha=None, eta=None, eval_every=1, random_state=1)'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/atvb.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, id2author, author2doc, doc2author, threshold, iterations, alpha, eta, minimum_probability, eval_every, random_state)\u001b[0m\n\u001b[1;32m 129\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 130\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 131\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc2author\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 132\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 133\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcorpus\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc2author\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/atvb.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, author2doc, doc2author)\u001b[0m\n\u001b[1;32m 243\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 244\u001b[0m \u001b[0;31m# Get the count of v in doc. If v is not in doc, return 0.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 245\u001b[0;31m \u001b[0mcnt\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdoc\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 246\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcnt\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 247\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ - "model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author,\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-12,\n", - " iterations=40, alpha=None, eta=None,\n", + "%time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", + " iterations=40, alpha=None, eta=None, \\\n", " eval_every=1, random_state=1)" ] }, @@ -546,97 +807,21 @@ "pprint(model.get_author_topics(author2id[name]))" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Test on a small dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "n_docs = 10\n", - "\n", - "from copy import deepcopy\n", - "\n", - "small_doc2author = deepcopy(dict(list(doc2author.items())[:n_docs]))\n", - "small_doc2author = dict(small_doc2author)\n", - "\n", - "small_corpus = corpus[:n_docs]" - ] - }, - { - "cell_type": "code", - "execution_count": 45, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "authors_ids = set()\n", - "for d, a_doc_ids in small_doc2author.items():\n", - " for a in a_doc_ids:\n", - " authors_ids.add(a)\n", - "\n", - "authors_ids = list(authors_ids)\n", - "author_id_dict = dict(zip(authors_ids, range(len(authors_ids))))" - ] - }, - { - "cell_type": "code", - "execution_count": 46, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "for d, a_ids in small_doc2author.items():\n", - " for i, a in enumerate(a_ids):\n", - " small_doc2author[d][i] = author_id_dict[a]" - ] - }, { "cell_type": "code", - "execution_count": 47, + "execution_count": 169, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "# Make a mapping from author IDs to document IDs.\n", - "small_author2doc = {}\n", - "for a in range(len(author_id_dict)):\n", - " small_author2doc[a] = []\n", - " for d, a_ids in small_doc2author.items():\n", - " if a in a_ids:\n", - " small_author2doc[a].append(d)" - ] - }, - { - "cell_type": "code", - "execution_count": 48, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "author_id_dict_rev = dict(zip(range(len(authors_ids)), authors_ids))\n", - "\n", - "small_id2author = {}\n", - "for a, a_id in author_id_dict_rev.items():\n", - " small_id2author[a] = id2author[a_id]" + "reload(atvb)\n", + "AtVb = atvb.AtVb" ] }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 170, "metadata": { "collapsed": false }, @@ -645,47 +830,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 10 x 681 x 10 (68100 elements)\n", - "mu is 10 x 681 x 21 (143010 elements)\n" + "CPU times: user 15.6 s, sys: 8 ms, total: 15.6 s\n", + "Wall time: 15.6 s\n" ] } ], "source": [ - "print('phi is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), 10,\n", - " len(small_corpus) * len(dictionary.id2token) * 10))\n", - "print('mu is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), len(small_author2doc),\n", - " len(small_corpus) * len(dictionary.id2token) * len(small_author2doc)))" - ] - }, - { - "cell_type": "code", - "execution_count": 50, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(atvb)\n", - "AtVb = atvb.AtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 51, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author,\n", - " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12,\n", - " iterations=10, alpha=None, eta=None,\n", + "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", + " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", + " iterations=10, alpha=None, eta=None, \\\n", " eval_every=1, random_state=1)" ] }, { "cell_type": "code", - "execution_count": 52, + "execution_count": 34, "metadata": { "collapsed": false }, @@ -694,28 +853,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.045*hopfield + 0.042*matrix + 0.025*gradient + 0.022*classification + 0.022*descent + 0.019*minimum + 0.018*training + 0.014*positive + 0.014*indicated + 0.014*called'),\n", + " '0.071*group + 0.039*matrix + 0.032*feedback + 0.027*whose + 0.018*obtain + 0.016*scheme + 0.015*constraint + 0.015*expression + 0.014*unique + 0.013*computational'),\n", " (1,\n", - " '0.063*cell + 0.037*activity + 0.026*region + 0.016*interaction + 0.016*connectivity + 0.014*along + 0.014*synaptic + 0.014*principle + 0.013*spatial + 0.013*robust'),\n", + " '0.041*map + 0.040*field + 0.034*location + 0.033*brain + 0.030*node + 0.021*requires + 0.020*propagation + 0.016*back_propagation + 0.016*distribution + 0.014*mechanism'),\n", " (2,\n", - " '0.056*principle + 0.033*region + 0.031*noise + 0.029*position + 0.024*center + 0.020*dimensional + 0.018*mapping + 0.018*map + 0.016*previous + 0.016*coordinate'),\n", + " '0.084*processor + 0.075*edge + 0.052*activation + 0.034*update + 0.021*column + 0.020*run + 0.019*implementation + 0.018*control + 0.018*operation + 0.017*content'),\n", " (3,\n", - " '0.049*scheme + 0.037*capacity + 0.027*probability + 0.025*stored + 0.025*binary + 0.024*representation + 0.017*feature + 0.016*represented + 0.016*code + 0.016*bound'),\n", + " '0.046*image + 0.038*gradient + 0.027*flow + 0.025*field + 0.024*analog + 0.023*circuit + 0.022*constraint + 0.018*square + 0.017*vision + 0.017*technique'),\n", " (4,\n", - " '0.063*control + 0.033*brain + 0.026*action + 0.024*situation + 0.024*goal + 0.018*associative_memory + 0.018*task + 0.017*iv + 0.017*basic + 0.016*higher'),\n", + " '0.023*dynamic + 0.021*phase + 0.018*cell + 0.018*variable + 0.017*with_respect + 0.017*respect + 0.016*path + 0.015*noise + 0.014*energy + 0.011*limit'),\n", " (5,\n", - " '0.059*architecture + 0.046*potential + 0.037*connectivity + 0.036*energy + 0.035*computational + 0.029*storage + 0.019*current + 0.017*artificial + 0.015*dynamic + 0.015*though'),\n", + " '0.080*processor + 0.061*activation + 0.040*edge + 0.040*update + 0.021*store + 0.020*operation + 0.018*required + 0.018*address + 0.017*stored + 0.016*machine'),\n", " (6,\n", - " '0.052*dynamic + 0.042*training + 0.021*hidden + 0.021*noise + 0.019*propagation + 0.017*matrix + 0.015*hidden_unit + 0.015*context + 0.014*back + 0.014*architecture'),\n", + " '0.038*map + 0.037*brain + 0.033*stimulus + 0.024*functional + 0.021*noise + 0.020*associative_memory + 0.020*recall + 0.017*series + 0.015*scale + 0.015*associated'),\n", " (7,\n", - " '0.056*training + 0.030*human + 0.029*speed + 0.027*block + 0.026*he + 0.020*decision + 0.019*control + 0.018*distance + 0.017*distribution + 0.015*artificial'),\n", + " '0.049*potential + 0.044*cell + 0.035*connectivity + 0.026*synaptic + 0.025*artificial + 0.023*architecture + 0.015*temporal + 0.014*brain + 0.014*computational + 0.013*action'),\n", " (8,\n", - " '0.020*produced + 0.018*relative + 0.017*capability + 0.016*other_hand + 0.016*potential + 0.015*magnitude + 0.015*circuit + 0.015*cell + 0.014*consequence + 0.014*interconnected'),\n", + " '0.075*image + 0.032*log + 0.024*dimensional + 0.018*mapping + 0.017*matrix + 0.016*center + 0.015*node + 0.014*recall + 0.013*back + 0.013*th'),\n", " (9,\n", - " '0.093*cell + 0.054*firing + 0.045*synaptic + 0.040*produce + 0.035*probability + 0.029*potential + 0.029*relation + 0.027*correlation + 0.023*produced + 0.023*connection_between')]" + " '0.058*scheme + 0.048*capacity + 0.047*probability + 0.040*representation + 0.030*stored + 0.028*binary + 0.025*represented + 0.023*code + 0.022*relationship + 0.021*bound')]" ] }, - "execution_count": 52, + "execution_count": 34, "metadata": {}, "output_type": "execute_result" } @@ -726,7 +885,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 35, "metadata": { "collapsed": false }, @@ -734,13 +893,13 @@ { "data": { "text/plain": [ - "[(0, 0.34230097594591424),\n", - " (4, 0.2487692783005907),\n", - " (6, 0.19935367234756304),\n", - " (8, 0.20805744284415623)]" + "[(0, 0.55485121572041607),\n", + " (4, 0.17897884328936686),\n", + " (6, 0.14414251935372879),\n", + " (8, 0.11957893769069983)]" ] }, - "execution_count": 45, + "execution_count": 35, "metadata": {}, "output_type": "execute_result" } @@ -751,49 +910,28 @@ }, { "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Online AT VB" - ] - }, - { - "cell_type": "code", - "execution_count": 83, "metadata": { - "collapsed": false + "collapsed": true }, - "outputs": [], "source": [ - "reload(onlineatvb)\n", - "OnlineAtVb = onlineatvb.OnlineAtVb" + "## LDA" ] }, { "cell_type": "code", - "execution_count": 84, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, \n", - " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12,\n", - " iterations=10, alpha=None, eta=None, decay=0.5, offset=64.0,\n", - " eval_every=1, random_state=0)" - ] - }, - { - "cell_type": "markdown", + "execution_count": 158, "metadata": { "collapsed": true }, + "outputs": [], "source": [ - "## LDA" + "reload(gensim.models.ldamodel)\n", + "LdaModel = gensim.models.ldamodel.LdaModel" ] }, { "cell_type": "code", - "execution_count": 96, + "execution_count": null, "metadata": { "collapsed": true }, @@ -887,7 +1025,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.4.3+" + "version": "3.5.2" } }, "nbformat": 4, From 40bbabf491ac3af4398404bac40360ed541ce66a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 14 Oct 2016 17:49:25 +0200 Subject: [PATCH 024/100] Computing rho in a different way. Added the possibility to evaluate only occasionally. Updated notebook. --- docs/notebooks/at_with_nips.ipynb | 257 +++++++++++++++++++++--------- gensim/models/onlineatvb.py | 28 ++-- 2 files changed, 201 insertions(+), 84 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 78d9879898..4b62f4e520 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -80,8 +80,12 @@ "outputs": [], "source": [ "# Configure logging.\n", + "\n", + "# log_dir = '../../../log_files/log.log' # On my own machine.\n", + "log_dir = '../../../../log_files/log.log' # On Hetzner\n", + "\n", "logger = logging.getLogger()\n", - "fhandler = logging.FileHandler(filename='../../../log_files/log.log', mode='a')\n", + "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", "formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", "fhandler.setFormatter(formatter)\n", "logger.addHandler(fhandler)\n", @@ -97,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 27, "metadata": { "collapsed": false }, @@ -107,11 +111,12 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "data_dir = '../../../../data/nipstxt/'\n", + "# data_dir = '../../../../data/nipstxt/' # On my own machine.\n", + "data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00']\n", + "yrs = ['00', '01', '02']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -133,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 28, "metadata": { "collapsed": false }, @@ -158,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 29, "metadata": { "collapsed": true }, @@ -173,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 30, "metadata": { "collapsed": false }, @@ -191,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 31, "metadata": { "collapsed": false }, @@ -217,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 32, "metadata": { "collapsed": false }, @@ -240,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 33, "metadata": { "collapsed": true }, @@ -255,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 34, "metadata": { "collapsed": true }, @@ -274,7 +279,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 35, "metadata": { "collapsed": true }, @@ -286,16 +291,16 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 36, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHVWZ//HPlwBBlgTGmAQEBUUQcIFEhKgsDpLIqjOg\n2G4g4IKA/KIoijowoI6iJOyKArKojQiDwx4IjhC2CSaIICEoO2gCgdCBBMj2/P4455JK5fZ2b3du\n377f9+tVr9v31FN1TtXt5elT51QpIjAzMzNrNWs0ugFmZmZmjeAkyMzMzFqSkyAzMzNrSU6CzMzM\nrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMysJTkJMjMzs5bkJMjM+pykpyT9vPB+D0nLJb1v\nNdT9PUlLCu+H5Lon9Xfdub7Dc32brI76aiXpm5IekbRU0vRGt6enJL01n99PNrot1vycBNmgIeng\n/Mux2vKDRrevxVR7Hk+vn9Ej6duS9quh7uW9rau3umhbUMOxrk6S9gZ+APwvcAjw3YY2yKxB1mx0\nA8z6WJB+oT9WKr9/9TfFKiLiZkmvi4jFvdz0O8AlwNW92OYE4KRe1lOLztp2AXBJDce6On0QWAIc\nHn6ApLUwJ0E2GN0QETN7GixJwNoR8Wo/tqnl9XdSIGndiFgUEctZDT1BnclJxUBOgABGAQsHYgLk\nn0dbnXw5zFpKcXyIpM9I+ivwCrBHXi9JX5X0V0mvSPqnpHMkDSvtR5L+I499eUnSVElvl/RkaSzM\nSuNTCuVVx41I2kfStLzPDklXSXp7KeZXkuZL2jSvf1HSM5J+WKUeSZoo6S+SXs5x10naPq+/XdKf\nOjlXD0vqsgems/NQJW6VMUGStpL035Lm5LY9IenXktarfE7A2kDlXC2vnNt8XpfnffxW0nzSpZ1O\nz3le9xlJs3N908tjlPK5/VuV7V7bZw/a1tlne3Th++ppSWdU+b66TdJMSdtJ+l9Ji/K5/WpXn0Nh\n+zUlnZA/u1eUxvycJGmtUts/BQzP7VymTsbX5O+dJZLWK5Qdl7f7YaFszfz5n1QoW1/S5Pwz8Yqk\nWZL+X2n/3f08biTpYkkvSHpe0vnASucsx20s6aJ8rl6R9A9JV0ratCfnzVqXe4JsMBou6fXFgoh4\nrhQzHvgEcDbwPPBELr8AaMuvpwFvAY4G3i1pl9zLAGk8xXHAVcAUYCxwI/C6Uj2djQ9ZpVzSIcD5\nwHXAN4D1gC8D0yTtEBFPFbZdM9c3DfhaPp6vS/pbRJxf2O3FpD94VwM/J/3h3hXYCfhzXn+OpK0i\n4qFCW8YBWwDfqtL2op6eh0q7K/sfmuPWIJ3nucCmwH7AsIhYKOnTwC+B2/J5Afh7aV//DTwIfLNQ\n1tk53wP4JHAG6VLQkcAUSe+JiNndbPtaeUQs60Hbyp/t94DjgRtI33PbkD7bsaXvqwBGANcDvwMu\nBT4O/FjSvRFxc5W2FV2Yj/FS0vfGzqTLdlsDBxXa/mXg3cAXAAG3d7K/aaTP6P2kzwvgA8AyYJdC\n3FjSZ35rPl4B1+btfgH8BdgLmCRp44g4rlTPKj+PeR9Xk75XzwFmAweQznv5M/o9sCXps32C1NM1\nnvQ99RRmnYkIL14GxQIcTLoMUl6WFWKG5LLFwJal7XfP6w4ole+Vyw/M70fm7a8oxf0wx/28UHYy\nsLhKWw8j/SHZJL/fAHgBOLMUNyqXn1UouyRv+41S7J+BOwrv98ztOaWLc7Yh8DJwUqn87FzvOl1s\n25vzsEdu8/vy+7E5Zr9uPtOXi/spndflwIWdrFtceF/5zJcC7yiUv5nU63Bp6dw+1N0+u2lb+bMd\nlc/TVaW4r+S4TxXKpuWyjxfK1iYlib/p5lyNycd5dql8Ut7n+0vH+XwPfqaGAC8CJxfKniclWa9U\nvj+Ar+djXD+/PyC35djS/q4gJaBv6sHPY2UfXymUrUFKPJcBn8xl/1KO8+Klp4svh9lgE8ARwIcK\ny55V4m6OiL+Xyg4k/YL/o6TXVxbgT6Q/eB/McRNIv7zPLG1/Wh3t/jApEbq0VPcy4O5C3UU/L72/\njdRzVXEA6Q//yZ1VGhEvANeQeg+AdIkC+BgpuXmlizaPp/bz8EJ+3UvSOj2IryaAn/UiflpEvDZA\nPiIeJ/U0fLjG+ntqT9J5Kp+Xc4FFwD6l8o6IuKzyJtJYqrtZ+bOtZm/SOSnfCuBUUm9PuZ5uRcQy\n4E5S7yGS3gUMB/4LWIvUSwOpd+jeiHgpv9+LlNicXdrlJNK5KJ/zaj+PewGvUvg+j9RjdlY+nopF\npMTqg5KG9/IQrcU5CbLB6O6I+ENxqRLzWJWyt5H+q3y2tMwF1iH1fAC8Kb+u9Es7IuaQ/muuxZak\nX+zTSnU/A/xroe6Kl3ICUzQf2Kjw/i3AUxHRXZsuBraQtHN+/2Hg9aTegq68Ob/2+jxExMPA6cAX\ngeckXS/pCEkbdFNn2aO9iC3/kQV4CNhA0kZV1vWVynl6qFgYaeDvo4X1FU9W2Uf5s+2snqX53Bbr\neZr0eZTr6anbgB3zuKJdgCcj4l7SjMvKJbH3k753i215KiJeLu1rVmF90WNV6n0z8HSVRHx28U1e\nfzywL/CMpD9KOlZS+WfGbBUeE2StqvzLGdI/Bf8APsPK/2lWPJNfK+t6MrOms5ghVeoO0nikeVXi\nywN9l3WyX3XydVeuz3V+Grgrvz4dEX/sZrvenIdVRMTEPND1I6RepbOA4yTtnBOpnqj2OfZG+Rz1\n9POqp47u9OSz7e363rahaBrptgM7kXp8phXKd5G0Hemfh1vrqK/a5yiqfx6r7DsiTpV0JfBRUk/t\n94BvSdqt2PtnVuaeILMVHiYNSr2t3JOUl8ov08fy61bFjSWNJl3SKpoPDJG0bql88yp1AzzTSd3T\n6L2/A5uWZyCVRcRS8gBcSRuSBif/ugf7fyy/9uQ8dFb3/RHx/YjYDdiN1Mv2hWJIT/bTQ2+rUrYV\n8GJEzM/v55PGSZVtXqWsp217LL9uXSyUtHbe7+M93E9P6llT0ltL9WwCrF9HPXeRLqvuSur5qXwv\n3gq8j3SpNkg9RsW2bCqpPEB+m/zak7ZU9lG+XLp1lVgi4pGImBQRE4B3kgZq92hWnbUuJ0FmK1xG\nGoT6nfKKPAW4kkzcRPpv/ehS2MQq+3yY9J/rroV9rU/qbSq6HngJ+HYek1Ouf0QPj6HoClJvb0/u\nBnwJKQE8l/THoydJUG/Ow0okDZNU/v1zP+mP6dBC2UKqJyW1+EAe01Jpw+akSyg3FGIeBl4vaZtC\n3BtJiWFZT9tWOU/HlMq/SJoBeE0P9tET15G+1/5fqfxrpPN6bS07zZe0ZpK+Zzdm5Z6g9YCjgNkR\nUezBvI70s/Tl0u4mks7F9T2o+jrS98IXKwX5Z+MoVp5p+Lo827DoEdLP09BC3GhJW1f5vrMW5sth\nNtjU3O0fEX/Il2e+I2kMMJX0H/BWpEHTR5Bm+MyVNBk4VtJVpF/o7yENwn6+tNvrgaeBCyX9JJcd\nCvwTeO0+MhHRIeko0tT8mZIuJV2iejNpQOv/0sv/aiNiqqR24KtK9+65kXRZZxdgSkQUB5z+SdIs\n0oDov/TkEkIvzwOs/NnsCUyW9Dvgb6RBtgeTLvv9dyFuBjA+31/mn8DDEVH1vkY9cD9wo6QzSZ/r\nl/PrfxZifkOa9n9Vjlsf+BJpGv67S/vrUdvyefoRcLyk60hJzzZ5v3eSeuHqFhEzJf0a+HIeVD8N\nGEe6vHlZRHQ2Db4npgHHAs9FxKxc3z8lPUz6+fhFKf5KUk/RjyRtyYop8vsAP46IauOeyq4k9UL9\nJPduVabIl3tVtwVukHQZ8AApyTqQNK6tvRD3E9IEgE1Jl73NPEXey+BZSH9ElwFjuogZkmNO7SLm\n86TZOC+RLo/cA3wfGFmK+w9SgvMS6b/9rUmDWn9eihtD+mP3Muk/1CMpTaMuxO5O6pmYn/c7GzgP\n2L4Qcwnpj1G53ScDr5bKRPrj9UCufw5pRtS7qmz/zdymr/byvFc7D08A5xZiylPk35KP62+kHpVn\n8ra7lvb9duCPed/LKuc2H+sy0j2FujwPxc+clBA8lM/F9Ep7StuPB+4jTQH/K+k+PdWmyHfWts4+\n2yPz/l7J5+t0YINSzDRgRpU2XULqbenusxiSP4+Hcz2PkpK8Navsb5XvoS72u18+pitL5RdQmuZf\nWLceaTbYU7ktDwLH9ObnkTQY/GLSbMLnSPdk2oGVp8iPIM1QfABYQErAbwc+WuWYl5Y/Fy+tvSii\nLy+5m7U2SU8C10fEF7oNHmAkfY10j583RcQ/G90eM7P+5mujZlZxKOl+LU6AzKwleEyQWQtTeibU\n/qRxPG/Hs2nMrIU4CTLrW509e2qgGk2aCfY86dEZUxrcHjOz1cZjgszMzKwleUyQmZmZtSQnQWZm\nZtaSnASZWb+S9D1J5Wefre42DJG0XFL5Cev17HOPvM/9+2qfvaj7V5L+trrrNRtsnASZNZCkg/Mf\n0srysqTZks4cRE/BbrbB4r3RqOMKYHmD6jYbNDw7zKzxgvR8r8eAdUhP6j4C2EvSOyLilQa2zbpW\nz9PZ63FIA+s2GzScBJkNDDdExMz89QWSnic9bPIjwG8b16zuSVo3IhY1uh2tJCKWNaJef9Y22Phy\nmNnA9AfSf/pbVAokbSHpd5Kek7RQ0p2S9i5uJOnZwoNaUfKCpCWShhXKj8tl6xbKtpZ0ed7/y5Lu\nlrRfaf+Vy3e7SjpH0lzS89J6RdJhkm6WNDfXdb+kz5diTpc0p1T201z/lwplm+SyQ3tY92fyJceX\nJU2X9L4qMW+UdKGkOZJekXSfpIOr7C6ANSR9V9JTkhZJuknSFqX97ZY/uyfy/h6X9JPi088lfVPS\nMkmblCvJsS9L2iC/X2VMkKT1JU2W9GSuY1Z+uGsx5q35XH2yVF4ZM3V8oex7uWwrSb+VNJ/0IF+z\nQcNJkNnAtGV+fQ4gjw+6k/T09bOA44GhwNWSPlLY7nZg18L7dwGV5Of9hfIPADMr/9VL2o70xO6t\ngf8i3Tn6JeD3pf1XnEO6w/R/kp431ltHkB4m+33ga6QHip5bSoSmAW+QtFWp3cuAXQplu5KSkWk9\nqHcP4MfARaQHjY4EpkjauhIgaTTp4aq7AWcAx+S2/lLSl0v7E+lS5j7Aj/LyPtJDP4s+Tvq8zgKO\nIj0s9hjSA0grLs37+1iVdh8IXBcRL+b3K42zkiTgWuBo0lPqJ5IeTjtJ6Qn2tajs/79JDzr9JukB\npmaDR6Of4OrFSysvrHjy/QeB1wNvBA4CniUlIRvnuMk5blxh2/VITwt/uFD2NWAxsF5+fxTpD/id\nwA8Kcc8DPym8nwrcw6pPG78NeLDU3uWkp6erh8dY7QnsQ6vE3QTMKrwfles6LL/fKJ+DS4EnCnFn\nAXO6acOQvK+lwDsK5W8mPeH80kLZhcATwPDSPi4D5gFr5fd75H3eCwwpxE3M7dyqm+P9dm7PxoWy\n/wPuKMWNy/V8vFB2CfBQ4f0BOebY0rZXAEtID8UFeGuO+2Qn5+f40ue2HLiw0T8nXrz01+KeILPG\nE3AzKfF5EvgNsAD4aKx4mOlewPSIuLOyUUQsBH4ObC5p21w8jTTWr3KJZ5dcNi1/jaR3ARvmMiRt\nRErCfgcMl/T6ygLcCLxN0saF9gbwi4ioeWZURLz62sFLw3JdtwBbSXpdjpkL/J0VPVu7AK8CpwKb\nSnpz6Rh7YlpE3F9ox+PA1cCHc1sE/BvwP8CaVc7FRsD2pX2eHyuP0ZlG+kzf0snxrpv3d0eOK+7v\nt8BOkt5UKDsIWETq4enMXqTk9+xS+SRSgvPhLrbtSgA/q3FbswHPSZBZ4wXp8tCHgN2BbSPirREx\ntRDzZmB2lW1nFdYDzCT9waxcLvoAK5Kg90haO68LUi8PpEtvIv3n/2xpOTHHlKfrP1Z8I2ktSaOK\nS1cHLGkXSX+Q9BLwQq7rpLx6eCH0ttKxTAf+BHQAu0gaDryDnidBf69S9hCwQU4GRwMbAF9m1XPx\n8xxfPhflMVHz8+tGlQJJb5Z0saTnSD18z5ISX1j5eC/Lrx8vlB0AXBNdD0h+M/BURLxcKi9/f9Ti\n0Tq2NRvQPDvMbGC4O1bMDqtZRCyV9H/ArpLeCmwM3Er6o7sWsBMpmZgVEc/lzSr/DP0E6OwBquXk\nofzHdlfS5awgJVQhabOI+Ed5R5LelmPvJ106epLUi7E/aUxL8Z+zacDBkjYjJUNTIyIk3Z7fVxKO\nWztpd08Up5pX6r4I+FUn8feW3nc2U0uQBh2TLjduAPyAlMwuAt5EGhP02vFGxFOS7iQlQT+RtAvp\nEumlvTiGrnTWezeki23Kn7XZoOEkyKw5PE4atFy2TWF9xTTgG6RB1M9GxEMAkv5KSlZ2IV0Cqngk\nvy6JiD/U2L4ZpJ6somc7id2flJDtky95kds3oUpspYdnAjAGOCG/vxX4HCkJepFVE5POvK1K2VbA\nixExX9ICYCGwRh3nomx70lictoh47XYHkjq7RHUpcLqkt5Auhb0IXN9NHY8BH5D0ulJvUPn7o5I0\nbljavp6eIrOm5cthZs3hOuC9knaqFEhaD/gC8GhEPFCInUa66eIxrLjkRf76M6TeodcuH0XEs6SB\nzl/MM6NWImlEd42LiBci4g+lpbNHZVR6Tl77/ZMvRX22yn7/DswlDfhegzSOpnKMW5PG79zRi/FJ\nH8hjoir1bg7sC9yQ61sGXAl8XNI25Y2rnIue1FvteEX6fKpt/zvy4GXSpbCrimOKOnEdsDbpMl5R\nZZD29QARMZ90+XHXUtxRnbSlKknDlW6psH5PtzEbiNwTZNZ4PbmU8UOgDbhB0hmk2V2HkP6D//dS\n7J2kWUdbAecWym8ljT2qNp38yFx2n6RfkHqHRpFmJr0R2KGX7e3KFNJU8utyXcOAzwP/ZNXxNpCS\ntwNJU/pfymV3ky7TbEmazdVT9wM3SjqTdI6+nF//sxDzDVKSMD23bxbwL8B7SL1oxUSxJ+fir6Rx\nNaflwdwv5eMZVi04IuZKmgZ8HVifnt0s80rS5/sjSVsCfyENlt4H+HFEFMctnQccK6mDNIZsd1JP\nVW8+108AP82vl3UTazZguSfIrPG6/Q88Ip4hJSQ3kv5r/wFpave+EXFVKXYRabp7cfAzpCQnSNPL\nnyxtM4v0R/4a0jT4s4AvknoRTmJltcwKe22bXNeBpN8/PwEOB84k3Xuomkq7i71XS0nTyXt6f6BK\nG24GjiUd44mkXqbxuU2Vfc8BdiSNC/r33LavkJKW4zo7rs7Kc4/YvqTE5HjgO6TE6HNdtPW3pATo\nBTofp1WsI0gJzxnAfqRbKmwFfDUivlna7gTSWKSPk5LRpbl9vX3G22B9Hpy1ENUxy9XMzMysaTW8\nJ0jSo1r5KdqV5cy8fqiksyXNk/Si0m39R5b2sZmka5UeJTBH0imS1ijF7C5pRr6d/EOqcgt8SUfm\n9rws6S5JO/bv0ZuZmVmjNDwJInXBjy4se5K6WSvXmU8jdfMeQLpOvwnpLqgA5GTnOtL4pp1J3dyH\nUOjCz4MfryF1hb8bOB04T9KehZiDSDdhO4E0/uFe0u30ux0UamZmZs1nwF0Ok3QasHdEbKX0wMdn\ngU9ExJV5/dakgYo7R8R0SXsBV5FuPT8vx3yRNJD0Dfm+KT8C9oqI4qyQdtJt8ffO7+8C/i8ijsnv\nRbp/yRkRccrqOXozMzNbXQZCT9BrJK0FfIoVD+l7D6mHp3JnVSJiNum5PuNy0c7AfZUEKJtCugvr\ndoWY4t13KzHjCvWOLdUTeZtxmJmZ2aAzoJIg0j0/hpNmZUCaors4IhaU4uayYprq6Py+vJ4exAyT\nNBQYQbpjarWYVe6bYmZmZs1voN0n6FDg+jxFtSuiZ9Mzu4pRD2M6XZ8fgjiBdLfWV3rQHjMzM0vW\nATYHphQe47NaDZgkKD81+UPARwvFc4C1JQ0r9QaNZEWvTeWeHkWjCusqr+UHOo4EFkTEYknzSPdD\nqRZT7h0qmgD8uov1ZmZm1rVPAb9pRMUDJgki9QLNJc30qphBupHXHqQ7oiJpK9KDByu3z78TOF7S\niMK4oPGkp0zPKsTsVapvfC4nIpZImpHruSrXo/z+jC7a/BjAr371K7bZZpU77FsXJk6cyOTJkxvd\njKbic1Ybn7fe8zmrjc9b78yaNYtPf/rTkP+WNsKASIJywnEIcGFELK+UR8QCSecDkyRVHpR4BnB7\nRNydw24EHgAukXQc6blIJwNnFZ5d9DPgqDxL7AJScnMgsHehGZOAi3IyNJ30zJ116fqW/K8AbLPN\nNowZM6bGo29Nw4cP9znrJZ+z2vi89Z7PWW183mrWsOEkAyIJIl0G2wz4ZZV1lQcAXg4MJT3o8MjK\nyohYLmlf0nNs7iA9AfpCVjxtmoh4TNI+pETnK8BTwGERMbUQc1m+J9BJpMtifwYm5IdLmpmZ2SAz\nIJKgiLiJNDur2rpXgaPz0tn2T5KefdNVHbeQpsF3FXMOnT+/yMzMzAaRgTZF3szMzGy1cBJkDdHW\n1tboJjQdn7Pa+Lz1ns9ZbXzems+Ae2xGM5E0BpgxY8YMD4YzMzPrhZkzZzJ27FiAsRExsxFtcE+Q\nmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJ\nkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYtyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3J\nSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYt\nyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pIGRBIkaRNJl0iaJ2mR\npHsljSnFnCTpH3n9TZK2LK3fSNKvJXVImi/pPEnrlWLeJelWSS9LelzS16u05WOSZuWYeyXt1T9H\nbWZmZo3U8CRI0obA7cCrwARgG+BrwPxCzHHAUcAXgfcCC4EpktYu7Oo3eds9gH2AXYFzC/vYAJgC\nPAqMAb4OnCjp8ELMuLyfXwDbA78Hfi9p2z49aDMzszr9/vfwkY80uhXNbc1GNwD4JvBERBxeKHu8\nFHMMcHJEXA0g6bPAXOCjwGWStiElUGMj4p4cczRwraRjI2IO8GlgLeCwiFgKzJK0A/BV4LxCPddH\nxKT8/gRJ40kJ2Jf79KjNzMzq8PjjcPPNjW5Fc2t4TxCwH/AnSZdJmitpZql3ZgtgNPDaRx0RC4D/\nA8blop2B+ZUEKJsKBLBTIebWnABVTAG2ljQ8vx+Xt6MUMw4zMzMbVAZCEvQW4AhgNjAe+BlwhqRP\n5/WjScnM3NJ2c/O6SswzxZURsQx4vhRTbR/0IGY0ZmZmNqgMhMthawDTI+K7+f29krYjJUa/6mI7\nkZKjrnQXox7GdFePmZmZNZmBkAT9E5hVKpsF/Hv+eg4pERnFyr00I4F7CjEjizuQNATYKK+rxIwq\n1TOSlXuZOosp9w6tZOLEiQwfPnylsra2Ntra2rrazMzMrCW0t7fT3t6+UllHR0eDWrPCQEiCbge2\nLpVtTR4cHRGPSppDmvX1FwBJw0hjfc7O8XcCG0raoTAuaA9S8jS9EPM9SUPypTJIl99mR0RHIWYP\n4IxCW/bM5Z2aPHkyY8aM6SrEzMysZVXrGJg5cyZjx45tUIuSgTAmaDKws6RvSXqrpE8ChwNnFWJO\nA74jaT9J7wQuBp4C/gcgIh4kDWD+haQdJb0fOBNozzPDIE19XwxcIGlbSQcBXwFOLdRzOrCXpK9K\n2lrSicDYUlvMzMxsEGh4EhQRfwL+DWgD7gO+DRwTEZcWYk4hJTXnkmaFvQ7YKyIWF3b1SeBB0uyu\na4BbSfcVquxjAWka/ebAn4AfAydGxPmFmDtzO74A/Jl0Se4jEfFAnx60mZmZNdxAuBxGRFwHXNdN\nzInAiV2sf4F0L6Cu9nEfsFs3MVcAV3QVY2ZmZs2v4T1BZmZmZo3gJMjMzMxakpMgMzMza0lOgszM\nzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLM\nzMysJTkJMjMzs5bkJMjMzKwJRTS6Bc3PSZCZmVmTkhrdgubmJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSQ1PgiSd\nIGl5aXmgsH6opLMlzZP0oqTLJY0s7WMzSddKWihpjqRTJK1Ritld0gxJr0h6SNLBVdpypKRHJb0s\n6S5JO/bfkZuZmVkjNTwJyu4HRgGj8/KBwrrTgH2AA4BdgU2AKyorc7JzHbAmsDNwMHAIcFIhZnPg\nGuBm4N3A6cB5kvYsxBwEnAqcAOwA3AtMkTSiD4/TzMzMBoiBkgQtjYhnI+KZvDwPIGkYcCgwMSJu\niYh7gM8B75f03rztBODtwKci4r6ImAJ8FzhS0po55gjgkYj4RkTMjoizgcuBiYU2TATOjYiLI+JB\n4EvAoly/mZmZDTIDJQl6m6SnJT0s6VeSNsvlY0k9PDdXAiNiNvAEMC4X7QzcFxHzCvubAgwHtivE\nTC3VOaWyD0lr5bqK9UTeZhxmZmYDTESjW9D8BkISdBfp8tUEUu/LFsCtktYjXRpbHBELStvMzevI\nr3OrrKcHMcMkDQVGAEM6iRmNmZnZACQ1ugXNbc3uQ/pXvnxVcb+k6cDjwMeBVzrZTEBPcuCuYtTD\nGOfaZmZmg1DDk6CyiOiQ9BCwJely1NqShpV6g0ayotdmDlCexTWqsK7yOqoUMxJYEBGLJc0DlnUS\nU+4dWsXEiRMZPnz4SmVtbW20tbV1t6mZmdmg197eTnt7+0plHR0dDWrNCgMuCZK0PvBW4CJgBrAU\n2AO4Mq+xdAPHAAAgAElEQVTfCngTcEfe5E7geEkjCuOCxgMdwKxCzF6lqsbnciJiiaQZuZ6rcj3K\n78/ors2TJ09mzJgxvT5WMzOzVlCtY2DmzJmMHTu2QS1KGp4ESfoxcDXpEtgbgf8kJT6XRsQCSecD\nkyTNB14kJSW3R8TdeRc3Ag8Al0g6DtgYOBk4KyKW5JifAUdJ+hFwASm5ORDYu9CUScBFORmaTpot\nti5wYb8cuJmZmTVUw5MgYFPgN8DrgWeB24CdI+K5vH4i6VLV5cBQ4AbgyMrGEbFc0r7AT0m9QwtJ\nicsJhZjHJO1DSnS+AjwFHBYRUwsxl+V7Ap1Euiz2Z2BCRDzbD8dsZmZmDdbwJCgiuhw4ExGvAkfn\npbOYJ4F9u9nPLaRp8F3FnAOc01WMmZmZDQ4DYYq8mZmZ2WrnJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzNrQhGNbkHzcxJkZmbWpKRGt6C59UkS\nJGmIpO0lbdQX+zMzMzPrbzUlQZJOk3RY/noIcAswE3hS0u591zwzMzOz/lFrT9CBwL356/2ALYC3\nA5OB7/dBu8zMzMz6Va1J0AhgTv56b+B3EfEQcAHwzr5omJmZmVl/qjUJmgtsmy+FfRiYmsvXBZb1\nRcPMzMzM+tOaNW73S+Ay4J9AADfl8p2AB/ugXWZmZmb9qqYkKCJOlHQ/sBnpUtiredUy4Id91Tgz\nMzOz/lJrTxARcTmApHUKZRf1RaPMzMzM+lutU+SHSPqupKeBlyS9JZefXJk6b2ZmZjaQ1Tow+tvA\nIcA3gMWF8vuBw+tsk5mZmVm/qzUJ+izwhYj4NSvPBruXdL8gMzMzswGt1iTojcDfO9nfWrU3x8zM\nzGz1qDUJegDYpUr5gcA9tTfHzMzMbPWodXbYScBFkt5ISqT+XdLWpMtk+/ZV48zMzMz6S009QRHx\nP6Rk50PAQlJStA2wX0Tc1NW2ZmZmZgNBPfcJug3Ysw/bYmZmZrba1HqfoB0l7VSlfCdJ76mnQZK+\nJWm5pEmFsqGSzpY0T9KLki6XNLK03WaSrpW0UNIcSadIWqMUs7ukGZJekfSQpIOr1H+kpEclvSzp\nLkk71nM8ZmZmNjDVOjD6bNIjM8remNfVJCccnydNtS86DdgHOADYFdgEuKKw3RrAdaSerZ2Bg0n3\nMTqpELM5cA1wM/Bu4HTgPEl7FmIOAk4FTgB2yO2YImlErcdkZmZmA1OtSdC2wMwq5ffkdb0maX3g\nV6SbLb5QKB8GHApMjIhbIuIe4HPA+yW9N4dNIN2f6FMRcV9ETAG+CxwpqXLJ7wjgkYj4RkTMjoiz\ngcuBiYVmTATOjYiLI+JB4EvAoly/mZmZDSK1JkGvAqOqlG8MLK1xn2cDV0fEH0rl7yH18NxcKYiI\n2cATwLhctDNwX0TMK2w3BRgObFeImVra95TKPiStBYwt1RN5m3GYmZnZoFJrEnQj8F+ShlcKJG0I\n/ADo9ewwSZ8Atge+VWX1KGBxRCwolc8FRuevR+f35fX0IGaYpKHACGBIJzGjMTMzs0Gl1tlhxwK3\nAo9LqtwccXtSwvCZ3uxI0qakMT97RsSS3mwKRA/iuopRD2O6rGfixIkMHz58pbK2tjba2tp60Dwz\nM7Pei578BRwg2tvbaW9vX6mso6OjQa1ZoaYkKCKelvQu4FOkQcYvA78E2nuZyEC6BPUGYIakSlIy\nBNhV0lHAh4GhkoaVeoNGsqLXZg5QnsU1qrCu8lq+hDcSWBARiyXNIz0HrVpMuXdoJZMnT2bMmDFd\nhZiZmfW51/5qDnDVOgZmzpzJ2LFjG9SipJ77BC0Eft4HbZgKvLNUdiEwC/gh8DSwBNgDuBJA0lbA\nm4A7cvydwPGSRhTGBY0HOvJ+KjF7leoZn8uJiCWSZuR6rsr1KL8/o96DNDMzs4Gl5iQoJyK7k3pK\nVhpbFBEnVdummpxMPVDa90LguYiYld+fD0ySNB94kZSU3B4Rd+dNbsz7uETScaQB2icDZxV6pn4G\nHCXpR8AFpOTmQGDvQtWTSI8DmQFMJ80WW5eUlJmZmdkgUlMSJOnzwE+BeaTLTMUrk0Hh/jw1Kl/p\nnEi6VHU5MBS4ATjyteCI5ZL2zW26g/QojwtJ9/upxDwmaR9SovMV4CngsIiYWoi5LN8T6CTSZbE/\nAxMi4tk6j8fMzMwGmFp7gr4DfDsiftSXjamIiH8tvX8VODovnW3zJN08vDUibiGNQeoq5hzgnB43\n1szMzJpSrVPkNwJ+15cNMTMzM1udak2CfkcaVGxmZmbWlGq9HPZ34GRJOwP3kWZvvSYiPJvKzMzM\nBrRak6AvAC8Bu+WlKPCUcjMzMxvgar1Z4hZ93RAzMzOz1anWMUEASFpb0taFJ7WbmZmZNYWakiBJ\n6+YbGC4C/kq6ezOSzpT0zT5sn5mZmVm/qLUn6L9IzwzbHXilUD4VOKjONpmZmZn1u1ovY30UOCgi\n7pJUvLvzX4G31t8sMzMzs/5Va0/QG4BnqpSvx6qPvDAzMzMbcGpNgv4E7FN4X0l8Dic/ld3MzMxs\nIKv1ctjxwPWSts37OEbSdsA4Vr1vkJmZmdmAU1NPUETcRhoYvSbpjtHjgbnAuIiY0XfNMzMzM+sf\nve4JyvcE+iQwJSI+3/dNMjMzM+t/ve4JioilwM+Adfq+OWZmZmarR60Do6cDO/RlQ8zMzMxWp1oH\nRp8DnCppU2AGsLC4MiL+Um/DzMzMzPpTrUnQpfm1+LT4AJRfh9TTKDMzM+ta+K58das1CfJT5M3M\nzBpManQLmltNSVBEPN7XDTEzMzNbnWpKgiR9tqv1EXFxbc0xMzMzWz1qvRx2eun9WsC6wGJgEeAk\nyMzMzAa0Wi+HbVQuk/Q24KfAj+ttlJmZmVl/q/U+QauIiL8B32TVXiIzMzOzAafPkqBsKbBJH+/T\nzMzMrM/VOjB6/3IRsDFwFHB7vY0yMzMz62+1Doz+fel9AM8CfwC+VleLzMzMzFaDWgdG9/VlNDMz\nM7PVysmMmZmZtaSakiBJl0v6ZpXyr0v6Xf3NMjMzM+tftfYE7QZcW6X8BmDX2ptjZmZmtnrUmgSt\nT7o7dNkSYFjtzTEzMzNbPWpNgu4DDqpS/gnggdqbY2ZmZrZ61JoEnQx8V9JFkg7Oy8XAt/O6HpP0\nJUn3SurIyx2SPlxYP1TS2ZLmSXoxj0caWdrHZpKulbRQ0hxJp0haoxSzu6QZkl6R9JCkg6u05UhJ\nj0p6WdJdknbs1VkxMzOzplFTEhQRVwMfBbYEzgFOBTYFPhQR5XsIdedJ4DhgbF7+APyPpG3y+tOA\nfYADSOONNgGuqGyck53rSNP9dwYOBg4BTirEbA5cA9wMvJv0aI/zJO1ZiDkoH8cJwA7AvcAUSSN6\neTxmZmbWBGq9WSIRcS3VB0fXsp+i70g6AthZ0tPAocAnIuIWAEmfA2ZJem9ETAcmAG8HPhgR84D7\nJH0X+KGkEyNiKXAE8EhEfCPXMVvSB4CJwE25bCJwbkRcnOv5Ein5OhQ4pd7jNDMzs4Gl1inyO0ra\nqUr5TpLeU2tjJK0h6RPAusCdpJ6hNUk9OABExGzgCWBcLtoZuC8nQBVTgOHAdoWYqaXqplT2IWmt\nXFexnsjbjMPMzMwGnVrHBJ0NbFal/I15Xa9IeoekF4FXSZfX/i0iHgRGA4sjYkFpk7l5Hfl1bpX1\n9CBmmKShwAhgSCcxozEzM7NBp9bLYdsCM6uU35PX9daDpLE6G5LG/lwsqav7DYn0vLLudBWjHsb0\npB4zMzNrMrUmQa8Co4BHSuUbA0t7u7M8bqeyr5mS3gscA1wGrC1pWKk3aCQrem3mAOVZXKMK6yqv\no0oxI4EFEbFY0jxgWScx5d6hVUycOJHhw4evVNbW1kZbW1t3m5qZmQ167e3ttLe3r1TW0dHRoNas\nUGsSdCPwX5I+EhEdAJI2BH7AioHG9VgDGArMICVVewBX5nq2At4E3JFj7wSOlzSiMC5oPNABzCrE\n7FWqY3wuJyKWSJqR67kq16P8/ozuGjt58mTGjBnT+6M0MzNrAdU6BmbOnMnYsWMb1KKk1iToWOBW\n4HFJ9+Sy7Um9Jp/pzY4kfR+4njRVfgPgU6THcoyPiAWSzgcmSZoPvEhKSm6PiLvzLm4k3aDxEknH\nkXqjTgbOioglOeZnwFGSfgRcQEpuDgT2LjRlEnBRToamk2aLrQtc2JvjMTMzWx3CgzXqVlMSFBFP\nS3oXKWF5N/Ay8EugvZB49NQo4GJS8tIB/IWUAP0hr59IulR1Oal36AbgyEJblkvaF/gpqXdoISlx\nOaEQ85ikfUiJzleAp4DDImJqIeayfE+gk3Kb/gxMiIhne3k8ZmZmq4XUfYx1rp77BC0Efl5vAyLi\n8G7WvwocnZfOYp4E9u1mP7eQpsF3FXMOaXaamZmZDXI1JUGSPga0AVuRZk/9DfhNRFzeh20zMzMz\n6ze9uk9Qvpnhb4HfkqbC/500q2s74DJJl+YBxWZmZmYDWm97go4BPgTsHxHXFFdI2p80LugY0vO+\nzMzMzAas3t4x+nPA18sJEEBEXAV8g/SsLTMzM7MBrbdJ0NtY9RlcRVNzjJmZmdmA1tsk6GXSoy06\nMwx4pfbmmJmZma0evU2C7gSO6GL9kTnGzMzMbEDr7cDo7wN/lPR64CekB58K2Ab4GvAR4IN92kIz\nMzOzftCrJCgi7pB0EOkmiQeUVs8H2iLi9r5qnJmZmVl/6fXNEiPiSklTSA8g3SoXPwTcGBGL+rJx\nZmZmZv2l1meHLZL0IeA/IuL5Pm6TmZmZWb/r7R2jNy28/SSwfi6/T9JmfdkwMzMzs/7U256gByU9\nB9wOrANsBjwBbA6s1bdNMzMzM+s/vZ0iPxz4GDAjb3udpIeAocAESaP7uH1mZmZm/aK3SdBaETE9\nIk4l3ThxB9KjNJaRHpfxsKTZfdxGMzMzsz7X28thCyTdQ7octjawbkTcLmkpcBDwFPDePm6jmZmZ\nWZ/rbU/QJsD3gFdJCdSfJE0jJURjgIiI2/q2iWZmZmZ9r1dJUETMi4irI+JbwCJgR+BMIEh3kF4g\n6Za+b6aZmZlZ3+ptT1BZR0RcBiwB/hXYAjin7laZmZmZ9bOabpaYvQt4On/9OLAkIuYAv627VWZm\nZmb9rOYkKCKeLHz9jr5pjpmZmfVERKNb0PzqvRxmZmZmDSI1ugXNzUmQmZmZtSQnQWZmZtaSnASZ\nmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwE\nmZmZWUtqeBIk6VuSpktaIGmupCslbVWKGSrpbEnzJL0o6XJJI0sxm0m6VtJCSXMknSJpjVLM7pJm\nSHpF0kOSDq7SniMlPSrpZUl3Sdqxf47czMzMGqnhSRCwC3AmsBPwIWAt4EZJryvEnAbsAxwA7Aps\nAlxRWZmTneuANYGdgYOBQ4CTCjGbA9cANwPvBk4HzpO0ZyHmIOBU4ARgB+BeYIqkEX13uGZmZjYQ\nrNnoBkTE3sX3kg4BngHGArdJGgYcCnwiIm7JMZ8DZkl6b0RMByYAbwc+GBHzgPskfRf4oaQTI2Ip\ncATwSER8I1c1W9IHgInATblsInBuRFyc6/kSKfk6FDilf86AmZmZNcJA6Akq2xAI4Pn8fiwpWbu5\nEhARs4EngHG5aGfgvpwAVUwBhgPbFWKmluqaUtmHpLVyXcV6Im8zDjMzMxtUBlQSJEmkS1+3RcQD\nuXg0sDgiFpTC5+Z1lZi5VdbTg5hhkoYCI4AhncSMxszMzAaVhl8OKzkH2Bb4QA9iReox6k5XMeph\nTE/qMTMzsyYyYJIgSWcBewO7RMQ/CqvmAGtLGlbqDRrJil6bOUB5FteowrrK66hSzEhgQUQsljQP\nWNZJTLl3aCUTJ05k+PDhK5W1tbXR1tbW1WZmZmYtob29nfb29pXKOjo6GtSaFQZEEpQToI8Au0XE\nE6XVM4ClwB7AlTl+K+BNwB055k7geEkjCuOCxgMdwKxCzF6lfY/P5UTEEkkzcj1X5XqU35/RVfsn\nT57MmDFjeny8ZmZmraRax8DMmTMZO3Zsg1qUNDwJknQO0AbsDyyUVOmJ6YiIVyJigaTzgUmS5gMv\nkpKS2yPi7hx7I/AAcImk44CNgZOBsyJiSY75GXCUpB8BF5CSmwNJvU8Vk4CLcjI0nTRbbF3gwn44\ndDMzM2ughidBwJdIY27+WCr/HHBx/noi6VLV5cBQ4AbgyEpgRCyXtC/wU1Lv0EJS4nJCIeYxSfuQ\nEp2vAE8Bh0XE1ELMZfmeQCeRLov9GZgQEc/20bGamZnZANHwJCgiup2hFhGvAkfnpbOYJ4F9u9nP\nLaRp8F3FnEMaoG1mZjZghafs1G1ATZE3MzOznpO6j7HOOQkyMzNrQu4Jqp+TIDMzsyYU4Z6gejkJ\nMjMza0JOgurnJMjMzKxJOQmqj5MgMzOzJuQxQfVzEmRmZtaEfDmsfk6CzMzMmpCToPo5CTIzM2tC\nToLq5yTIzMysCTkJqp+TIDMzM2tJToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzM\nmpCToPo5CTIzM2tCToLq5yTIzMzMWpKTIDMzsybknqD6OQkyMzNrQk6C6uckyMzMrAk5CaqfkyAz\nM7Mm5CSofk6CzMzMmlBEo1vQ/JwEmZmZNSn3BNXHSZCZmVkT8uWw+jkJMjMza0JOgurnJMjMzKwJ\nOQmqn5MgMzOzJuQkqH5OgszMzJqQZ4fVz0mQmZlZk3JPUH2cBJmZmTUhXw6rn5MgMzOzJuQkqH5O\ngszMzJqQk6D6OQkyMzNrQh4YXb8BkQRJ2kXSVZKelrRc0v5VYk6S9A9JiyTdJGnL0vqNJP1aUoek\n+ZLOk7ReKeZdkm6V9LKkxyV9vUo9H5M0K8fcK2mvvj9iMzOz+rgnqH4DIgkC1gP+DBwJrJLbSjoO\nOAr4IvBeYCEwRdLahbDfANsAewD7ALsC5xb2sQEwBXgUGAN8HThR0uGFmHF5P78Atgd+D/xe0rZ9\ndaBmZmZ9xUlQfdZsdAMAIuIG4AYAqepHegxwckRcnWM+C8wFPgpcJmkbYAIwNiLuyTFHA9dKOjYi\n5gCfBtYCDouIpcAsSTsAXwXOK9RzfURMyu9PkDSelIB9ua+P28zMrFbuCarfQOkJ6pSkLYDRwM2V\nsohYAPwfMC4X7QzMryRA2VRSr9JOhZhbcwJUMQXYWtLw/H5c3o5SzDjMzMwGECdB9RvwSRApAQpS\nz0/R3LyuEvNMcWVELAOeL8VU2wc9iBmNmZnZAOKB0fVrhiSoM6LK+KFexqiHMf5WMzOzAcU9QfUb\nEGOCujGHlIiMYuVempHAPYWYkcWNJA0BNsrrKjGjSvseycq9TJ3FlHuHVjJx4kSGDx++UllbWxtt\nbW1dbWZmZlazZkqC2tvbaW9vX6mso6OjQa1ZYcAnQRHxqKQ5pFlffwGQNIw01ufsHHYnsKGkHQrj\ngvYgJU/TCzHfkzQkXyoDGA/MjoiOQswewBmFJuyZyzs1efJkxowZU+shmpmZ1aRZkqBqHQMzZ85k\n7NixDWpRMiAuh0laT9K7JW2fi96S32+W358GfEfSfpLeCVwMPAX8D0BEPEgawPwLSTtKej9wJtCe\nZ4ZBmvq+GLhA0raSDgK+ApxaaMrpwF6Svippa0knAmOBs/rr2M3MzGrRTD1BA9VA6Ql6D/C/pEtT\nwYrE5CLg0Ig4RdK6pPv+bAhMA/aKiMWFfXySlKxMBZYDl5OmvANpRpmkCTnmT8A84MSIOL8Qc6ek\nNuD7efkb8JGIeKDvD9nMzKx2ToLqNyCSoIi4hW56pSLiRODELta/QLoXUFf7uA/YrZuYK4Aruoox\nMzNrNM8Oq9+AuBxmZmZmveOeoPo5CTIzM2tCToLq5yTIzMysSTkJqo+TIDMzsybknqD6OQkyMzNr\nQh4YXT8nQWZmZk3IPUH1cxJkZmbWhJwE1c9JkJmZWZNyElQfJ0FmZmZNyD1B9XMSZGZm1oQ8MLp+\nToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzMmpSToPo4CTIzM2tC7gmqn5MgMzOz\nJuTZYfVzEmRmZtaE3BNUPydBZmZmTchJUP2cBJmZmTUhJ0H1cxJkZmbWhJYtgzXXbHQrmpuTIDMz\nsya0eDGstVajW9HcnASZmZk1oSVLnATVy0mQmZlZE1qyBNZeu9GtaG5OgszMzJqQL4fVz0mQmZlZ\nE/LlsPo5CTIzM2tCvhxWPydBZmZmTcg9QfVzEmRmZtaEXn3VSVC9nASZmZk1oWefhTe8odGtaG5O\ngszMzJrMwoXwwguw6aaNbklzcxJkZmbWZJ5+Or06CaqPkyAzM7Mm89RT6dVJUH2cBJmZmTWZShK0\nySaNbUezcxJkZmbWZJ5+Gl7/enjd6xrdkubmJKgKSUdKelTSy5LukrRjo9s02LS3tze6CU3H56w2\nPm+953NWm9V53m6/HbbbbrVVN2g5CSqRdBBwKnACsANwLzBF0oiGNmyQ8S/Z3vM5q43PW+/5nNVm\ndZ23Rx+Fm26Cf/u31VLdoOYkaFUTgXMj4uKIeBD4ErAIOLSxzTIzs1Z3113wwQ+msUAHH9zo1jS/\nNRvdgIFE0lrAWOAHlbKICElTgXENa5iZmbWUCFiwAB5/HGbPhhkz4MYb4Z570mWw66+HjTZqdCub\nn5OglY0AhgBzS+Vzga072+iBB2D58p5XEtH7hg22bZ5/Hu64Y/XUNVi2ee45uOWW1de2WrcbaNs8\n+2y6dNDf9dS7zeqsq7tt5s6Fa6/t/3r6apt66lq+PC2Vr6u9drWu+PrIIzBpEixdCsuWpdfy14sX\np+XVV1e8LlwIixal5cUXU/Lz/PNpXcUmm8Auu8AJJ8C++8KQIbWdJ1uZk6CeEVDtR2wdgM98Ztbq\nbc2g0MH73z+z0Y1oMh3svrvPWe91MH68z1vvdLDvvj5nXZFWXZYt6+C7353JkCF0uqy11srL2mvD\nOuvAeuul2V7rrpu+HjYsvR89GjbbbOVen3vvbdxx96VZs17727lOo9rgJGhl84BlwKhS+UhW7R0C\n2Dy9fLo/2zSIjW10A5qQz1ltfN56z+esK5UeorJFi3zearA5UMO1gfo5CSqIiCWSZgB7AFcBSFJ+\nf0aVTaYAnwIeA15ZTc00MzMbDNYhJUBTGtUARa0XYAcpSR8HLgK+CEwnzRY7EHh7RDzbyLaZmZlZ\n33FPUElEXJbvCXQS6bLYn4EJToDMzMwGF/cEmZmZWUvyzRLNzMysJTkJqkOrPmNM0gmSlpeWBwrr\nh0o6W9I8SS9KulzSyNI+NpN0raSFkuZIOkXSGqWY3SXNkPSKpIckNdX9USXtIukqSU/nc7R/lZiT\nJP1D0iJJN0nasrR+I0m/ltQhab6k8yStV4p5l6Rb8/fh45K+XqWej0malWPulbRX3x9x/bo7Z5J+\nWeV777pSTEudMwBJ35I0XdICSXMlXSlpq1LMavu5bIbfjT08Z38sfa8tk3ROKaZlzhmApC/ln4eO\nvNwh6cOF9c31fRYRXmpYgINIM8I+C7wdOBd4HhjR6LathmM/AfgL8AbS7QNGAv9SWP9T0oy53UjP\nX7sDmFZYvwZwH2lGwDuBCcAzwPcKMZsDLwGnkG5UeSSwBNiz0cffi/P0YdLYso+Sbr2wf2n9cfl7\nZj/gHcDvgYeBtQsx1wMzgfcA7wMeAn5VWL8B8E/SYP5tgI8DC4HDCzH/v707j9WjKuM4/v1VoECb\ntsjiTaTQ0goUQZayNWy3LCVIkCCJNpi68QfKHypEiYkkQEQJokQCNCaCRBZBTAwQKSLLbRsNS1hS\nK9CylbKUawoUKHgV2j7+cc5Lz532Lr2Ud7nz+yRvmpk5M++Zp2fmfe6ZMzOzcuzOz7G8BPgfsF+r\nYzSCmN0A3F1pexMrZWoVs1zfBcC8vD8HAH/Jx+AORZmmHJd0yLlxmDHrAX5TaW/j6xqzXNdT83E6\nPX8uzcfGjE5sZy0PaKd+gIeBq4ppAa8CF7S6bk3Y94uAJwZYNiEfEGcU8/YBNgCH5+lTcoPepShz\nDrAG2CZPXw78s7LtW4EFrd7/EcZsA5v+oK8CzqvErg/4Sp6ekdc7uChzMrAO6MrT3yU932qbosxl\nwNPF9G3AXZXvfgiY3+q4jCBmNwB/HmSdfescs6Kuu+Q4HF20raYcl516bqzGLM/rAa4cZJ1ax6yo\n75vAtzqxnfly2Aho4zvGHmjMi/S/UKd3jH0uX7J4QdLNkibn+TNJdx2WsVkOvMzG2BwJLI2IN4rt\n3QtMBD5flLm/8p33MkriK2kq0EX/OL0LPEL/OK2JiCeLVe8nPb38iKLM4ohYV5S5F9hH0sQ8PYvR\nFcvufPlimaT5kj5dLJuFYwYwibTPb+XpphyXHX5urMas4WuSVktaKunnknYoltU6ZpLGSJoL7Ej6\nI6Hj2pmToJEZ7B1jXc2vTtM9DHyT9Bf2d4CpwOI87qIL+CD/oJfK2HSx+dgxjDITJI39uDvQBrpI\nJ9zB2lAXqZv4IxGxnnSS3hqx7MS2eg+p+/t44AJSl/sCScrLax+zHItfA3+PiMZYvWYdlx15bhwg\nZgC3kF4J0E16sfY84KZieS1jJml/SWtJvT7zST0/y+jAdubnBG1dA71jbFSJiPLpnv+S9CiwkjS2\nYqAnZw83NoOV0TDKdLrhxGmoMhpmmY6LY0TcXkw+JWkpaRxVN+nSxUDqFLP5wH7A0cMo26zjst1j\n14jZUeXMiLiumHxKUi/wgKSpEbFiiG2O5pgtAw4k9Z6dCdwo6dhByrdtO3NP0Mhs6TvGRrWIeIc0\n+HhpytQAAAY1SURBVHQ60AtsJ2lCpVgZm142jd1nimUDldkNeDciPtga9W6xXtIBO1gb6s3TH5H0\nKWAnho5T2cs0UJmOb6v5h+gNUtuDmsdM0jXAF4HuiFhVLGrWcdlx58ZKzF4fovgj+d+yvdUuZhGx\nLiJejIgnIuInwBLg+3RgO3MSNAIR8SHQeMcY0O8dYy15CVwrSRoPTCMN9H2cNAi1jM3ewB5sjM1D\nwAFKT+ZumAO8AzxTlDmB/ubk+R0v/3j30j9OE0jjVso4TZJ0cLHqCaTk6dGizLH5h75hDrA8J6eN\nMtVYnsQoiKWk3YGdSXd7QY1jln/MTwdmR8TLlcVNOS477dw4RMw252BSsly2t1rFbABjgLF0Yjtr\n9ajyTv2QLv300f/2vDeBXVtdtybs+xXAscCepFuQ7yNl3zvn5fOBFaRLFDOBf7DpLZJLSOM7vkAa\nW/Rv4KdFmSmkWyQvJ91dcC7wAXBiq/d/C+I0jtRlfBDp7ogf5OnJefkFuc2cRrpV9A7gOfrfIr8A\neAw4jNRVvxy4qVg+gZR8/p7Unf/VHLezizKzcuwat3tfTLps2Xa3ew8Ws7zsF6REcU/SCe8x0olz\n27rGLNd3PunummNIfx03PttXynzixyUdcm4cKmbAXsCFwCG5vX0JeB54sK4xy3X9GelS656kR3tc\nRkp8ju/EdtbygHbyJ//HvJT/Ix4CDm11nZq037eSbkXsI436/wMwtVg+Fria1GW5FvgTsFtlG5NJ\nz+V4Lx8AlwNjKmWOI2X7faTkYF6r930L43Qc6Yd8feXzu6LMxaQf5P+Q7n6YXtnGJOBm0l9Ja4Df\nAjtWyhwALMrbeBn44WbqcibpOn4f6RlPJ7c6PlsaM9Ibp/9K6kH7L/Ai6Zkku1a2UauY5bpuLmbr\nga8XZZp2XNIB58ahYgbsDiwEVud2spz0gz++sp3axCzX87p87PXlY/Fv5ASoE9uZ3x1mZmZmteQx\nQWZmZlZLToLMzMyslpwEmZmZWS05CTIzM7NachJkZmZmteQkyMzMzGrJSZCZmZnVkpMgMzMzqyUn\nQWZmZlZLToLMzDJJPZKubHU9zKw5nASZWVuQdI6kdyWNKeaNk/ShpAcqZWdL2iBpSrPraWajh5Mg\nM2sXPaQ3xR9azDsGeB04UtJ2xfzjgJUR8dKWfomkbT5OJc1s9HASZGZtISKeJSU83cXsbuAOYAVw\nZGV+D4CkyZLulLRW0juS/ihpt0ZBSRdJelLS2ZJeJL2BHkk7Sroxr/eapPOrdZJ0rqRnJfVJ6pV0\n+9bdazNrJSdBZtZOFgKzi+nZed6ixnxJY4EjgAdzmTuBSaReoxOBacBtle1OB74MnAEclOf9Mq9z\nGjCHlFjNbKwg6VDgKuBCYG/gZGDxx9w/M2sj7hY2s3ayELgyjwsaR0pYFgPbAecAlwBH5emFkk4C\n9gemRMQqAEnzgKckzYyIx/N2twXmRcRbucw44NvAWRGxMM/7BvBqUZfJwHvA3RHxPvAKsOQT2m8z\nawH3BJlZO2mMCzoMOBp4NiLeIPUEHZHHBXUDL0TEq8C+wCuNBAggIp4B3gZmFNtd2UiAsmmkxOjR\nYr01wPKizH3ASmBFvmx2lqQdttqemlnLOQkys7YRES8Ar5Eufc0mJT9ExOuknpijKMYDAQJiM5uq\nzn9/M8sZYN1GXd4DDgHmAqtIvVBLJE0Y9g6ZWVtzEmRm7aaHlAB1ky6PNSwGTgEOZ2MS9DSwh6TP\nNgpJ2g+YmJcN5HlgHcVga0k7kcb+fCQiNkTEgxHxY+BAYApw/Aj2yczakMcEmVm76QGuJZ2fFhXz\nFwPXkC5jLQSIiPslLQVukXReXnYt0BMRTw70BRHxvqTrgSskvQWsBi4F1jfKSDoV2Ct/7xrgVFIP\n0vJNt2hmnchJkJm1mx5ge+CZiFhdzF8EjAeWRURvMf904Oq8fANwD/C9YXzPj0jjj+4C1gK/AspL\nXW+T7ii7KNfnOWBuHnNkZqOAIga8JG5mZmY2anlMkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\nnASZmZlZLTkJMjMzs1pyEmRmZma15CTIzMzMaslJkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\n/g8dz3zImRgHIwAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -321,7 +326,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 37, "metadata": { "collapsed": true }, @@ -339,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 38, "metadata": { "collapsed": true }, @@ -353,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 39, "metadata": { "collapsed": false }, @@ -362,9 +367,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 166\n", - "Number of unique tokens: 681\n", - "Number of documents: 90\n" + "Number of authors: 578\n", + "Number of unique tokens: 2245\n", + "Number of documents: 286\n" ] } ], @@ -381,6 +386,151 @@ "## Online AT VB" ] }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 8min 11s, sys: 88 ms, total: 8min 11s\n", + "Wall time: 8min 12s\n" + ] + } + ], + "source": [ + "%time model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-3, \\\n", + " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=64.0, \\\n", + " eval_every=1, random_state=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.098*object + 0.029*frequency + 0.022*tree + 0.016*structured + 0.015*intrinsic + 0.013*impulse + 0.012*time_step + 0.012*induced + 0.010*bar + 0.009*experiment'),\n", + " (1,\n", + " '0.047*potential + 0.042*synapsis + 0.037*firing_rate + 0.037*cerebral + 0.034*hebbian + 0.034*synapse + 0.034*hebb + 0.029*ii + 0.026*dt + 0.024*expression'),\n", + " (2,\n", + " '0.041*environment + 0.039*visual + 0.039*reconstruction + 0.035*orientation + 0.029*spatial + 0.028*action + 0.016*image + 0.016*receptive + 0.016*filter + 0.016*receptive_field'),\n", + " (3,\n", + " '0.078*class + 0.078*competitive + 0.026*block + 0.026*competition + 0.024*field + 0.024*winner + 0.021*square + 0.019*operation + 0.019*column + 0.017*ideal'),\n", + " (4,\n", + " '0.042*propagation + 0.036*machine + 0.034*update + 0.034*back_propagation + 0.031*hidden + 0.027*hidden_unit + 0.025*bp + 0.025*classifier + 0.024*test_set + 0.022*variance'),\n", + " (5,\n", + " '0.044*implementation + 0.030*dimension + 0.028*polynomial + 0.019*measure + 0.017*find + 0.017*recurrent + 0.015*forward + 0.015*stanford + 0.013*sum + 0.013*per'),\n", + " (6,\n", + " '0.089*processor + 0.051*cm + 0.035*code + 0.022*communication + 0.018*generator + 0.018*asynchronous + 0.014*connected + 0.014*transfer + 0.014*reduction + 0.012*compute'),\n", + " (7,\n", + " '0.073*node + 0.035*perceptron + 0.035*likelihood + 0.024*robot + 0.022*perceptton + 0.020*accuracy + 0.016*fast + 0.015*testing + 0.015*speech + 0.015*multi'),\n", + " (8,\n", + " '0.050*power + 0.030*uniform + 0.021*capacitor + 0.020*transistor + 0.018*curve + 0.018*maximum + 0.014*next + 0.014*formed + 0.011*every + 0.011*implement'),\n", + " (9,\n", + " '0.137*cell + 0.059*modulation + 0.052*fiber + 0.037*cortex + 0.033*plasticity + 0.033*consequently + 0.033*chemical + 0.030*visual_cortex + 0.026*action_potential + 0.026*neurosci')]" + ] + }, + "execution_count": 54, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [21]\n", + "[(0, 0.21583496794863521),\n", + " (1, 0.097266339574133789),\n", + " (2, 0.046104075223616918),\n", + " (3, 0.082806188712471071),\n", + " (4, 0.038965292793156206),\n", + " (5, 0.087321487508770779),\n", + " (6, 0.27427875618494268),\n", + " (8, 0.11559328603434335),\n", + " (9, 0.033930683980366742)]\n", + "\n", + "Geoffrey E. Hinton\n", + "Docs: [276, 235, 270]\n", + "[(0, 0.21478874086777811),\n", + " (2, 0.045025956135116882),\n", + " (3, 0.018058748789869943),\n", + " (4, 0.035433940765978195),\n", + " (5, 0.017238697764781256),\n", + " (6, 0.55389866230226381),\n", + " (7, 0.052574483064497073),\n", + " (8, 0.059424559853111729)]\n", + "\n", + "Michael I. Jordan\n", + "Docs: [205]\n", + "[(0, 0.28200285471582809),\n", + " (1, 0.019207440913240986),\n", + " (2, 0.036697731732562668),\n", + " (3, 0.028229183886206974),\n", + " (4, 0.047970907798814945),\n", + " (5, 0.049451568961465901),\n", + " (6, 0.41516092316824699),\n", + " (7, 0.040312982014292426),\n", + " (8, 0.079229887840147561)]\n", + "\n", + "James M. Bower\n", + "Docs: [188, 251, 244]\n", + "[(0, 0.26698792921714365),\n", + " (1, 0.15878442632165649),\n", + " (2, 0.060474251888253387),\n", + " (3, 0.010249883539547755),\n", + " (6, 0.40223568615538446),\n", + " (8, 0.052211994055734137),\n", + " (9, 0.033610105811001288)]\n" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -390,7 +540,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 17, "metadata": { "collapsed": false }, @@ -408,7 +558,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 18, "metadata": { "collapsed": false }, @@ -425,7 +575,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 19, "metadata": { "collapsed": false }, @@ -438,7 +588,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 20, "metadata": { "collapsed": false }, @@ -455,7 +605,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 21, "metadata": { "collapsed": false }, @@ -471,7 +621,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 23, "metadata": { "collapsed": false }, @@ -481,7 +631,7 @@ "output_type": "stream", "text": [ "phi is 10 x 681 x 10 (68100 elements)\n", - "mu is 10 x 681 x 22 (149820 elements)\n" + "mu is 10 x 681 x 21 (143010 elements)\n" ] } ], @@ -494,7 +644,7 @@ }, { "cell_type": "code", - "execution_count": 165, + "execution_count": 24, "metadata": { "collapsed": false }, @@ -506,7 +656,7 @@ }, { "cell_type": "code", - "execution_count": 167, + "execution_count": 25, "metadata": { "collapsed": false }, @@ -515,8 +665,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 13.4 s, sys: 8 ms, total: 13.4 s\n", - "Wall time: 13.4 s\n" + "CPU times: user 7.46 s, sys: 0 ns, total: 7.46 s\n", + "Wall time: 7.48 s\n" ] } ], @@ -527,47 +677,6 @@ " eval_every=1, random_state=1)" ] }, - { - "cell_type": "code", - "execution_count": 168, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.059*though + 0.052*subject + 0.033*estimate + 0.033*produced + 0.030*get + 0.027*assume_that + 0.027*technology + 0.026*actual + 0.026*lead + 0.020*taken'),\n", - " (1,\n", - " '0.066*node + 0.036*eq + 0.024*course + 0.024*goal + 0.023*comparison + 0.018*associated + 0.018*ma + 0.018*equivalent + 0.018*feature + 0.018*construct'),\n", - " (2,\n", - " '0.069*current + 0.069*synapsis + 0.057*inhibition + 0.046*cycle + 0.042*detailed + 0.038*circuit + 0.035*dynamic + 0.034*total + 0.034*combination + 0.027*rate'),\n", - " (3,\n", - " '0.144*connectivity + 0.073*image + 0.047*phase + 0.031*constraint + 0.026*vision + 0.026*spatial + 0.026*non_linear + 0.023*interaction + 0.021*biological + 0.021*characteristic'),\n", - " (4,\n", - " '0.088*associative_memory + 0.071*analog + 0.056*matrix + 0.041*edge + 0.030*variety + 0.020*assume + 0.020*required + 0.020*appropriate + 0.019*final + 0.015*implementation'),\n", - " (5,\n", - " '0.100*recall + 0.087*synaptic + 0.070*architecture + 0.065*scale + 0.057*flow + 0.032*circuit + 0.029*energy + 0.020*role + 0.014*path + 0.012*variable'),\n", - " (6,\n", - " '0.075*recognition + 0.057*representation + 0.042*capability + 0.034*u + 0.029*learning_algorithm + 0.029*might + 0.023*experiment + 0.023*pattern_recognition + 0.023*more_than + 0.023*view'),\n", - " (7,\n", - " '0.070*series + 0.069*field + 0.048*brain + 0.046*action + 0.042*location + 0.037*activity + 0.037*related + 0.037*limit + 0.036*adaptive + 0.031*strength'),\n", - " (8,\n", - " '0.083*gradient + 0.048*square + 0.042*component + 0.035*moving + 0.028*learn + 0.028*communication + 0.021*additional + 0.021*configuration + 0.020*solve + 0.020*connection_between'),\n", - " (9,\n", - " '0.152*log + 0.093*mapping + 0.070*bound + 0.044*connectionist + 0.026*viewed + 0.022*appear + 0.017*previously + 0.017*determined_by + 0.017*john + 0.017*yield')]" - ] - }, - "execution_count": 168, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.show_topics()" - ] - }, { "cell_type": "code", "execution_count": 133, @@ -1025,7 +1134,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.2" + "version": "3.4.3+" } }, "nbformat": 4, diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 986b0a6020..9cefee43a2 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -137,8 +137,8 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, if corpus is not None: self.inference(corpus) - def rho(self, iteration): - return pow(self.offset + iteration, -self.decay) + def rho(self, t): + return pow(self.offset + t, -self.decay) def inference(self, corpus=None): if corpus is None: @@ -176,7 +176,8 @@ def inference(self, corpus=None): bound = word_bound + theta_bound + beta_bound #likelihood = self.log_word_prob(var_gamma, var_lambda) logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) - for _ in xrange(self.passes): + t = 0 + for _pass in xrange(self.passes): converged = 0 # Number of documents converged for current pass over corpus. prev_bound = bound for d, doc in enumerate(corpus): @@ -267,10 +268,16 @@ def inference(self, corpus=None): break # End of iterations loop. + # TODO: I don't need to update the entire gamma, as I only updated a few rows of it, + # corresponding to the authors in the document. The same goes for Elogtheta. + # Update gamma and lambda. # Interpolation between document d's "local" gamma (tilde_gamma), # and "global" gamma (var_gamma). Same goes for lambda. - rhot = self.rho(d) + # TODO: I may need to be smarter about computing rho. In ldamodel, + # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). + rhot = self.rho(t) + t += 1 var_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma # Note that we only changed the elements in lambda corresponding to # the words in document d, hence the [:, ids] indexing. @@ -288,12 +295,13 @@ def inference(self, corpus=None): # End of corpus loop. # Evaluate bound. - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta, var_gamma) - beta_bound = self.beta_bound(Elogbeta, var_lambda) - bound = word_bound + theta_bound + beta_bound - #likelihood = self.log_word_prob(var_gamma, var_lambda) - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if _pass % self.eval_every == 0: + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta, var_gamma) + beta_bound = self.beta_bound(Elogbeta, var_lambda) + bound = word_bound + theta_bound + beta_bound + #likelihood = self.log_word_prob(var_gamma, var_lambda) + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) logger.info('Converged documents: %d/%d', converged, self.num_docs) From ed96b23b9a87d7fd4932e2e4b7ce4fc8560f8921 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Mon, 17 Oct 2016 15:53:31 +0200 Subject: [PATCH 025/100] Implemented hyperparam MLE for eta and alpha in offline algo. Removed use of log_normalize in offline algo. Update notebook. --- docs/notebooks/at_with_nips.ipynb | 71 +++++++++--------- gensim/models/atvb.py | 115 +++++++++++++++++++++++++----- 2 files changed, 135 insertions(+), 51 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 4b62f4e520..13aad0eb55 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -81,8 +81,8 @@ "source": [ "# Configure logging.\n", "\n", - "# log_dir = '../../../log_files/log.log' # On my own machine.\n", - "log_dir = '../../../../log_files/log.log' # On Hetzner\n", + "log_dir = '../../../log_files/log.log' # On my own machine.\n", + "#log_dir = '../../../../log_files/log.log' # On Hetzner\n", "\n", "logger = logging.getLogger()\n", "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", @@ -101,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 31, "metadata": { "collapsed": false }, @@ -111,12 +111,12 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "# data_dir = '../../../../data/nipstxt/' # On my own machine.\n", - "data_dir = '../../../nipstxt/' # On Hetzner.\n", + "data_dir = '../../../../data/nipstxt/' # On my own machine.\n", + "#data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00', '01', '02']\n", + "yrs = ['00']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -138,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 32, "metadata": { "collapsed": false }, @@ -163,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 33, "metadata": { "collapsed": true }, @@ -178,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 34, "metadata": { "collapsed": false }, @@ -196,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 35, "metadata": { "collapsed": false }, @@ -222,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 36, "metadata": { "collapsed": false }, @@ -245,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 37, "metadata": { "collapsed": true }, @@ -260,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 38, "metadata": { "collapsed": true }, @@ -279,7 +279,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 39, "metadata": { "collapsed": true }, @@ -291,16 +291,16 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 40, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHVWZ//HPlwBBlgTGmAQEBUUQcIFEhKgsDpLIqjOg\n2G4g4IKA/KIoijowoI6iJOyKArKojQiDwx4IjhC2CSaIICEoO2gCgdCBBMj2/P4455JK5fZ2b3du\n377f9+tVr9v31FN1TtXt5elT51QpIjAzMzNrNWs0ugFmZmZmjeAkyMzMzFqSkyAzMzNrSU6CzMzM\nrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMysJTkJMjMzs5bkJMjM+pykpyT9vPB+D0nLJb1v\nNdT9PUlLCu+H5Lon9Xfdub7Dc32brI76aiXpm5IekbRU0vRGt6enJL01n99PNrot1vycBNmgIeng\n/Mux2vKDRrevxVR7Hk+vn9Ej6duS9quh7uW9rau3umhbUMOxrk6S9gZ+APwvcAjw3YY2yKxB1mx0\nA8z6WJB+oT9WKr9/9TfFKiLiZkmvi4jFvdz0O8AlwNW92OYE4KRe1lOLztp2AXBJDce6On0QWAIc\nHn6ApLUwJ0E2GN0QETN7GixJwNoR8Wo/tqnl9XdSIGndiFgUEctZDT1BnclJxUBOgABGAQsHYgLk\nn0dbnXw5zFpKcXyIpM9I+ivwCrBHXi9JX5X0V0mvSPqnpHMkDSvtR5L+I499eUnSVElvl/RkaSzM\nSuNTCuVVx41I2kfStLzPDklXSXp7KeZXkuZL2jSvf1HSM5J+WKUeSZoo6S+SXs5x10naPq+/XdKf\nOjlXD0vqsgems/NQJW6VMUGStpL035Lm5LY9IenXktarfE7A2kDlXC2vnNt8XpfnffxW0nzSpZ1O\nz3le9xlJs3N908tjlPK5/VuV7V7bZw/a1tlne3Th++ppSWdU+b66TdJMSdtJ+l9Ji/K5/WpXn0Nh\n+zUlnZA/u1eUxvycJGmtUts/BQzP7VymTsbX5O+dJZLWK5Qdl7f7YaFszfz5n1QoW1/S5Pwz8Yqk\nWZL+X2n/3f08biTpYkkvSHpe0vnASucsx20s6aJ8rl6R9A9JV0ratCfnzVqXe4JsMBou6fXFgoh4\nrhQzHvgEcDbwPPBELr8AaMuvpwFvAY4G3i1pl9zLAGk8xXHAVcAUYCxwI/C6Uj2djQ9ZpVzSIcD5\nwHXAN4D1gC8D0yTtEBFPFbZdM9c3DfhaPp6vS/pbRJxf2O3FpD94VwM/J/3h3hXYCfhzXn+OpK0i\n4qFCW8YBWwDfqtL2op6eh0q7K/sfmuPWIJ3nucCmwH7AsIhYKOnTwC+B2/J5Afh7aV//DTwIfLNQ\n1tk53wP4JHAG6VLQkcAUSe+JiNndbPtaeUQs60Hbyp/t94DjgRtI33PbkD7bsaXvqwBGANcDvwMu\nBT4O/FjSvRFxc5W2FV2Yj/FS0vfGzqTLdlsDBxXa/mXg3cAXAAG3d7K/aaTP6P2kzwvgA8AyYJdC\n3FjSZ35rPl4B1+btfgH8BdgLmCRp44g4rlTPKj+PeR9Xk75XzwFmAweQznv5M/o9sCXps32C1NM1\nnvQ99RRmnYkIL14GxQIcTLoMUl6WFWKG5LLFwJal7XfP6w4ole+Vyw/M70fm7a8oxf0wx/28UHYy\nsLhKWw8j/SHZJL/fAHgBOLMUNyqXn1UouyRv+41S7J+BOwrv98ztOaWLc7Yh8DJwUqn87FzvOl1s\n25vzsEdu8/vy+7E5Zr9uPtOXi/spndflwIWdrFtceF/5zJcC7yiUv5nU63Bp6dw+1N0+u2lb+bMd\nlc/TVaW4r+S4TxXKpuWyjxfK1iYlib/p5lyNycd5dql8Ut7n+0vH+XwPfqaGAC8CJxfKniclWa9U\nvj+Ar+djXD+/PyC35djS/q4gJaBv6sHPY2UfXymUrUFKPJcBn8xl/1KO8+Klp4svh9lgE8ARwIcK\ny55V4m6OiL+Xyg4k/YL/o6TXVxbgT6Q/eB/McRNIv7zPLG1/Wh3t/jApEbq0VPcy4O5C3UU/L72/\njdRzVXEA6Q//yZ1VGhEvANeQeg+AdIkC+BgpuXmlizaPp/bz8EJ+3UvSOj2IryaAn/UiflpEvDZA\nPiIeJ/U0fLjG+ntqT9J5Kp+Xc4FFwD6l8o6IuKzyJtJYqrtZ+bOtZm/SOSnfCuBUUm9PuZ5uRcQy\n4E5S7yGS3gUMB/4LWIvUSwOpd+jeiHgpv9+LlNicXdrlJNK5KJ/zaj+PewGvUvg+j9RjdlY+nopF\npMTqg5KG9/IQrcU5CbLB6O6I+ENxqRLzWJWyt5H+q3y2tMwF1iH1fAC8Kb+u9Es7IuaQ/muuxZak\nX+zTSnU/A/xroe6Kl3ICUzQf2Kjw/i3AUxHRXZsuBraQtHN+/2Hg9aTegq68Ob/2+jxExMPA6cAX\ngeckXS/pCEkbdFNn2aO9iC3/kQV4CNhA0kZV1vWVynl6qFgYaeDvo4X1FU9W2Uf5s+2snqX53Bbr\neZr0eZTr6anbgB3zuKJdgCcj4l7SjMvKJbH3k753i215KiJeLu1rVmF90WNV6n0z8HSVRHx28U1e\nfzywL/CMpD9KOlZS+WfGbBUeE2StqvzLGdI/Bf8APsPK/2lWPJNfK+t6MrOms5ghVeoO0nikeVXi\nywN9l3WyX3XydVeuz3V+Grgrvz4dEX/sZrvenIdVRMTEPND1I6RepbOA4yTtnBOpnqj2OfZG+Rz1\n9POqp47u9OSz7e363rahaBrptgM7kXp8phXKd5G0Hemfh1vrqK/a5yiqfx6r7DsiTpV0JfBRUk/t\n94BvSdqt2PtnVuaeILMVHiYNSr2t3JOUl8ov08fy61bFjSWNJl3SKpoPDJG0bql88yp1AzzTSd3T\n6L2/A5uWZyCVRcRS8gBcSRuSBif/ugf7fyy/9uQ8dFb3/RHx/YjYDdiN1Mv2hWJIT/bTQ2+rUrYV\n8GJEzM/v55PGSZVtXqWsp217LL9uXSyUtHbe7+M93E9P6llT0ltL9WwCrF9HPXeRLqvuSur5qXwv\n3gq8j3SpNkg9RsW2bCqpPEB+m/zak7ZU9lG+XLp1lVgi4pGImBQRE4B3kgZq92hWnbUuJ0FmK1xG\nGoT6nfKKPAW4kkzcRPpv/ehS2MQq+3yY9J/rroV9rU/qbSq6HngJ+HYek1Ouf0QPj6HoClJvb0/u\nBnwJKQE8l/THoydJUG/Ow0okDZNU/v1zP+mP6dBC2UKqJyW1+EAe01Jpw+akSyg3FGIeBl4vaZtC\n3BtJiWFZT9tWOU/HlMq/SJoBeE0P9tET15G+1/5fqfxrpPN6bS07zZe0ZpK+Zzdm5Z6g9YCjgNkR\nUezBvI70s/Tl0u4mks7F9T2o+jrS98IXKwX5Z+MoVp5p+Lo827DoEdLP09BC3GhJW1f5vrMW5sth\nNtjU3O0fEX/Il2e+I2kMMJX0H/BWpEHTR5Bm+MyVNBk4VtJVpF/o7yENwn6+tNvrgaeBCyX9JJcd\nCvwTeO0+MhHRIeko0tT8mZIuJV2iejNpQOv/0sv/aiNiqqR24KtK9+65kXRZZxdgSkQUB5z+SdIs\n0oDov/TkEkIvzwOs/NnsCUyW9Dvgb6RBtgeTLvv9dyFuBjA+31/mn8DDEVH1vkY9cD9wo6QzSZ/r\nl/PrfxZifkOa9n9Vjlsf+BJpGv67S/vrUdvyefoRcLyk60hJzzZ5v3eSeuHqFhEzJf0a+HIeVD8N\nGEe6vHlZRHQ2Db4npgHHAs9FxKxc3z8lPUz6+fhFKf5KUk/RjyRtyYop8vsAP46IauOeyq4k9UL9\nJPduVabIl3tVtwVukHQZ8AApyTqQNK6tvRD3E9IEgE1Jl73NPEXey+BZSH9ElwFjuogZkmNO7SLm\n86TZOC+RLo/cA3wfGFmK+w9SgvMS6b/9rUmDWn9eihtD+mP3Muk/1CMpTaMuxO5O6pmYn/c7GzgP\n2L4Qcwnpj1G53ScDr5bKRPrj9UCufw5pRtS7qmz/zdymr/byvFc7D08A5xZiylPk35KP62+kHpVn\n8ra7lvb9duCPed/LKuc2H+sy0j2FujwPxc+clBA8lM/F9Ep7StuPB+4jTQH/K+k+PdWmyHfWts4+\n2yPz/l7J5+t0YINSzDRgRpU2XULqbenusxiSP4+Hcz2PkpK8Navsb5XvoS72u18+pitL5RdQmuZf\nWLceaTbYU7ktDwLH9ObnkTQY/GLSbMLnSPdk2oGVp8iPIM1QfABYQErAbwc+WuWYl5Y/Fy+tvSii\nLy+5m7U2SU8C10fEF7oNHmAkfY10j583RcQ/G90eM7P+5mujZlZxKOl+LU6AzKwleEyQWQtTeibU\n/qRxPG/Hs2nMrIU4CTLrW509e2qgGk2aCfY86dEZUxrcHjOz1cZjgszMzKwleUyQmZmZtSQnQWZm\nZtaSnASZWb+S9D1J5Wefre42DJG0XFL5Cev17HOPvM/9+2qfvaj7V5L+trrrNRtsnASZNZCkg/Mf\n0srysqTZks4cRE/BbrbB4r3RqOMKYHmD6jYbNDw7zKzxgvR8r8eAdUhP6j4C2EvSOyLilQa2zbpW\nz9PZ63FIA+s2GzScBJkNDDdExMz89QWSnic9bPIjwG8b16zuSVo3IhY1uh2tJCKWNaJef9Y22Phy\nmNnA9AfSf/pbVAokbSHpd5Kek7RQ0p2S9i5uJOnZwoNaUfKCpCWShhXKj8tl6xbKtpZ0ed7/y5Lu\nlrRfaf+Vy3e7SjpH0lzS89J6RdJhkm6WNDfXdb+kz5diTpc0p1T201z/lwplm+SyQ3tY92fyJceX\nJU2X9L4qMW+UdKGkOZJekXSfpIOr7C6ANSR9V9JTkhZJuknSFqX97ZY/uyfy/h6X9JPi088lfVPS\nMkmblCvJsS9L2iC/X2VMkKT1JU2W9GSuY1Z+uGsx5q35XH2yVF4ZM3V8oex7uWwrSb+VNJ/0IF+z\nQcNJkNnAtGV+fQ4gjw+6k/T09bOA44GhwNWSPlLY7nZg18L7dwGV5Of9hfIPADMr/9VL2o70xO6t\ngf8i3Tn6JeD3pf1XnEO6w/R/kp431ltHkB4m+33ga6QHip5bSoSmAW+QtFWp3cuAXQplu5KSkWk9\nqHcP4MfARaQHjY4EpkjauhIgaTTp4aq7AWcAx+S2/lLSl0v7E+lS5j7Aj/LyPtJDP4s+Tvq8zgKO\nIj0s9hjSA0grLs37+1iVdh8IXBcRL+b3K42zkiTgWuBo0lPqJ5IeTjtJ6Qn2tajs/79JDzr9JukB\npmaDR6Of4OrFSysvrHjy/QeB1wNvBA4CniUlIRvnuMk5blxh2/VITwt/uFD2NWAxsF5+fxTpD/id\nwA8Kcc8DPym8nwrcw6pPG78NeLDU3uWkp6erh8dY7QnsQ6vE3QTMKrwfles6LL/fKJ+DS4EnCnFn\nAXO6acOQvK+lwDsK5W8mPeH80kLZhcATwPDSPi4D5gFr5fd75H3eCwwpxE3M7dyqm+P9dm7PxoWy\n/wPuKMWNy/V8vFB2CfBQ4f0BOebY0rZXAEtID8UFeGuO+2Qn5+f40ue2HLiw0T8nXrz01+KeILPG\nE3AzKfF5EvgNsAD4aKx4mOlewPSIuLOyUUQsBH4ObC5p21w8jTTWr3KJZ5dcNi1/jaR3ARvmMiRt\nRErCfgcMl/T6ygLcCLxN0saF9gbwi4ioeWZURLz62sFLw3JdtwBbSXpdjpkL/J0VPVu7AK8CpwKb\nSnpz6Rh7YlpE3F9ox+PA1cCHc1sE/BvwP8CaVc7FRsD2pX2eHyuP0ZlG+kzf0snxrpv3d0eOK+7v\nt8BOkt5UKDsIWETq4enMXqTk9+xS+SRSgvPhLrbtSgA/q3FbswHPSZBZ4wXp8tCHgN2BbSPirREx\ntRDzZmB2lW1nFdYDzCT9waxcLvoAK5Kg90haO68LUi8PpEtvIv3n/2xpOTHHlKfrP1Z8I2ktSaOK\nS1cHLGkXSX+Q9BLwQq7rpLx6eCH0ttKxTAf+BHQAu0gaDryDnidBf69S9hCwQU4GRwMbAF9m1XPx\n8xxfPhflMVHz8+tGlQJJb5Z0saTnSD18z5ISX1j5eC/Lrx8vlB0AXBNdD0h+M/BURLxcKi9/f9Ti\n0Tq2NRvQPDvMbGC4O1bMDqtZRCyV9H/ArpLeCmwM3Er6o7sWsBMpmZgVEc/lzSr/DP0E6OwBquXk\nofzHdlfS5awgJVQhabOI+Ed5R5LelmPvJ106epLUi7E/aUxL8Z+zacDBkjYjJUNTIyIk3Z7fVxKO\nWztpd08Up5pX6r4I+FUn8feW3nc2U0uQBh2TLjduAPyAlMwuAt5EGhP02vFGxFOS7iQlQT+RtAvp\nEumlvTiGrnTWezeki23Kn7XZoOEkyKw5PE4atFy2TWF9xTTgG6RB1M9GxEMAkv5KSlZ2IV0Cqngk\nvy6JiD/U2L4ZpJ6somc7id2flJDtky95kds3oUpspYdnAjAGOCG/vxX4HCkJepFVE5POvK1K2VbA\nixExX9ICYCGwRh3nomx70lictoh47XYHkjq7RHUpcLqkt5Auhb0IXN9NHY8BH5D0ulJvUPn7o5I0\nbljavp6eIrOm5cthZs3hOuC9knaqFEhaD/gC8GhEPFCInUa66eIxrLjkRf76M6TeodcuH0XEs6SB\nzl/MM6NWImlEd42LiBci4g+lpbNHZVR6Tl77/ZMvRX22yn7/DswlDfhegzSOpnKMW5PG79zRi/FJ\nH8hjoir1bg7sC9yQ61sGXAl8XNI25Y2rnIue1FvteEX6fKpt/zvy4GXSpbCrimOKOnEdsDbpMl5R\nZZD29QARMZ90+XHXUtxRnbSlKknDlW6psH5PtzEbiNwTZNZ4PbmU8UOgDbhB0hmk2V2HkP6D//dS\n7J2kWUdbAecWym8ljT2qNp38yFx2n6RfkHqHRpFmJr0R2KGX7e3KFNJU8utyXcOAzwP/ZNXxNpCS\ntwNJU/pfymV3ky7TbEmazdVT9wM3SjqTdI6+nF//sxDzDVKSMD23bxbwL8B7SL1oxUSxJ+fir6Rx\nNaflwdwv5eMZVi04IuZKmgZ8HVifnt0s80rS5/sjSVsCfyENlt4H+HFEFMctnQccK6mDNIZsd1JP\nVW8+108AP82vl3UTazZguSfIrPG6/Q88Ip4hJSQ3kv5r/wFpave+EXFVKXYRabp7cfAzpCQnSNPL\nnyxtM4v0R/4a0jT4s4AvknoRTmJltcwKe22bXNeBpN8/PwEOB84k3Xuomkq7i71XS0nTyXt6f6BK\nG24GjiUd44mkXqbxuU2Vfc8BdiSNC/r33LavkJKW4zo7rs7Kc4/YvqTE5HjgO6TE6HNdtPW3pATo\nBTofp1WsI0gJzxnAfqRbKmwFfDUivlna7gTSWKSPk5LRpbl9vX3G22B9Hpy1ENUxy9XMzMysaTW8\nJ0jSo1r5KdqV5cy8fqiksyXNk/Si0m39R5b2sZmka5UeJTBH0imS1ijF7C5pRr6d/EOqcgt8SUfm\n9rws6S5JO/bv0ZuZmVmjNDwJInXBjy4se5K6WSvXmU8jdfMeQLpOvwnpLqgA5GTnOtL4pp1J3dyH\nUOjCz4MfryF1hb8bOB04T9KehZiDSDdhO4E0/uFe0u30ux0UamZmZs1nwF0Ok3QasHdEbKX0wMdn\ngU9ExJV5/dakgYo7R8R0SXsBV5FuPT8vx3yRNJD0Dfm+KT8C9oqI4qyQdtJt8ffO7+8C/i8ijsnv\nRbp/yRkRccrqOXozMzNbXQZCT9BrJK0FfIoVD+l7D6mHp3JnVSJiNum5PuNy0c7AfZUEKJtCugvr\ndoWY4t13KzHjCvWOLdUTeZtxmJmZ2aAzoJIg0j0/hpNmZUCaors4IhaU4uayYprq6Py+vJ4exAyT\nNBQYQbpjarWYVe6bYmZmZs1voN0n6FDg+jxFtSuiZ9Mzu4pRD2M6XZ8fgjiBdLfWV3rQHjMzM0vW\nATYHphQe47NaDZgkKD81+UPARwvFc4C1JQ0r9QaNZEWvTeWeHkWjCusqr+UHOo4EFkTEYknzSPdD\nqRZT7h0qmgD8uov1ZmZm1rVPAb9pRMUDJgki9QLNJc30qphBupHXHqQ7oiJpK9KDByu3z78TOF7S\niMK4oPGkp0zPKsTsVapvfC4nIpZImpHruSrXo/z+jC7a/BjAr371K7bZZpU77FsXJk6cyOTJkxvd\njKbic1Ybn7fe8zmrjc9b78yaNYtPf/rTkP+WNsKASIJywnEIcGFELK+UR8QCSecDkyRVHpR4BnB7\nRNydw24EHgAukXQc6blIJwNnFZ5d9DPgqDxL7AJScnMgsHehGZOAi3IyNJ30zJ116fqW/K8AbLPN\nNowZM6bGo29Nw4cP9znrJZ+z2vi89Z7PWW183mrWsOEkAyIJIl0G2wz4ZZV1lQcAXg4MJT3o8MjK\nyohYLmlf0nNs7iA9AfpCVjxtmoh4TNI+pETnK8BTwGERMbUQc1m+J9BJpMtifwYm5IdLmpmZ2SAz\nIJKgiLiJNDur2rpXgaPz0tn2T5KefdNVHbeQpsF3FXMOnT+/yMzMzAaRgTZF3szMzGy1cBJkDdHW\n1tboJjQdn7Pa+Lz1ns9ZbXzems+Ae2xGM5E0BpgxY8YMD4YzMzPrhZkzZzJ27FiAsRExsxFtcE+Q\nmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJ\nkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYtyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3J\nSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYt\nyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pIGRBIkaRNJl0iaJ2mR\npHsljSnFnCTpH3n9TZK2LK3fSNKvJXVImi/pPEnrlWLeJelWSS9LelzS16u05WOSZuWYeyXt1T9H\nbWZmZo3U8CRI0obA7cCrwARgG+BrwPxCzHHAUcAXgfcCC4EpktYu7Oo3eds9gH2AXYFzC/vYAJgC\nPAqMAb4OnCjp8ELMuLyfXwDbA78Hfi9p2z49aDMzszr9/vfwkY80uhXNbc1GNwD4JvBERBxeKHu8\nFHMMcHJEXA0g6bPAXOCjwGWStiElUGMj4p4cczRwraRjI2IO8GlgLeCwiFgKzJK0A/BV4LxCPddH\nxKT8/gRJ40kJ2Jf79KjNzMzq8PjjcPPNjW5Fc2t4TxCwH/AnSZdJmitpZql3ZgtgNPDaRx0RC4D/\nA8blop2B+ZUEKJsKBLBTIebWnABVTAG2ljQ8vx+Xt6MUMw4zMzMbVAZCEvQW4AhgNjAe+BlwhqRP\n5/WjScnM3NJ2c/O6SswzxZURsQx4vhRTbR/0IGY0ZmZmNqgMhMthawDTI+K7+f29krYjJUa/6mI7\nkZKjrnQXox7GdFePmZmZNZmBkAT9E5hVKpsF/Hv+eg4pERnFyr00I4F7CjEjizuQNATYKK+rxIwq\n1TOSlXuZOosp9w6tZOLEiQwfPnylsra2Ntra2rrazMzMrCW0t7fT3t6+UllHR0eDWrPCQEiCbge2\nLpVtTR4cHRGPSppDmvX1FwBJw0hjfc7O8XcCG0raoTAuaA9S8jS9EPM9SUPypTJIl99mR0RHIWYP\n4IxCW/bM5Z2aPHkyY8aM6SrEzMysZVXrGJg5cyZjx45tUIuSgTAmaDKws6RvSXqrpE8ChwNnFWJO\nA74jaT9J7wQuBp4C/gcgIh4kDWD+haQdJb0fOBNozzPDIE19XwxcIGlbSQcBXwFOLdRzOrCXpK9K\n2lrSicDYUlvMzMxsEGh4EhQRfwL+DWgD7gO+DRwTEZcWYk4hJTXnkmaFvQ7YKyIWF3b1SeBB0uyu\na4BbSfcVquxjAWka/ebAn4AfAydGxPmFmDtzO74A/Jl0Se4jEfFAnx60mZmZNdxAuBxGRFwHXNdN\nzInAiV2sf4F0L6Cu9nEfsFs3MVcAV3QVY2ZmZs2v4T1BZmZmZo3gJMjMzMxakpMgMzMza0lOgszM\nzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLM\nzMysJTkJMjMzs5bkJMjMzKwJRTS6Bc3PSZCZmVmTkhrdgubmJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSQ1PgiSd\nIGl5aXmgsH6opLMlzZP0oqTLJY0s7WMzSddKWihpjqRTJK1Ritld0gxJr0h6SNLBVdpypKRHJb0s\n6S5JO/bfkZuZmVkjNTwJyu4HRgGj8/KBwrrTgH2AA4BdgU2AKyorc7JzHbAmsDNwMHAIcFIhZnPg\nGuBm4N3A6cB5kvYsxBwEnAqcAOwA3AtMkTSiD4/TzMzMBoiBkgQtjYhnI+KZvDwPIGkYcCgwMSJu\niYh7gM8B75f03rztBODtwKci4r6ImAJ8FzhS0po55gjgkYj4RkTMjoizgcuBiYU2TATOjYiLI+JB\n4EvAoly/mZmZDTIDJQl6m6SnJT0s6VeSNsvlY0k9PDdXAiNiNvAEMC4X7QzcFxHzCvubAgwHtivE\nTC3VOaWyD0lr5bqK9UTeZhxmZmYDTESjW9D8BkISdBfp8tUEUu/LFsCtktYjXRpbHBELStvMzevI\nr3OrrKcHMcMkDQVGAEM6iRmNmZnZACQ1ugXNbc3uQ/pXvnxVcb+k6cDjwMeBVzrZTEBPcuCuYtTD\nGOfaZmZmg1DDk6CyiOiQ9BCwJely1NqShpV6g0ayotdmDlCexTWqsK7yOqoUMxJYEBGLJc0DlnUS\nU+4dWsXEiRMZPnz4SmVtbW20tbV1t6mZmdmg197eTnt7+0plHR0dDWrNCgMuCZK0PvBW4CJgBrAU\n2AO4Mq+xdAPHAAAgAElEQVTfCngTcEfe5E7geEkjCuOCxgMdwKxCzF6lqsbnciJiiaQZuZ6rcj3K\n78/ors2TJ09mzJgxvT5WMzOzVlCtY2DmzJmMHTu2QS1KGp4ESfoxcDXpEtgbgf8kJT6XRsQCSecD\nkyTNB14kJSW3R8TdeRc3Ag8Al0g6DtgYOBk4KyKW5JifAUdJ+hFwASm5ORDYu9CUScBFORmaTpot\nti5wYb8cuJmZmTVUw5MgYFPgN8DrgWeB24CdI+K5vH4i6VLV5cBQ4AbgyMrGEbFc0r7AT0m9QwtJ\nicsJhZjHJO1DSnS+AjwFHBYRUwsxl+V7Ap1Euiz2Z2BCRDzbD8dsZmZmDdbwJCgiuhw4ExGvAkfn\npbOYJ4F9u9nPLaRp8F3FnAOc01WMmZmZDQ4DYYq8mZmZ2WrnJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzNrQhGNbkHzcxJkZmbWpKRGt6C59UkS\nJGmIpO0lbdQX+zMzMzPrbzUlQZJOk3RY/noIcAswE3hS0u591zwzMzOz/lFrT9CBwL356/2ALYC3\nA5OB7/dBu8zMzMz6Va1J0AhgTv56b+B3EfEQcAHwzr5omJmZmVl/qjUJmgtsmy+FfRiYmsvXBZb1\nRcPMzMzM+tOaNW73S+Ay4J9AADfl8p2AB/ugXWZmZmb9qqYkKCJOlHQ/sBnpUtiredUy4Id91Tgz\nMzOz/lJrTxARcTmApHUKZRf1RaPMzMzM+lutU+SHSPqupKeBlyS9JZefXJk6b2ZmZjaQ1Tow+tvA\nIcA3gMWF8vuBw+tsk5mZmVm/qzUJ+izwhYj4NSvPBruXdL8gMzMzswGt1iTojcDfO9nfWrU3x8zM\nzGz1qDUJegDYpUr5gcA9tTfHzMzMbPWodXbYScBFkt5ISqT+XdLWpMtk+/ZV48zMzMz6S009QRHx\nP6Rk50PAQlJStA2wX0Tc1NW2ZmZmZgNBPfcJug3Ysw/bYmZmZrba1HqfoB0l7VSlfCdJ76mnQZK+\nJWm5pEmFsqGSzpY0T9KLki6XNLK03WaSrpW0UNIcSadIWqMUs7ukGZJekfSQpIOr1H+kpEclvSzp\nLkk71nM8ZmZmNjDVOjD6bNIjM8remNfVJCccnydNtS86DdgHOADYFdgEuKKw3RrAdaSerZ2Bg0n3\nMTqpELM5cA1wM/Bu4HTgPEl7FmIOAk4FTgB2yO2YImlErcdkZmZmA1OtSdC2wMwq5ffkdb0maX3g\nV6SbLb5QKB8GHApMjIhbIuIe4HPA+yW9N4dNIN2f6FMRcV9ETAG+CxwpqXLJ7wjgkYj4RkTMjoiz\ngcuBiYVmTATOjYiLI+JB4EvAoly/mZmZDSK1JkGvAqOqlG8MLK1xn2cDV0fEH0rl7yH18NxcKYiI\n2cATwLhctDNwX0TMK2w3BRgObFeImVra95TKPiStBYwt1RN5m3GYmZnZoFJrEnQj8F+ShlcKJG0I\n/ADo9ewwSZ8Atge+VWX1KGBxRCwolc8FRuevR+f35fX0IGaYpKHACGBIJzGjMTMzs0Gl1tlhxwK3\nAo9LqtwccXtSwvCZ3uxI0qakMT97RsSS3mwKRA/iuopRD2O6rGfixIkMHz58pbK2tjba2tp60Dwz\nM7Pei578BRwg2tvbaW9vX6mso6OjQa1ZoaYkKCKelvQu4FOkQcYvA78E2nuZyEC6BPUGYIakSlIy\nBNhV0lHAh4GhkoaVeoNGsqLXZg5QnsU1qrCu8lq+hDcSWBARiyXNIz0HrVpMuXdoJZMnT2bMmDFd\nhZiZmfW51/5qDnDVOgZmzpzJ2LFjG9SipJ77BC0Eft4HbZgKvLNUdiEwC/gh8DSwBNgDuBJA0lbA\nm4A7cvydwPGSRhTGBY0HOvJ+KjF7leoZn8uJiCWSZuR6rsr1KL8/o96DNDMzs4Gl5iQoJyK7k3pK\nVhpbFBEnVdummpxMPVDa90LguYiYld+fD0ySNB94kZSU3B4Rd+dNbsz7uETScaQB2icDZxV6pn4G\nHCXpR8AFpOTmQGDvQtWTSI8DmQFMJ80WW5eUlJmZmdkgUlMSJOnzwE+BeaTLTMUrk0Hh/jw1Kl/p\nnEi6VHU5MBS4ATjyteCI5ZL2zW26g/QojwtJ9/upxDwmaR9SovMV4CngsIiYWoi5LN8T6CTSZbE/\nAxMi4tk6j8fMzMwGmFp7gr4DfDsiftSXjamIiH8tvX8VODovnW3zJN08vDUibiGNQeoq5hzgnB43\n1szMzJpSrVPkNwJ+15cNMTMzM1udak2CfkcaVGxmZmbWlGq9HPZ34GRJOwP3kWZvvSYiPJvKzMzM\nBrRak6AvAC8Bu+WlKPCUcjMzMxvgar1Z4hZ93RAzMzOz1anWMUEASFpb0taFJ7WbmZmZNYWakiBJ\n6+YbGC4C/kq6ezOSzpT0zT5sn5mZmVm/qLUn6L9IzwzbHXilUD4VOKjONpmZmZn1u1ovY30UOCgi\n7pJUvLvzX4G31t8sMzMzs/5Va0/QG4BnqpSvx6qPvDAzMzMbcGpNgv4E7FN4X0l8Dic/ld3MzMxs\nIKv1ctjxwPWSts37OEbSdsA4Vr1vkJmZmdmAU1NPUETcRhoYvSbpjtHjgbnAuIiY0XfNMzMzM+sf\nve4JyvcE+iQwJSI+3/dNMjMzM+t/ve4JioilwM+Adfq+OWZmZmarR60Do6cDO/RlQ8zMzMxWp1oH\nRp8DnCppU2AGsLC4MiL+Um/DzMzMzPpTrUnQpfm1+LT4AJRfh9TTKDMzM+ta+K58das1CfJT5M3M\nzBpManQLmltNSVBEPN7XDTEzMzNbnWpKgiR9tqv1EXFxbc0xMzMzWz1qvRx2eun9WsC6wGJgEeAk\nyMzMzAa0Wi+HbVQuk/Q24KfAj+ttlJmZmVl/q/U+QauIiL8B32TVXiIzMzOzAafPkqBsKbBJH+/T\nzMzMrM/VOjB6/3IRsDFwFHB7vY0yMzMz62+1Doz+fel9AM8CfwC+VleLzMzMzFaDWgdG9/VlNDMz\nM7PVysmMmZmZtaSakiBJl0v6ZpXyr0v6Xf3NMjMzM+tftfYE7QZcW6X8BmDX2ptjZmZmtnrUmgSt\nT7o7dNkSYFjtzTEzMzNbPWpNgu4DDqpS/gnggdqbY2ZmZrZ61JoEnQx8V9JFkg7Oy8XAt/O6HpP0\nJUn3SurIyx2SPlxYP1TS2ZLmSXoxj0caWdrHZpKulbRQ0hxJp0haoxSzu6QZkl6R9JCkg6u05UhJ\nj0p6WdJdknbs1VkxMzOzplFTEhQRVwMfBbYEzgFOBTYFPhQR5XsIdedJ4DhgbF7+APyPpG3y+tOA\nfYADSOONNgGuqGyck53rSNP9dwYOBg4BTirEbA5cA9wMvJv0aI/zJO1ZiDkoH8cJwA7AvcAUSSN6\neTxmZmbWBGq9WSIRcS3VB0fXsp+i70g6AthZ0tPAocAnIuIWAEmfA2ZJem9ETAcmAG8HPhgR84D7\nJH0X+KGkEyNiKXAE8EhEfCPXMVvSB4CJwE25bCJwbkRcnOv5Ein5OhQ4pd7jNDMzs4Gl1inyO0ra\nqUr5TpLeU2tjJK0h6RPAusCdpJ6hNUk9OABExGzgCWBcLtoZuC8nQBVTgOHAdoWYqaXqplT2IWmt\nXFexnsjbjMPMzMwGnVrHBJ0NbFal/I15Xa9IeoekF4FXSZfX/i0iHgRGA4sjYkFpk7l5Hfl1bpX1\n9CBmmKShwAhgSCcxozEzM7NBp9bLYdsCM6uU35PX9daDpLE6G5LG/lwsqav7DYn0vLLudBWjHsb0\npB4zMzNrMrUmQa8Co4BHSuUbA0t7u7M8bqeyr5mS3gscA1wGrC1pWKk3aCQrem3mAOVZXKMK6yqv\no0oxI4EFEbFY0jxgWScx5d6hVUycOJHhw4evVNbW1kZbW1t3m5qZmQ167e3ttLe3r1TW0dHRoNas\nUGsSdCPwX5I+EhEdAJI2BH7AioHG9VgDGArMICVVewBX5nq2At4E3JFj7wSOlzSiMC5oPNABzCrE\n7FWqY3wuJyKWSJqR67kq16P8/ozuGjt58mTGjBnT+6M0MzNrAdU6BmbOnMnYsWMb1KKk1iToWOBW\n4HFJ9+Sy7Um9Jp/pzY4kfR+4njRVfgPgU6THcoyPiAWSzgcmSZoPvEhKSm6PiLvzLm4k3aDxEknH\nkXqjTgbOioglOeZnwFGSfgRcQEpuDgT2LjRlEnBRToamk2aLrQtc2JvjMTMzWx3CgzXqVlMSFBFP\nS3oXKWF5N/Ay8EugvZB49NQo4GJS8tIB/IWUAP0hr59IulR1Oal36AbgyEJblkvaF/gpqXdoISlx\nOaEQ85ikfUiJzleAp4DDImJqIeayfE+gk3Kb/gxMiIhne3k8ZmZmq4XUfYx1rp77BC0Efl5vAyLi\n8G7WvwocnZfOYp4E9u1mP7eQpsF3FXMOaXaamZmZDXI1JUGSPga0AVuRZk/9DfhNRFzeh20zMzMz\n6ze9uk9Qvpnhb4HfkqbC/500q2s74DJJl+YBxWZmZmYDWm97go4BPgTsHxHXFFdI2p80LugY0vO+\nzMzMzAas3t4x+nPA18sJEEBEXAV8g/SsLTMzM7MBrbdJ0NtY9RlcRVNzjJmZmdmA1tsk6GXSoy06\nMwx4pfbmmJmZma0evU2C7gSO6GL9kTnGzMzMbEDr7cDo7wN/lPR64CekB58K2Ab4GvAR4IN92kIz\nMzOzftCrJCgi7pB0EOkmiQeUVs8H2iLi9r5qnJmZmVl/6fXNEiPiSklTSA8g3SoXPwTcGBGL+rJx\nZmZmZv2l1meHLZL0IeA/IuL5Pm6TmZmZWb/r7R2jNy28/SSwfi6/T9JmfdkwMzMzs/7U256gByU9\nB9wOrANsBjwBbA6s1bdNMzMzM+s/vZ0iPxz4GDAjb3udpIeAocAESaP7uH1mZmZm/aK3SdBaETE9\nIk4l3ThxB9KjNJaRHpfxsKTZfdxGMzMzsz7X28thCyTdQ7octjawbkTcLmkpcBDwFPDePm6jmZmZ\nWZ/rbU/QJsD3gFdJCdSfJE0jJURjgIiI2/q2iWZmZmZ9r1dJUETMi4irI+JbwCJgR+BMIEh3kF4g\n6Za+b6aZmZlZ3+ptT1BZR0RcBiwB/hXYAjin7laZmZmZ9bOabpaYvQt4On/9OLAkIuYAv627VWZm\nZmb9rOYkKCKeLHz9jr5pjpmZmfVERKNb0PzqvRxmZmZmDSI1ugXNzUmQmZmZtSQnQWZmZtaSnASZ\nmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwE\nmZmZWUtqeBIk6VuSpktaIGmupCslbVWKGSrpbEnzJL0o6XJJI0sxm0m6VtJCSXMknSJpjVLM7pJm\nSHpF0kOSDq7SniMlPSrpZUl3Sdqxf47czMzMGqnhSRCwC3AmsBPwIWAt4EZJryvEnAbsAxwA7Aps\nAlxRWZmTneuANYGdgYOBQ4CTCjGbA9cANwPvBk4HzpO0ZyHmIOBU4ARgB+BeYIqkEX13uGZmZjYQ\nrNnoBkTE3sX3kg4BngHGArdJGgYcCnwiIm7JMZ8DZkl6b0RMByYAbwc+GBHzgPskfRf4oaQTI2Ip\ncATwSER8I1c1W9IHgInATblsInBuRFyc6/kSKfk6FDilf86AmZmZNcJA6Akq2xAI4Pn8fiwpWbu5\nEhARs4EngHG5aGfgvpwAVUwBhgPbFWKmluqaUtmHpLVyXcV6Im8zDjMzMxtUBlQSJEmkS1+3RcQD\nuXg0sDgiFpTC5+Z1lZi5VdbTg5hhkoYCI4AhncSMxszMzAaVhl8OKzkH2Bb4QA9iReox6k5XMeph\nTE/qMTMzsyYyYJIgSWcBewO7RMQ/CqvmAGtLGlbqDRrJil6bOUB5FteowrrK66hSzEhgQUQsljQP\nWNZJTLl3aCUTJ05k+PDhK5W1tbXR1tbW1WZmZmYtob29nfb29pXKOjo6GtSaFQZEEpQToI8Au0XE\nE6XVM4ClwB7AlTl+K+BNwB055k7geEkjCuOCxgMdwKxCzF6lfY/P5UTEEkkzcj1X5XqU35/RVfsn\nT57MmDFjeny8ZmZmraRax8DMmTMZO3Zsg1qUNDwJknQO0AbsDyyUVOmJ6YiIVyJigaTzgUmS5gMv\nkpKS2yPi7hx7I/AAcImk44CNgZOBsyJiSY75GXCUpB8BF5CSmwNJvU8Vk4CLcjI0nTRbbF3gwn44\ndDMzM2ughidBwJdIY27+WCr/HHBx/noi6VLV5cBQ4AbgyEpgRCyXtC/wU1Lv0EJS4nJCIeYxSfuQ\nEp2vAE8Bh0XE1ELMZfmeQCeRLov9GZgQEc/20bGamZnZANHwJCgiup2hFhGvAkfnpbOYJ4F9u9nP\nLaRp8F3FnEMaoG1mZjZghafs1G1ATZE3MzOznpO6j7HOOQkyMzNrQu4Jqp+TIDMzsyYU4Z6gejkJ\nMjMza0JOgurnJMjMzKxJOQmqj5MgMzOzJuQxQfVzEmRmZtaEfDmsfk6CzMzMmpCToPo5CTIzM2tC\nToLq5yTIzMysCTkJqp+TIDMzM2tJToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzM\nmpCToPo5CTIzM2tCToLq5yTIzMzMWpKTIDMzsybknqD6OQkyMzNrQk6C6uckyMzMrAk5CaqfkyAz\nM7Mm5CSofk6CzMzMmlBEo1vQ/JwEmZmZNSn3BNXHSZCZmVkT8uWw+jkJMjMza0JOgurnJMjMzKwJ\nOQmqn5MgMzOzJuQkqH5OgszMzJqQZ4fVz0mQmZlZk3JPUH2cBJmZmTUhXw6rn5MgMzOzJuQkqH5O\ngszMzJqQk6D6OQkyMzNrQh4YXb8BkQRJ2kXSVZKelrRc0v5VYk6S9A9JiyTdJGnL0vqNJP1aUoek\n+ZLOk7ReKeZdkm6V9LKkxyV9vUo9H5M0K8fcK2mvvj9iMzOz+rgnqH4DIgkC1gP+DBwJrJLbSjoO\nOAr4IvBeYCEwRdLahbDfANsAewD7ALsC5xb2sQEwBXgUGAN8HThR0uGFmHF5P78Atgd+D/xe0rZ9\ndaBmZmZ9xUlQfdZsdAMAIuIG4AYAqepHegxwckRcnWM+C8wFPgpcJmkbYAIwNiLuyTFHA9dKOjYi\n5gCfBtYCDouIpcAsSTsAXwXOK9RzfURMyu9PkDSelIB9ua+P28zMrFbuCarfQOkJ6pSkLYDRwM2V\nsohYAPwfMC4X7QzMryRA2VRSr9JOhZhbcwJUMQXYWtLw/H5c3o5SzDjMzMwGECdB9RvwSRApAQpS\nz0/R3LyuEvNMcWVELAOeL8VU2wc9iBmNmZnZAOKB0fVrhiSoM6LK+KFexqiHMf5WMzOzAcU9QfUb\nEGOCujGHlIiMYuVempHAPYWYkcWNJA0BNsrrKjGjSvseycq9TJ3FlHuHVjJx4kSGDx++UllbWxtt\nbW1dbWZmZlazZkqC2tvbaW9vX6mso6OjQa1ZYcAnQRHxqKQ5pFlffwGQNIw01ufsHHYnsKGkHQrj\ngvYgJU/TCzHfkzQkXyoDGA/MjoiOQswewBmFJuyZyzs1efJkxowZU+shmpmZ1aRZkqBqHQMzZ85k\n7NixDWpRMiAuh0laT9K7JW2fi96S32+W358GfEfSfpLeCVwMPAX8D0BEPEgawPwLSTtKej9wJtCe\nZ4ZBmvq+GLhA0raSDgK+ApxaaMrpwF6Svippa0knAmOBs/rr2M3MzGrRTD1BA9VA6Ql6D/C/pEtT\nwYrE5CLg0Ig4RdK6pPv+bAhMA/aKiMWFfXySlKxMBZYDl5OmvANpRpmkCTnmT8A84MSIOL8Qc6ek\nNuD7efkb8JGIeKDvD9nMzKx2ToLqNyCSoIi4hW56pSLiRODELta/QLoXUFf7uA/YrZuYK4Aruoox\nMzNrNM8Oq9+AuBxmZmZmveOeoPo5CTIzM2tCToLq5yTIzMysSTkJqo+TIDMzsybknqD6OQkyMzNr\nQh4YXT8nQWZmZk3IPUH1cxJkZmbWhJwE1c9JkJmZWZNyElQfJ0FmZmZNyD1B9XMSZGZm1oQ8MLp+\nToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzMmpSToPo4CTIzM2tC7gmqn5MgMzOz\nJuTZYfVzEmRmZtaE3BNUPydBZmZmTchJUP2cBJmZmTUhJ0H1cxJkZmbWhJYtgzXXbHQrmpuTIDMz\nsya0eDGstVajW9HcnASZmZk1oSVLnATVy0mQmZlZE1qyBNZeu9GtaG5OgszMzJqQL4fVz0mQmZlZ\nE/LlsPo5CTIzM2tCvhxWPydBZmZmTcg9QfVzEmRmZtaEXn3VSVC9nASZmZk1oWefhTe8odGtaG5O\ngszMzJrMwoXwwguw6aaNbklzcxJkZmbWZJ5+Or06CaqPkyAzM7Mm89RT6dVJUH2cBJmZmTWZShK0\nySaNbUezcxJkZmbWZJ5+Gl7/enjd6xrdkubmJKgKSUdKelTSy5LukrRjo9s02LS3tze6CU3H56w2\nPm+953NWm9V53m6/HbbbbrVVN2g5CSqRdBBwKnACsANwLzBF0oiGNmyQ8S/Z3vM5q43PW+/5nNVm\ndZ23Rx+Fm26Cf/u31VLdoOYkaFUTgXMj4uKIeBD4ErAIOLSxzTIzs1Z3113wwQ+msUAHH9zo1jS/\nNRvdgIFE0lrAWOAHlbKICElTgXENa5iZmbWUCFiwAB5/HGbPhhkz4MYb4Z570mWw66+HjTZqdCub\nn5OglY0AhgBzS+Vzga072+iBB2D58p5XEtH7hg22bZ5/Hu64Y/XUNVi2ee45uOWW1de2WrcbaNs8\n+2y6dNDf9dS7zeqsq7tt5s6Fa6/t/3r6apt66lq+PC2Vr6u9drWu+PrIIzBpEixdCsuWpdfy14sX\np+XVV1e8LlwIixal5cUXU/Lz/PNpXcUmm8Auu8AJJ8C++8KQIbWdJ1uZk6CeEVDtR2wdgM98Ztbq\nbc2g0MH73z+z0Y1oMh3svrvPWe91MH68z1vvdLDvvj5nXZFWXZYt6+C7353JkCF0uqy11srL2mvD\nOuvAeuul2V7rrpu+HjYsvR89GjbbbOVen3vvbdxx96VZs17727lOo9rgJGhl84BlwKhS+UhW7R0C\n2Dy9fLo/2zSIjW10A5qQz1ltfN56z+esK5UeorJFi3zearA5UMO1gfo5CSqIiCWSZgB7AFcBSFJ+\nf0aVTaYAnwIeA15ZTc00MzMbDNYhJUBTGtUARa0XYAcpSR8HLgK+CEwnzRY7EHh7RDzbyLaZmZlZ\n33FPUElEXJbvCXQS6bLYn4EJToDMzMwGF/cEmZmZWUvyzRLNzMysJTkJqkOrPmNM0gmSlpeWBwrr\nh0o6W9I8SS9KulzSyNI+NpN0raSFkuZIOkXSGqWY3SXNkPSKpIckNdX9USXtIukqSU/nc7R/lZiT\nJP1D0iJJN0nasrR+I0m/ltQhab6k8yStV4p5l6Rb8/fh45K+XqWej0malWPulbRX3x9x/bo7Z5J+\nWeV777pSTEudMwBJ35I0XdICSXMlXSlpq1LMavu5bIbfjT08Z38sfa8tk3ROKaZlzhmApC/ln4eO\nvNwh6cOF9c31fRYRXmpYgINIM8I+C7wdOBd4HhjR6LathmM/AfgL8AbS7QNGAv9SWP9T0oy53UjP\nX7sDmFZYvwZwH2lGwDuBCcAzwPcKMZsDLwGnkG5UeSSwBNiz0cffi/P0YdLYso+Sbr2wf2n9cfl7\nZj/gHcDvgYeBtQsx1wMzgfcA7wMeAn5VWL8B8E/SYP5tgI8DC4HDCzH/v707j9WjKuM4/v1VoECb\ntsjiTaTQ0goUQZayNWy3LCVIkCCJNpi68QfKHypEiYkkQEQJokQCNCaCRBZBTAwQKSLLbRsNS1hS\nK9CylbKUawoUKHgV2j7+cc5Lz532Lr2Ud7nz+yRvmpk5M++Zp2fmfe6ZMzOzcuzOz7G8BPgfsF+r\nYzSCmN0A3F1pexMrZWoVs1zfBcC8vD8HAH/Jx+AORZmmHJd0yLlxmDHrAX5TaW/j6xqzXNdT83E6\nPX8uzcfGjE5sZy0PaKd+gIeBq4ppAa8CF7S6bk3Y94uAJwZYNiEfEGcU8/YBNgCH5+lTcoPepShz\nDrAG2CZPXw78s7LtW4EFrd7/EcZsA5v+oK8CzqvErg/4Sp6ekdc7uChzMrAO6MrT3yU932qbosxl\nwNPF9G3AXZXvfgiY3+q4jCBmNwB/HmSdfescs6Kuu+Q4HF20raYcl516bqzGLM/rAa4cZJ1ax6yo\n75vAtzqxnfly2Aho4zvGHmjMi/S/UKd3jH0uX7J4QdLNkibn+TNJdx2WsVkOvMzG2BwJLI2IN4rt\n3QtMBD5flLm/8p33MkriK2kq0EX/OL0LPEL/OK2JiCeLVe8nPb38iKLM4ohYV5S5F9hH0sQ8PYvR\nFcvufPlimaT5kj5dLJuFYwYwibTPb+XpphyXHX5urMas4WuSVktaKunnknYoltU6ZpLGSJoL7Ej6\nI6Hj2pmToJEZ7B1jXc2vTtM9DHyT9Bf2d4CpwOI87qIL+CD/oJfK2HSx+dgxjDITJI39uDvQBrpI\nJ9zB2lAXqZv4IxGxnnSS3hqx7MS2eg+p+/t44AJSl/sCScrLax+zHItfA3+PiMZYvWYdlx15bhwg\nZgC3kF4J0E16sfY84KZieS1jJml/SWtJvT7zST0/y+jAdubnBG1dA71jbFSJiPLpnv+S9CiwkjS2\nYqAnZw83NoOV0TDKdLrhxGmoMhpmmY6LY0TcXkw+JWkpaRxVN+nSxUDqFLP5wH7A0cMo26zjst1j\n14jZUeXMiLiumHxKUi/wgKSpEbFiiG2O5pgtAw4k9Z6dCdwo6dhByrdtO3NP0Mhs6TvGRrWIeIc0\n+HhpytQAAAY1SURBVHQ60AtsJ2lCpVgZm142jd1nimUDldkNeDciPtga9W6xXtIBO1gb6s3TH5H0\nKWAnho5T2cs0UJmOb6v5h+gNUtuDmsdM0jXAF4HuiFhVLGrWcdlx58ZKzF4fovgj+d+yvdUuZhGx\nLiJejIgnIuInwBLg+3RgO3MSNAIR8SHQeMcY0O8dYy15CVwrSRoPTCMN9H2cNAi1jM3ewB5sjM1D\nwAFKT+ZumAO8AzxTlDmB/ubk+R0v/3j30j9OE0jjVso4TZJ0cLHqCaTk6dGizLH5h75hDrA8J6eN\nMtVYnsQoiKWk3YGdSXd7QY1jln/MTwdmR8TLlcVNOS477dw4RMw252BSsly2t1rFbABjgLF0Yjtr\n9ajyTv2QLv300f/2vDeBXVtdtybs+xXAscCepFuQ7yNl3zvn5fOBFaRLFDOBf7DpLZJLSOM7vkAa\nW/Rv4KdFmSmkWyQvJ91dcC7wAXBiq/d/C+I0jtRlfBDp7ogf5OnJefkFuc2cRrpV9A7gOfrfIr8A\neAw4jNRVvxy4qVg+gZR8/p7Unf/VHLezizKzcuwat3tfTLps2Xa3ew8Ws7zsF6REcU/SCe8x0olz\n27rGLNd3PunummNIfx03PttXynzixyUdcm4cKmbAXsCFwCG5vX0JeB54sK4xy3X9GelS656kR3tc\nRkp8ju/EdtbygHbyJ//HvJT/Ix4CDm11nZq037eSbkXsI436/wMwtVg+Fria1GW5FvgTsFtlG5NJ\nz+V4Lx8AlwNjKmWOI2X7faTkYF6r930L43Qc6Yd8feXzu6LMxaQf5P+Q7n6YXtnGJOBm0l9Ja4Df\nAjtWyhwALMrbeBn44WbqcibpOn4f6RlPJ7c6PlsaM9Ibp/9K6kH7L/Ai6Zkku1a2UauY5bpuLmbr\nga8XZZp2XNIB58ahYgbsDiwEVud2spz0gz++sp3axCzX87p87PXlY/Fv5ASoE9uZ3x1mZmZmteQx\nQWZmZlZLToLMzMyslpwEmZmZWS05CTIzM7NachJkZmZmteQkyMzMzGrJSZCZmZnVkpMgMzMzqyUn\nQWZmZlZLToLMzDJJPZKubHU9zKw5nASZWVuQdI6kdyWNKeaNk/ShpAcqZWdL2iBpSrPraWajh5Mg\nM2sXPaQ3xR9azDsGeB04UtJ2xfzjgJUR8dKWfomkbT5OJc1s9HASZGZtISKeJSU83cXsbuAOYAVw\nZGV+D4CkyZLulLRW0juS/ihpt0ZBSRdJelLS2ZJeJL2BHkk7Sroxr/eapPOrdZJ0rqRnJfVJ6pV0\n+9bdazNrJSdBZtZOFgKzi+nZed6ixnxJY4EjgAdzmTuBSaReoxOBacBtle1OB74MnAEclOf9Mq9z\nGjCHlFjNbKwg6VDgKuBCYG/gZGDxx9w/M2sj7hY2s3ayELgyjwsaR0pYFgPbAecAlwBH5emFkk4C\n9gemRMQqAEnzgKckzYyIx/N2twXmRcRbucw44NvAWRGxMM/7BvBqUZfJwHvA3RHxPvAKsOQT2m8z\nawH3BJlZO2mMCzoMOBp4NiLeIPUEHZHHBXUDL0TEq8C+wCuNBAggIp4B3gZmFNtd2UiAsmmkxOjR\nYr01wPKizH3ASmBFvmx2lqQdttqemlnLOQkys7YRES8Ar5Eufc0mJT9ExOuknpijKMYDAQJiM5uq\nzn9/M8sZYN1GXd4DDgHmAqtIvVBLJE0Y9g6ZWVtzEmRm7aaHlAB1ky6PNSwGTgEOZ2MS9DSwh6TP\nNgpJ2g+YmJcN5HlgHcVga0k7kcb+fCQiNkTEgxHxY+BAYApw/Aj2yczakMcEmVm76QGuJZ2fFhXz\nFwPXkC5jLQSIiPslLQVukXReXnYt0BMRTw70BRHxvqTrgSskvQWsBi4F1jfKSDoV2Ct/7xrgVFIP\n0vJNt2hmnchJkJm1mx5ge+CZiFhdzF8EjAeWRURvMf904Oq8fANwD/C9YXzPj0jjj+4C1gK/AspL\nXW+T7ii7KNfnOWBuHnNkZqOAIga8JG5mZmY2anlMkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\nnASZmZlZLTkJMjMzs1pyEmRmZma15CTIzMzMaslJkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\n/g8dz3zImRgHIwAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -326,7 +326,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 41, "metadata": { "collapsed": true }, @@ -344,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 42, "metadata": { "collapsed": true }, @@ -358,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 43, "metadata": { "collapsed": false }, @@ -367,9 +367,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 578\n", - "Number of unique tokens: 2245\n", - "Number of documents: 286\n" + "Number of authors: 166\n", + "Number of unique tokens: 681\n", + "Number of documents: 90\n" ] } ], @@ -621,7 +621,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -630,8 +630,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 10 x 681 x 10 (68100 elements)\n", - "mu is 10 x 681 x 21 (143010 elements)\n" + "phi is 10 x 2245 x 10 (224500 elements)\n", + "mu is 10 x 2245 x 22 (493900 elements)\n" ] } ], @@ -916,9 +916,16 @@ "pprint(model.get_author_topics(author2id[name]))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Test on small corpus" + ] + }, { "cell_type": "code", - "execution_count": 169, + "execution_count": 27, "metadata": { "collapsed": false }, @@ -930,7 +937,7 @@ }, { "cell_type": "code", - "execution_count": 170, + "execution_count": 30, "metadata": { "collapsed": false }, @@ -939,15 +946,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 15.6 s, sys: 8 ms, total: 15.6 s\n", - "Wall time: 15.6 s\n" + "CPU times: user 1min 29s, sys: 52 ms, total: 1min 29s\n", + "Wall time: 1min 29s\n" ] } ], "source": [ "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha=None, eta=None, \\\n", + " iterations=10, alpha='auto', eta='auto', \\\n", " eval_every=1, random_state=1)" ] }, @@ -1134,7 +1141,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.4.3+" + "version": "3.5.2" } }, "nbformat": 4, diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 58ba69fe12..918f693135 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -18,8 +18,9 @@ from gensim import utils, matutils from gensim.models.ldamodel import dirichlet_expectation, get_random_state from gensim.models import LdaModel -from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. -from scipy.special import gammaln +from scipy.special import gammaln, psi # gamma function utils +from scipy.special import polygamma + from six.moves import xrange from pprint import pprint @@ -34,6 +35,31 @@ logger = logging.getLogger('gensim.models.atmodel') +def update_dir_prior(prior, N, logphat, rho): + """ + Updates a given prior using Newton's method, described in + **Huang: Maximum Likelihood Estimation of Dirichlet Distribution Parameters.** + http://jonathan-huang.org/research/dirichlet/dirichlet.pdf + """ + dprior = numpy.copy(prior) + gradf = N * (psi(numpy.sum(prior)) - psi(prior) + logphat) + + c = N * polygamma(1, numpy.sum(prior)) + q = -N * polygamma(1, prior) + + b = numpy.sum(gradf / q) / (1 / c + numpy.sum(1 / q)) + + dprior = -(gradf - b) / q + + # NOTE: in the LDA code, the criterion below is: + # if all(rho * dprior + prior > 0) + # But this causes an error for me, but the below criterion works. + if (rho * dprior + prior > 0).all(): + prior += rho * dprior + else: + logger.warning("updated prior not positive") + + return prior class AtVb(LdaModel): """ @@ -42,15 +68,9 @@ class AtVb(LdaModel): def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, author2doc=None, doc2author=None, threshold=0.001, - iterations=10, alpha=None, eta=None, minimum_probability=0.01, + iterations=10, alpha='symmetric', eta='symmetric', minimum_probability=0.01, eval_every=1, random_state=None): - # TODO: allow for asymmetric priors. - if alpha is None: - alpha = 1.0 / num_topics - if eta is None: - eta = 1.0 / num_topics - self.id2word = id2word if corpus is None and self.id2word is None: raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') @@ -118,8 +138,6 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.num_topics = num_topics self.threshold = threshold self.minimum_probability = minimum_probability - self.alpha = alpha - self.eta = eta self.num_docs = len(corpus) self.num_authors = len(author2doc) self.eval_every = eval_every @@ -127,9 +145,57 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.random_state = get_random_state(random_state) + # NOTE: I don't think this necessarily is a good way to initialize the topics. + self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) + self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + + if alpha == 'auto': + self.optimize_alpha = True + else: + self.optimize_alpha = False + + if eta == 'auto': + self.optimize_eta = True + else: + self.optimize_eta = False + if corpus is not None: self.inference(corpus, author2doc, doc2author) + def update_alpha(self, var_gamma, rho): + """ + Update parameters for the Dirichlet prior on the per-document + topic weights `alpha` given the last `var_gamma`. + """ + N = float(len(var_gamma)) + + logphat = 0.0 + for a in xrange(self.num_authors): + logphat += dirichlet_expectation(var_gamma[a, :]) + logphat *= 1 / N + + self.alpha = update_dir_prior(self.alpha, N, logphat, rho) + # logger.info("optimized eta %s", list(self.alpha)) + + return self.alpha + + def update_eta(self, var_lambda, rho): + """ + Update parameters for the Dirichlet prior on the per-document + topic weights `eta` given the last `var_lambda`. + """ + N = float(len(var_lambda)) + + logphat = 0.0 + for k in xrange(self.num_topics): + logphat += dirichlet_expectation(var_lambda[k, :]) + logphat *= 1 / N + + self.eta = update_dir_prior(self.eta, N, logphat, rho) + # logger.info("optimized eta %s", list(self.eta)) + + return self.eta + def inference(self, corpus=None, author2doc=None, doc2author=None): if corpus is None: corpus = self.corpus @@ -194,8 +260,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # TODO: avoid computing phi if possible. var_phi[d, v, k] = expavgElogtheta * expElogbeta[k, v] # Normalize phi. - (log_var_phi_dv, _) = log_normalize(numpy.log(var_phi[d, v, :])) - var_phi[d, v, :] = numpy.exp(log_var_phi_dv) + var_phi[d, v, :] = var_phi[d, v, :] / var_phi[d, v, :].sum() # Update mu. for d, doc in enumerate(corpus): @@ -224,7 +289,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for k in xrange(self.num_topics): docs_a = self.author2doc[a] var_gamma[a, k] = 0.0 - var_gamma[a, k] += self.alpha + var_gamma[a, k] += self.alpha[k] for d in docs_a: # TODO: if this document doesn't exist, we will have problems here. Could to an "if corpus.get(d)" type of thing. doc = corpus[d] @@ -233,30 +298,42 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): for vi, v in enumerate(ids): var_gamma[a, k] += cts[vi] * var_mu[(d, v, a)] * var_phi[d, v, k] + if self.optimize_alpha: + # NOTE: taking a full Newton step seems to yield good results. + # In the LDA code, they use rho() as step size. This seems + # very arbitrary; if a carefully chosen stepsize is needed, + # linesearch would probably be better. + stepsize = 1 + self.update_alpha(var_gamma, stepsize) + # Update Elogtheta, since gamma has been updated. Elogtheta = dirichlet_expectation(var_gamma) # Update lambda. for k in xrange(self.num_topics): for v in xrange(self.num_terms): - # TODO: highly unnecessary: - var_lambda[k, v] = self.eta + var_lambda[k, v] = self.eta[v] for d, doc in enumerate(corpus): # Get the count of v in doc. If v is not in doc, return 0. cnt = dict(doc).get(v, 0) var_lambda[k, v] += cnt * var_phi[d, v, k] + if self.optimize_eta: + stepsize = 1 + self.update_eta(var_lambda, stepsize) + # Update Elogbeta, since lambda has been updated. Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) logger.info('All variables updated.') - # Print topics: + self.var_gamma = var_gamma self.var_lambda = var_lambda + + # Print topics: #pprint(self.show_topics()) - self.var_gamma = var_gamma # Evaluate bound. if (iteration + 1) % self.eval_every == 0: @@ -283,7 +360,7 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. """ - + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. if doc_ids is None: From a2253998ead6fddceed24c1c88b3111ab04c9c1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Mon, 17 Oct 2016 17:20:53 +0200 Subject: [PATCH 026/100] Made it possible to sample a subset of documents in lambda update to speed up large experiments. Made it possible to initialize the model with LDA topics (lambda). --- docs/notebooks/at_with_nips.ipynb | 152 +++++++++++++++++------------- gensim/models/atvb.py | 29 ++++-- 2 files changed, 109 insertions(+), 72 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 13aad0eb55..29b391fb7e 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -73,7 +73,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -81,8 +81,8 @@ "source": [ "# Configure logging.\n", "\n", - "log_dir = '../../../log_files/log.log' # On my own machine.\n", - "#log_dir = '../../../../log_files/log.log' # On Hetzner\n", + "#log_dir = '../../../log_files/log.log' # On my own machine.\n", + "log_dir = '../../../../log_files/log.log' # On Hetzner\n", "\n", "logger = logging.getLogger()\n", "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", @@ -101,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 93, "metadata": { "collapsed": false }, @@ -111,12 +111,12 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "data_dir = '../../../../data/nipstxt/' # On my own machine.\n", - "#data_dir = '../../../nipstxt/' # On Hetzner.\n", + "#data_dir = '../../../../data/nipstxt/' # On my own machine.\n", + "data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", - "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00']\n", + "yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", + "#yrs = ['00']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -138,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 94, "metadata": { "collapsed": false }, @@ -163,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 95, "metadata": { "collapsed": true }, @@ -178,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 96, "metadata": { "collapsed": false }, @@ -196,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 97, "metadata": { "collapsed": false }, @@ -222,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 98, "metadata": { "collapsed": false }, @@ -245,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 99, "metadata": { "collapsed": true }, @@ -260,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 100, "metadata": { "collapsed": true }, @@ -279,7 +279,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 101, "metadata": { "collapsed": true }, @@ -291,16 +291,16 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 102, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkoAAAGcCAYAAAAmrI82AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHFW5//HPNwGiLAlcuQkgKCAGQWVJQMhFAt5IkEXU\nC4oDekFwYZU7iqAIPxAUBSVhVxCQRR1kuSh7MHCFAMFAgggSguyLJBDABMISkjy/P85pqBRdMz2d\nkZ6ZfN+vV786feqpc05VTzJPTp06pYjAzMzMzN5uQKs7YGZmZtZbOVEyMzMzq+BEyczMzKyCEyUz\nMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMrOWkPSUpLMLn8dIWiTp\nP96Btn8o6Y3C54G57XH/6rZze1/N7a3xTrTXLEnflfSIpAWSprS6P42S9IF8fvdodV+s73OiZEsV\nSXvlf0DrvY5vdf+WMvWen9TtZypJ+r6kTzfR9qLuttVdnfQtaOJY30mSdgSOB/4P2Bs4qqUdMmuR\nZVrdAbMWCNI/+o+Vyu9757tiNRFxo6R3R8T8bu56JHARcFU39jkaOLab7TSjqm/nARc1cazvpE8A\nbwBfDT8U1JZiTpRsaXV9RExrNFiSgOUi4vV/YZ+Wev/qxEHS8hHxSkQs4h0YUaqSE4/enCQBDAPm\n9cYkyX8f7Z3kS29mJcX5KpK+LOlvwGvAmLxdkr4l6W+SXpP0jKQzJQ0u1SNJ/y/PxXlZ0kRJH5L0\nZGluzmLzZQrldeexSNpJ0qRc5xxJV0r6UCnm15JelLRm3v6SpGcl/aROO5LULumvkl7NcddK2iRv\nv03SXRXn6mFJnY7kVJ2HOnFvm6Mkabik/5U0M/ftCUm/kbRC7XsClgNq52pR7dzm87oo1/E7SS+S\nLiNVnvO87cuSZuT2ppTnTOVz+/c6+71ZZwN9q/puDy78XD0t6dQ6P1e3Spom6cOS/k/SK/ncfquz\n76Gw/zKSjs7f3WtKc5COlbRsqe97AkNyPxeqYr5P/tl5Q9IKhbLD834/KZQtk7//YwtlK0oan/9O\nvCZpuqT/KdXf1d/HVSRdKOmfkl6QdC6w2DnLcatLuiCfq9ck/UPSFZLWbOS82dLLI0q2tBoi6T3F\ngoh4vhQzFvgicAbwAvBELj8PaMvvJwPrAgcDG0vaOo9WQJrfcThwJTABGAncALy71E7VfJW3lUva\nGzgXuBY4DFgBOACYJGnTiHiqsO8yub1JwLfz8XxH0t8j4txCtReSfileBZxN+uU+GtgC+Evefqak\n4RHxYKEvo4B1gO/V6XtRo+eh1u9a/YNy3ADSeZ4FrAl8GhgcEfMkfQn4FXBrPi8AD5Xq+l/gAeC7\nhbKqcz4G2AM4lXTZ6UBggqTNImJGF/u+WR4RCxvoW/m7/SFwBHA96WduA9J3O7L0cxXAqsB1wKXA\nxcAXgJ9KuicibqzTt6Lz8zFeTPrZ2JJ0iXB9YPdC3w8ANga+Dgi4raK+SaTvaCvS9wXwcWAhsHUh\nbiTpO78lH6+Aa/J+vwT+CuwAjJO0ekQcXmrnbX8fcx1XkX5WzwRmALuSznv5O/o9sB7pu32CNGI2\nlvQz9RRmVSLCL7+WmhewF+mSS/m1sBAzMJfNB9Yr7b9t3rZrqXyHXL5b/jw07395Ke4nOe7sQtlx\nwPw6fd2X9Mtmjfx5JeCfwGmluGG5/PRC2UV538NKsX8Bbi983i7358ROztnKwKvAsaXyM3K77+pk\n3+6chzG5z/+RP4/MMZ/u4jt9tVhP6bwuAs6v2Da/8Ln2nS8APlIofz9p9OLi0rl9sKs6u+hb+bsd\nls/TlaW4b+a4PQtlk3LZFwply5ESyd92ca5G5OM8o1Q+Lte5Vek4X2jg79RA4CXguELZC6RE7LXa\nzwfwnXyMK+bPu+a+HFqq73JSkvq+Bv4+1ur4ZqFsACk5XQjskcv+rRznl1+NvnzpzZZGAewPfLLw\n2q5O3I0R8VCpbDfSL4E/SXpP7QXcRfql+Ikctz3pH/jTSvufvAT9/hQpWbq41PZC4M5C20Vnlz7f\nShoBq9mVlBwcV9VoRPwTuJo0CgGkyyHA50kJ0Gud9HkszZ+Hf+b3HSS9q4H4egL4RTfiJ0XEm5P6\nI+Jx0ojFp5psv1Hbkc5T+bycBbwC7FQqnxMRl9Q+RJrbdSeLf7f17Eg6J+VlEE4ijRqV2+lSRCwE\nJpNGIZG0ETAE+DGwLGm0B9Io0z0R8XL+vAMp+TmjVOU40rkon/N6fx93AF6n8HMeaeTt9Hw8Na+Q\nkq9PSBrSzUO0pZwTJVta3RkRNxVfdWIeq1P2QdL/Tp8rvWYB7yKNoAC8L78v9g97RMwk/e+7GeuR\n/vGfVGr7WeA/C23XvJyTnKIXgVUKn9cFnoqIrvp0IbCOpC3z508B7yGNOnTm/fm92+chIh4GTgG+\nATwv6TpJ+0taqYs2yx7tRmz5FzHAg8BKklaps62n1M7Tg8XCSJOVHy1sr3myTh3l77aqnQX53Bbb\neZr0fZTbadStwOZ5ntPWwJMRcQ/pTtLa5betSD+7xb48FRGvluqaXthe9Fiddt8PPF0nWZ9R/JC3\nHwHsDDwr6U+SDpVU/jtj9jaeo2RWrfwPOKT/XPwD+DKL/4+15tn8XtvWyB1DVTED67QdpPlRs+vE\nlycnL6yoVxV/7sx1uc0vAXfk96cj4k9d7Ned8/A2EdGeJ+d+hjQ6dTpwuKQtc7LViHrfY3eUz1Gj\n39eStNGVRr7b7m7vbh+KJpGWXNiCNHI0qVC+taQPk/6DccsStFfvexT1v4+31R0RJ0m6AvgsacT3\nh8D3JG1THEU0K/OIkln3PEyaSHtreUQqv2r/4D6W34cXd5a0GunyWdGLwEBJy5fK167TNsCzFW1P\novseAtYs31lVFhELyJOGJa1MmlD9mwbqfyy/N3Ieqtq+LyJ+FBHbANuQRuu+XgxppJ4GfbBO2XDg\npYh4MX9+kTRvq2ztOmWN9u2x/L5+sVDScrnexxusp5F2lpH0gVI7awArLkE7d5Au4Y4mjSDVfhZv\nAf6DdFk4SCNPxb6sKak8qX+D/N5IX2p1lC/Nrl8nloh4JCLGRcT2wEdJk8sbulvQll5OlMy65xLS\nxNkjyxvy7c+1hOOPpP/1H1wKa69T58Ok/wGPLtS1ImnUqug64GXg+3mOULn9VRs8hqLLSSPLjay6\nfBEpSTyL9AumkUSpO+dhMZIGSyr/G3Uf6RfuoELZPOonLs34eJ5jU+vD2qTLNdcXYh4G3iNpg0Lc\ne0nJY1mjfaudp0NK5d8g3dl4dQN1NOJa0s/a/5TKv006r9c0U2m+fDaN9DO7OouPKK0AHATMiIji\nSOi1pL9LB5Sqayedi+saaPpa0s/CN2oF+e/GQSx+B+W7812URY+Q/j4NKsStJmn9Oj93thTzpTdb\nGjV9iSEibsqXgo6UNAKYSPqf9HDSRO/9SXcuzZI0HjhU0pWkf/Q3I00cf6FU7XXA08D5kn6Wy/YB\nngHeXGcnIuZIOoi0LME0SReTLoe9nzQJ9//o5v+OI2KipA7gW0prG91AuoS0NTAhIoqTZO+SNJ00\nifuvjVyu6OZ5gMW/m+2A8ZIuBf5Omhi8F+kS4/8W4qYCY/P6O88AD0dE3XWfGnAfcIOk00jf6wH5\n/QeFmN+Sljy4MsetCOxHWoJg41J9DfUtn6cTgCMkXUtKjDbI9U4mjeYtsYiYJuk3wAH5RoBJwCjS\npdRLIqJqCYBGTAIOBZ6PiOm5vWckPUz6+/HLUvwVpBGnEyStx1vLA+wE/DQi6s3DKruCNJr1szxK\nVlseoDw6uyFwvaRLgPtJidhupHl2HYW4n5FuWliTdIndzMsD+LV0vUi/aBcCIzqJGZhjTuok5muk\nu4xeJl2KuRv4ETC0FPf/SEnQy6RRg/VJE3HPLsWNIP1CfJX0P90DKd1CXojdljTC8WKudwZwDrBJ\nIeYi0i+scr+PA14vlYn0C+7+3P5M0p1eG9XZ/7u5T9/q5nmvdx6eAM4qxJSXB1g3H9ffSSMzz+Z9\nR5fq/hDwp1z3wtq5zce6kLTmUqfnofidk5KGB/O5mFLrT2n/scC9pNvf/0Zax6je8gBVfav6bg/M\n9b2Wz9cpwEqlmEnA1Dp9uog0atPVdzEwfx8P53YeJSWCy9Sp720/Q53U++l8TFeUys+jtMRBYdsK\npLvcnsp9eQA4pDt/H0kT2C8k3SX5PGnNqk1ZfHmAVUl3Xt4PzCUl6bcBn61zzAvK34tfS/dLET15\ned/MuiLpSeC6iPh6l8G9jKRvk9ZAel9EPNPq/piZ/av5OqyZdcc+pPVsnCSZ2VLBc5TMrFNKz/Da\nhTSv6EP4LiEzW4o4UTJ751U9K6y3Wo10h9sLpMeYTGhxf8zM3jGeo2RmZmZWwXOUzMzMzCo4UTIz\nMzOr4ETJzFpO0g8llZ9V9073YaCkRZLG9WCdY3Kdu/RUnd1o+9eS/v5Ot2vW3zhRMuvlJO2Vf9nW\nXq9KmiHptH709PO+NsG9O1p1XAEsalHbZv2G73oz6xuC9Dy2x4B3kZ7Qvj+wg6SPRMRrLeybda7p\nR+Ysob1b2LZZv+FEyazvuD4ipuU/nyfpBdIDRD8D/K513eqapOUj4pVW92NpEhELW9Guv2vrb3zp\nzazvuok0YrBOrUDSOpIulfS8pHmSJkvasbiTpOcKD99FyT8lvSFpcKH88Fy2fKFsfUmX5fpflXSn\npE+X6q9dKhwt6UxJs0jPt+sWSftKulHSrNzWfZK+Voo5RdLMUtnPc/v7FcrWyGX7NNj2l/PlzVcl\nTZH0H3Vi3ivpfEkzJb0m6V5Je9WpLoABko6S9JSkVyT9UdI6pfq2yd/dE7m+xyX9rPjUe0nflbRQ\n0hrlRnLsq5JWyp/fNkdJ0oqSxkt6MrcxPT+wtxjzgXyu9iiV1+ZwHVEo+2EuGy7pd5JeJD2c2azf\ncKJk1netl9+fB8jzlSYD2wGnA0cAg4CrJH2msN9twOjC542AWoK0VaH848C02uiApA+TntS+PvBj\n0grdLwO/L9VfcyZpJe8fkJ4P1137kx4Q/CPg26SHxJ5VSpYmAf8uaXip3wuBrQtlo0kJy6QG2h0D\n/BS4gPTw2KHABEnr1wIkrUZ6YO42wKnAIbmvv5J0QKk+kS6b7gSckF//QXqQa9EXSN/X6cBBpAcA\nH0J6qGzNxbm+z9fp927AtRHxUv682LwvSQKuAQ4GriaNRv4dGCfphM5OSCdq9f8v6eG13yU9lNas\n/2j1U3n98suvzl/AXqRf/J8A3gO8F9gdeI6UqKye48bnuFGFfVcgPSX+4ULZt4H5wAr580GkX/KT\ngeMLcS8APyt8ngjczdufMn8r8ECpv4uAP5EXtW3gGI8D5pfKBtWJ+yMwvfB5WG5r3/x5lXwOLgae\nKMSdDszsog8Dc10LgI8Uyt9PerL9xYWy84EngCGlOi4BZgPL5s9jcp33AAMLce25n8O7ON7v5/6s\nXij7M3B7KW5UbucLhbKLgAcLn3fNMYeW9r0ceIP0oGOAD+S4PSrOzxGl720RcH6r/5745de/6uUR\nJbO+QcCNpOToSeC3wFzgs/HWA2p3AKZExOTaThExDzgbWFvShrl4Eml+Yu1y0ta5bFL+M5I2AlbO\nZUhahZSoXQoMkfSe2gu4AfigpNUL/Q3glxHR9B1fEfH6mwcvDc5t3QwMl/TuHDMLeIi3Rsi2Bl4H\nTgLWlPT+0jE2YlJE3Ffox+PAVcCncl8EfA74A7BMnXOxCrBJqc5zY/E5Q5NI3+m6Fce7fK7v9hxX\nrO93wBaS3lco2x14hTRSVGUHUoJ8Rql8HCkJ+lQn+3YmgF80ua9Zr+dEyaxvCNKlqE8C2wIbRsQH\nImJiIeb9wIw6+04vbAeYRvqlWrs09XHeSpQ2k7Rc3hak0SJIl/lEGkF4rvQ6JseUlyp4rPhB0rKS\nhhVfnR2wpK0l3STpZeCfua1j8+YhhdBbS8cyBbgLmANsLWkI8BEaT5QeqlP2ILBSThhXA1YCDuDt\n5+LsHF8+F+U5Wi/m91VqBZLeL+lCSc+TRgqfIyXHsPjxXpLfv1Ao2xW4OjqfRP1+4KmIeLVUXv75\naMajS7CvWa/mu97M+o4746273poWEQsk/RkYLekDwOrALaRfzMsCW5ASjukR8Xzerfafqp8BVQ/F\nLScY5V/Io0mXzoKUdIWktSLiH+WKJH0wx95Hukz1JGk0ZBfSHJvif/ImAXtJWouUME2MiJB0W/5c\nS0puqeh3I4q32dfavgD4dUX8PaXPVXegCdJEadKlzZWA40kJ7yvA+0hzlN483oh4StJkUqL0M0lb\nky7HXtyNY+hM1SjgwE72KX/XZv2GEyWz/uNx0kTrsg0K22smAYeRJn4/FxEPAkj6Gymh2Zp0uanm\nkfz+RkTc1GT/ppJGxIqeq4jdhZS07ZQvr5H7t32d2NpI0fbACODo/PkW4CukROkl3p68VPlgnbLh\nwEsR8aKkucA8YMASnIuyTUhzg9oi4s2lHiRVXQ67GDhF0rqky24vAdd10cZjwMclvbs0qlT++agl\nliuX9l+SESezPsuX3sz6j2uBj0naolYgaQXg68CjEXF/IXYSaeHKQ3jr8hr5z18mjTK9eakqIp4j\nTc7+Rr7jazGSVu2qcxHxz4i4qfSqemxJbQTmzX+j8mWv/65T70PALNIk9QGkeT21Y1yfNJ/o9m7M\nl/p4nqNVa3dtYGfg+tzeQuAK4AuSNijvXOdcNNJuveMV6fupt/+l5AnXpMtuVxbnOFW4FliOdMmw\nqDax/DqAiHiRdKlzdCnuoIq+1CVpiNJyEis2uo9Zb+QRJbO+oZHLJj8B2oDrJZ1Kumttb9JIwH+V\nYieT7qYaDpxVKL+FNBeq3q30B+ayeyX9kjTKNIx0x9V7gU272d/OTCDdRn9tbmsw8DXgGd4+/wdS\ngrcbaTmDl3PZnaRLQuuR7lJr1H3ADZJOI52jA/L7Dwoxh5ESiSm5f9OBfwM2I43GFZPJRs7F30jz\nfE7OE9BfzsczuF5wRMySNAn4DrAijS04egXp+z1B0nrAX0kTvHcCfhoRxXlU5wCHSppDmtO2LWnE\nqzvf6xeBn+f3S7qINeu1PKJk1jd0+T/5iHiWlLTcQPrf//Gk29p3jogrS7GvkG71L07YhpQIBenW\n+idL+0wnJQJXk5YAOB34Bmk04lgW18zdbm/uk9vajfRv1M+ArwKnkdZmqqfW7+Io2ALSrfSNrp9U\n68ONwKGkYzyGNFo1NvepVvdMYHPSPKX/yn37JimxObzquKrK88jazqTk5QjgSFLy9JVO+vo7UpL0\nT6rnjRXbCFJSdCrwadJyEsOBb0XEd0v7HU2aG/UFUsK6IPevu8/k66/P77OliJbg7l0zMzOzfq3X\njShJ+l5eEn9coWyQpDMkzZb0ktIjFIaW9ltL0jVKj22YKelESQNKMdtKmpqX7n9QdR43IOlASY/m\nRwHcIWnz0vYu+2JmZmb9Q69KlHJS8jXefnfKyaQh411J8wLWIK0mW9tvAGmi4jLAlqQh870pXA7I\nEzKvJg2rbwycApwjabtCzO6kheqOJs23uIf06ILi5MxO+2JmZmb9R6+59JbvjJhKmkh6FHB3RHxL\n6SGdzwFfjIgrcuz6pMmTW0bEFEk7AFeSlvmfnWO+QZrc+u953ZgTgB0iong3SwfpEQQ75s93AH+O\niEPyZ5HWbzk1Ik5spC//0pNkZmZm76jeNKJ0BnBVnXVJNiONFNVWqCUiZpCeszQqF20J3FtLkrIJ\npNVsP1yIKa5iXIsZBWnVYGBkqZ3I+9TaaaQvZmZm1k/0ikRJ0hdJC659r87mYaSHZc4tlc/irVtw\nV8ufy9tpIGawpEHAqqSVZ+vF1OpopC+Lyc9sGiFp+XrbzczMrL7e8Du05esoSVqTNO9nu04Wn6u7\nK43detpZjBqM6aqdzmI2AW4DpuVnVhVdT/VtvWZmZkuT7Xn7w5lXJK24vxVvLSb7jmp5okS63PXv\nwNQ8JwjSyM5oSQeRTtogSYNLIzlDeWv0p7amSdGwwrbae/khnEOBuRExX9Js0now9WKK7SzXRV/K\n1s7vI+psG01a68bMzMyqrc1SnChNBD5aKjufNEH6J8DTwBvAGNLKskgaTnpYZO2kTQaOkLRqYZ7S\nWNLTw6cXYnYotTM2lxMRb0iamtu5Mrej/PnUHD+VtPBavb5Mrji+xwB+/etfs8EGb3vaQb/S3t7O\n+PHjW92Nf7ml5Thh6TlWH2f/4uPsP6ZPn86XvvQlyL9LW6HliVJEzAOKz6BC0jzg+dpKuJLOBcZJ\nqj3c8lTgtoi4M+9yQ67jIkmHk55TdRxweuFy3i+Ag/Ldb+eRkp3dgB0LTY8DLsgJ0xTSM5CWJz/+\nICLmdtKXqjveXgPYYIMNGDGi3qBS/zFkyJB+f4yw9BwnLD3H6uPsX3yc/dJrrWq45YlShfJ8n9pD\nGy8DBpHm9hz4ZnDEIkk7k54rdDvpyd7n89ZTxImIxyTtREqGvgk8BewbERMLMZfkNZOOJV2C+wuw\nfX4gaEN9MTMzs/6jVyZKEfGfpc+vAwfnV9U+T5KeRdRZvTeT5kR1FnMm1c+TaqgvZmZm1j/0iuUB\nzMzMzHojJ0rWY9ra2lrdhXfE0nKcsPQcq4+zf/FxWk/qNY8w6a8kjQCmTp06dWmadGdmZrbEpk2b\nxsiRIwFGRsS0VvTBI0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCi\nZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVw\nomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlV\ncKJkZmZmvcr//A987Wut7kWyTKs7YGZmZlb01FPw8sut7kXS8hElSftJukfSnPy6XdKnCtv/JGlR\n4bVQ0pmlOtaSdI2keZJmSjpR0oBSzLaSpkp6TdKDkvaq05cDJT0q6VVJd0javLR9kKQzJM2W9JKk\nyyQN7elzYmZmZr1DyxMl4EngcGBkft0E/EHSBnl7AGcDw4DVgNWBw2o754ToWtLo2JbAXsDewLGF\nmLWBq4EbgY2BU4BzJG1XiNkdOAk4GtgUuAeYIGnVQl9PBnYCdgVGA2sAly/pCTAzM7PeqeWJUkRc\nExHXR8RD+XUk8DIp6al5JSKei4hn86s4ILc98CFgz4i4NyImAEcBB0qqXVrcH3gkIg6LiBkRcQZw\nGdBeqKcdOCsiLoyIB4D9gFeAfQAkDc5/bo+ImyPibuArwFaSPtbDp8XMzMx6gZYnSkWSBkj6IrA8\ncHth056SnpN0r6TjJb27sG1L4N6ImF0omwAMAT5ciJlYam4CMCq3uyxpNOvG2saIiLzPqFy0GWnU\nqhgzA3iiEGNmZmb9SK+YzC3pI8Bk4F3AS8DnchIC8BvgceAfwEbAicBwYLe8fTVgVqnKWYVt93QS\nM1jSIODfgIEVMevnPw8D5kfE3DoxqzV0oGZmZtan9IpECXiANHdoZdL8nwsljY6IByLinELc3yTN\nBG6UtE5EPNpFvdHJNjUY09n2RmPMzMysD+oViVJELAAeyR+n5Tk/h5DmFpX9Ob+vBzwKzAQ2L8UM\ny+8zC+/DSjFDgbkRMV/SbGBhRUxtlGkmsJykwaVRpWJMpfb2doYMGbJYWVtbG21tbV3tamZm1u91\ndHTQ0dEBwF13wYIF0N4+p8W96iWJUh0DgEEV2zYljeA8kz9PBo6QtGphntJYYA4wvRCzQ6mesbmc\niHhD0lRgDHAlgCTlz6fm+KnAglx2RY4ZDryvVk9nxo8fz4gRI7oKMzMzWyoVBw922y2to3T88dMY\nOXJkS/vV8kRJ0o+A60jLBKwE7AlsA4yVtC6wB+n2/+dJl+fGATdHxH25ihuA+4GLJB1OWj7gOOD0\niHgjx/wCOEjSCcB5pGRnN2DHQlfGARfkhGkK6S645YHzASJirqRzgXGSXiTNpToVuC0ipvToSTEz\nM7NeoeWJEuly14WkBGcO8FdgbETcJGlN4JOky3ArkJKpS4Ef1XaOiEWSdgZ+TrpTbh4puTm6EPOY\npJ1IydA3gaeAfSNiYiHmkrxm0rG5T38Bto+I5wp9bSddoruMNOJ1PXBgj50JMzMz61VanihFxFc7\n2fYUsG0DdTwJ7NxFzM2kJQA6izkTOLOT7a8DB+eXmZmZ9XO9ah0lMzMzs97EiZKZmZlZBSdKZmZm\nZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZm\nZtbrSK3uQeJEyczMzHqViFb34C1OlMzMzMwqOFEyMzMzq+BEyczMzKyCEyUzMzOzCk6UzMzMzCo4\nUTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMjMzM6vgRMnMzMysghMlMzMzswpOlMzMzMwq\nOFEyMzMzq+BEyczMzKyCEyUzMzOzCi1PlCTtJ+keSXPy63ZJnypsHyTpDEmzJb0k6TJJQ0t1rCXp\nGknzJM2UdKKkAaWYbSVNlfSapAcl7VWnLwdKelTSq5LukLR5aXuXfTEzM7P+o+WJEvAkcDgwMr9u\nAv4gaYO8/WRgJ2BXYDSwBnB5beecEF0LLANsCewF7A0cW4hZG7gauBHYGDgFOEfSdoWY3YGTgKOB\nTYF7gAmSVi30tdO+mJmZWf/S8kQpIq6JiOsj4qH8OhJ4GdhS0mBgH6A9Im6OiLuBrwBbSfpYrmJ7\n4EPAnhFxb0RMAI4CDpS0TI7ZH3gkIg6LiBkRcQZwGdBe6Eo7cFZEXBgRDwD7Aa/k9mmwL2ZmZtaP\ntDxRKpI0QNIXgeWByaQRpmVII0EARMQM4AlgVC7aErg3ImYXqpoADAE+XIiZWGpuQq0OScvmtort\nRN6n1s5mDfTFzMzM+pFekShJ+oikl4DXgTOBz+VRndWA+RExt7TLrLyN/D6rznYaiBksaRCwKjCw\nIqZWx7B7GWMFAAAgAElEQVQG+mJmZmY9QGp1D5Jlug55RzxAmju0Mmn+z4WSRncSLyAaqLezGDUY\n01U7jfbFzMzM+phekShFxALgkfxxWp7zcwhwCbCcpMGlkZyhvDX6MxNY7O400uhPbVvtfVgpZigw\nNyLmS5oNLKyIKbbTVV8qtbe3M2TIkMXK2traaGtr62pXMzOzfq+jo4OOjg4A7rwTFi2C9vY5Le5V\nL0mU6hgADAKmAguAMcAVAJKGA+8Dbs+xk4EjJK1amKc0FpgDTC/E7FBqY2wuJyLekDQ1t3Nlbkf5\n86k5vrO+TO7qgMaPH8+IESMaO3ozM7OlTHHwYNdd4dVX4Yc/nMbIkSNb2q+WJ0qSfgRcR1omYCVg\nT2AbYGxEzJV0LjBO0ovAS6TE5baIuDNXcQNwP3CRpMOB1YHjgNMj4o0c8wvgIEknAOeRkp3dgB0L\nXRkHXJATpimku+CWB84H6KIvU3r4tJiZmS21ohdNaGl5okS63HUhKcGZA/yVlCTdlLe3ky6LXUYa\nZboeOLC2c0QskrQz8HPSKNM8UnJzdCHmMUk7kZKhbwJPAftGxMRCzCV5zaRjc5/+AmwfEc8V+tpp\nX8zMzKx/aXmiFBFf7WL768DB+VUV8ySwcxf13ExaAqCzmDNJd9013RczMzPrP3rF8gBmZmZmvZET\nJTMzM7MKTpTMzMzMKjhRMjMzM6vgRMnMzMysghMlMzMzswpOlMzMzMwqOFEyMzMzq+BEyczMzKyC\nEyUzMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMjMzs15HanUPEidK\nZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZWa8S0eoevMWJkpmZmVkFJ0pm\nZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVWp4oSfqepCmS5kqaJekKScNLMX+StKjw\nWijpzFLMWpKukTRP0kxJJ0oaUIrZVtJUSa9JelDSXnX6c6CkRyW9KukOSZuXtg+SdIak2ZJeknSZ\npKE9eU7MzMysd+iRREnSQEmbSFqlid23Bk4DtgA+CSwL3CDp3YWYAM4GhgGrAasDhxXaHwBcCywD\nbAnsBewNHFuIWRu4GrgR2Bg4BThH0naFmN2Bk4CjgU2Be4AJklYt9OVkYCdgV2A0sAZweRPHbWZm\nZr1cU4mSpJMl7Zv/PBC4GZgGPClp2+7UFRE7RsRFETE9Iu4lJTjvA0aWQl+JiOci4tn8ermwbXvg\nQ8CeEXFvREwAjgIOlLRMjtkfeCQiDouIGRFxBnAZ0F6opx04KyIujIgHgP2AV4B98rEOzn9uj4ib\nI+Ju4CvAVpI+1p3jNjMzs96v2RGl3UijLQCfBtYhJSrjgR8tYZ9WJo0gvVAq31PSc5LulXR8acRp\nS+DeiJhdKJsADAE+XIiZWKpzAjAKQNKypOTsxtrGiIi8z6hctBlp1KoYMwN4ohBjZmZm/USzidKq\nwMz85x2BSyPiQeA84KPNdkaSSJe2bo2I+wubfgN8CdgWOB74MnBRYftqwKxSdbMK2zqLGSxpEOmY\nBlbE1OoYBsyPiLmdxJiZmVk/sUzXIXXNAjaU9AzwKeCAXL48sHAJ+nMmsCGwVbEwIs4pfPybpJnA\njZLWiYhHu6izsyfGqMGYrp4600iMmZmZ9THNJkq/Ai4BniElCH/M5VsADzRToaTTSaNTW0fEM12E\n/zm/rwc8Shrd2rwUMyy/zyy8DyvFDAXmRsR8SbNJSV69mNoo00xgOUmDS6NKxZi62tvbGTJkyGJl\nbW1ttLW1dbabmZnZUqGjo4OOjg4A7rwTFi2C9vY5Le5Vk4lSRBwj6T5gLdJlt9fzpoXAT7pbX06S\nPgNsExFPNLDLpqQErZZQTQaOkLRqYZ7SWGAOML0Qs0OpnrG5nIh4Q9JUYAxwZe6X8udTc/xUYEEu\nuyLHDCdNPp/cWYfHjx/PiBEjGjg0MzOzpU9x8OBzn4P58+G446YxcmT53q53VrMjSkTEZQCS3lUo\nu6C79eT1kNqAXYB5kmojOnMi4jVJ6wJ7kG7/f550a/844OaIuC/H3gDcD1wk6XDS8gHHAadHxBs5\n5hfAQZJOIM2lGkOalL5joTvjgAtywjSFdBfc8sD5+fjmSjoXGCfpReAlUhJ1W0RM6e6xm5mZWe/W\n7PIAAyUdJelp4OWczCDpuNqyAd2wHzAY+BPwj8LrC3n7fNL6ShNIo0M/BS4lJVYARMQiYGfSiNbt\nwIWk5OboQsxjpPWPPgn8hZQE7RsREwsxlwDfJq2/dDewEbB9RDxX6G87aT2mywp93rWbx2xmZmZ9\nQLMjSt8nLep4GPDLQvl9wP8A5zZaUUR0mqxFxFOku926qudJUrLUWczNvH19pnLMmaRJ5VXbXwcO\nzi8zMzP7F5C6jnknNLs8wH8DX4+I37D4XW73kNZTMjMzM+vzmk2U3gs8VFHfss13x8zMzKz3aDZR\nup/0jLay3Uhze8zMzMz6vGbnKB1LujvsvaRk678krU+6JNfpPCEzMzOzvqKpEaWI+AMpIfokMI+U\nOG0AfDoi/tjZvmZmZmZ9xZKso3QrsF0P9sXMzMysV2l2HaXNJW1Rp3wLSZstebfMzMzMWq/Zydxn\nkB5fUvbevM3MzMysKdGLHjPfbKK0ITCtTvndeZuZmZlZn9dsovQ6MKxO+eqkh8aamZmZ9XnNJko3\nAD+WNKRWIGll4HjAd72ZmZlZv9DsXW+HArcAj0uqLTC5CTAL+HJPdMzMzMys1ZpKlCLiaUkbAXsC\nGwOvAr8COiLijR7sn5mZmVnLLMk6SvOAs3uwL2ZmZma9StOJkqThwLbAUEpznSLi2CXrlpmZmVnr\nNZUoSfoa8HNgNjATKK54EKRHmpiZmZn1ac2OKB0JfD8iTujJzpiZmZn1Js0uD7AKcGlPdsTMzMys\nt2k2UboUGNuTHTEzMzPrbZq99PYQcJykLYF7gcWWBIiIU5e0Y2ZmZmat1myi9HXgZWCb/CoKwImS\nmZmZ9XnNLji5Tk93xMzMzKxGanUPkmbnKAEgaTlJ60tqej0mMzMzs96qqURJ0vKSzgVeAf4GvC+X\nnybpuz3YPzMzM7OWaXZE6cekZ7xtC7xWKJ8I7L6EfTIzMzPrFZq9ZPZZYPeIuENScVXuvwEfWPJu\nmZmZmbVesyNK/w48W6d8BRZ/nImZmZlZn9VsonQXsFPhcy05+ioweYl6ZGZmZtZLNJsoHQEcL+nn\npMt3h0j6I/AV4PvdqUjS9yRNkTRX0ixJV0gaXooZJOkMSbMlvSTpMklDSzFrSbpG0jxJMyWdKGlA\nKWZbSVMlvSbpQUl71enPgZIelfSqpDskbd7dvpiZmVn/0FSiFBG3kiZzL0NamXssMAsYFRFTu1nd\n1sBpwBbAJ4FlgRskvbsQczJpBGtXYDSwBnB5bWNOiK7N/dkS2AvYGzi2ELM2cDVwY+77KcA5krYr\nxOwOnAQcDWwK3ANMkLRqo30xMzOz/qPbk7nzmkl7ABMi4mtL2oGI2LFU/96k+U8jgVslDQb2Ab4Y\nETfnmK8A0yV9LCKmANsDHwI+ERGzgXslHQX8RNIxEbEA2B94JCIOy03NkPRxoB34Yy5rB86KiAtz\nO/uRkqJ9gBMb7IuZmZn1E90eUcpJxy+Ad/V8dwBYmTTn6YX8eSQpobux0IcZwBPAqFy0JXBvTpJq\nJgBDgA8XYiaW2ppQq0PSsrmtYjuR96m1s1kDfTEzM7MlEL3otrBm5yhNIV2a6lGSRLq0dWtE3J+L\nVwPmR8TcUvisvK0WM6vOdhqIGSxpELAqMLAiplbHsAb6YmZmZv1Es+sonQmcJGlNYCowr7gxIv66\nBPVuCHy8gVjR2FIEncWowZiu2ukypr29nSFDhixW1tbWRltbWxdVm5mZ9X8dHR10dHQAMCVPZGlv\nn9PCHiXNJkoX5/dTC2XBWwnDwO5WKOl0YEdg64j4R2HTTGA5SYNLIzlDeWv0Zyaw2N1ppNGf2rba\n+7BSzFBgbkTMlzQbWFgRU2ynq77UNX78eEaMGNFZiJmZ2VKrOHjwmc/AokXwgx9MY+TIkS3tV7OX\n3tap81q38N4tOUn6DGky9hOlzVOBBcCYQvxw0vPlbs9Fk4GPlu5OGwvMAaYXYsawuLG5nIh4I7dV\nbEf5c62dzvri9aPMzMz6maZGlCLi8Z7qgKQzgTZgF2CepNqIzpyIeC0i5uYH8I6T9CLwEmkk67aI\nuDPH3gDcD1wk6XBgdeA44PScAEGagH6QpBOA80jJzm6kUayaccAFkqaS5mG1A8sD5+fj7qwvvuPN\nzMysn2kqUZL0351tr91e36D9SJfr/lQq/wpQq6eddFnsMmAQcD1wYKG9RZJ2Bn5OGv2ZR0puji7E\nPCZpJ1Iy9E3gKWDfiJhYiLkkj0odS7oE9xdg+4h4rtCvTvtiZmZm/Uezc5ROKX1eljTyMh94hbcS\nnC5FRJeX/yLideDg/KqKeRLYuYt6biYtAdBZzJmkSeVN98XMzMz6h2Yvva1SLpP0QdKIzk+XtFNm\nZmZmvUGzk7nfJiL+DnyXt482mZmZmfVJPZYoZQtIzz4zMzMz6/Oancy9S7mIdKfZQcBtS9opMzMz\ns96g2cncvy99DuA54Cbg20vUIzMzM1vqSV3HvBOanczd05fszMzMzHodJzxmZmZmFZpKlCRdJum7\ndcq/I+nSJe+WmZmZWes1O6K0DXBNnfLrgdHNd8fMzMys92g2UVqRtAp32RvA4Oa7Y2ZmZtZ7NJso\n3QvsXqf8i6SH05qZmZn1ec0uD3Ac8L+SPkBaEgBgDNAGfL4nOmZmZmbWas0uD3CVpM8CRwC7Aa8C\nfwU+mR88a2ZmZtbnNTuiRERcQ/0J3WZmZmb9QrPLA2wuaYs65VtI2mzJu2VmZmbWes1O5j4DWKtO\n+XvzNjMzM7M+r9lEaUNgWp3yu/M2MzMzsz6v2UTpdWBYnfLVgQXNd8fMzMyWdhGt7sFbmk2UbgB+\nLGlIrUDSysDxwB97omNmZmZmrdbsXW+HArcAj0u6O5dtAswCvtwTHTMzMzNrtWbXUXpa0kbAnsDG\npHWUfgV0RMQbPdg/MzMzs5ZZknWU5gFn92BfzMzMzHqVphIlSZ8nPa5kOBDA34HfRsRlPdg3MzMz\ns5bq1mRuSQMk/Q74HWkZgIeAR4APA5dIuliSer6bZmZmZu+87o4oHQJ8EtglIq4ubpC0C2me0iHA\nyT3TPTMzM7PW6e7yAF8BvlNOkgAi4krgMGCfnuiYmZmZWat1N1H6IDCxk+0Tc4yZmZlZn9fdROlV\nYOVOtg8GXutuJyRtLelKSU9LWpQv4xW3/yqXF1/XlmJWkfQbSXMkvSjpHEkrlGI2knSLpFclPS7p\nO3X68nlJ03PMPZJ2qBNzrKR/SHpF0h8lrdfdYzYzM7NqvWXGc3cTpcnA/p1sPzDHdNcKwF/y/lUL\nl19HemzKavnVVtr+W2ADYAywEzAaOKu2UdJKwATgUWAE8B3gGElfLcSMyvX8krSA5u+B30vasBBz\nOHAQ8A3gY8A8YIKk5Zo4bjMzM+vFujuZ+0fAnyS9B/gZ8AAgUoLybeAzwCe624mIuB64HqCTu+Ze\nj4jn6m2Q9CFge2BkRNydyw4GrpF0aETMBL4ELAvsGxELgOmSNgW+BZyTqzoEuC4ixuXPR0saS0qM\nDijEHBcRV+V2/pu0IvlngUu6e+xmZmbWe3VrRCkibgd2JyVDk4EXgReA23JZW0Tc1tOdzLaVNEvS\nA5LOlPRvhW2jgBdrSVI2kTQ6tUX+vCVwS06SaiYA6xeeWTeKt8/BmpDLkbQuaTTrxtrGiJgL/LkW\nY2ZmZv1HtxecjIgrJE0AxpIWnAR4ELghIl7pyc4VXAdcTrps9gHgx8C1kkZFRJCSl2dL/Vwo6YW8\njfz+SKneWYVtc/L7rDoxtTqGkZKvzmLMzMysn2j2WW+vSPok8P8i4oUe7lO99oqXtP4m6V7gYWBb\n4P862VVUz3mqbW8kprPtjcaYmZlZH9OtREnSmhHxVP64B3Ai8EJOXHaMiCd7uoP1RMSjkmYD65ES\npZnA0FJfBwKr5G3k92Glqoay+AhRVUxxu3LMrFLM3XSivb2dIUOGLFbW1tZGW1t5TrqZmdnSp6Oj\ng46ODgCmTEll7e1zWtijpLsjSg9Iep40J+ldwFrAE8DapInS7whJawLvAZ7JRZOBlSVtWpinNIaU\n1EwpxPxQ0sCIWJjLxgIzImJOIWYMcGqhue1yeS1Bm5lj/pr7Mpg0D+qMzvo8fvx4RowY0czhmpmZ\n9XvFwYNddknLAxx99DRGjhzZ0n51d3mAIcDngal532slPQgMAraX1NQ8HUkrSNpY0ia5aN38ea28\n7URJW0h6v6QxpNv2HyRNtCYiHsh//qWkzSVtBZwGdOQ73iDd9j8fOE/ShpJ2B74JnFToyinADpK+\nJWl9SccAI4HTCzEnA0dK+rSkjwIXAk8Bf2jm2M3MzKz36m6itGxETImIk0iLT25KeqzJQtKjSx6W\nNKOJfmxGunQ1lXQp7CRgGvCDXPdGpERkBmmNozuB0RHxRqGOPUjLFUwErgZuIa11BLx5d9r2pNGv\nu4CfAsdExLmFmMmk9Zm+TlrX6b+Az0TE/YWYE0lJ2Fmku93eDewQEfObOG4zMzPrxbp76W2upLtJ\nl96WA5aPiNskLSAtG/AUaRHGbomIm+k8aftUA3X8k7RWUmcx9wLbdBFzOekOu85ijgGO6apPZmZm\n1rd1d0RpDeCHwOukJOsuSZNISdMIICLi1p7topmZmVlrdHfBydkRcVVEfA94BdicdBkqSCt1z5V0\nc89308zMzOyd190RpbI5eY2jN4D/BNYBzlziXpmZmZn1Ak0tOJltBDyd//w48Ea+w+x3S9wrMzMz\nW2pFpOUBeoOmE6Xi4pIR8ZGe6Y6ZmZlZ77Gkl97MzMzM+i0nSmZmZmYVnCiZmZmZVXCiZGZmZlbB\niZKZmZlZBSdKZmZmZhWcKJmZmVmv01vWUXKiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJk\nZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCi\nZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlV6BWJkqStJV0p6WlJiyTtUifmWEn/kPSKpD9KWq+0\nfRVJv5E0R9KLks6RtEIpZiNJt0h6VdLjkr5Tp53PS5qeY+6RtEN3+2JmZmbNi2h1D97SKxIlYAXg\nL8CBwNtOj6TDgYOAbwAfA+YBEyQtVwj7LbABMAbYCRgNnFWoYyVgAvAoMAL4DnCMpK8WYkblen4J\nbAL8Hvi9pA272RczMzPrB5ZpdQcAIuJ64HoASaoTcghwXERclWP+G5gFfBa4RNIGwPbAyIi4O8cc\nDFwj6dCImAl8CVgW2DciFgDTJW0KfAs4p9DOdRExLn8+WtJYUmJ0QCN96ZETYmZmZr1CbxlRqiRp\nHWA14MZaWUTMBf4MjMpFWwIv1pKkbCJpdGqLQswtOUmqmQCsL2lI/jwq70cpZlTuy7oN9MXMzMz6\niV6fKJESkyCN2hTNyttqMc8WN0bEQuCFUky9OmggprZ9WAN9MTMzs36iLyRKVUSd+UzdjFGDMUva\njpmZmXVD3Yk4LdAr5ih1YSYpERnG4iM5Q4G7CzFDiztJGgiskrfVYoaV6h7K4iNEVTHF7V31pa72\n9naGDBmyWFlbWxttbW2d7WZmZrZU6OjooKOjA4A774QBA6C9fU6Le9UHEqWIeFTSTNLdbH8FkDSY\nNPfojBw2GVhZ0qaFeUpjSEnNlELMDyUNzJflAMYCMyJiTiFmDHBqoQvb5fJG+1LX+PHjGTFiRHcP\n38zMbKlQHDzYeWdYdlk46qhpjBw5sqX96hWX3iStIGljSZvkonXz57Xy55OBIyV9WtJHgQuBp4A/\nAETEA6RJ17+UtLmkrYDTgI58xxuk2/7nA+dJ2lDS7sA3gZMKXTkF2EHStyStL+kYYCRweiGm076Y\nmZlZ/9FbRpQ2A/6PdBkseCt5uQDYJyJOlLQ8aV2klYFJwA4RMb9Qxx6khGYisAi4jHQrP5DuTpO0\nfY65C5gNHBMR5xZiJktqA36UX38HPhMR9xdiGumLmZmZ9QO9IlGKiJvpYnQrIo4Bjulk+z9JayV1\nVse9wDZdxFwOXL4kfTEzM7P+oVdcejMzMzPrjZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYV\nnCiZmZmZVXCiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZm\nFZwomZmZWa8S0eoevMWJkpmZmVkFJ0pmZmZmFZwomZmZWa8jtboHiRMlMzMzswpOlMzMzMwqOFEy\nMzMzq+BEyczMzKyCEyUzMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhR\nMjMzM6vQJxIlSUdLWlR63V/YPkjSGZJmS3pJ0mWShpbqWEvSNZLmSZop6URJA0ox20qaKuk1SQ9K\n2qtOXw6U9KikVyXdIWnzf92Rm5mZWSv1iUQpuw8YBqyWXx8vbDsZ2AnYFRgNrAFcXtuYE6JrgWWA\nLYG9gL2BYwsxawNXAzcCGwOnAOdI2q4QsztwEnA0sClwDzBB0qo9eJxmZmbWS/SlRGlBRDwXEc/m\n1wsAkgYD+wDtEXFzRNwNfAXYStLH8r7bAx8C9oyIeyNiAnAUcKCkZXLM/sAjEXFYRMyIiDOAy4D2\nQh/agbMi4sKIeADYD3glt29mZmb9TF9KlD4o6WlJD0v6taS1cvlI0kjRjbXAiJgBPAGMykVbAvdG\nxOxCfROAIcCHCzETS21OqNUhadncVrGdyPuMwszMzPqdvpIo3UG6VLY9aRRnHeAWSSuQLsPNj4i5\npX1m5W3k91l1ttNAzGBJg4BVgYEVMathZmZm/c4yXYe0Xr5UVnOfpCnA48AXgNcqdhMQjVTfyTY1\nGNNIO2ZmZtbH9IlEqSwi5kh6EFiPdOlrOUmDS6NKQ3lr9GcmUL47bVhhW+19WClmKDA3IuZLmg0s\nrIgpjzK9TXt7O0OGDFmsrK2tjba2tq52NTMz6/c6Ojro6OgA4K67YMAAaG+f0+Je9dFESdKKwAeA\nC4CpwAJgDHBF3j4ceB9we95lMnCEpFUL85TGAnOA6YWYHUpNjc3lRMQbkqbmdq7M7Sh/PrWrPo8f\nP54RI0Z0+1jNzMyWBsXBg512gkGD4MgjpzFy5MiW9qtPJEqSfgpcRbrc9l7gB6Tk6OKImCvpXGCc\npBeBl0iJy20RcWeu4gb4/+3de7QdZXnH8e8vYBIIDaFACK2BIJGbIGCCBLmFWwLBaFELLARtwSUU\nLBZX1dLaFW4WwZqKQERRW26CyFpFRCgFCUkVbZYhRgohwRDCJSQkBEJuwMk5T/94300mO3tyPefs\ns+f8PmvtleyZd2ae58ycOc9+553ZPA3cJumrwO7AlcANEdGW29wEfEHSNcCPSAXQp4BxhVAmArfk\ngmka6S647YH/6JLEzczMrKlaolAC3gv8GNgZWAz8ChgVEa/l+ZeQLovdA/QD/gu4qLZwRHRI+ijw\nXVIv00pScTOh0OZ5SaeSiqGLgZeA8yLikUKbu/Mzk64gXYL7PTA2IhZ3Qc5mZmbWZC1RKEXEBgfy\nRMTbwN/mV1mbF4GPbmQ9U0iPANhQm0nApA21MTMzsy0XPegWqVZ5PICZmZn1Eh0daTB3T9BDwjAz\nMzNLIkDaeLvu4ELJzMzMepQI9yiZmZmZNdTR4R4lMzMzs4Z86c3MzMyshAslMzMzsxK+683MzMys\nhHuUzMzMzEp4MLeZmZlZCT8ewMzMzKyEL72ZmZmZlfClNzMzM7MSvvRmZmZmVsKX3szMzMxK+NKb\nmZmZWQlfejMzMzMr4R4lMzMzsxIeo2RmZmZWwpfezMzMzEr40puZmZlZCV96MzMzMyvhS29mZmZm\nJdrbXSiZmZmZNbR6NWy3XbOjSFwomZmZWY+yYgUMGNDsKBIXSmZmZtajrFwJO+zQ7CgSF0pmZmbW\nY7S3w1tvuUfJzMzMbD0rV6Z/XSi1OEkXSZonabWk30o6rNkxNdudd97Z7BC6RW/JE3pPrs6zWpxn\na1uwIP27++7NjaPGhdIWkHQG8C1gAnAoMBN4SNIuTQ2syar6S1uvt+QJvSdX51ktzrO1zZuX/t1z\nz+bGUeNCactcAnwvIm6NiGeAC4BVwLnNDcvMzKy13Xtv6k3aY49mR5K4UNpMkt4DjAB+WZsWEQE8\nAhzRrLjMzMxa2dKlcOGF8P3vw8UX95wHTm7b7ABa0C7ANsCiuumLgH3LFpo1Kz2SfXNtyTLNWv6N\nN2DatOZtv7OXL1t26VJ4/PGu3XZPWf6112Dq1O7ffnfnvmQJTJ7cvO131/KLF8PDD3ft9ntC7q++\nCg8+2Lztd9fyixbB/fc3Z9tly3d0pLvW1qxJr7a2tf9fswbefhtWrUqvFSvSOebVV2H+fJg7F/r1\ng5tugs9/futi6UwulDqPgEaHWX+As8+e1b3RNMUyDj/8iWYH0Q2WceSRvSFPgGUce2xvyHUZxx/f\nO/IcM6Z35DluXO/Ic/z41shzm21SD1HfvtC/f3rqdv/+MGhQeo0aBeeck/7ddVeYMSMtN2vWu387\n+zcrdhdKm28J0A7sVjd9MOv3MgEMS/+c3ZUx9SAjmh1AN+kteULvydV5Vovz7Ena29OrrW3t7f+b\naRiwCX35nc+F0maKiDZJ04ETgPsAJCm//06DRR4CPg08D7zVTWGamZlVQX9SkfRQswJQbO1FyV5I\n0unALcD5wDTSXXCfAvaLiMXNjM3MzMw6j3uUtkBE3J2fmXQF6RLc74GxLpLMzMyqxT1KZmZmZiV6\nyCSLcAIAAAsFSURBVFMKzMzMzHoeF0pdrCd/J5ykoyXdJ+llSR2SPtagzRWSFkhaJelhScPr5u8k\n6Q5JyyS9LukHkgbUtfmgpKn5ZzBf0pcbbOcvJc3KbWZKOqWTcrxU0jRJb0paJOk/Je1T16afpBsl\nLZG0XNI9kgbXtRkq6ReSVkpaKOlaSX3q2oyWNF3SW5LmSPpsg3i67HiQdEH+2S3Lr8clnVy1POu2\nc2k+didWLU9JE3JuxdfTVcszr//PJN2Wc1mVj+MP1bVp9XPRvAb7s0PS9Xl+JfanpD6SrpT0XN5X\nf5T0tQbtWmd/RoRfXfQCziDd6fYZYD/ge8BSYJdmx5bjO5k0zuovSI88+Fjd/K/meMcDBwL3AnOB\nvoU2DwJPACOBjwBzgNsL8/8EeIU0+H1/4HRgJfC5QpsjgDbgS6SHdl4OvA0c0Ak5PgCck7d9EHA/\n6Q7E7QptvpunHUv67r7Hgf8pzO8DPEm66+IgYCzwKnBVoc0wYAVwbc7hopzTSd11PACn5n06PL+u\nyj/H/auUZ2E7hwHPATOAiRXcnxOAPwC7kh4/Mhj40wrmOQiYB/yAdK/7nsCJwF4VOxftXNiPg0l3\nSrcDR1dsf/5jjutkYA/gE8CbwBdadX922knLr4YHzG+B6wrvBbwEfKXZsTWItYP1C6UFwCWF9wOB\n1cDp+f3+eblDC23GAmuAIfn935CePbVtoc3VwNOF93cB99Vt+zfApC7Ic5cc81GFnN4GTiu02Te3\n+XB+f0r+Zdul0OZ84PVaXsA1wB/qtnUn8EAzjwfgNeCvq5YnsAMwGzgemEwulKqUJ6lQeqJkXpXy\n/AYwZSNtqngu+jYwp4L78+fAzXXT7gFubdX96UtvXUQt/p1wkvYChrBu/G8C/8va+EcBr0fEjMKi\nj5CeUH54oc3UiFhTaPMQsK+kHfP7I/Jy1LXpip/ToBzf0vx+BOnuz2Kes4EXWDfPJyNiSV18OwIf\nKLQpzaG7j4fc/X0msD3pxFC1PG8Efh4Rj9ZNH0m18ny/0qXxuZJulzQ0T6/S/hwP/E7S3UqXx5+Q\n9LnazCqei/LP9dPAD/OkKh23jwMnSHp/3ubBwJGk3v2W3J8ulLrOhr4Tbkj3h7PZhpAOyg3FP4TU\nxfquiGgnFSHFNo3WwSa06dSfkySRPsX9KiJqYz2GAO/kX9Sy7W9NDgMl9aObjgdJB0paTvp0Oon0\nCfUZKpRnLgAPAS5tMHs3KpIn6ZP/X5E+SV8A7AVMzeM0KrM/gfeRegdmA2OAm4DvSKp9nUHlzkXA\naaQC55b8vkrH7TeAnwDPSHoHmA58OyLuKsTYUvvTz1HqfmXfCdcqNiX+jbXRJrbp7J/TJOAA4KhN\naLup299YDpvSpjPzfAY4mNRz9kngVknHdML2e0Sekt5LKnZPioi2zVl0E7ffI/IEiIjik4j/T9I0\nYD5pLEbZU/5bLk/SB/ZpEfHP+f1MSR8gFU+3b2UMPfVcdC7wYEQs3Ei7VtyfZwBnAWcCT5M+1Fwn\naUFE3LaVMTRlf7pHqets7nfC9TQLSQfUhuJfmN+/S9I2wE55Xq1No3UUP1GUtem0n5OkG4BxwOiI\nWFCYtRDoK2ngBrbfKL7dCvPK2gwG3oyId+im4yEi1kTEcxHxRET8EzAT+CLVyXMEaXDzdEltktpI\ng1+/mD+9LgL6VSDP9UTEMtKA1uFUZ39CGpBb/63hs0gDgWsxVulctAdpsPrNhclV2p/XAldHxE8j\n4qmIuAP4N9b2ALfc/nSh1EXyp93ad8IB63wnXFO+2G9zRMQ80kFWjH8g6fpwLf7fAIMkHVpY9ATS\nL8G0Qptj8kFeMwaYnU/8tTYnsK6T8vStloukjwPHRcQLdbOnkwYIFvPch3SSLuZ5kNLT2Is5LGPt\nCb5RDmNqOTTxeOgD9KM6eT5CuuPnEFLP2cHA70g9D7X/t9H6ea5H0g7A3qSBsFXZnwC/Jg1cLtqX\n1HtWqXNRdi7pD/UDhWlV2p/bs36PTQe53mjJ/dkZo9z9Kh39fzppJH/xNszXgF2bHVuObwDpj8sh\n+UD+u/x+aJ7/lRzveNIfp3uBZ1n3Fs4HSH+cDiMN2JsN3FaYP5B0Yr+FdNnrDNLtq+cV2hwBvMPa\nWzgvI11a6IxbcieR7go5mvTJovbqX9dmHjCa1GPxa9a/LXcm6XbVD5LGjCwCriy0GZbzuibncGHO\n6cTuOh6Ar5MuK+5JuuX2atLJ9/gq5dkg73fveqtSnsA3gWPy/vwI8HCOc+eK5TmSNKbuUlIheBaw\nHDiz0Kblz0V5/SI9AuDrDeZVZX/+O2kQ+rh87J5GGm/0L626Pzv9pOXXegfNhfkXYzWpih3Z7JgK\nsR1LKpDa614/KrS5LB+Mq0h3CwyvW8cg0qf5ZaSC5GZg+7o2BwFT8jpeAP6+QSyfJI2vWU16dszY\nTsqxUX7twGcKbfoB15O6pZcDPwUG161nKOkZTCvyyekaoE+Dn+f0nMOzwDndeTyQnkPzXF73QuC/\nyUVSlfJssK1HWbdQqkSepNu6X8rrfgH4Mes+W6gSeeb1jyP93q8CngLObdDmMlr4XJTXfRLp/DO8\nwbxK7E/SB/CJpKJvZY7hcgq38bfa/vR3vZmZmZmV8BglMzMzsxIulMzMzMxKuFAyMzMzK+FCyczM\nzKyECyUzMzOzEi6UzMzMzEq4UDIzMzMr4ULJzMzMrIQLJTMzM7MSLpTMzDaDpMmSJjY7DjPrHi6U\nzKxlSDpf0puS+hSmDZDUJumXdW2Pk9QhaVh3x2lm1eFCycxayWTSl26OLEw7GngFGCWpb2H6scD8\niHh+czciadutCdLMqsOFkpm1jIiYQyqKRhcmjwbuJX1b+ai66ZMBJA2V9DNJyyUtk/QTSYNrDSVN\nkDRD0nmSngPeytO3l3RrXu5lSV+qj0nShZLmSFotaaGkuzs3azNrJhdKZtZqHgOOK7w/Lk+bUpsu\nqR9wOPBobvMzYBCp9+lEYG/grrr1Dgc+AZwGHJKn/WteZjwwhlR8jagtIGkkcB3wNWAfYCwwdSvz\nM7MexN3LZtZqHgMm5nFKA0hFzVSgL3A+cDlwZH7/mKSTgAOBYRGxAEDSOcBTkkZExPS83vcA50TE\n0txmAHAucFZEPJanfRZ4qRDLUGAF8IuIWAm8CMzsorzNrAnco2RmraY2Tukw4ChgTkQsIfUoHZ7H\nKY0G5kbES8B+wIu1IgkgImYBbwD7F9Y7v1YkZXuTiqdpheVeB2YX2jwMzAfm5Ut0Z0nartMyNbOm\nc6FkZi0lIuYCL5Musx1HKpCIiFdIPTpHUhifBAiIBquqn76ywXxKlq3FsgL4EHAmsIDUmzVT0sBN\nTsjMejQXSmbWiiaTiqTRpEtxNVOBU4APs7ZQehrYQ9Kf1xpJOgDYMc8r80dgDYUB4pJ2Io1FeldE\ndETEoxHxD8DBwDDg+C3Iycx6II9RMrNWNBm4kXQOm1KYPhW4gXTJ7DGAiHhE0pPAHZIuyfNuBCZH\nxIyyDUTESkk/BL4paSmwGLgKaK+1kXQq8L683deBU0k9UbPXX6OZtSIXSmbWiiYD/YFZEbG4MH0K\nsAPwTEQsLEz/OHB9nt8BPAhcvAnb+TJpPNR9wHLgW0DxstobpDvlJuR4ngXOzGOgzKwCFFF6+d3M\nzMysV/MYJTMzM7MSLpTMzMzMSrhQMjMzMyvhQsnMzMyshAslMzMzsxIulMzMzMxKuFAyMzMzK+FC\nyczMzKyECyUzMzOzEi6UzMzMzEq4UDIzMzMr4ULJzMzMrMT/A3OQOlsNsogaAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -326,7 +326,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 103, "metadata": { "collapsed": true }, @@ -344,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 104, "metadata": { "collapsed": true }, @@ -358,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 105, "metadata": { "collapsed": false }, @@ -367,9 +367,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 166\n", - "Number of unique tokens: 681\n", - "Number of documents: 90\n" + "Number of authors: 3467\n", + "Number of unique tokens: 8640\n", + "Number of documents: 1740\n" ] } ], @@ -540,7 +540,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 18, "metadata": { "collapsed": false }, @@ -558,7 +558,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 19, "metadata": { "collapsed": false }, @@ -575,7 +575,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 20, "metadata": { "collapsed": false }, @@ -588,7 +588,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 21, "metadata": { "collapsed": false }, @@ -605,7 +605,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -621,7 +621,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 23, "metadata": { "collapsed": false }, @@ -630,8 +630,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 10 x 2245 x 10 (224500 elements)\n", - "mu is 10 x 2245 x 22 (493900 elements)\n" + "phi is 10 x 681 x 10 (68100 elements)\n", + "mu is 10 x 681 x 21 (143010 elements)\n" ] } ], @@ -733,7 +733,7 @@ }, { "cell_type": "code", - "execution_count": 78, + "execution_count": 106, "metadata": { "collapsed": false }, @@ -742,8 +742,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 286 x 2245 x 10 (6420700 elements)\n", - "mu is 286 x 2245 x 578 (371116460 elements)\n" + "phi is 1740 x 8640 x 10 (150336000 elements)\n", + "mu is 1740 x 8640 x 3467 (52121491200 elements)\n" ] } ], @@ -756,7 +756,7 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 113, "metadata": { "collapsed": false }, @@ -768,11 +768,23 @@ }, { "cell_type": "code", - "execution_count": 76, + "execution_count": 114, "metadata": { "collapsed": false }, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/olavur/author_topic_models/gensim/gensim/models/atvb.py:395: RuntimeWarning: divide by zero encountered in log\n", + " bound_d += cts[vi] * numpy.log(bound_v)\n", + "/home/olavur/author_topic_models/gensim/gensim/models/atvb.py:268: RuntimeWarning: invalid value encountered in true_divide\n", + " var_phi[d, v, :] = var_phi[d, v, :] / var_phi[d, v, :].sum()\n", + "/home/olavur/author_topic_models/gensim/gensim/models/atvb.py:58: RuntimeWarning: invalid value encountered in greater\n", + " if (rho * dprior + prior > 0).all():\n" + ] + }, { "ename": "KeyboardInterrupt", "evalue": "", @@ -780,15 +792,15 @@ "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, author2doc=author2doc, doc2author=doc2author, threshold=1e-12, iterations=40, alpha=None, eta=None, eval_every=1, random_state=1)'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, author2doc=author2doc, doc2author=doc2author, threshold=1e-12, iterations=40, alpha='auto', eta='auto', eval_every=1, random_state=1)\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/home/olavur/.vew/author-topic/local/lib/python3.4/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.vew/author-topic/local/lib/python3.4/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.vew/author-topic/local/lib/python3.4/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.vew/author-topic/local/lib/python3.4/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/atvb.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, id2author, author2doc, doc2author, threshold, iterations, alpha, eta, minimum_probability, eval_every, random_state)\u001b[0m\n\u001b[1;32m 129\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 130\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 131\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc2author\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 132\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 133\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcorpus\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc2author\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/atvb.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, author2doc, doc2author)\u001b[0m\n\u001b[1;32m 243\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 244\u001b[0m \u001b[0;31m# Get the count of v in doc. If v is not in doc, return 0.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 245\u001b[0;31m \u001b[0mcnt\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdoc\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 246\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcnt\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 247\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/author_topic_models/gensim/gensim/models/atvb.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, id2author, author2doc, doc2author, threshold, iterations, alpha, eta, minimum_probability, eval_every, random_state, var_lambda)\u001b[0m\n\u001b[1;32m 162\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 163\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 164\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc2author\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 165\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 166\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mupdate_alpha\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_gamma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/author_topic_models/gensim/gensim/models/atvb.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, author2doc, doc2author, var_lambda)\u001b[0m\n\u001b[1;32m 330\u001b[0m \u001b[0mdoc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcorpus\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 331\u001b[0m \u001b[0;31m# Get the count of v in doc. If v is not in doc, return 0.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 332\u001b[0;31m \u001b[0mcnt\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdoc\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 333\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcnt\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 334\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mKeyboardInterrupt\u001b[0m: " ] } @@ -796,13 +808,13 @@ "source": [ "%time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=40, alpha=None, eta=None, \\\n", + " iterations=40, alpha='auto', eta='auto', \\\n", " eval_every=1, random_state=1)" ] }, { "cell_type": "code", - "execution_count": 92, + "execution_count": 31, "metadata": { "collapsed": false }, @@ -811,28 +823,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.046*image + 0.022*object + 0.014*visual + 0.013*field + 0.012*motion + 0.011*filter + 0.011*velocity + 0.009*pixel + 0.009*line + 0.008*noise'),\n", + " '0.019*cell + 0.008*matrix + 0.008*representation + 0.008*training + 0.007*activity + 0.007*node + 0.006*dynamic + 0.006*field + 0.006*probability + 0.005*hopfield'),\n", " (1,\n", - " '0.023*fig + 0.023*cell + 0.012*delay + 0.011*cortex + 0.010*noise + 0.009*eye + 0.009*phase + 0.008*activity + 0.008*cortical + 0.008*oscillation'),\n", + " '0.016*cell + 0.007*matrix + 0.007*capacity + 0.006*feature + 0.006*activity + 0.006*node + 0.006*field + 0.006*dynamic + 0.006*training + 0.006*stimulus'),\n", " (2,\n", - " '0.020*map + 0.015*field + 0.013*cell + 0.013*region + 0.012*human + 0.012*receptive + 0.011*receptive_field + 0.009*response + 0.008*chain + 0.008*orientation'),\n", + " '0.012*cell + 0.010*training + 0.008*matrix + 0.007*stimulus + 0.007*hopfield + 0.006*image + 0.006*noise + 0.006*representation + 0.006*hidden + 0.006*convergence'),\n", " (3,\n", - " '0.023*vector + 0.019*matrix + 0.015*hopfield + 0.009*probability + 0.008*let + 0.008*code + 0.008*optimization + 0.008*convergence + 0.008*theorem + 0.007*minimum'),\n", + " '0.011*cell + 0.008*hopfield + 0.007*activity + 0.007*rate + 0.006*matrix + 0.006*hidden + 0.006*field + 0.006*training + 0.005*node + 0.005*representation'),\n", " (4,\n", - " '0.027*memory + 0.015*vector + 0.013*activation + 0.010*bit + 0.009*processor + 0.008*associative + 0.008*capacity + 0.008*hidden + 0.007*machine + 0.006*address'),\n", + " '0.012*cell + 0.008*activity + 0.007*matrix + 0.007*training + 0.006*field + 0.006*code + 0.006*representation + 0.006*firing + 0.006*current + 0.005*synaptic'),\n", " (5,\n", - " '0.012*hidden + 0.010*energy + 0.009*gradient + 0.007*probability + 0.007*hidden_unit + 0.006*learning_algorithm + 0.006*adaptive + 0.006*forward + 0.006*procedure + 0.006*recurrent'),\n", + " '0.014*cell + 0.008*hidden + 0.007*sequence + 0.007*training + 0.006*field + 0.006*noise + 0.006*node + 0.006*dynamic + 0.006*hopfield + 0.006*representation'),\n", " (6,\n", - " '0.019*classifier + 0.019*recognition + 0.019*hidden + 0.018*speech + 0.011*classification + 0.011*trained + 0.009*test + 0.009*class + 0.008*hidden_layer + 0.008*propagation'),\n", + " '0.025*cell + 0.011*matrix + 0.009*training + 0.006*activity + 0.006*probability + 0.006*hopfield + 0.006*synaptic + 0.005*node + 0.005*stimulus + 0.005*representation'),\n", " (7,\n", - " '0.031*node + 0.007*surface + 0.006*sample + 0.006*distribution + 0.005*noise + 0.005*scale + 0.005*back_propagation + 0.005*dimensional + 0.005*propagation + 0.005*neural_net'),\n", + " '0.016*cell + 0.008*training + 0.007*activity + 0.007*representation + 0.007*matrix + 0.007*hidden + 0.007*noise + 0.006*hopfield + 0.006*probability + 0.006*firing'),\n", " (8,\n", - " '0.034*circuit + 0.023*chip + 0.021*analog + 0.018*voltage + 0.017*current + 0.014*synapse + 0.010*vlsi + 0.010*transistor + 0.010*threshold + 0.009*implementation'),\n", + " '0.012*cell + 0.008*image + 0.007*training + 0.006*feature + 0.006*hopfield + 0.006*representation + 0.006*probability + 0.006*firing + 0.006*activity + 0.005*synaptic'),\n", " (9,\n", - " '0.029*cell + 0.019*firing + 0.015*activity + 0.014*response + 0.013*stimulus + 0.013*potential + 0.012*synaptic + 0.012*spike + 0.010*frequency + 0.009*motor')]" + " '0.012*cell + 0.008*matrix + 0.008*activity + 0.007*representation + 0.007*training + 0.006*image + 0.006*capacity + 0.006*rate + 0.006*hopfield + 0.006*node')]" ] }, - "execution_count": 92, + "execution_count": 31, "metadata": {}, "output_type": "execute_result" } @@ -925,7 +937,19 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 77, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "lda = LdaModel(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, passes=10)\n", + "var_lambda_init = lda.state.get_lambda()" + ] + }, + { + "cell_type": "code", + "execution_count": 90, "metadata": { "collapsed": false }, @@ -937,7 +961,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 92, "metadata": { "collapsed": false }, @@ -946,8 +970,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1min 29s, sys: 52 ms, total: 1min 29s\n", - "Wall time: 1min 29s\n" + "CPU times: user 3.43 s, sys: 4 ms, total: 3.43 s\n", + "Wall time: 3.44 s\n" ] } ], @@ -955,7 +979,7 @@ "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", " iterations=10, alpha='auto', eta='auto', \\\n", - " eval_every=1, random_state=1)" + " eval_every=1, random_state=1, var_lambda=var_lambda_init)" ] }, { @@ -1141,7 +1165,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.2" + "version": "3.4.3+" } }, "nbformat": 4, diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 918f693135..dce231c276 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -24,6 +24,7 @@ from six.moves import xrange from pprint import pprint +from random import sample # log(sum(exp(x))) that tries to avoid overflow try: @@ -69,7 +70,7 @@ class AtVb(LdaModel): def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, author2doc=None, doc2author=None, threshold=0.001, iterations=10, alpha='symmetric', eta='symmetric', minimum_probability=0.01, - eval_every=1, random_state=None): + eval_every=1, random_state=None, var_lambda=None): self.id2word = id2word if corpus is None and self.id2word is None: @@ -160,7 +161,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.optimize_eta = False if corpus is not None: - self.inference(corpus, author2doc, doc2author) + self.inference(corpus, author2doc, doc2author, var_lambda) def update_alpha(self, var_gamma, rho): """ @@ -196,7 +197,7 @@ def update_eta(self, var_lambda, rho): return self.eta - def inference(self, corpus=None, author2doc=None, doc2author=None): + def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=None): if corpus is None: corpus = self.corpus @@ -206,8 +207,10 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # NOTE: parameters of gamma distribution same as in `ldamodel`. var_gamma = self.random_state.gamma(100., 1. / 100., (self.num_authors, self.num_topics)) - var_lambda = self.random_state.gamma(100., 1. / 100., - (self.num_topics, self.num_terms)) + + if var_lambda is None: + var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) # Initialize mu. # mu is 1/|A_d| if a is in A_d, zero otherwise. @@ -242,8 +245,10 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): #likelihood = self.log_word_prob(var_gamma, var_lambda) logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) for iteration in xrange(self.iterations): + #logger.info('Starting iteration %d.', iteration) # Update phi. for d, doc in enumerate(corpus): + #logger.info('Updating phi, document %d.', d) ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. authors_d = doc2author[d] # List of author IDs for document d. @@ -264,6 +269,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Update mu. for d, doc in enumerate(corpus): + #logger.info('Updating mu, document %d.', d) ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. authors_d = doc2author[d] # List of author IDs for document d. for v in ids: @@ -285,6 +291,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): var_mu[(d, v, a)] *= mu_norm_const # Update gamma. + #logger.info('Updating gamma.') for a in xrange(self.num_authors): for k in xrange(self.num_topics): docs_a = self.author2doc[a] @@ -310,10 +317,17 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): Elogtheta = dirichlet_expectation(var_gamma) # Update lambda. + #logger.info('Updating lambda.') for k in xrange(self.num_topics): + #logger.info('k = %d.', k) for v in xrange(self.num_terms): + #logger.info('v = %d.', v) var_lambda[k, v] = self.eta[v] - for d, doc in enumerate(corpus): + sample_ratio = 1.0 # When sample_ratio is 1.0, the whole dataset is used. + nsamples = int(numpy.ceil(self.num_docs * sample_ratio)) + doc_idxs = sample(xrange(self.num_docs), nsamples) + for d in doc_idxs: + doc = corpus[d] # Get the count of v in doc. If v is not in doc, return 0. cnt = dict(doc).get(v, 0) var_lambda[k, v] += cnt * var_phi[d, v, k] @@ -326,8 +340,6 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - logger.info('All variables updated.') - self.var_gamma = var_gamma self.var_lambda = var_lambda @@ -337,6 +349,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None): # Evaluate bound. if (iteration + 1) % self.eval_every == 0: + #logger.info('Computing bound.') prev_bound = bound word_bound = self.word_bound(Elogtheta, Elogbeta) theta_bound = self.theta_bound(Elogtheta, var_gamma) From 938daff7d0179e944c632a4c633c211907d3a063 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Tue, 18 Oct 2016 15:39:00 +0200 Subject: [PATCH 027/100] Now, if LDA topics are supplied lambda is not estimated at all. Added a small number to mu and phi normalization term to avoid divide by zero. Made some comments (NOTEs and TODOs) about numerical stability. --- docs/notebooks/at_with_nips.ipynb | 252 +++++++++++++++++++++--------- gensim/models/atvb.py | 94 ++++++++--- 2 files changed, 247 insertions(+), 99 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 29b391fb7e..cdcec88c65 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -101,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 93, + "execution_count": 115, "metadata": { "collapsed": false }, @@ -115,8 +115,8 @@ "data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", - "yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "#yrs = ['00']\n", + "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", + "yrs = ['00', '01', '02']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -138,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 94, + "execution_count": 116, "metadata": { "collapsed": false }, @@ -163,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 95, + "execution_count": 117, "metadata": { "collapsed": true }, @@ -178,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 96, + "execution_count": 118, "metadata": { "collapsed": false }, @@ -196,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 97, + "execution_count": 119, "metadata": { "collapsed": false }, @@ -222,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": 98, + "execution_count": 120, "metadata": { "collapsed": false }, @@ -245,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 99, + "execution_count": 121, "metadata": { "collapsed": true }, @@ -260,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 100, + "execution_count": 122, "metadata": { "collapsed": true }, @@ -279,7 +279,7 @@ }, { "cell_type": "code", - "execution_count": 101, + "execution_count": 123, "metadata": { "collapsed": true }, @@ -291,16 +291,16 @@ }, { "cell_type": "code", - "execution_count": 102, + "execution_count": 124, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkoAAAGcCAYAAAAmrI82AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHFW5//HPNwGiLAlcuQkgKCAGQWVJQMhFAt5IkEXU\nC4oDekFwYZU7iqAIPxAUBSVhVxCQRR1kuSh7MHCFAMFAgggSguyLJBDABMISkjy/P85pqBRdMz2d\nkZ6ZfN+vV786feqpc05VTzJPTp06pYjAzMzMzN5uQKs7YGZmZtZbOVEyMzMzq+BEyczMzKyCEyUz\nMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMrOWkPSUpLMLn8dIWiTp\nP96Btn8o6Y3C54G57XH/6rZze1/N7a3xTrTXLEnflfSIpAWSprS6P42S9IF8fvdodV+s73OiZEsV\nSXvlf0DrvY5vdf+WMvWen9TtZypJ+r6kTzfR9qLuttVdnfQtaOJY30mSdgSOB/4P2Bs4qqUdMmuR\nZVrdAbMWCNI/+o+Vyu9757tiNRFxo6R3R8T8bu56JHARcFU39jkaOLab7TSjqm/nARc1cazvpE8A\nbwBfDT8U1JZiTpRsaXV9RExrNFiSgOUi4vV/YZ+Wev/qxEHS8hHxSkQs4h0YUaqSE4/enCQBDAPm\n9cYkyX8f7Z3kS29mJcX5KpK+LOlvwGvAmLxdkr4l6W+SXpP0jKQzJQ0u1SNJ/y/PxXlZ0kRJH5L0\nZGluzmLzZQrldeexSNpJ0qRc5xxJV0r6UCnm15JelLRm3v6SpGcl/aROO5LULumvkl7NcddK2iRv\nv03SXRXn6mFJnY7kVJ2HOnFvm6Mkabik/5U0M/ftCUm/kbRC7XsClgNq52pR7dzm87oo1/E7SS+S\nLiNVnvO87cuSZuT2ppTnTOVz+/c6+71ZZwN9q/puDy78XD0t6dQ6P1e3Spom6cOS/k/SK/ncfquz\n76Gw/zKSjs7f3WtKc5COlbRsqe97AkNyPxeqYr5P/tl5Q9IKhbLD834/KZQtk7//YwtlK0oan/9O\nvCZpuqT/KdXf1d/HVSRdKOmfkl6QdC6w2DnLcatLuiCfq9ck/UPSFZLWbOS82dLLI0q2tBoi6T3F\ngoh4vhQzFvgicAbwAvBELj8PaMvvJwPrAgcDG0vaOo9WQJrfcThwJTABGAncALy71E7VfJW3lUva\nGzgXuBY4DFgBOACYJGnTiHiqsO8yub1JwLfz8XxH0t8j4txCtReSfileBZxN+uU+GtgC+Evefqak\n4RHxYKEvo4B1gO/V6XtRo+eh1u9a/YNy3ADSeZ4FrAl8GhgcEfMkfQn4FXBrPi8AD5Xq+l/gAeC7\nhbKqcz4G2AM4lXTZ6UBggqTNImJGF/u+WR4RCxvoW/m7/SFwBHA96WduA9J3O7L0cxXAqsB1wKXA\nxcAXgJ9KuicibqzTt6Lz8zFeTPrZ2JJ0iXB9YPdC3w8ANga+Dgi4raK+SaTvaCvS9wXwcWAhsHUh\nbiTpO78lH6+Aa/J+vwT+CuwAjJO0ekQcXmrnbX8fcx1XkX5WzwRmALuSznv5O/o9sB7pu32CNGI2\nlvQz9RRmVSLCL7+WmhewF+mSS/m1sBAzMJfNB9Yr7b9t3rZrqXyHXL5b/jw07395Ke4nOe7sQtlx\nwPw6fd2X9Mtmjfx5JeCfwGmluGG5/PRC2UV538NKsX8Bbi983i7358ROztnKwKvAsaXyM3K77+pk\n3+6chzG5z/+RP4/MMZ/u4jt9tVhP6bwuAs6v2Da/8Ln2nS8APlIofz9p9OLi0rl9sKs6u+hb+bsd\nls/TlaW4b+a4PQtlk3LZFwply5ESyd92ca5G5OM8o1Q+Lte5Vek4X2jg79RA4CXguELZC6RE7LXa\nzwfwnXyMK+bPu+a+HFqq73JSkvq+Bv4+1ur4ZqFsACk5XQjskcv+rRznl1+NvnzpzZZGAewPfLLw\n2q5O3I0R8VCpbDfSL4E/SXpP7QXcRfql+Ikctz3pH/jTSvufvAT9/hQpWbq41PZC4M5C20Vnlz7f\nShoBq9mVlBwcV9VoRPwTuJo0CgGkyyHA50kJ0Gud9HkszZ+Hf+b3HSS9q4H4egL4RTfiJ0XEm5P6\nI+Jx0ojFp5psv1Hbkc5T+bycBbwC7FQqnxMRl9Q+RJrbdSeLf7f17Eg6J+VlEE4ijRqV2+lSRCwE\nJpNGIZG0ETAE+DGwLGm0B9Io0z0R8XL+vAMp+TmjVOU40rkon/N6fx93AF6n8HMeaeTt9Hw8Na+Q\nkq9PSBrSzUO0pZwTJVta3RkRNxVfdWIeq1P2QdL/Tp8rvWYB7yKNoAC8L78v9g97RMwk/e+7GeuR\n/vGfVGr7WeA/C23XvJyTnKIXgVUKn9cFnoqIrvp0IbCOpC3z508B7yGNOnTm/fm92+chIh4GTgG+\nATwv6TpJ+0taqYs2yx7tRmz5FzHAg8BKklaps62n1M7Tg8XCSJOVHy1sr3myTh3l77aqnQX53Bbb\neZr0fZTbadStwOZ5ntPWwJMRcQ/pTtLa5betSD+7xb48FRGvluqaXthe9Fiddt8PPF0nWZ9R/JC3\nHwHsDDwr6U+SDpVU/jtj9jaeo2RWrfwPOKT/XPwD+DKL/4+15tn8XtvWyB1DVTED67QdpPlRs+vE\nlycnL6yoVxV/7sx1uc0vAXfk96cj4k9d7Ned8/A2EdGeJ+d+hjQ6dTpwuKQtc7LViHrfY3eUz1Gj\n39eStNGVRr7b7m7vbh+KJpGWXNiCNHI0qVC+taQPk/6DccsStFfvexT1v4+31R0RJ0m6AvgsacT3\nh8D3JG1THEU0K/OIkln3PEyaSHtreUQqv2r/4D6W34cXd5a0GunyWdGLwEBJy5fK167TNsCzFW1P\novseAtYs31lVFhELyJOGJa1MmlD9mwbqfyy/N3Ieqtq+LyJ+FBHbANuQRuu+XgxppJ4GfbBO2XDg\npYh4MX9+kTRvq2ztOmWN9u2x/L5+sVDScrnexxusp5F2lpH0gVI7awArLkE7d5Au4Y4mjSDVfhZv\nAf6DdFk4SCNPxb6sKak8qX+D/N5IX2p1lC/Nrl8nloh4JCLGRcT2wEdJk8sbulvQll5OlMy65xLS\nxNkjyxvy7c+1hOOPpP/1H1wKa69T58Ok/wGPLtS1ImnUqug64GXg+3mOULn9VRs8hqLLSSPLjay6\nfBEpSTyL9AumkUSpO+dhMZIGSyr/G3Uf6RfuoELZPOonLs34eJ5jU+vD2qTLNdcXYh4G3iNpg0Lc\ne0nJY1mjfaudp0NK5d8g3dl4dQN1NOJa0s/a/5TKv006r9c0U2m+fDaN9DO7OouPKK0AHATMiIji\nSOi1pL9LB5Sqayedi+saaPpa0s/CN2oF+e/GQSx+B+W7812URY+Q/j4NKsStJmn9Oj93thTzpTdb\nGjV9iSEibsqXgo6UNAKYSPqf9HDSRO/9SXcuzZI0HjhU0pWkf/Q3I00cf6FU7XXA08D5kn6Wy/YB\nngHeXGcnIuZIOoi0LME0SReTLoe9nzQJ9//o5v+OI2KipA7gW0prG91AuoS0NTAhIoqTZO+SNJ00\nifuvjVyu6OZ5gMW/m+2A8ZIuBf5Omhi8F+kS4/8W4qYCY/P6O88AD0dE3XWfGnAfcIOk00jf6wH5\n/QeFmN+Sljy4MsetCOxHWoJg41J9DfUtn6cTgCMkXUtKjDbI9U4mjeYtsYiYJuk3wAH5RoBJwCjS\npdRLIqJqCYBGTAIOBZ6PiOm5vWckPUz6+/HLUvwVpBGnEyStx1vLA+wE/DQi6s3DKruCNJr1szxK\nVlseoDw6uyFwvaRLgPtJidhupHl2HYW4n5FuWliTdIndzMsD+LV0vUi/aBcCIzqJGZhjTuok5muk\nu4xeJl2KuRv4ETC0FPf/SEnQy6RRg/VJE3HPLsWNIP1CfJX0P90DKd1CXojdljTC8WKudwZwDrBJ\nIeYi0i+scr+PA14vlYn0C+7+3P5M0p1eG9XZ/7u5T9/q5nmvdx6eAM4qxJSXB1g3H9ffSSMzz+Z9\nR5fq/hDwp1z3wtq5zce6kLTmUqfnofidk5KGB/O5mFLrT2n/scC9pNvf/0Zax6je8gBVfav6bg/M\n9b2Wz9cpwEqlmEnA1Dp9uog0atPVdzEwfx8P53YeJSWCy9Sp720/Q53U++l8TFeUys+jtMRBYdsK\npLvcnsp9eQA4pDt/H0kT2C8k3SX5PGnNqk1ZfHmAVUl3Xt4PzCUl6bcBn61zzAvK34tfS/dLET15\ned/MuiLpSeC6iPh6l8G9jKRvk9ZAel9EPNPq/piZ/av5OqyZdcc+pPVsnCSZ2VLBc5TMrFNKz/Da\nhTSv6EP4LiEzW4o4UTJ751U9K6y3Wo10h9sLpMeYTGhxf8zM3jGeo2RmZmZWwXOUzMzMzCo4UTIz\nMzOr4ETJzFpO0g8llZ9V9073YaCkRZLG9WCdY3Kdu/RUnd1o+9eS/v5Ot2vW3zhRMuvlJO2Vf9nW\nXq9KmiHptH709PO+NsG9O1p1XAEsalHbZv2G73oz6xuC9Dy2x4B3kZ7Qvj+wg6SPRMRrLeybda7p\nR+Ysob1b2LZZv+FEyazvuD4ipuU/nyfpBdIDRD8D/K513eqapOUj4pVW92NpEhELW9Guv2vrb3zp\nzazvuok0YrBOrUDSOpIulfS8pHmSJkvasbiTpOcKD99FyT8lvSFpcKH88Fy2fKFsfUmX5fpflXSn\npE+X6q9dKhwt6UxJs0jPt+sWSftKulHSrNzWfZK+Voo5RdLMUtnPc/v7FcrWyGX7NNj2l/PlzVcl\nTZH0H3Vi3ivpfEkzJb0m6V5Je9WpLoABko6S9JSkVyT9UdI6pfq2yd/dE7m+xyX9rPjUe0nflbRQ\n0hrlRnLsq5JWyp/fNkdJ0oqSxkt6MrcxPT+wtxjzgXyu9iiV1+ZwHVEo+2EuGy7pd5JeJD2c2azf\ncKJk1netl9+fB8jzlSYD2wGnA0cAg4CrJH2msN9twOjC542AWoK0VaH848C02uiApA+TntS+PvBj\n0grdLwO/L9VfcyZpJe8fkJ4P1137kx4Q/CPg26SHxJ5VSpYmAf8uaXip3wuBrQtlo0kJy6QG2h0D\n/BS4gPTw2KHABEnr1wIkrUZ6YO42wKnAIbmvv5J0QKk+kS6b7gSckF//QXqQa9EXSN/X6cBBpAcA\nH0J6qGzNxbm+z9fp927AtRHxUv682LwvSQKuAQ4GriaNRv4dGCfphM5OSCdq9f8v6eG13yU9lNas\n/2j1U3n98suvzl/AXqRf/J8A3gO8F9gdeI6UqKye48bnuFGFfVcgPSX+4ULZt4H5wAr580GkX/KT\ngeMLcS8APyt8ngjczdufMn8r8ECpv4uAP5EXtW3gGI8D5pfKBtWJ+yMwvfB5WG5r3/x5lXwOLgae\nKMSdDszsog8Dc10LgI8Uyt9PerL9xYWy84EngCGlOi4BZgPL5s9jcp33AAMLce25n8O7ON7v5/6s\nXij7M3B7KW5UbucLhbKLgAcLn3fNMYeW9r0ceIP0oGOAD+S4PSrOzxGl720RcH6r/5745de/6uUR\nJbO+QcCNpOToSeC3wFzgs/HWA2p3AKZExOTaThExDzgbWFvShrl4Eml+Yu1y0ta5bFL+M5I2AlbO\nZUhahZSoXQoMkfSe2gu4AfigpNUL/Q3glxHR9B1fEfH6mwcvDc5t3QwMl/TuHDMLeIi3Rsi2Bl4H\nTgLWlPT+0jE2YlJE3Ffox+PAVcCncl8EfA74A7BMnXOxCrBJqc5zY/E5Q5NI3+m6Fce7fK7v9hxX\nrO93wBaS3lco2x14hTRSVGUHUoJ8Rql8HCkJ+lQn+3YmgF80ua9Zr+dEyaxvCNKlqE8C2wIbRsQH\nImJiIeb9wIw6+04vbAeYRvqlWrs09XHeSpQ2k7Rc3hak0SJIl/lEGkF4rvQ6JseUlyp4rPhB0rKS\nhhVfnR2wpK0l3STpZeCfua1j8+YhhdBbS8cyBbgLmANsLWkI8BEaT5QeqlP2ILBSThhXA1YCDuDt\n5+LsHF8+F+U5Wi/m91VqBZLeL+lCSc+TRgqfIyXHsPjxXpLfv1Ao2xW4OjqfRP1+4KmIeLVUXv75\naMajS7CvWa/mu97M+o4746273poWEQsk/RkYLekDwOrALaRfzMsCW5ASjukR8Xzerfafqp8BVQ/F\nLScY5V/Io0mXzoKUdIWktSLiH+WKJH0wx95Hukz1JGk0ZBfSHJvif/ImAXtJWouUME2MiJB0W/5c\nS0puqeh3I4q32dfavgD4dUX8PaXPVXegCdJEadKlzZWA40kJ7yvA+0hzlN483oh4StJkUqL0M0lb\nky7HXtyNY+hM1SjgwE72KX/XZv2GEyWz/uNx0kTrsg0K22smAYeRJn4/FxEPAkj6Gymh2Zp0uanm\nkfz+RkTc1GT/ppJGxIqeq4jdhZS07ZQvr5H7t32d2NpI0fbACODo/PkW4CukROkl3p68VPlgnbLh\nwEsR8aKkucA8YMASnIuyTUhzg9oi4s2lHiRVXQ67GDhF0rqky24vAdd10cZjwMclvbs0qlT++agl\nliuX9l+SESezPsuX3sz6j2uBj0naolYgaQXg68CjEXF/IXYSaeHKQ3jr8hr5z18mjTK9eakqIp4j\nTc7+Rr7jazGSVu2qcxHxz4i4qfSqemxJbQTmzX+j8mWv/65T70PALNIk9QGkeT21Y1yfNJ/o9m7M\nl/p4nqNVa3dtYGfg+tzeQuAK4AuSNijvXOdcNNJuveMV6fupt/+l5AnXpMtuVxbnOFW4FliOdMmw\nqDax/DqAiHiRdKlzdCnuoIq+1CVpiNJyEis2uo9Zb+QRJbO+oZHLJj8B2oDrJZ1Kumttb9JIwH+V\nYieT7qYaDpxVKL+FNBeq3q30B+ayeyX9kjTKNIx0x9V7gU272d/OTCDdRn9tbmsw8DXgGd4+/wdS\ngrcbaTmDl3PZnaRLQuuR7lJr1H3ADZJOI52jA/L7Dwoxh5ESiSm5f9OBfwM2I43GFZPJRs7F30jz\nfE7OE9BfzsczuF5wRMySNAn4DrAijS04egXp+z1B0nrAX0kTvHcCfhoRxXlU5wCHSppDmtO2LWnE\nqzvf6xeBn+f3S7qINeu1PKJk1jd0+T/5iHiWlLTcQPrf//Gk29p3jogrS7GvkG71L07YhpQIBenW\n+idL+0wnJQJXk5YAOB34Bmk04lgW18zdbm/uk9vajfRv1M+ArwKnkdZmqqfW7+Io2ALSrfSNrp9U\n68ONwKGkYzyGNFo1NvepVvdMYHPSPKX/yn37JimxObzquKrK88jazqTk5QjgSFLy9JVO+vo7UpL0\nT6rnjRXbCFJSdCrwadJyEsOBb0XEd0v7HU2aG/UFUsK6IPevu8/k66/P77OliJbg7l0zMzOzfq3X\njShJ+l5eEn9coWyQpDMkzZb0ktIjFIaW9ltL0jVKj22YKelESQNKMdtKmpqX7n9QdR43IOlASY/m\nRwHcIWnz0vYu+2JmZmb9Q69KlHJS8jXefnfKyaQh411J8wLWIK0mW9tvAGmi4jLAlqQh870pXA7I\nEzKvJg2rbwycApwjabtCzO6kheqOJs23uIf06ILi5MxO+2JmZmb9R6+59JbvjJhKmkh6FHB3RHxL\n6SGdzwFfjIgrcuz6pMmTW0bEFEk7AFeSlvmfnWO+QZrc+u953ZgTgB0iong3SwfpEQQ75s93AH+O\niEPyZ5HWbzk1Ik5spC//0pNkZmZm76jeNKJ0BnBVnXVJNiONFNVWqCUiZpCeszQqF20J3FtLkrIJ\npNVsP1yIKa5iXIsZBWnVYGBkqZ3I+9TaaaQvZmZm1k/0ikRJ0hdJC659r87mYaSHZc4tlc/irVtw\nV8ufy9tpIGawpEHAqqSVZ+vF1OpopC+Lyc9sGiFp+XrbzczMrL7e8Du05esoSVqTNO9nu04Wn6u7\nK43detpZjBqM6aqdzmI2AW4DpuVnVhVdT/VtvWZmZkuT7Xn7w5lXJK24vxVvLSb7jmp5okS63PXv\nwNQ8JwjSyM5oSQeRTtogSYNLIzlDeWv0p7amSdGwwrbae/khnEOBuRExX9Js0now9WKK7SzXRV/K\n1s7vI+psG01a68bMzMyqrc1SnChNBD5aKjufNEH6J8DTwBvAGNLKskgaTnpYZO2kTQaOkLRqYZ7S\nWNLTw6cXYnYotTM2lxMRb0iamtu5Mrej/PnUHD+VtPBavb5Mrji+xwB+/etfs8EGb3vaQb/S3t7O\n+PHjW92Nf7ml5Thh6TlWH2f/4uPsP6ZPn86XvvQlyL9LW6HliVJEzAOKz6BC0jzg+dpKuJLOBcZJ\nqj3c8lTgtoi4M+9yQ67jIkmHk55TdRxweuFy3i+Ag/Ldb+eRkp3dgB0LTY8DLsgJ0xTSM5CWJz/+\nICLmdtKXqjveXgPYYIMNGDGi3qBS/zFkyJB+f4yw9BwnLD3H6uPsX3yc/dJrrWq45YlShfJ8n9pD\nGy8DBpHm9hz4ZnDEIkk7k54rdDvpyd7n89ZTxImIxyTtREqGvgk8BewbERMLMZfkNZOOJV2C+wuw\nfX4gaEN9MTMzs/6jVyZKEfGfpc+vAwfnV9U+T5KeRdRZvTeT5kR1FnMm1c+TaqgvZmZm1j/0iuUB\nzMzMzHojJ0rWY9ra2lrdhXfE0nKcsPQcq4+zf/FxWk/qNY8w6a8kjQCmTp06dWmadGdmZrbEpk2b\nxsiRIwFGRsS0VvTBI0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCi\nZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVw\nomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlV\ncKJkZmZmvcr//A987Wut7kWyTKs7YGZmZlb01FPw8sut7kXS8hElSftJukfSnPy6XdKnCtv/JGlR\n4bVQ0pmlOtaSdI2keZJmSjpR0oBSzLaSpkp6TdKDkvaq05cDJT0q6VVJd0javLR9kKQzJM2W9JKk\nyyQN7elzYmZmZr1DyxMl4EngcGBkft0E/EHSBnl7AGcDw4DVgNWBw2o754ToWtLo2JbAXsDewLGF\nmLWBq4EbgY2BU4BzJG1XiNkdOAk4GtgUuAeYIGnVQl9PBnYCdgVGA2sAly/pCTAzM7PeqeWJUkRc\nExHXR8RD+XUk8DIp6al5JSKei4hn86s4ILc98CFgz4i4NyImAEcBB0qqXVrcH3gkIg6LiBkRcQZw\nGdBeqKcdOCsiLoyIB4D9gFeAfQAkDc5/bo+ImyPibuArwFaSPtbDp8XMzMx6gZYnSkWSBkj6IrA8\ncHth056SnpN0r6TjJb27sG1L4N6ImF0omwAMAT5ciJlYam4CMCq3uyxpNOvG2saIiLzPqFy0GWnU\nqhgzA3iiEGNmZmb9SK+YzC3pI8Bk4F3AS8DnchIC8BvgceAfwEbAicBwYLe8fTVgVqnKWYVt93QS\nM1jSIODfgIEVMevnPw8D5kfE3DoxqzV0oGZmZtan9IpECXiANHdoZdL8nwsljY6IByLinELc3yTN\nBG6UtE5EPNpFvdHJNjUY09n2RmPMzMysD+oViVJELAAeyR+n5Tk/h5DmFpX9Ob+vBzwKzAQ2L8UM\ny+8zC+/DSjFDgbkRMV/SbGBhRUxtlGkmsJykwaVRpWJMpfb2doYMGbJYWVtbG21tbV3tamZm1u91\ndHTQ0dEBwF13wYIF0N4+p8W96iWJUh0DgEEV2zYljeA8kz9PBo6QtGphntJYYA4wvRCzQ6mesbmc\niHhD0lRgDHAlgCTlz6fm+KnAglx2RY4ZDryvVk9nxo8fz4gRI7oKMzMzWyoVBw922y2to3T88dMY\nOXJkS/vV8kRJ0o+A60jLBKwE7AlsA4yVtC6wB+n2/+dJl+fGATdHxH25ihuA+4GLJB1OWj7gOOD0\niHgjx/wCOEjSCcB5pGRnN2DHQlfGARfkhGkK6S645YHzASJirqRzgXGSXiTNpToVuC0ipvToSTEz\nM7NeoeWJEuly14WkBGcO8FdgbETcJGlN4JOky3ArkJKpS4Ef1XaOiEWSdgZ+TrpTbh4puTm6EPOY\npJ1IydA3gaeAfSNiYiHmkrxm0rG5T38Bto+I5wp9bSddoruMNOJ1PXBgj50JMzMz61VanihFxFc7\n2fYUsG0DdTwJ7NxFzM2kJQA6izkTOLOT7a8DB+eXmZmZ9XO9ah0lMzMzs97EiZKZmZlZBSdKZmZm\nZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZm\nZtbrSK3uQeJEyczMzHqViFb34C1OlMzMzMwqOFEyMzMzq+BEyczMzKyCEyUzMzOzCk6UzMzMzCo4\nUTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMjMzM6vgRMnMzMysghMlMzMzswpOlMzMzMwq\nOFEyMzMzq+BEyczMzKyCEyUzMzOzCi1PlCTtJ+keSXPy63ZJnypsHyTpDEmzJb0k6TJJQ0t1rCXp\nGknzJM2UdKKkAaWYbSVNlfSapAcl7VWnLwdKelTSq5LukLR5aXuXfTEzM7P+o+WJEvAkcDgwMr9u\nAv4gaYO8/WRgJ2BXYDSwBnB5beecEF0LLANsCewF7A0cW4hZG7gauBHYGDgFOEfSdoWY3YGTgKOB\nTYF7gAmSVi30tdO+mJmZWf/S8kQpIq6JiOsj4qH8OhJ4GdhS0mBgH6A9Im6OiLuBrwBbSfpYrmJ7\n4EPAnhFxb0RMAI4CDpS0TI7ZH3gkIg6LiBkRcQZwGdBe6Eo7cFZEXBgRDwD7Aa/k9mmwL2ZmZtaP\ntDxRKpI0QNIXgeWByaQRpmVII0EARMQM4AlgVC7aErg3ImYXqpoADAE+XIiZWGpuQq0OScvmtort\nRN6n1s5mDfTFzMzM+pFekShJ+oikl4DXgTOBz+VRndWA+RExt7TLrLyN/D6rznYaiBksaRCwKjCw\nIqZWx7B7GWMFAAAgAElEQVQG+mJmZmY9QGp1D5Jlug55RzxAmju0Mmn+z4WSRncSLyAaqLezGDUY\n01U7jfbFzMzM+phekShFxALgkfxxWp7zcwhwCbCcpMGlkZyhvDX6MxNY7O400uhPbVvtfVgpZigw\nNyLmS5oNLKyIKbbTVV8qtbe3M2TIkMXK2traaGtr62pXMzOzfq+jo4OOjg4A7rwTFi2C9vY5Le5V\nL0mU6hgADAKmAguAMcAVAJKGA+8Dbs+xk4EjJK1amKc0FpgDTC/E7FBqY2wuJyLekDQ1t3Nlbkf5\n86k5vrO+TO7qgMaPH8+IESMaO3ozM7OlTHHwYNdd4dVX4Yc/nMbIkSNb2q+WJ0qSfgRcR1omYCVg\nT2AbYGxEzJV0LjBO0ovAS6TE5baIuDNXcQNwP3CRpMOB1YHjgNMj4o0c8wvgIEknAOeRkp3dgB0L\nXRkHXJATpimku+CWB84H6KIvU3r4tJiZmS21ohdNaGl5okS63HUhKcGZA/yVlCTdlLe3ky6LXUYa\nZboeOLC2c0QskrQz8HPSKNM8UnJzdCHmMUk7kZKhbwJPAftGxMRCzCV5zaRjc5/+AmwfEc8V+tpp\nX8zMzKx/aXmiFBFf7WL768DB+VUV8ySwcxf13ExaAqCzmDNJd9013RczMzPrP3rF8gBmZmZmvZET\nJTMzM7MKTpTMzMzMKjhRMjMzM6vgRMnMzMysghMlMzMzswpOlMzMzMwqOFEyMzMzq+BEyczMzKyC\nEyUzMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMjMzs15HanUPEidK\nZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZWa8S0eoevMWJkpmZmVkFJ0pm\nZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVWp4oSfqepCmS5kqaJekKScNLMX+StKjw\nWijpzFLMWpKukTRP0kxJJ0oaUIrZVtJUSa9JelDSXnX6c6CkRyW9KukOSZuXtg+SdIak2ZJeknSZ\npKE9eU7MzMysd+iRREnSQEmbSFqlid23Bk4DtgA+CSwL3CDp3YWYAM4GhgGrAasDhxXaHwBcCywD\nbAnsBewNHFuIWRu4GrgR2Bg4BThH0naFmN2Bk4CjgU2Be4AJklYt9OVkYCdgV2A0sAZweRPHbWZm\nZr1cU4mSpJMl7Zv/PBC4GZgGPClp2+7UFRE7RsRFETE9Iu4lJTjvA0aWQl+JiOci4tn8ermwbXvg\nQ8CeEXFvREwAjgIOlLRMjtkfeCQiDouIGRFxBnAZ0F6opx04KyIujIgHgP2AV4B98rEOzn9uj4ib\nI+Ju4CvAVpI+1p3jNjMzs96v2RGl3UijLQCfBtYhJSrjgR8tYZ9WJo0gvVAq31PSc5LulXR8acRp\nS+DeiJhdKJsADAE+XIiZWKpzAjAKQNKypOTsxtrGiIi8z6hctBlp1KoYMwN4ohBjZmZm/USzidKq\nwMz85x2BSyPiQeA84KPNdkaSSJe2bo2I+wubfgN8CdgWOB74MnBRYftqwKxSdbMK2zqLGSxpEOmY\nBlbE1OoYBsyPiLmdxJiZmVk/sUzXIXXNAjaU9AzwKeCAXL48sHAJ+nMmsCGwVbEwIs4pfPybpJnA\njZLWiYhHu6izsyfGqMGYrp4600iMmZmZ9THNJkq/Ai4BniElCH/M5VsADzRToaTTSaNTW0fEM12E\n/zm/rwc8Shrd2rwUMyy/zyy8DyvFDAXmRsR8SbNJSV69mNoo00xgOUmDS6NKxZi62tvbGTJkyGJl\nbW1ttLW1dbabmZnZUqGjo4OOjg4A7rwTFi2C9vY5Le5Vk4lSRBwj6T5gLdJlt9fzpoXAT7pbX06S\nPgNsExFPNLDLpqQErZZQTQaOkLRqYZ7SWGAOML0Qs0OpnrG5nIh4Q9JUYAxwZe6X8udTc/xUYEEu\nuyLHDCdNPp/cWYfHjx/PiBEjGjg0MzOzpU9x8OBzn4P58+G446YxcmT53q53VrMjSkTEZQCS3lUo\nu6C79eT1kNqAXYB5kmojOnMi4jVJ6wJ7kG7/f550a/844OaIuC/H3gDcD1wk6XDS8gHHAadHxBs5\n5hfAQZJOIM2lGkOalL5joTvjgAtywjSFdBfc8sD5+fjmSjoXGCfpReAlUhJ1W0RM6e6xm5mZWe/W\n7PIAAyUdJelp4OWczCDpuNqyAd2wHzAY+BPwj8LrC3n7fNL6ShNIo0M/BS4lJVYARMQiYGfSiNbt\nwIWk5OboQsxjpPWPPgn8hZQE7RsREwsxlwDfJq2/dDewEbB9RDxX6G87aT2mywp93rWbx2xmZmZ9\nQLMjSt8nLep4GPDLQvl9wP8A5zZaUUR0mqxFxFOku926qudJUrLUWczNvH19pnLMmaRJ5VXbXwcO\nzi8zMzP7F5C6jnknNLs8wH8DX4+I37D4XW73kNZTMjMzM+vzmk2U3gs8VFHfss13x8zMzKz3aDZR\nup/0jLay3Uhze8zMzMz6vGbnKB1LujvsvaRk678krU+6JNfpPCEzMzOzvqKpEaWI+AMpIfokMI+U\nOG0AfDoi/tjZvmZmZmZ9xZKso3QrsF0P9sXMzMysV2l2HaXNJW1Rp3wLSZstebfMzMzMWq/Zydxn\nkB5fUvbevM3MzMysKdGLHjPfbKK0ITCtTvndeZuZmZlZn9dsovQ6MKxO+eqkh8aamZmZ9XnNJko3\nAD+WNKRWIGll4HjAd72ZmZlZv9DsXW+HArcAj0uqLTC5CTAL+HJPdMzMzMys1ZpKlCLiaUkbAXsC\nGwOvAr8COiLijR7sn5mZmVnLLMk6SvOAs3uwL2ZmZma9StOJkqThwLbAUEpznSLi2CXrlpmZmVnr\nNZUoSfoa8HNgNjATKK54EKRHmpiZmZn1ac2OKB0JfD8iTujJzpiZmZn1Js0uD7AKcGlPdsTMzMys\nt2k2UboUGNuTHTEzMzPrbZq99PYQcJykLYF7gcWWBIiIU5e0Y2ZmZmat1myi9HXgZWCb/CoKwImS\nmZmZ9XnNLji5Tk93xMzMzKxGanUPkmbnKAEgaTlJ60tqej0mMzMzs96qqURJ0vKSzgVeAf4GvC+X\nnybpuz3YPzMzM7OWaXZE6cekZ7xtC7xWKJ8I7L6EfTIzMzPrFZq9ZPZZYPeIuENScVXuvwEfWPJu\nmZmZmbVesyNK/w48W6d8BRZ/nImZmZlZn9VsonQXsFPhcy05+ioweYl6ZGZmZtZLNJsoHQEcL+nn\npMt3h0j6I/AV4PvdqUjS9yRNkTRX0ixJV0gaXooZJOkMSbMlvSTpMklDSzFrSbpG0jxJMyWdKGlA\nKWZbSVMlvSbpQUl71enPgZIelfSqpDskbd7dvpiZmVn/0FSiFBG3kiZzL0NamXssMAsYFRFTu1nd\n1sBpwBbAJ4FlgRskvbsQczJpBGtXYDSwBnB5bWNOiK7N/dkS2AvYGzi2ELM2cDVwY+77KcA5krYr\nxOwOnAQcDWwK3ANMkLRqo30xMzOz/qPbk7nzmkl7ABMi4mtL2oGI2LFU/96k+U8jgVslDQb2Ab4Y\nETfnmK8A0yV9LCKmANsDHwI+ERGzgXslHQX8RNIxEbEA2B94JCIOy03NkPRxoB34Yy5rB86KiAtz\nO/uRkqJ9gBMb7IuZmZn1E90eUcpJxy+Ad/V8dwBYmTTn6YX8eSQpobux0IcZwBPAqFy0JXBvTpJq\nJgBDgA8XYiaW2ppQq0PSsrmtYjuR96m1s1kDfTEzM7MlEL3otrBm5yhNIV2a6lGSRLq0dWtE3J+L\nVwPmR8TcUvisvK0WM6vOdhqIGSxpELAqMLAiplbHsAb6YmZmZv1Es+sonQmcJGlNYCowr7gxIv66\nBPVuCHy8gVjR2FIEncWowZiu2ukypr29nSFDhixW1tbWRltbWxdVm5mZ9X8dHR10dHQAMCVPZGlv\nn9PCHiXNJkoX5/dTC2XBWwnDwO5WKOl0YEdg64j4R2HTTGA5SYNLIzlDeWv0Zyaw2N1ppNGf2rba\n+7BSzFBgbkTMlzQbWFgRU2ynq77UNX78eEaMGNFZiJmZ2VKrOHjwmc/AokXwgx9MY+TIkS3tV7OX\n3tap81q38N4tOUn6DGky9hOlzVOBBcCYQvxw0vPlbs9Fk4GPlu5OGwvMAaYXYsawuLG5nIh4I7dV\nbEf5c62dzvri9aPMzMz6maZGlCLi8Z7qgKQzgTZgF2CepNqIzpyIeC0i5uYH8I6T9CLwEmkk67aI\nuDPH3gDcD1wk6XBgdeA44PScAEGagH6QpBOA80jJzm6kUayaccAFkqaS5mG1A8sD5+fj7qwvvuPN\nzMysn2kqUZL0351tr91e36D9SJfr/lQq/wpQq6eddFnsMmAQcD1wYKG9RZJ2Bn5OGv2ZR0puji7E\nPCZpJ1Iy9E3gKWDfiJhYiLkkj0odS7oE9xdg+4h4rtCvTvtiZmZm/Uezc5ROKX1eljTyMh94hbcS\nnC5FRJeX/yLideDg/KqKeRLYuYt6biYtAdBZzJmkSeVN98XMzMz6h2Yvva1SLpP0QdKIzk+XtFNm\nZmZmvUGzk7nfJiL+DnyXt482mZmZmfVJPZYoZQtIzz4zMzMz6/Oancy9S7mIdKfZQcBtS9opMzMz\ns96g2cncvy99DuA54Cbg20vUIzMzM1vqSV3HvBOanczd05fszMzMzHodJzxmZmZmFZpKlCRdJum7\ndcq/I+nSJe+WmZmZWes1O6K0DXBNnfLrgdHNd8fMzMys92g2UVqRtAp32RvA4Oa7Y2ZmZtZ7NJso\n3QvsXqf8i6SH05qZmZn1ec0uD3Ac8L+SPkBaEgBgDNAGfL4nOmZmZmbWas0uD3CVpM8CRwC7Aa8C\nfwU+mR88a2ZmZtbnNTuiRERcQ/0J3WZmZmb9QrPLA2wuaYs65VtI2mzJu2VmZmbWes1O5j4DWKtO\n+XvzNjMzM7M+r9lEaUNgWp3yu/M2MzMzsz6v2UTpdWBYnfLVgQXNd8fMzMyWdhGt7sFbmk2UbgB+\nLGlIrUDSysDxwB97omNmZmZmrdbsXW+HArcAj0u6O5dtAswCvtwTHTMzMzNrtWbXUXpa0kbAnsDG\npHWUfgV0RMQbPdg/MzMzs5ZZknWU5gFn92BfzMzMzHqVphIlSZ8nPa5kOBDA34HfRsRlPdg3MzMz\ns5bq1mRuSQMk/Q74HWkZgIeAR4APA5dIuliSer6bZmZmZu+87o4oHQJ8EtglIq4ubpC0C2me0iHA\nyT3TPTMzM7PW6e7yAF8BvlNOkgAi4krgMGCfnuiYmZmZWat1N1H6IDCxk+0Tc4yZmZlZn9fdROlV\nYOVOtg8GXutuJyRtLelKSU9LWpQv4xW3/yqXF1/XlmJWkfQbSXMkvSjpHEkrlGI2knSLpFclPS7p\nO3X68nlJ03PMPZJ2qBNzrKR/SHpF0h8lrdfdYzYzM7NqvWXGc3cTpcnA/p1sPzDHdNcKwF/y/lUL\nl19HemzKavnVVtr+W2ADYAywEzAaOKu2UdJKwATgUWAE8B3gGElfLcSMyvX8krSA5u+B30vasBBz\nOHAQ8A3gY8A8YIKk5Zo4bjMzM+vFujuZ+0fAnyS9B/gZ8AAgUoLybeAzwCe624mIuB64HqCTu+Ze\nj4jn6m2Q9CFge2BkRNydyw4GrpF0aETMBL4ELAvsGxELgOmSNgW+BZyTqzoEuC4ixuXPR0saS0qM\nDijEHBcRV+V2/pu0IvlngUu6e+xmZmbWe3VrRCkibgd2JyVDk4EXgReA23JZW0Tc1tOdzLaVNEvS\nA5LOlPRvhW2jgBdrSVI2kTQ6tUX+vCVwS06SaiYA6xeeWTeKt8/BmpDLkbQuaTTrxtrGiJgL/LkW\nY2ZmZv1HtxecjIgrJE0AxpIWnAR4ELghIl7pyc4VXAdcTrps9gHgx8C1kkZFRJCSl2dL/Vwo6YW8\njfz+SKneWYVtc/L7rDoxtTqGkZKvzmLMzMysn2j2WW+vSPok8P8i4oUe7lO99oqXtP4m6V7gYWBb\n4P862VVUz3mqbW8kprPtjcaYmZlZH9OtREnSmhHxVP64B3Ai8EJOXHaMiCd7uoP1RMSjkmYD65ES\npZnA0FJfBwKr5G3k92Glqoay+AhRVUxxu3LMrFLM3XSivb2dIUOGLFbW1tZGW1t5TrqZmdnSp6Oj\ng46ODgCmTEll7e1zWtijpLsjSg9Iep40J+ldwFrAE8DapInS7whJawLvAZ7JRZOBlSVtWpinNIaU\n1EwpxPxQ0sCIWJjLxgIzImJOIWYMcGqhue1yeS1Bm5lj/pr7Mpg0D+qMzvo8fvx4RowY0czhmpmZ\n9XvFwYNddknLAxx99DRGjhzZ0n51d3mAIcDngal532slPQgMAraX1NQ8HUkrSNpY0ia5aN38ea28\n7URJW0h6v6QxpNv2HyRNtCYiHsh//qWkzSVtBZwGdOQ73iDd9j8fOE/ShpJ2B74JnFToyinADpK+\nJWl9SccAI4HTCzEnA0dK+rSkjwIXAk8Bf2jm2M3MzKz36m6itGxETImIk0iLT25KeqzJQtKjSx6W\nNKOJfmxGunQ1lXQp7CRgGvCDXPdGpERkBmmNozuB0RHxRqGOPUjLFUwErgZuIa11BLx5d9r2pNGv\nu4CfAsdExLmFmMmk9Zm+TlrX6b+Az0TE/YWYE0lJ2Fmku93eDewQEfObOG4zMzPrxbp76W2upLtJ\nl96WA5aPiNskLSAtG/AUaRHGbomIm+k8aftUA3X8k7RWUmcx9wLbdBFzOekOu85ijgGO6apPZmZm\n1rd1d0RpDeCHwOukJOsuSZNISdMIICLi1p7topmZmVlrdHfBydkRcVVEfA94BdicdBkqSCt1z5V0\nc89308zMzOyd190RpbI5eY2jN4D/BNYBzlziXpmZmZn1Ak0tOJltBDyd//w48Ea+w+x3S9wrMzMz\nW2pFpOUBeoOmE6Xi4pIR8ZGe6Y6ZmZlZ77Gkl97MzMzM+i0nSmZmZmYVnCiZmZmZVXCiZGZmZlbB\niZKZmZlZBSdKZmZmZhWcKJmZmVmv01vWUXKiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJk\nZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCi\nZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlV6BWJkqStJV0p6WlJiyTtUifmWEn/kPSKpD9KWq+0\nfRVJv5E0R9KLks6RtEIpZiNJt0h6VdLjkr5Tp53PS5qeY+6RtEN3+2JmZmbNi2h1D97SKxIlYAXg\nL8CBwNtOj6TDgYOAbwAfA+YBEyQtVwj7LbABMAbYCRgNnFWoYyVgAvAoMAL4DnCMpK8WYkblen4J\nbAL8Hvi9pA272RczMzPrB5ZpdQcAIuJ64HoASaoTcghwXERclWP+G5gFfBa4RNIGwPbAyIi4O8cc\nDFwj6dCImAl8CVgW2DciFgDTJW0KfAs4p9DOdRExLn8+WtJYUmJ0QCN96ZETYmZmZr1CbxlRqiRp\nHWA14MZaWUTMBf4MjMpFWwIv1pKkbCJpdGqLQswtOUmqmQCsL2lI/jwq70cpZlTuy7oN9MXMzMz6\niV6fKJESkyCN2hTNyttqMc8WN0bEQuCFUky9OmggprZ9WAN9MTMzs36iLyRKVUSd+UzdjFGDMUva\njpmZmXVD3Yk4LdAr5ih1YSYpERnG4iM5Q4G7CzFDiztJGgiskrfVYoaV6h7K4iNEVTHF7V31pa72\n9naGDBmyWFlbWxttbW2d7WZmZrZU6OjooKOjA4A774QBA6C9fU6Le9UHEqWIeFTSTNLdbH8FkDSY\nNPfojBw2GVhZ0qaFeUpjSEnNlELMDyUNzJflAMYCMyJiTiFmDHBqoQvb5fJG+1LX+PHjGTFiRHcP\n38zMbKlQHDzYeWdYdlk46qhpjBw5sqX96hWX3iStIGljSZvkonXz57Xy55OBIyV9WtJHgQuBp4A/\nAETEA6RJ17+UtLmkrYDTgI58xxuk2/7nA+dJ2lDS7sA3gZMKXTkF2EHStyStL+kYYCRweiGm076Y\nmZlZ/9FbRpQ2A/6PdBkseCt5uQDYJyJOlLQ8aV2klYFJwA4RMb9Qxx6khGYisAi4jHQrP5DuTpO0\nfY65C5gNHBMR5xZiJktqA36UX38HPhMR9xdiGumLmZmZ9QO9IlGKiJvpYnQrIo4Bjulk+z9JayV1\nVse9wDZdxFwOXL4kfTEzM7P+oVdcejMzMzPrjZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYV\nnCiZmZmZVXCiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZm\nFZwomZmZWa8S0eoevMWJkpmZmVkFJ0pmZmZmFZwomZmZWa8jtboHiRMlMzMzswpOlMzMzMwqOFEy\nMzMzq+BEyczMzKyCEyUzMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhR\nMjMzM6vQJxIlSUdLWlR63V/YPkjSGZJmS3pJ0mWShpbqWEvSNZLmSZop6URJA0ox20qaKuk1SQ9K\n2qtOXw6U9KikVyXdIWnzf92Rm5mZWSv1iUQpuw8YBqyWXx8vbDsZ2AnYFRgNrAFcXtuYE6JrgWWA\nLYG9gL2BYwsxawNXAzcCGwOnAOdI2q4QsztwEnA0sClwDzBB0qo9eJxmZmbWS/SlRGlBRDwXEc/m\n1wsAkgYD+wDtEXFzRNwNfAXYStLH8r7bAx8C9oyIeyNiAnAUcKCkZXLM/sAjEXFYRMyIiDOAy4D2\nQh/agbMi4sKIeADYD3glt29mZmb9TF9KlD4o6WlJD0v6taS1cvlI0kjRjbXAiJgBPAGMykVbAvdG\nxOxCfROAIcCHCzETS21OqNUhadncVrGdyPuMwszMzPqdvpIo3UG6VLY9aRRnHeAWSSuQLsPNj4i5\npX1m5W3k91l1ttNAzGBJg4BVgYEVMathZmZm/c4yXYe0Xr5UVnOfpCnA48AXgNcqdhMQjVTfyTY1\nGNNIO2ZmZtbH9IlEqSwi5kh6EFiPdOlrOUmDS6NKQ3lr9GcmUL47bVhhW+19WClmKDA3IuZLmg0s\nrIgpjzK9TXt7O0OGDFmsrK2tjba2tq52NTMz6/c6Ojro6OgA4K67YMAAaG+f0+Je9dFESdKKwAeA\nC4CpwAJgDHBF3j4ceB9we95lMnCEpFUL85TGAnOA6YWYHUpNjc3lRMQbkqbmdq7M7Sh/PrWrPo8f\nP54RI0Z0+1jNzMyWBsXBg512gkGD4MgjpzFy5MiW9qtPJEqSfgpcRbrc9l7gB6Tk6OKImCvpXGCc\npBeBl0iJy20RcWeu4gb4/+3de7QdZXnH8e8vYBIIDaFACK2BIJGbIGCCBLmFWwLBaFELLARtwSUU\nLBZX1dLaFW4WwZqKQERRW26CyFpFRCgFCUkVbZYhRgohwRDCJSQkBEJuwMk5T/94300mO3tyPefs\ns+f8PmvtleyZd2ae58ycOc9+553ZPA3cJumrwO7AlcANEdGW29wEfEHSNcCPSAXQp4BxhVAmArfk\ngmka6S647YH/6JLEzczMrKlaolAC3gv8GNgZWAz8ChgVEa/l+ZeQLovdA/QD/gu4qLZwRHRI+ijw\nXVIv00pScTOh0OZ5SaeSiqGLgZeA8yLikUKbu/Mzk64gXYL7PTA2IhZ3Qc5mZmbWZC1RKEXEBgfy\nRMTbwN/mV1mbF4GPbmQ9U0iPANhQm0nApA21MTMzsy0XPegWqVZ5PICZmZn1Eh0daTB3T9BDwjAz\nMzNLIkDaeLvu4ELJzMzMepQI9yiZmZmZNdTR4R4lMzMzs4Z86c3MzMyshAslMzMzsxK+683MzMys\nhHuUzMzMzEp4MLeZmZlZCT8ewMzMzKyEL72ZmZmZlfClNzMzM7MSvvRmZmZmVsKX3szMzMxK+NKb\nmZmZWQlfejMzMzMr4R4lMzMzsxIeo2RmZmZWwpfezMzMzEr40puZmZlZCV96MzMzMyvhS29mZmZm\nJdrbXSiZmZmZNbR6NWy3XbOjSFwomZmZWY+yYgUMGNDsKBIXSmZmZtajrFwJO+zQ7CgSF0pmZmbW\nY7S3w1tvuUfJzMzMbD0rV6Z/XSi1OEkXSZonabWk30o6rNkxNdudd97Z7BC6RW/JE3pPrs6zWpxn\na1uwIP27++7NjaPGhdIWkHQG8C1gAnAoMBN4SNIuTQ2syar6S1uvt+QJvSdX51ktzrO1zZuX/t1z\nz+bGUeNCactcAnwvIm6NiGeAC4BVwLnNDcvMzKy13Xtv6k3aY49mR5K4UNpMkt4DjAB+WZsWEQE8\nAhzRrLjMzMxa2dKlcOGF8P3vw8UX95wHTm7b7ABa0C7ANsCiuumLgH3LFpo1Kz2SfXNtyTLNWv6N\nN2DatOZtv7OXL1t26VJ4/PGu3XZPWf6112Dq1O7ffnfnvmQJTJ7cvO131/KLF8PDD3ft9ntC7q++\nCg8+2Lztd9fyixbB/fc3Z9tly3d0pLvW1qxJr7a2tf9fswbefhtWrUqvFSvSOebVV2H+fJg7F/r1\ng5tugs9/futi6UwulDqPgEaHWX+As8+e1b3RNMUyDj/8iWYH0Q2WceSRvSFPgGUce2xvyHUZxx/f\nO/IcM6Z35DluXO/Ic/z41shzm21SD1HfvtC/f3rqdv/+MGhQeo0aBeeck/7ddVeYMSMtN2vWu387\n+zcrdhdKm28J0A7sVjd9MOv3MgEMS/+c3ZUx9SAjmh1AN+kteULvydV5Vovz7Ena29OrrW3t7f+b\naRiwCX35nc+F0maKiDZJ04ETgPsAJCm//06DRR4CPg08D7zVTWGamZlVQX9SkfRQswJQbO1FyV5I\n0unALcD5wDTSXXCfAvaLiMXNjM3MzMw6j3uUtkBE3J2fmXQF6RLc74GxLpLMzMyqxT1KZmZmZiV6\nyCSLcAIAAAsFSURBVFMKzMzMzHoeF0pdrCd/J5ykoyXdJ+llSR2SPtagzRWSFkhaJelhScPr5u8k\n6Q5JyyS9LukHkgbUtfmgpKn5ZzBf0pcbbOcvJc3KbWZKOqWTcrxU0jRJb0paJOk/Je1T16afpBsl\nLZG0XNI9kgbXtRkq6ReSVkpaKOlaSX3q2oyWNF3SW5LmSPpsg3i67HiQdEH+2S3Lr8clnVy1POu2\nc2k+didWLU9JE3JuxdfTVcszr//PJN2Wc1mVj+MP1bVp9XPRvAb7s0PS9Xl+JfanpD6SrpT0XN5X\nf5T0tQbtWmd/RoRfXfQCziDd6fYZYD/ge8BSYJdmx5bjO5k0zuovSI88+Fjd/K/meMcDBwL3AnOB\nvoU2DwJPACOBjwBzgNsL8/8EeIU0+H1/4HRgJfC5QpsjgDbgS6SHdl4OvA0c0Ak5PgCck7d9EHA/\n6Q7E7QptvpunHUv67r7Hgf8pzO8DPEm66+IgYCzwKnBVoc0wYAVwbc7hopzTSd11PACn5n06PL+u\nyj/H/auUZ2E7hwHPATOAiRXcnxOAPwC7kh4/Mhj40wrmOQiYB/yAdK/7nsCJwF4VOxftXNiPg0l3\nSrcDR1dsf/5jjutkYA/gE8CbwBdadX922knLr4YHzG+B6wrvBbwEfKXZsTWItYP1C6UFwCWF9wOB\n1cDp+f3+eblDC23GAmuAIfn935CePbVtoc3VwNOF93cB99Vt+zfApC7Ic5cc81GFnN4GTiu02Te3\n+XB+f0r+Zdul0OZ84PVaXsA1wB/qtnUn8EAzjwfgNeCvq5YnsAMwGzgemEwulKqUJ6lQeqJkXpXy\n/AYwZSNtqngu+jYwp4L78+fAzXXT7gFubdX96UtvXUQt/p1wkvYChrBu/G8C/8va+EcBr0fEjMKi\nj5CeUH54oc3UiFhTaPMQsK+kHfP7I/Jy1LXpip/ToBzf0vx+BOnuz2Kes4EXWDfPJyNiSV18OwIf\nKLQpzaG7j4fc/X0msD3pxFC1PG8Efh4Rj9ZNH0m18ny/0qXxuZJulzQ0T6/S/hwP/E7S3UqXx5+Q\n9LnazCqei/LP9dPAD/OkKh23jwMnSHp/3ubBwJGk3v2W3J8ulLrOhr4Tbkj3h7PZhpAOyg3FP4TU\nxfquiGgnFSHFNo3WwSa06dSfkySRPsX9KiJqYz2GAO/kX9Sy7W9NDgMl9aObjgdJB0paTvp0Oon0\nCfUZKpRnLgAPAS5tMHs3KpIn6ZP/X5E+SV8A7AVMzeM0KrM/gfeRegdmA2OAm4DvSKp9nUHlzkXA\naaQC55b8vkrH7TeAnwDPSHoHmA58OyLuKsTYUvvTz1HqfmXfCdcqNiX+jbXRJrbp7J/TJOAA4KhN\naLup299YDpvSpjPzfAY4mNRz9kngVknHdML2e0Sekt5LKnZPioi2zVl0E7ffI/IEiIjik4j/T9I0\nYD5pLEbZU/5bLk/SB/ZpEfHP+f1MSR8gFU+3b2UMPfVcdC7wYEQs3Ei7VtyfZwBnAWcCT5M+1Fwn\naUFE3LaVMTRlf7pHqets7nfC9TQLSQfUhuJfmN+/S9I2wE55Xq1No3UUP1GUtem0n5OkG4BxwOiI\nWFCYtRDoK2ngBrbfKL7dCvPK2gwG3oyId+im4yEi1kTEcxHxRET8EzAT+CLVyXMEaXDzdEltktpI\ng1+/mD+9LgL6VSDP9UTEMtKA1uFUZ39CGpBb/63hs0gDgWsxVulctAdpsPrNhclV2p/XAldHxE8j\n4qmIuAP4N9b2ALfc/nSh1EXyp93ad8IB63wnXFO+2G9zRMQ80kFWjH8g6fpwLf7fAIMkHVpY9ATS\nL8G0Qptj8kFeMwaYnU/8tTYnsK6T8vStloukjwPHRcQLdbOnkwYIFvPch3SSLuZ5kNLT2Is5LGPt\nCb5RDmNqOTTxeOgD9KM6eT5CuuPnEFLP2cHA70g9D7X/t9H6ea5H0g7A3qSBsFXZnwC/Jg1cLtqX\n1HtWqXNRdi7pD/UDhWlV2p/bs36PTQe53mjJ/dkZo9z9Kh39fzppJH/xNszXgF2bHVuObwDpj8sh\n+UD+u/x+aJ7/lRzveNIfp3uBZ1n3Fs4HSH+cDiMN2JsN3FaYP5B0Yr+FdNnrDNLtq+cV2hwBvMPa\nWzgvI11a6IxbcieR7go5mvTJovbqX9dmHjCa1GPxa9a/LXcm6XbVD5LGjCwCriy0GZbzuibncGHO\n6cTuOh6Ar5MuK+5JuuX2atLJ9/gq5dkg73fveqtSnsA3gWPy/vwI8HCOc+eK5TmSNKbuUlIheBaw\nHDiz0Kblz0V5/SI9AuDrDeZVZX/+O2kQ+rh87J5GGm/0L626Pzv9pOXXegfNhfkXYzWpih3Z7JgK\nsR1LKpDa614/KrS5LB+Mq0h3CwyvW8cg0qf5ZaSC5GZg+7o2BwFT8jpeAP6+QSyfJI2vWU16dszY\nTsqxUX7twGcKbfoB15O6pZcDPwUG161nKOkZTCvyyekaoE+Dn+f0nMOzwDndeTyQnkPzXF73QuC/\nyUVSlfJssK1HWbdQqkSepNu6X8rrfgH4Mes+W6gSeeb1jyP93q8CngLObdDmMlr4XJTXfRLp/DO8\nwbxK7E/SB/CJpKJvZY7hcgq38bfa/vR3vZmZmZmV8BglMzMzsxIulMzMzMxKuFAyMzMzK+FCyczM\nzKyECyUzMzOzEi6UzMzMzEq4UDIzMzMr4ULJzMzMrIQLJTMzM7MSLpTMzDaDpMmSJjY7DjPrHi6U\nzKxlSDpf0puS+hSmDZDUJumXdW2Pk9QhaVh3x2lm1eFCycxayWTSl26OLEw7GngFGCWpb2H6scD8\niHh+czciadutCdLMqsOFkpm1jIiYQyqKRhcmjwbuJX1b+ai66ZMBJA2V9DNJyyUtk/QTSYNrDSVN\nkDRD0nmSngPeytO3l3RrXu5lSV+qj0nShZLmSFotaaGkuzs3azNrJhdKZtZqHgOOK7w/Lk+bUpsu\nqR9wOPBobvMzYBCp9+lEYG/grrr1Dgc+AZwGHJKn/WteZjwwhlR8jagtIGkkcB3wNWAfYCwwdSvz\nM7MexN3LZtZqHgMm5nFKA0hFzVSgL3A+cDlwZH7/mKSTgAOBYRGxAEDSOcBTkkZExPS83vcA50TE\n0txmAHAucFZEPJanfRZ4qRDLUGAF8IuIWAm8CMzsorzNrAnco2RmraY2Tukw4ChgTkQsIfUoHZ7H\nKY0G5kbES8B+wIu1IgkgImYBbwD7F9Y7v1YkZXuTiqdpheVeB2YX2jwMzAfm5Ut0Z0nartMyNbOm\nc6FkZi0lIuYCL5Musx1HKpCIiFdIPTpHUhifBAiIBquqn76ywXxKlq3FsgL4EHAmsIDUmzVT0sBN\nTsjMejQXSmbWiiaTiqTRpEtxNVOBU4APs7ZQehrYQ9Kf1xpJOgDYMc8r80dgDYUB4pJ2Io1FeldE\ndETEoxHxD8DBwDDg+C3Iycx6II9RMrNWNBm4kXQOm1KYPhW4gXTJ7DGAiHhE0pPAHZIuyfNuBCZH\nxIyyDUTESkk/BL4paSmwGLgKaK+1kXQq8L683deBU0k9UbPXX6OZtSIXSmbWiiYD/YFZEbG4MH0K\nsAPwTEQsLEz/OHB9nt8BPAhcvAnb+TJpPNR9wHLgW0DxstobpDvlJuR4ngXOzGOgzKwCFFF6+d3M\nzMysV/MYJTMzM7MSLpTMzMzMSrhQMjMzMyvhQsnMzMyshAslMzMzsxIulMzMzMxKuFAyMzMzK+FC\nyczMzKyECyUzMzOzEi6UzMzMzEq4UDIzMzMr4ULJzMzMrMT/A3OQOlsNsogaAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHVWZ//HPlwBBlgTGmAQEBUUQcIFEhKgsDpLIqjOg\n2G4g4IKA/KIoijowoI6iJOyKArKojQiDwx4IjhC2CSaIICEoO2gCgdCBBMj2/P4455JK5fZ2b3du\n377f9+tVr9v31FN1TtXt5elT51QpIjAzMzNrNWs0ugFmZmZmjeAkyMzMzFqSkyAzMzNrSU6CzMzM\nrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMysJTkJMjMzs5bkJMjM+pykpyT9vPB+D0nLJb1v\nNdT9PUlLCu+H5Lon9Xfdub7Dc32brI76aiXpm5IekbRU0vRGt6enJL01n99PNrot1vycBNmgIeng\n/Mux2vKDRrevxVR7Hk+vn9Ej6duS9quh7uW9rau3umhbUMOxrk6S9gZ+APwvcAjw3YY2yKxB1mx0\nA8z6WJB+oT9WKr9/9TfFKiLiZkmvi4jFvdz0O8AlwNW92OYE4KRe1lOLztp2AXBJDce6On0QWAIc\nHn6ApLUwJ0E2GN0QETN7GixJwNoR8Wo/tqnl9XdSIGndiFgUEctZDT1BnclJxUBOgABGAQsHYgLk\nn0dbnXw5zFpKcXyIpM9I+ivwCrBHXi9JX5X0V0mvSPqnpHMkDSvtR5L+I499eUnSVElvl/RkaSzM\nSuNTCuVVx41I2kfStLzPDklXSXp7KeZXkuZL2jSvf1HSM5J+WKUeSZoo6S+SXs5x10naPq+/XdKf\nOjlXD0vqsgems/NQJW6VMUGStpL035Lm5LY9IenXktarfE7A2kDlXC2vnNt8XpfnffxW0nzSpZ1O\nz3le9xlJs3N908tjlPK5/VuV7V7bZw/a1tlne3Th++ppSWdU+b66TdJMSdtJ+l9Ji/K5/WpXn0Nh\n+zUlnZA/u1eUxvycJGmtUts/BQzP7VymTsbX5O+dJZLWK5Qdl7f7YaFszfz5n1QoW1/S5Pwz8Yqk\nWZL+X2n/3f08biTpYkkvSHpe0vnASucsx20s6aJ8rl6R9A9JV0ratCfnzVqXe4JsMBou6fXFgoh4\nrhQzHvgEcDbwPPBELr8AaMuvpwFvAY4G3i1pl9zLAGk8xXHAVcAUYCxwI/C6Uj2djQ9ZpVzSIcD5\nwHXAN4D1gC8D0yTtEBFPFbZdM9c3DfhaPp6vS/pbRJxf2O3FpD94VwM/J/3h3hXYCfhzXn+OpK0i\n4qFCW8YBWwDfqtL2op6eh0q7K/sfmuPWIJ3nucCmwH7AsIhYKOnTwC+B2/J5Afh7aV//DTwIfLNQ\n1tk53wP4JHAG6VLQkcAUSe+JiNndbPtaeUQs60Hbyp/t94DjgRtI33PbkD7bsaXvqwBGANcDvwMu\nBT4O/FjSvRFxc5W2FV2Yj/FS0vfGzqTLdlsDBxXa/mXg3cAXAAG3d7K/aaTP6P2kzwvgA8AyYJdC\n3FjSZ35rPl4B1+btfgH8BdgLmCRp44g4rlTPKj+PeR9Xk75XzwFmAweQznv5M/o9sCXps32C1NM1\nnvQ99RRmnYkIL14GxQIcTLoMUl6WFWKG5LLFwJal7XfP6w4ole+Vyw/M70fm7a8oxf0wx/28UHYy\nsLhKWw8j/SHZJL/fAHgBOLMUNyqXn1UouyRv+41S7J+BOwrv98ztOaWLc7Yh8DJwUqn87FzvOl1s\n25vzsEdu8/vy+7E5Zr9uPtOXi/spndflwIWdrFtceF/5zJcC7yiUv5nU63Bp6dw+1N0+u2lb+bMd\nlc/TVaW4r+S4TxXKpuWyjxfK1iYlib/p5lyNycd5dql8Ut7n+0vH+XwPfqaGAC8CJxfKniclWa9U\nvj+Ar+djXD+/PyC35djS/q4gJaBv6sHPY2UfXymUrUFKPJcBn8xl/1KO8+Klp4svh9lgE8ARwIcK\ny55V4m6OiL+Xyg4k/YL/o6TXVxbgT6Q/eB/McRNIv7zPLG1/Wh3t/jApEbq0VPcy4O5C3UU/L72/\njdRzVXEA6Q//yZ1VGhEvANeQeg+AdIkC+BgpuXmlizaPp/bz8EJ+3UvSOj2IryaAn/UiflpEvDZA\nPiIeJ/U0fLjG+ntqT9J5Kp+Xc4FFwD6l8o6IuKzyJtJYqrtZ+bOtZm/SOSnfCuBUUm9PuZ5uRcQy\n4E5S7yGS3gUMB/4LWIvUSwOpd+jeiHgpv9+LlNicXdrlJNK5KJ/zaj+PewGvUvg+j9RjdlY+nopF\npMTqg5KG9/IQrcU5CbLB6O6I+ENxqRLzWJWyt5H+q3y2tMwF1iH1fAC8Kb+u9Es7IuaQ/muuxZak\nX+zTSnU/A/xroe6Kl3ICUzQf2Kjw/i3AUxHRXZsuBraQtHN+/2Hg9aTegq68Ob/2+jxExMPA6cAX\ngeckXS/pCEkbdFNn2aO9iC3/kQV4CNhA0kZV1vWVynl6qFgYaeDvo4X1FU9W2Uf5s+2snqX53Bbr\neZr0eZTr6anbgB3zuKJdgCcj4l7SjMvKJbH3k753i215KiJeLu1rVmF90WNV6n0z8HSVRHx28U1e\nfzywL/CMpD9KOlZS+WfGbBUeE2StqvzLGdI/Bf8APsPK/2lWPJNfK+t6MrOms5ghVeoO0nikeVXi\nywN9l3WyX3XydVeuz3V+Grgrvz4dEX/sZrvenIdVRMTEPND1I6RepbOA4yTtnBOpnqj2OfZG+Rz1\n9POqp47u9OSz7e363rahaBrptgM7kXp8phXKd5G0Hemfh1vrqK/a5yiqfx6r7DsiTpV0JfBRUk/t\n94BvSdqt2PtnVuaeILMVHiYNSr2t3JOUl8ov08fy61bFjSWNJl3SKpoPDJG0bql88yp1AzzTSd3T\n6L2/A5uWZyCVRcRS8gBcSRuSBif/ugf7fyy/9uQ8dFb3/RHx/YjYDdiN1Mv2hWJIT/bTQ2+rUrYV\n8GJEzM/v55PGSZVtXqWsp217LL9uXSyUtHbe7+M93E9P6llT0ltL9WwCrF9HPXeRLqvuSur5qXwv\n3gq8j3SpNkg9RsW2bCqpPEB+m/zak7ZU9lG+XLp1lVgi4pGImBQRE4B3kgZq92hWnbUuJ0FmK1xG\nGoT6nfKKPAW4kkzcRPpv/ehS2MQq+3yY9J/rroV9rU/qbSq6HngJ+HYek1Ouf0QPj6HoClJvb0/u\nBnwJKQE8l/THoydJUG/Ow0okDZNU/v1zP+mP6dBC2UKqJyW1+EAe01Jpw+akSyg3FGIeBl4vaZtC\n3BtJiWFZT9tWOU/HlMq/SJoBeE0P9tET15G+1/5fqfxrpPN6bS07zZe0ZpK+Zzdm5Z6g9YCjgNkR\nUezBvI70s/Tl0u4mks7F9T2o+jrS98IXKwX5Z+MoVp5p+Lo827DoEdLP09BC3GhJW1f5vrMW5sth\nNtjU3O0fEX/Il2e+I2kMMJX0H/BWpEHTR5Bm+MyVNBk4VtJVpF/o7yENwn6+tNvrgaeBCyX9JJcd\nCvwTeO0+MhHRIeko0tT8mZIuJV2iejNpQOv/0sv/aiNiqqR24KtK9+65kXRZZxdgSkQUB5z+SdIs\n0oDov/TkEkIvzwOs/NnsCUyW9Dvgb6RBtgeTLvv9dyFuBjA+31/mn8DDEVH1vkY9cD9wo6QzSZ/r\nl/PrfxZifkOa9n9Vjlsf+BJpGv67S/vrUdvyefoRcLyk60hJzzZ5v3eSeuHqFhEzJf0a+HIeVD8N\nGEe6vHlZRHQ2Db4npgHHAs9FxKxc3z8lPUz6+fhFKf5KUk/RjyRtyYop8vsAP46IauOeyq4k9UL9\nJPduVabIl3tVtwVukHQZ8AApyTqQNK6tvRD3E9IEgE1Jl73NPEXey+BZSH9ElwFjuogZkmNO7SLm\n86TZOC+RLo/cA3wfGFmK+w9SgvMS6b/9rUmDWn9eihtD+mP3Muk/1CMpTaMuxO5O6pmYn/c7GzgP\n2L4Qcwnpj1G53ScDr5bKRPrj9UCufw5pRtS7qmz/zdymr/byvFc7D08A5xZiylPk35KP62+kHpVn\n8ra7lvb9duCPed/LKuc2H+sy0j2FujwPxc+clBA8lM/F9Ep7StuPB+4jTQH/K+k+PdWmyHfWts4+\n2yPz/l7J5+t0YINSzDRgRpU2XULqbenusxiSP4+Hcz2PkpK8Navsb5XvoS72u18+pitL5RdQmuZf\nWLceaTbYU7ktDwLH9ObnkTQY/GLSbMLnSPdk2oGVp8iPIM1QfABYQErAbwc+WuWYl5Y/Fy+tvSii\nLy+5m7U2SU8C10fEF7oNHmAkfY10j583RcQ/G90eM7P+5mujZlZxKOl+LU6AzKwleEyQWQtTeibU\n/qRxPG/Hs2nMrIU4CTLrW509e2qgGk2aCfY86dEZUxrcHjOz1cZjgszMzKwleUyQmZmZtSQnQWZm\nZtaSnASZWb+S9D1J5Wefre42DJG0XFL5Cev17HOPvM/9+2qfvaj7V5L+trrrNRtsnASZNZCkg/Mf\n0srysqTZks4cRE/BbrbB4r3RqOMKYHmD6jYbNDw7zKzxgvR8r8eAdUhP6j4C2EvSOyLilQa2zbpW\nz9PZ63FIA+s2GzScBJkNDDdExMz89QWSnic9bPIjwG8b16zuSVo3IhY1uh2tJCKWNaJef9Y22Phy\nmNnA9AfSf/pbVAokbSHpd5Kek7RQ0p2S9i5uJOnZwoNaUfKCpCWShhXKj8tl6xbKtpZ0ed7/y5Lu\nlrRfaf+Vy3e7SjpH0lzS89J6RdJhkm6WNDfXdb+kz5diTpc0p1T201z/lwplm+SyQ3tY92fyJceX\nJU2X9L4qMW+UdKGkOZJekXSfpIOr7C6ANSR9V9JTkhZJuknSFqX97ZY/uyfy/h6X9JPi088lfVPS\nMkmblCvJsS9L2iC/X2VMkKT1JU2W9GSuY1Z+uGsx5q35XH2yVF4ZM3V8oex7uWwrSb+VNJ/0IF+z\nQcNJkNnAtGV+fQ4gjw+6k/T09bOA44GhwNWSPlLY7nZg18L7dwGV5Of9hfIPADMr/9VL2o70xO6t\ngf8i3Tn6JeD3pf1XnEO6w/R/kp431ltHkB4m+33ga6QHip5bSoSmAW+QtFWp3cuAXQplu5KSkWk9\nqHcP4MfARaQHjY4EpkjauhIgaTTp4aq7AWcAx+S2/lLSl0v7E+lS5j7Aj/LyPtJDP4s+Tvq8zgKO\nIj0s9hjSA0grLs37+1iVdh8IXBcRL+b3K42zkiTgWuBo0lPqJ5IeTjtJ6Qn2tajs/79JDzr9JukB\npmaDR6Of4OrFSysvrHjy/QeB1wNvBA4CniUlIRvnuMk5blxh2/VITwt/uFD2NWAxsF5+fxTpD/id\nwA8Kcc8DPym8nwrcw6pPG78NeLDU3uWkp6erh8dY7QnsQ6vE3QTMKrwfles6LL/fKJ+DS4EnCnFn\nAXO6acOQvK+lwDsK5W8mPeH80kLZhcATwPDSPi4D5gFr5fd75H3eCwwpxE3M7dyqm+P9dm7PxoWy\n/wPuKMWNy/V8vFB2CfBQ4f0BOebY0rZXAEtID8UFeGuO+2Qn5+f40ue2HLiw0T8nXrz01+KeILPG\nE3AzKfF5EvgNsAD4aKx4mOlewPSIuLOyUUQsBH4ObC5p21w8jTTWr3KJZ5dcNi1/jaR3ARvmMiRt\nRErCfgcMl/T6ygLcCLxN0saF9gbwi4ioeWZURLz62sFLw3JdtwBbSXpdjpkL/J0VPVu7AK8CpwKb\nSnpz6Rh7YlpE3F9ox+PA1cCHc1sE/BvwP8CaVc7FRsD2pX2eHyuP0ZlG+kzf0snxrpv3d0eOK+7v\nt8BOkt5UKDsIWETq4enMXqTk9+xS+SRSgvPhLrbtSgA/q3FbswHPSZBZ4wXp8tCHgN2BbSPirREx\ntRDzZmB2lW1nFdYDzCT9waxcLvoAK5Kg90haO68LUi8PpEtvIv3n/2xpOTHHlKfrP1Z8I2ktSaOK\nS1cHLGkXSX+Q9BLwQq7rpLx6eCH0ttKxTAf+BHQAu0gaDryDnidBf69S9hCwQU4GRwMbAF9m1XPx\n8xxfPhflMVHz8+tGlQJJb5Z0saTnSD18z5ISX1j5eC/Lrx8vlB0AXBNdD0h+M/BURLxcKi9/f9Ti\n0Tq2NRvQPDvMbGC4O1bMDqtZRCyV9H/ArpLeCmwM3Er6o7sWsBMpmZgVEc/lzSr/DP0E6OwBquXk\nofzHdlfS5awgJVQhabOI+Ed5R5LelmPvJ106epLUi7E/aUxL8Z+zacDBkjYjJUNTIyIk3Z7fVxKO\nWztpd08Up5pX6r4I+FUn8feW3nc2U0uQBh2TLjduAPyAlMwuAt5EGhP02vFGxFOS7iQlQT+RtAvp\nEumlvTiGrnTWezeki23Kn7XZoOEkyKw5PE4atFy2TWF9xTTgG6RB1M9GxEMAkv5KSlZ2IV0Cqngk\nvy6JiD/U2L4ZpJ6somc7id2flJDtky95kds3oUpspYdnAjAGOCG/vxX4HCkJepFVE5POvK1K2VbA\nixExX9ICYCGwRh3nomx70lictoh47XYHkjq7RHUpcLqkt5Auhb0IXN9NHY8BH5D0ulJvUPn7o5I0\nbljavp6eIrOm5cthZs3hOuC9knaqFEhaD/gC8GhEPFCInUa66eIxrLjkRf76M6TeodcuH0XEs6SB\nzl/MM6NWImlEd42LiBci4g+lpbNHZVR6Tl77/ZMvRX22yn7/DswlDfhegzSOpnKMW5PG79zRi/FJ\nH8hjoir1bg7sC9yQ61sGXAl8XNI25Y2rnIue1FvteEX6fKpt/zvy4GXSpbCrimOKOnEdsDbpMl5R\nZZD29QARMZ90+XHXUtxRnbSlKknDlW6psH5PtzEbiNwTZNZ4PbmU8UOgDbhB0hmk2V2HkP6D//dS\n7J2kWUdbAecWym8ljT2qNp38yFx2n6RfkHqHRpFmJr0R2KGX7e3KFNJU8utyXcOAzwP/ZNXxNpCS\ntwNJU/pfymV3ky7TbEmazdVT9wM3SjqTdI6+nF//sxDzDVKSMD23bxbwL8B7SL1oxUSxJ+fir6Rx\nNaflwdwv5eMZVi04IuZKmgZ8HVifnt0s80rS5/sjSVsCfyENlt4H+HFEFMctnQccK6mDNIZsd1JP\nVW8+108AP82vl3UTazZguSfIrPG6/Q88Ip4hJSQ3kv5r/wFpave+EXFVKXYRabp7cfAzpCQnSNPL\nnyxtM4v0R/4a0jT4s4AvknoRTmJltcwKe22bXNeBpN8/PwEOB84k3Xuomkq7i71XS0nTyXt6f6BK\nG24GjiUd44mkXqbxuU2Vfc8BdiSNC/r33LavkJKW4zo7rs7Kc4/YvqTE5HjgO6TE6HNdtPW3pATo\nBTofp1WsI0gJzxnAfqRbKmwFfDUivlna7gTSWKSPk5LRpbl9vX3G22B9Hpy1ENUxy9XMzMysaTW8\nJ0jSo1r5KdqV5cy8fqiksyXNk/Si0m39R5b2sZmka5UeJTBH0imS1ijF7C5pRr6d/EOqcgt8SUfm\n9rws6S5JO/bv0ZuZmVmjNDwJInXBjy4se5K6WSvXmU8jdfMeQLpOvwnpLqgA5GTnOtL4pp1J3dyH\nUOjCz4MfryF1hb8bOB04T9KehZiDSDdhO4E0/uFe0u30ux0UamZmZs1nwF0Ok3QasHdEbKX0wMdn\ngU9ExJV5/dakgYo7R8R0SXsBV5FuPT8vx3yRNJD0Dfm+KT8C9oqI4qyQdtJt8ffO7+8C/i8ijsnv\nRbp/yRkRccrqOXozMzNbXQZCT9BrJK0FfIoVD+l7D6mHp3JnVSJiNum5PuNy0c7AfZUEKJtCugvr\ndoWY4t13KzHjCvWOLdUTeZtxmJmZ2aAzoJIg0j0/hpNmZUCaors4IhaU4uayYprq6Py+vJ4exAyT\nNBQYQbpjarWYVe6bYmZmZs1voN0n6FDg+jxFtSuiZ9Mzu4pRD2M6XZ8fgjiBdLfWV3rQHjMzM0vW\nATYHphQe47NaDZgkKD81+UPARwvFc4C1JQ0r9QaNZEWvTeWeHkWjCusqr+UHOo4EFkTEYknzSPdD\nqRZT7h0qmgD8uov1ZmZm1rVPAb9pRMUDJgki9QLNJc30qphBupHXHqQ7oiJpK9KDByu3z78TOF7S\niMK4oPGkp0zPKsTsVapvfC4nIpZImpHruSrXo/z+jC7a/BjAr371K7bZZpU77FsXJk6cyOTJkxvd\njKbic1Ybn7fe8zmrjc9b78yaNYtPf/rTkP+WNsKASIJywnEIcGFELK+UR8QCSecDkyRVHpR4BnB7\nRNydw24EHgAukXQc6blIJwNnFZ5d9DPgqDxL7AJScnMgsHehGZOAi3IyNJ30zJ116fqW/K8AbLPN\nNowZM6bGo29Nw4cP9znrJZ+z2vi89Z7PWW183mrWsOEkAyIJIl0G2wz4ZZV1lQcAXg4MJT3o8MjK\nyohYLmlf0nNs7iA9AfpCVjxtmoh4TNI+pETnK8BTwGERMbUQc1m+J9BJpMtifwYm5IdLmpmZ2SAz\nIJKgiLiJNDur2rpXgaPz0tn2T5KefdNVHbeQpsF3FXMOnT+/yMzMzAaRgTZF3szMzGy1cBJkDdHW\n1tboJjQdn7Pa+Lz1ns9ZbXzems+Ae2xGM5E0BpgxY8YMD4YzMzPrhZkzZzJ27FiAsRExsxFtcE+Q\nmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJ\nkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYtyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3J\nSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYt\nyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pIGRBIkaRNJl0iaJ2mR\npHsljSnFnCTpH3n9TZK2LK3fSNKvJXVImi/pPEnrlWLeJelWSS9LelzS16u05WOSZuWYeyXt1T9H\nbWZmZo3U8CRI0obA7cCrwARgG+BrwPxCzHHAUcAXgfcCC4EpktYu7Oo3eds9gH2AXYFzC/vYAJgC\nPAqMAb4OnCjp8ELMuLyfXwDbA78Hfi9p2z49aDMzszr9/vfwkY80uhXNbc1GNwD4JvBERBxeKHu8\nFHMMcHJEXA0g6bPAXOCjwGWStiElUGMj4p4cczRwraRjI2IO8GlgLeCwiFgKzJK0A/BV4LxCPddH\nxKT8/gRJ40kJ2Jf79KjNzMzq8PjjcPPNjW5Fc2t4TxCwH/AnSZdJmitpZql3ZgtgNPDaRx0RC4D/\nA8blop2B+ZUEKJsKBLBTIebWnABVTAG2ljQ8vx+Xt6MUMw4zMzMbVAZCEvQW4AhgNjAe+BlwhqRP\n5/WjScnM3NJ2c/O6SswzxZURsQx4vhRTbR/0IGY0ZmZmNqgMhMthawDTI+K7+f29krYjJUa/6mI7\nkZKjrnQXox7GdFePmZmZNZmBkAT9E5hVKpsF/Hv+eg4pERnFyr00I4F7CjEjizuQNATYKK+rxIwq\n1TOSlXuZOosp9w6tZOLEiQwfPnylsra2Ntra2rrazMzMrCW0t7fT3t6+UllHR0eDWrPCQEiCbge2\nLpVtTR4cHRGPSppDmvX1FwBJw0hjfc7O8XcCG0raoTAuaA9S8jS9EPM9SUPypTJIl99mR0RHIWYP\n4IxCW/bM5Z2aPHkyY8aM6SrEzMysZVXrGJg5cyZjx45tUIuSgTAmaDKws6RvSXqrpE8ChwNnFWJO\nA74jaT9J7wQuBp4C/gcgIh4kDWD+haQdJb0fOBNozzPDIE19XwxcIGlbSQcBXwFOLdRzOrCXpK9K\n2lrSicDYUlvMzMxsEGh4EhQRfwL+DWgD7gO+DRwTEZcWYk4hJTXnkmaFvQ7YKyIWF3b1SeBB0uyu\na4BbSfcVquxjAWka/ebAn4AfAydGxPmFmDtzO74A/Jl0Se4jEfFAnx60mZmZNdxAuBxGRFwHXNdN\nzInAiV2sf4F0L6Cu9nEfsFs3MVcAV3QVY2ZmZs2v4T1BZmZmZo3gJMjMzMxakpMgMzMza0lOgszM\nzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLM\nzMysJTkJMjMzs5bkJMjMzKwJRTS6Bc3PSZCZmVmTkhrdgubmJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSQ1PgiSd\nIGl5aXmgsH6opLMlzZP0oqTLJY0s7WMzSddKWihpjqRTJK1Ritld0gxJr0h6SNLBVdpypKRHJb0s\n6S5JO/bfkZuZmVkjNTwJyu4HRgGj8/KBwrrTgH2AA4BdgU2AKyorc7JzHbAmsDNwMHAIcFIhZnPg\nGuBm4N3A6cB5kvYsxBwEnAqcAOwA3AtMkTSiD4/TzMzMBoiBkgQtjYhnI+KZvDwPIGkYcCgwMSJu\niYh7gM8B75f03rztBODtwKci4r6ImAJ8FzhS0po55gjgkYj4RkTMjoizgcuBiYU2TATOjYiLI+JB\n4EvAoly/mZmZDTIDJQl6m6SnJT0s6VeSNsvlY0k9PDdXAiNiNvAEMC4X7QzcFxHzCvubAgwHtivE\nTC3VOaWyD0lr5bqK9UTeZhxmZmYDTESjW9D8BkISdBfp8tUEUu/LFsCtktYjXRpbHBELStvMzevI\nr3OrrKcHMcMkDQVGAEM6iRmNmZnZACQ1ugXNbc3uQ/pXvnxVcb+k6cDjwMeBVzrZTEBPcuCuYtTD\nGOfaZmZmg1DDk6CyiOiQ9BCwJely1NqShpV6g0ayotdmDlCexTWqsK7yOqoUMxJYEBGLJc0DlnUS\nU+4dWsXEiRMZPnz4SmVtbW20tbV1t6mZmdmg197eTnt7+0plHR0dDWrNCgMuCZK0PvBW4CJgBrAU\n2AO4Mq+xdAPHAAAgAElEQVTfCngTcEfe5E7geEkjCuOCxgMdwKxCzF6lqsbnciJiiaQZuZ6rcj3K\n78/ors2TJ09mzJgxvT5WMzOzVlCtY2DmzJmMHTu2QS1KGp4ESfoxcDXpEtgbgf8kJT6XRsQCSecD\nkyTNB14kJSW3R8TdeRc3Ag8Al0g6DtgYOBk4KyKW5JifAUdJ+hFwASm5ORDYu9CUScBFORmaTpot\nti5wYb8cuJmZmTVUw5MgYFPgN8DrgWeB24CdI+K5vH4i6VLV5cBQ4AbgyMrGEbFc0r7AT0m9QwtJ\nicsJhZjHJO1DSnS+AjwFHBYRUwsxl+V7Ap1Euiz2Z2BCRDzbD8dsZmZmDdbwJCgiuhw4ExGvAkfn\npbOYJ4F9u9nPLaRp8F3FnAOc01WMmZmZDQ4DYYq8mZmZ2WrnJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzNrQhGNbkHzcxJkZmbWpKRGt6C59UkS\nJGmIpO0lbdQX+zMzMzPrbzUlQZJOk3RY/noIcAswE3hS0u591zwzMzOz/lFrT9CBwL356/2ALYC3\nA5OB7/dBu8zMzMz6Va1J0AhgTv56b+B3EfEQcAHwzr5omJmZmVl/qjUJmgtsmy+FfRiYmsvXBZb1\nRcPMzMzM+tOaNW73S+Ay4J9AADfl8p2AB/ugXWZmZmb9qqYkKCJOlHQ/sBnpUtiredUy4Id91Tgz\nMzOz/lJrTxARcTmApHUKZRf1RaPMzMzM+lutU+SHSPqupKeBlyS9JZefXJk6b2ZmZjaQ1Tow+tvA\nIcA3gMWF8vuBw+tsk5mZmVm/qzUJ+izwhYj4NSvPBruXdL8gMzMzswGt1iTojcDfO9nfWrU3x8zM\nzGz1qDUJegDYpUr5gcA9tTfHzMzMbPWodXbYScBFkt5ISqT+XdLWpMtk+/ZV48zMzMz6S009QRHx\nP6Rk50PAQlJStA2wX0Tc1NW2ZmZmZgNBPfcJug3Ysw/bYmZmZrba1HqfoB0l7VSlfCdJ76mnQZK+\nJWm5pEmFsqGSzpY0T9KLki6XNLK03WaSrpW0UNIcSadIWqMUs7ukGZJekfSQpIOr1H+kpEclvSzp\nLkk71nM8ZmZmNjDVOjD6bNIjM8remNfVJCccnydNtS86DdgHOADYFdgEuKKw3RrAdaSerZ2Bg0n3\nMTqpELM5cA1wM/Bu4HTgPEl7FmIOAk4FTgB2yO2YImlErcdkZmZmA1OtSdC2wMwq5ffkdb0maX3g\nV6SbLb5QKB8GHApMjIhbIuIe4HPA+yW9N4dNIN2f6FMRcV9ETAG+CxwpqXLJ7wjgkYj4RkTMjoiz\ngcuBiYVmTATOjYiLI+JB4EvAoly/mZmZDSK1JkGvAqOqlG8MLK1xn2cDV0fEH0rl7yH18NxcKYiI\n2cATwLhctDNwX0TMK2w3BRgObFeImVra95TKPiStBYwt1RN5m3GYmZnZoFJrEnQj8F+ShlcKJG0I\n/ADo9ewwSZ8Atge+VWX1KGBxRCwolc8FRuevR+f35fX0IGaYpKHACGBIJzGjMTMzs0Gl1tlhxwK3\nAo9LqtwccXtSwvCZ3uxI0qakMT97RsSS3mwKRA/iuopRD2O6rGfixIkMHz58pbK2tjba2tp60Dwz\nM7Pei578BRwg2tvbaW9vX6mso6OjQa1ZoaYkKCKelvQu4FOkQcYvA78E2nuZyEC6BPUGYIakSlIy\nBNhV0lHAh4GhkoaVeoNGsqLXZg5QnsU1qrCu8lq+hDcSWBARiyXNIz0HrVpMuXdoJZMnT2bMmDFd\nhZiZmfW51/5qDnDVOgZmzpzJ2LFjG9SipJ77BC0Eft4HbZgKvLNUdiEwC/gh8DSwBNgDuBJA0lbA\nm4A7cvydwPGSRhTGBY0HOvJ+KjF7leoZn8uJiCWSZuR6rsr1KL8/o96DNDMzs4Gl5iQoJyK7k3pK\nVhpbFBEnVdummpxMPVDa90LguYiYld+fD0ySNB94kZSU3B4Rd+dNbsz7uETScaQB2icDZxV6pn4G\nHCXpR8AFpOTmQGDvQtWTSI8DmQFMJ80WW5eUlJmZmdkgUlMSJOnzwE+BeaTLTMUrk0Hh/jw1Kl/p\nnEi6VHU5MBS4ATjyteCI5ZL2zW26g/QojwtJ9/upxDwmaR9SovMV4CngsIiYWoi5LN8T6CTSZbE/\nAxMi4tk6j8fMzMwGmFp7gr4DfDsiftSXjamIiH8tvX8VODovnW3zJN08vDUibiGNQeoq5hzgnB43\n1szMzJpSrVPkNwJ+15cNMTMzM1udak2CfkcaVGxmZmbWlGq9HPZ34GRJOwP3kWZvvSYiPJvKzMzM\nBrRak6AvAC8Bu+WlKPCUcjMzMxvgar1Z4hZ93RAzMzOz1anWMUEASFpb0taFJ7WbmZmZNYWakiBJ\n6+YbGC4C/kq6ezOSzpT0zT5sn5mZmVm/qLUn6L9IzwzbHXilUD4VOKjONpmZmZn1u1ovY30UOCgi\n7pJUvLvzX4G31t8sMzMzs/5Va0/QG4BnqpSvx6qPvDAzMzMbcGpNgv4E7FN4X0l8Dic/ld3MzMxs\nIKv1ctjxwPWSts37OEbSdsA4Vr1vkJmZmdmAU1NPUETcRhoYvSbpjtHjgbnAuIiY0XfNMzMzM+sf\nve4JyvcE+iQwJSI+3/dNMjMzM+t/ve4JioilwM+Adfq+OWZmZmarR60Do6cDO/RlQ8zMzMxWp1oH\nRp8DnCppU2AGsLC4MiL+Um/DzMzMzPpTrUnQpfm1+LT4AJRfh9TTKDMzM+ta+K58das1CfJT5M3M\nzBpManQLmltNSVBEPN7XDTEzMzNbnWpKgiR9tqv1EXFxbc0xMzMzWz1qvRx2eun9WsC6wGJgEeAk\nyMzMzAa0Wi+HbVQuk/Q24KfAj+ttlJmZmVl/q/U+QauIiL8B32TVXiIzMzOzAafPkqBsKbBJH+/T\nzMzMrM/VOjB6/3IRsDFwFHB7vY0yMzMz62+1Doz+fel9AM8CfwC+VleLzMzMzFaDWgdG9/VlNDMz\nM7PVysmMmZmZtaSakiBJl0v6ZpXyr0v6Xf3NMjMzM+tftfYE7QZcW6X8BmDX2ptjZmZmtnrUmgSt\nT7o7dNkSYFjtzTEzMzNbPWpNgu4DDqpS/gnggdqbY2ZmZrZ61JoEnQx8V9JFkg7Oy8XAt/O6HpP0\nJUn3SurIyx2SPlxYP1TS2ZLmSXoxj0caWdrHZpKulbRQ0hxJp0haoxSzu6QZkl6R9JCkg6u05UhJ\nj0p6WdJdknbs1VkxMzOzplFTEhQRVwMfBbYEzgFOBTYFPhQR5XsIdedJ4DhgbF7+APyPpG3y+tOA\nfYADSOONNgGuqGyck53rSNP9dwYOBg4BTirEbA5cA9wMvJv0aI/zJO1ZiDkoH8cJwA7AvcAUSSN6\neTxmZmbWBGq9WSIRcS3VB0fXsp+i70g6AthZ0tPAocAnIuIWAEmfA2ZJem9ETAcmAG8HPhgR84D7\nJH0X+KGkEyNiKXAE8EhEfCPXMVvSB4CJwE25bCJwbkRcnOv5Ein5OhQ4pd7jNDMzs4Gl1inyO0ra\nqUr5TpLeU2tjJK0h6RPAusCdpJ6hNUk9OABExGzgCWBcLtoZuC8nQBVTgOHAdoWYqaXqplT2IWmt\nXFexnsjbjMPMzMwGnVrHBJ0NbFal/I15Xa9IeoekF4FXSZfX/i0iHgRGA4sjYkFpk7l5Hfl1bpX1\n9CBmmKShwAhgSCcxozEzM7NBp9bLYdsCM6uU35PX9daDpLE6G5LG/lwsqav7DYn0vLLudBWjHsb0\npB4zMzNrMrUmQa8Co4BHSuUbA0t7u7M8bqeyr5mS3gscA1wGrC1pWKk3aCQrem3mAOVZXKMK6yqv\no0oxI4EFEbFY0jxgWScx5d6hVUycOJHhw4evVNbW1kZbW1t3m5qZmQ167e3ttLe3r1TW0dHRoNas\nUGsSdCPwX5I+EhEdAJI2BH7AioHG9VgDGArMICVVewBX5nq2At4E3JFj7wSOlzSiMC5oPNABzCrE\n7FWqY3wuJyKWSJqR67kq16P8/ozuGjt58mTGjBnT+6M0MzNrAdU6BmbOnMnYsWMb1KKk1iToWOBW\n4HFJ9+Sy7Um9Jp/pzY4kfR+4njRVfgPgU6THcoyPiAWSzgcmSZoPvEhKSm6PiLvzLm4k3aDxEknH\nkXqjTgbOioglOeZnwFGSfgRcQEpuDgT2LjRlEnBRToamk2aLrQtc2JvjMTMzWx3CgzXqVlMSFBFP\nS3oXKWF5N/Ay8EugvZB49NQo4GJS8tIB/IWUAP0hr59IulR1Oal36AbgyEJblkvaF/gpqXdoISlx\nOaEQ85ikfUiJzleAp4DDImJqIeayfE+gk3Kb/gxMiIhne3k8ZmZmq4XUfYx1rp77BC0Efl5vAyLi\n8G7WvwocnZfOYp4E9u1mP7eQpsF3FXMOaXaamZmZDXI1JUGSPga0AVuRZk/9DfhNRFzeh20zMzMz\n6ze9uk9Qvpnhb4HfkqbC/500q2s74DJJl+YBxWZmZmYDWm97go4BPgTsHxHXFFdI2p80LugY0vO+\nzMzMzAas3t4x+nPA18sJEEBEXAV8g/SsLTMzM7MBrbdJ0NtY9RlcRVNzjJmZmdmA1tsk6GXSoy06\nMwx4pfbmmJmZma0evU2C7gSO6GL9kTnGzMzMbEDr7cDo7wN/lPR64CekB58K2Ab4GvAR4IN92kIz\nMzOzftCrJCgi7pB0EOkmiQeUVs8H2iLi9r5qnJmZmVl/6fXNEiPiSklTSA8g3SoXPwTcGBGL+rJx\nZmZmZv2l1meHLZL0IeA/IuL5Pm6TmZmZWb/r7R2jNy28/SSwfi6/T9JmfdkwMzMzs/7U256gByU9\nB9wOrANsBjwBbA6s1bdNMzMzM+s/vZ0iPxz4GDAjb3udpIeAocAESaP7uH1mZmZm/aK3SdBaETE9\nIk4l3ThxB9KjNJaRHpfxsKTZfdxGMzMzsz7X28thCyTdQ7octjawbkTcLmkpcBDwFPDePm6jmZmZ\nWZ/rbU/QJsD3gFdJCdSfJE0jJURjgIiI2/q2iWZmZmZ9r1dJUETMi4irI+JbwCJgR+BMIEh3kF4g\n6Za+b6aZmZlZ3+ptT1BZR0RcBiwB/hXYAjin7laZmZmZ9bOabpaYvQt4On/9OLAkIuYAv627VWZm\nZmb9rOYkKCKeLHz9jr5pjpmZmfVERKNb0PzqvRxmZmZmDSI1ugXNzUmQmZmZtSQnQWZmZtaSnASZ\nmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwE\nmZmZWUtqeBIk6VuSpktaIGmupCslbVWKGSrpbEnzJL0o6XJJI0sxm0m6VtJCSXMknSJpjVLM7pJm\nSHpF0kOSDq7SniMlPSrpZUl3Sdqxf47czMzMGqnhSRCwC3AmsBPwIWAt4EZJryvEnAbsAxwA7Aps\nAlxRWZmTneuANYGdgYOBQ4CTCjGbA9cANwPvBk4HzpO0ZyHmIOBU4ARgB+BeYIqkEX13uGZmZjYQ\nrNnoBkTE3sX3kg4BngHGArdJGgYcCnwiIm7JMZ8DZkl6b0RMByYAbwc+GBHzgPskfRf4oaQTI2Ip\ncATwSER8I1c1W9IHgInATblsInBuRFyc6/kSKfk6FDilf86AmZmZNcJA6Akq2xAI4Pn8fiwpWbu5\nEhARs4EngHG5aGfgvpwAVUwBhgPbFWKmluqaUtmHpLVyXcV6Im8zDjMzMxtUBlQSJEmkS1+3RcQD\nuXg0sDgiFpTC5+Z1lZi5VdbTg5hhkoYCI4AhncSMxszMzAaVhl8OKzkH2Bb4QA9iReox6k5XMeph\nTE/qMTMzsyYyYJIgSWcBewO7RMQ/CqvmAGtLGlbqDRrJil6bOUB5FteowrrK66hSzEhgQUQsljQP\nWNZJTLl3aCUTJ05k+PDhK5W1tbXR1tbW1WZmZmYtob29nfb29pXKOjo6GtSaFQZEEpQToI8Au0XE\nE6XVM4ClwB7AlTl+K+BNwB055k7geEkjCuOCxgMdwKxCzF6lfY/P5UTEEkkzcj1X5XqU35/RVfsn\nT57MmDFjeny8ZmZmraRax8DMmTMZO3Zsg1qUNDwJknQO0AbsDyyUVOmJ6YiIVyJigaTzgUmS5gMv\nkpKS2yPi7hx7I/AAcImk44CNgZOBsyJiSY75GXCUpB8BF5CSmwNJvU8Vk4CLcjI0nTRbbF3gwn44\ndDMzM2ughidBwJdIY27+WCr/HHBx/noi6VLV5cBQ4AbgyEpgRCyXtC/wU1Lv0EJS4nJCIeYxSfuQ\nEp2vAE8Bh0XE1ELMZfmeQCeRLov9GZgQEc/20bGamZnZANHwJCgiup2hFhGvAkfnpbOYJ4F9u9nP\nLaRp8F3FnEMaoG1mZjZghafs1G1ATZE3MzOznpO6j7HOOQkyMzNrQu4Jqp+TIDMzsyYU4Z6gejkJ\nMjMza0JOgurnJMjMzKxJOQmqj5MgMzOzJuQxQfVzEmRmZtaEfDmsfk6CzMzMmpCToPo5CTIzM2tC\nToLq5yTIzMysCTkJqp+TIDMzM2tJToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzM\nmpCToPo5CTIzM2tCToLq5yTIzMzMWpKTIDMzsybknqD6OQkyMzNrQk6C6uckyMzMrAk5CaqfkyAz\nM7Mm5CSofk6CzMzMmlBEo1vQ/JwEmZmZNSn3BNXHSZCZmVkT8uWw+jkJMjMza0JOgurnJMjMzKwJ\nOQmqn5MgMzOzJuQkqH5OgszMzJqQZ4fVz0mQmZlZk3JPUH2cBJmZmTUhXw6rn5MgMzOzJuQkqH5O\ngszMzJqQk6D6OQkyMzNrQh4YXb8BkQRJ2kXSVZKelrRc0v5VYk6S9A9JiyTdJGnL0vqNJP1aUoek\n+ZLOk7ReKeZdkm6V9LKkxyV9vUo9H5M0K8fcK2mvvj9iMzOz+rgnqH4DIgkC1gP+DBwJrJLbSjoO\nOAr4IvBeYCEwRdLahbDfANsAewD7ALsC5xb2sQEwBXgUGAN8HThR0uGFmHF5P78Atgd+D/xe0rZ9\ndaBmZmZ9xUlQfdZsdAMAIuIG4AYAqepHegxwckRcnWM+C8wFPgpcJmkbYAIwNiLuyTFHA9dKOjYi\n5gCfBtYCDouIpcAsSTsAXwXOK9RzfURMyu9PkDSelIB9ua+P28zMrFbuCarfQOkJ6pSkLYDRwM2V\nsohYAPwfMC4X7QzMryRA2VRSr9JOhZhbcwJUMQXYWtLw/H5c3o5SzDjMzMwGECdB9RvwSRApAQpS\nz0/R3LyuEvNMcWVELAOeL8VU2wc9iBmNmZnZAOKB0fVrhiSoM6LK+KFexqiHMf5WMzOzAcU9QfUb\nEGOCujGHlIiMYuVempHAPYWYkcWNJA0BNsrrKjGjSvseycq9TJ3FlHuHVjJx4kSGDx++UllbWxtt\nbW1dbWZmZlazZkqC2tvbaW9vX6mso6OjQa1ZYcAnQRHxqKQ5pFlffwGQNIw01ufsHHYnsKGkHQrj\ngvYgJU/TCzHfkzQkXyoDGA/MjoiOQswewBmFJuyZyzs1efJkxowZU+shmpmZ1aRZkqBqHQMzZ85k\n7NixDWpRMiAuh0laT9K7JW2fi96S32+W358GfEfSfpLeCVwMPAX8D0BEPEgawPwLSTtKej9wJtCe\nZ4ZBmvq+GLhA0raSDgK+ApxaaMrpwF6Svippa0knAmOBs/rr2M3MzGrRTD1BA9VA6Ql6D/C/pEtT\nwYrE5CLg0Ig4RdK6pPv+bAhMA/aKiMWFfXySlKxMBZYDl5OmvANpRpmkCTnmT8A84MSIOL8Qc6ek\nNuD7efkb8JGIeKDvD9nMzKx2ToLqNyCSoIi4hW56pSLiRODELta/QLoXUFf7uA/YrZuYK4Aruoox\nMzNrNM8Oq9+AuBxmZmZmveOeoPo5CTIzM2tCToLq5yTIzMysSTkJqo+TIDMzsybknqD6OQkyMzNr\nQh4YXT8nQWZmZk3IPUH1cxJkZmbWhJwE1c9JkJmZWZNyElQfJ0FmZmZNyD1B9XMSZGZm1oQ8MLp+\nToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzMmpSToPo4CTIzM2tC7gmqn5MgMzOz\nJuTZYfVzEmRmZtaE3BNUPydBZmZmTchJUP2cBJmZmTUhJ0H1cxJkZmbWhJYtgzXXbHQrmpuTIDMz\nsya0eDGstVajW9HcnASZmZk1oSVLnATVy0mQmZlZE1qyBNZeu9GtaG5OgszMzJqQL4fVz0mQmZlZ\nE/LlsPo5CTIzM2tCvhxWPydBZmZmTcg9QfVzEmRmZtaEXn3VSVC9nASZmZk1oWefhTe8odGtaG5O\ngszMzJrMwoXwwguw6aaNbklzcxJkZmbWZJ5+Or06CaqPkyAzM7Mm89RT6dVJUH2cBJmZmTWZShK0\nySaNbUezcxJkZmbWZJ5+Gl7/enjd6xrdkubmJKgKSUdKelTSy5LukrRjo9s02LS3tze6CU3H56w2\nPm+953NWm9V53m6/HbbbbrVVN2g5CSqRdBBwKnACsANwLzBF0oiGNmyQ8S/Z3vM5q43PW+/5nNVm\ndZ23Rx+Fm26Cf/u31VLdoOYkaFUTgXMj4uKIeBD4ErAIOLSxzTIzs1Z3113wwQ+msUAHH9zo1jS/\nNRvdgIFE0lrAWOAHlbKICElTgXENa5iZmbWUCFiwAB5/HGbPhhkz4MYb4Z570mWw66+HjTZqdCub\nn5OglY0AhgBzS+Vzga072+iBB2D58p5XEtH7hg22bZ5/Hu64Y/XUNVi2ee45uOWW1de2WrcbaNs8\n+2y6dNDf9dS7zeqsq7tt5s6Fa6/t/3r6apt66lq+PC2Vr6u9drWu+PrIIzBpEixdCsuWpdfy14sX\np+XVV1e8LlwIixal5cUXU/Lz/PNpXcUmm8Auu8AJJ8C++8KQIbWdJ1uZk6CeEVDtR2wdgM98Ztbq\nbc2g0MH73z+z0Y1oMh3svrvPWe91MH68z1vvdLDvvj5nXZFWXZYt6+C7353JkCF0uqy11srL2mvD\nOuvAeuul2V7rrpu+HjYsvR89GjbbbOVen3vvbdxx96VZs17727lOo9rgJGhl84BlwKhS+UhW7R0C\n2Dy9fLo/2zSIjW10A5qQz1ltfN56z+esK5UeorJFi3zearA5UMO1gfo5CSqIiCWSZgB7AFcBSFJ+\nf0aVTaYAnwIeA15ZTc00MzMbDNYhJUBTGtUARa0XYAcpSR8HLgK+CEwnzRY7EHh7RDzbyLaZmZlZ\n33FPUElEXJbvCXQS6bLYn4EJToDMzMwGF/cEmZmZWUvyzRLNzMysJTkJqkOrPmNM0gmSlpeWBwrr\nh0o6W9I8SS9KulzSyNI+NpN0raSFkuZIOkXSGqWY3SXNkPSKpIckNdX9USXtIukqSU/nc7R/lZiT\nJP1D0iJJN0nasrR+I0m/ltQhab6k8yStV4p5l6Rb8/fh45K+XqWej0malWPulbRX3x9x/bo7Z5J+\nWeV777pSTEudMwBJ35I0XdICSXMlXSlpq1LMavu5bIbfjT08Z38sfa8tk3ROKaZlzhmApC/ln4eO\nvNwh6cOF9c31fRYRXmpYgINIM8I+C7wdOBd4HhjR6LathmM/AfgL8AbS7QNGAv9SWP9T0oy53UjP\nX7sDmFZYvwZwH2lGwDuBCcAzwPcKMZsDLwGnkG5UeSSwBNiz0cffi/P0YdLYso+Sbr2wf2n9cfl7\nZj/gHcDvgYeBtQsx1wMzgfcA7wMeAn5VWL8B8E/SYP5tgI8DC4HDCzH/v707j9WjKuM4/v1VoECb\ntsjiTaTQ0goUQZayNWy3LCVIkCCJNpi68QfKHypEiYkkQEQJokQCNCaCRBZBTAwQKSLLbRsNS1hS\nK9CylbKUawoUKHgV2j7+cc5Lz532Lr2Ud7nz+yRvmpk5M++Zp2fmfe6ZMzOzcuzOz7G8BPgfsF+r\nYzSCmN0A3F1pexMrZWoVs1zfBcC8vD8HAH/Jx+AORZmmHJd0yLlxmDHrAX5TaW/j6xqzXNdT83E6\nPX8uzcfGjE5sZy0PaKd+gIeBq4ppAa8CF7S6bk3Y94uAJwZYNiEfEGcU8/YBNgCH5+lTcoPepShz\nDrAG2CZPXw78s7LtW4EFrd7/EcZsA5v+oK8CzqvErg/4Sp6ekdc7uChzMrAO6MrT3yU932qbosxl\nwNPF9G3AXZXvfgiY3+q4jCBmNwB/HmSdfescs6Kuu+Q4HF20raYcl516bqzGLM/rAa4cZJ1ax6yo\n75vAtzqxnfly2Aho4zvGHmjMi/S/UKd3jH0uX7J4QdLNkibn+TNJdx2WsVkOvMzG2BwJLI2IN4rt\n3QtMBD5flLm/8p33MkriK2kq0EX/OL0LPEL/OK2JiCeLVe8nPb38iKLM4ohYV5S5F9hH0sQ8PYvR\nFcvufPlimaT5kj5dLJuFYwYwibTPb+XpphyXHX5urMas4WuSVktaKunnknYoltU6ZpLGSJoL7Ej6\nI6Hj2pmToJEZ7B1jXc2vTtM9DHyT9Bf2d4CpwOI87qIL+CD/oJfK2HSx+dgxjDITJI39uDvQBrpI\nJ9zB2lAXqZv4IxGxnnSS3hqx7MS2eg+p+/t44AJSl/sCScrLax+zHItfA3+PiMZYvWYdlx15bhwg\nZgC3kF4J0E16sfY84KZieS1jJml/SWtJvT7zST0/y+jAdubnBG1dA71jbFSJiPLpnv+S9CiwkjS2\nYqAnZw83NoOV0TDKdLrhxGmoMhpmmY6LY0TcXkw+JWkpaRxVN+nSxUDqFLP5wH7A0cMo26zjst1j\n14jZUeXMiLiumHxKUi/wgKSpEbFiiG2O5pgtAw4k9Z6dCdwo6dhByrdtO3NP0Mhs6TvGRrWIeIc0\n+HhpytQAAAY1SURBVHQ60AtsJ2lCpVgZm142jd1nimUDldkNeDciPtga9W6xXtIBO1gb6s3TH5H0\nKWAnho5T2cs0UJmOb6v5h+gNUtuDmsdM0jXAF4HuiFhVLGrWcdlx58ZKzF4fovgj+d+yvdUuZhGx\nLiJejIgnIuInwBLg+3RgO3MSNAIR8SHQeMcY0O8dYy15CVwrSRoPTCMN9H2cNAi1jM3ewB5sjM1D\nwAFKT+ZumAO8AzxTlDmB/ubk+R0v/3j30j9OE0jjVso4TZJ0cLHqCaTk6dGizLH5h75hDrA8J6eN\nMtVYnsQoiKWk3YGdSXd7QY1jln/MTwdmR8TLlcVNOS477dw4RMw252BSsly2t1rFbABjgLF0Yjtr\n9ajyTv2QLv300f/2vDeBXVtdtybs+xXAscCepFuQ7yNl3zvn5fOBFaRLFDOBf7DpLZJLSOM7vkAa\nW/Rv4KdFmSmkWyQvJ91dcC7wAXBiq/d/C+I0jtRlfBDp7ogf5OnJefkFuc2cRrpV9A7gOfrfIr8A\neAw4jNRVvxy4qVg+gZR8/p7Unf/VHLezizKzcuwat3tfTLps2Xa3ew8Ws7zsF6REcU/SCe8x0olz\n27rGLNd3PunummNIfx03PttXynzixyUdcm4cKmbAXsCFwCG5vX0JeB54sK4xy3X9GelS656kR3tc\nRkp8ju/EdtbygHbyJ//HvJT/Ix4CDm11nZq037eSbkXsI436/wMwtVg+Fria1GW5FvgTsFtlG5NJ\nz+V4Lx8AlwNjKmWOI2X7faTkYF6r930L43Qc6Yd8feXzu6LMxaQf5P+Q7n6YXtnGJOBm0l9Ja4Df\nAjtWyhwALMrbeBn44WbqcibpOn4f6RlPJ7c6PlsaM9Ibp/9K6kH7L/Ai6Zkku1a2UauY5bpuLmbr\nga8XZZp2XNIB58ahYgbsDiwEVud2spz0gz++sp3axCzX87p87PXlY/Fv5ASoE9uZ3x1mZmZmteQx\nQWZmZlZLToLMzMyslpwEmZmZWS05CTIzM7NachJkZmZmteQkyMzMzGrJSZCZmZnVkpMgMzMzqyUn\nQWZmZlZLToLMzDJJPZKubHU9zKw5nASZWVuQdI6kdyWNKeaNk/ShpAcqZWdL2iBpSrPraWajh5Mg\nM2sXPaQ3xR9azDsGeB04UtJ2xfzjgJUR8dKWfomkbT5OJc1s9HASZGZtISKeJSU83cXsbuAOYAVw\nZGV+D4CkyZLulLRW0juS/ihpt0ZBSRdJelLS2ZJeJL2BHkk7Sroxr/eapPOrdZJ0rqRnJfVJ6pV0\n+9bdazNrJSdBZtZOFgKzi+nZed6ixnxJY4EjgAdzmTuBSaReoxOBacBtle1OB74MnAEclOf9Mq9z\nGjCHlFjNbKwg6VDgKuBCYG/gZGDxx9w/M2sj7hY2s3ayELgyjwsaR0pYFgPbAecAlwBH5emFkk4C\n9gemRMQqAEnzgKckzYyIx/N2twXmRcRbucw44NvAWRGxMM/7BvBqUZfJwHvA3RHxPvAKsOQT2m8z\nawH3BJlZO2mMCzoMOBp4NiLeIPUEHZHHBXUDL0TEq8C+wCuNBAggIp4B3gZmFNtd2UiAsmmkxOjR\nYr01wPKizH3ASmBFvmx2lqQdttqemlnLOQkys7YRES8Ar5Eufc0mJT9ExOuknpijKMYDAQJiM5uq\nzn9/M8sZYN1GXd4DDgHmAqtIvVBLJE0Y9g6ZWVtzEmRm7aaHlAB1ky6PNSwGTgEOZ2MS9DSwh6TP\nNgpJ2g+YmJcN5HlgHcVga0k7kcb+fCQiNkTEgxHxY+BAYApw/Aj2yczakMcEmVm76QGuJZ2fFhXz\nFwPXkC5jLQSIiPslLQVukXReXnYt0BMRTw70BRHxvqTrgSskvQWsBi4F1jfKSDoV2Ct/7xrgVFIP\n0vJNt2hmnchJkJm1mx5ge+CZiFhdzF8EjAeWRURvMf904Oq8fANwD/C9YXzPj0jjj+4C1gK/AspL\nXW+T7ii7KNfnOWBuHnNkZqOAIga8JG5mZmY2anlMkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\nnASZmZlZLTkJMjMzs1pyEmRmZma15CTIzMzMaslJkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\n/g8dz3zImRgHIwAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -326,7 +326,7 @@ }, { "cell_type": "code", - "execution_count": 103, + "execution_count": 125, "metadata": { "collapsed": true }, @@ -344,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 104, + "execution_count": 126, "metadata": { "collapsed": true }, @@ -358,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 105, + "execution_count": 127, "metadata": { "collapsed": false }, @@ -367,9 +367,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 3467\n", - "Number of unique tokens: 8640\n", - "Number of documents: 1740\n" + "Number of authors: 578\n", + "Number of unique tokens: 2245\n", + "Number of documents: 286\n" ] } ], @@ -540,7 +540,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 128, "metadata": { "collapsed": false }, @@ -558,7 +558,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 129, "metadata": { "collapsed": false }, @@ -575,7 +575,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 130, "metadata": { "collapsed": false }, @@ -588,7 +588,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 131, "metadata": { "collapsed": false }, @@ -605,7 +605,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 132, "metadata": { "collapsed": false }, @@ -621,7 +621,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 133, "metadata": { "collapsed": false }, @@ -630,8 +630,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 10 x 681 x 10 (68100 elements)\n", - "mu is 10 x 681 x 21 (143010 elements)\n" + "phi is 10 x 2245 x 10 (224500 elements)\n", + "mu is 10 x 2245 x 21 (471450 elements)\n" ] } ], @@ -733,7 +733,7 @@ }, { "cell_type": "code", - "execution_count": 106, + "execution_count": 134, "metadata": { "collapsed": false }, @@ -742,8 +742,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 1740 x 8640 x 10 (150336000 elements)\n", - "mu is 1740 x 8640 x 3467 (52121491200 elements)\n" + "phi is 286 x 2245 x 10 (6420700 elements)\n", + "mu is 286 x 2245 x 578 (371116460 elements)\n" ] } ], @@ -756,7 +756,7 @@ }, { "cell_type": "code", - "execution_count": 113, + "execution_count": 157, "metadata": { "collapsed": false }, @@ -768,50 +768,142 @@ }, { "cell_type": "code", - "execution_count": 114, + "execution_count": 159, "metadata": { "collapsed": false }, "outputs": [ { - "name": "stderr", + "name": "stdout", "output_type": "stream", "text": [ - "/home/olavur/author_topic_models/gensim/gensim/models/atvb.py:395: RuntimeWarning: divide by zero encountered in log\n", - " bound_d += cts[vi] * numpy.log(bound_v)\n", - "/home/olavur/author_topic_models/gensim/gensim/models/atvb.py:268: RuntimeWarning: invalid value encountered in true_divide\n", - " var_phi[d, v, :] = var_phi[d, v, :] / var_phi[d, v, :].sum()\n", - "/home/olavur/author_topic_models/gensim/gensim/models/atvb.py:58: RuntimeWarning: invalid value encountered in greater\n", - " if (rho * dprior + prior > 0).all():\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, author2doc=author2doc, doc2author=doc2author, threshold=1e-12, iterations=40, alpha='auto', eta='auto', eval_every=1, random_state=1)\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m/home/olavur/.vew/author-topic/local/lib/python3.4/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.vew/author-topic/local/lib/python3.4/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", - "\u001b[0;32m/home/olavur/.vew/author-topic/local/lib/python3.4/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.vew/author-topic/local/lib/python3.4/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m/home/olavur/author_topic_models/gensim/gensim/models/atvb.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, id2author, author2doc, doc2author, threshold, iterations, alpha, eta, minimum_probability, eval_every, random_state, var_lambda)\u001b[0m\n\u001b[1;32m 162\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 163\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 164\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdoc2author\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 165\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 166\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mupdate_alpha\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_gamma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/author_topic_models/gensim/gensim/models/atvb.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, author2doc, doc2author, var_lambda)\u001b[0m\n\u001b[1;32m 330\u001b[0m \u001b[0mdoc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcorpus\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 331\u001b[0m \u001b[0;31m# Get the count of v in doc. If v is not in doc, return 0.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 332\u001b[0;31m \u001b[0mcnt\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdoc\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 333\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcnt\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 334\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + "CPU times: user 1min 30s, sys: 536 ms, total: 1min 31s\n", + "Wall time: 1min 30s\n" ] } ], "source": [ "%time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=40, alpha='auto', eta='auto', \\\n", + " iterations=10, alpha='auto', eta='auto', var_lambda=var_lambda, \\\n", " eval_every=1, random_state=1)" ] }, + { + "cell_type": "code", + "execution_count": 162, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.019*memory + 0.016*chip + 0.012*synapse + 0.010*energy + 0.009*circuit + 0.009*analog + 0.009*bit + 0.008*hidden + 0.007*pulse + 0.007*associative'),\n", + " (1,\n", + " '0.013*node + 0.009*vector + 0.009*role + 0.008*dynamic + 0.007*temporal + 0.006*propagation + 0.006*noise + 0.006*matrix + 0.006*transition + 0.006*variable'),\n", + " (2,\n", + " '0.019*activation + 0.015*processor + 0.011*cycle + 0.010*machine + 0.009*path + 0.007*update + 0.007*action + 0.007*element + 0.006*array + 0.006*operation'),\n", + " (3,\n", + " '0.017*node + 0.010*threshold + 0.009*circuit + 0.008*probability + 0.008*classifier + 0.007*distribution + 0.005*class + 0.005*let + 0.005*vector + 0.004*bit'),\n", + " (4,\n", + " '0.016*memory + 0.010*cell + 0.010*fig + 0.010*delay + 0.010*vector + 0.009*cortex + 0.009*matrix + 0.008*map + 0.007*phase + 0.007*associative'),\n", + " (5,\n", + " '0.040*cell + 0.016*firing + 0.014*response + 0.012*stimulus + 0.010*frequency + 0.010*spike + 0.010*activity + 0.010*potential + 0.007*current + 0.007*synaptic'),\n", + " (6,\n", + " '0.020*hidden + 0.017*recognition + 0.017*speech + 0.010*propagation + 0.009*hidden_unit + 0.009*back_propagation + 0.009*trained + 0.009*classifier + 0.008*training_set + 0.007*hidden_layer'),\n", + " (7,\n", + " '0.014*vector + 0.009*region + 0.009*code + 0.008*class + 0.008*chain + 0.006*human + 0.006*matrix + 0.006*domain + 0.006*probability + 0.006*equilibrium'),\n", + " (8,\n", + " '0.013*field + 0.009*constraint + 0.007*line + 0.007*analog + 0.007*noise + 0.006*velocity + 0.006*gradient + 0.006*energy + 0.006*minimum + 0.005*optimization'),\n", + " (9,\n", + " '0.031*image + 0.018*object + 0.011*visual + 0.008*joint + 0.007*vector + 0.007*position + 0.007*region + 0.007*pixel + 0.006*view + 0.006*fig')]" + ] + }, + "execution_count": 162, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 163, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [21]\n", + "[(0, 0.037231992567152018),\n", + " (1, 0.016698200630964527),\n", + " (2, 0.0342011513633367),\n", + " (3, 0.67231870768323798),\n", + " (4, 0.013204120807888714),\n", + " (7, 0.18016218284618693),\n", + " (8, 0.015739022916034465),\n", + " (9, 0.02510090606398686)]\n", + "\n", + "Geoffrey E. Hinton\n", + "Docs: [276, 235, 270]\n", + "[(0, 0.05309541750092419),\n", + " (1, 0.19415617091685813),\n", + " (2, 0.031847363809780198),\n", + " (3, 0.019551957323076154),\n", + " (6, 0.33640266674595204),\n", + " (7, 0.040241620376066808),\n", + " (8, 0.030507189050582355),\n", + " (9, 0.28201509780410544)]\n", + "\n", + "Michael I. Jordan\n", + "Docs: [205]\n", + "[(1, 0.13235075631104087),\n", + " (2, 0.36064441736611164),\n", + " (4, 0.033312619049718932),\n", + " (8, 0.3828853340620974),\n", + " (9, 0.064817361491075531)]\n", + "\n", + "James M. Bower\n", + "Docs: [188, 251, 244]\n", + "[(2, 0.021041919799574347),\n", + " (4, 0.59368476827823236),\n", + " (5, 0.32772809273108228),\n", + " (8, 0.024300185703856069),\n", + " (9, 0.012102285218647514)]\n" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))" + ] + }, { "cell_type": "code", "execution_count": 31, @@ -1071,18 +1163,30 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 151, "metadata": { "collapsed": true }, "outputs": [], "source": [ - "lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10)" + "lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10,\n", + " iterations=100, alpha='auto', eta='symmetric')" + ] + }, + { + "cell_type": "code", + "execution_count": 153, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "var_lambda = lda.state.get_lambda()" ] }, { "cell_type": "code", - "execution_count": 98, + "execution_count": 154, "metadata": { "collapsed": false }, @@ -1091,28 +1195,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.025*node + 0.015*processor + 0.012*constraint + 0.012*propagation + 0.010*activation + 0.009*back_propagation + 0.009*classifier + 0.009*update + 0.007*hidden + 0.007*energy'),\n", + " '0.020*memory + 0.015*chip + 0.010*synapse + 0.009*hidden + 0.009*energy + 0.008*activation + 0.008*bit + 0.007*analog + 0.007*associative + 0.007*circuit'),\n", " (1,\n", - " '0.022*image + 0.019*classifier + 0.010*classification + 0.009*noise + 0.008*region + 0.007*gaussian + 0.007*class + 0.007*node + 0.006*decision + 0.006*vector'),\n", + " '0.013*node + 0.010*vector + 0.008*dynamic + 0.007*role + 0.006*matrix + 0.006*temporal + 0.006*sequence + 0.005*propagation + 0.005*action + 0.004*noise'),\n", " (2,\n", - " '0.028*hidden + 0.020*speech + 0.012*hidden_unit + 0.011*chain + 0.010*region + 0.009*hidden_layer + 0.008*human + 0.007*propagation + 0.007*orientation + 0.007*acoustic'),\n", + " '0.022*processor + 0.017*activation + 0.013*cycle + 0.011*path + 0.009*machine + 0.008*cm + 0.007*letter + 0.007*array + 0.006*update + 0.006*string'),\n", " (3,\n", - " '0.028*memory + 0.024*vector + 0.010*capacity + 0.010*bit + 0.010*associative + 0.008*code + 0.008*associative_memory + 0.007*stored + 0.006*matrix + 0.006*threshold'),\n", + " '0.017*node + 0.010*circuit + 0.008*threshold + 0.007*classifier + 0.007*probability + 0.006*distribution + 0.005*bit + 0.005*vector + 0.005*let + 0.004*polynomial'),\n", " (4,\n", - " '0.023*circuit + 0.018*chip + 0.015*analog + 0.014*voltage + 0.013*current + 0.011*synapse + 0.008*pulse + 0.008*transistor + 0.007*vlsi + 0.007*synaptic'),\n", + " '0.017*memory + 0.010*vector + 0.010*fig + 0.009*matrix + 0.009*delay + 0.008*cell + 0.008*cortex + 0.007*associative + 0.007*map + 0.006*dynamic'),\n", " (5,\n", - " '0.009*node + 0.008*hidden + 0.007*activation + 0.005*generalization + 0.005*hidden_unit + 0.005*connectionist + 0.004*training_set + 0.004*trained + 0.004*word + 0.004*probability'),\n", + " '0.028*cell + 0.010*response + 0.010*firing + 0.009*stimulus + 0.008*activity + 0.007*frequency + 0.007*potential + 0.007*current + 0.006*synaptic + 0.006*spike'),\n", " (6,\n", - " '0.033*cell + 0.012*response + 0.011*stimulus + 0.011*firing + 0.008*synaptic + 0.008*activity + 0.008*potential + 0.007*spike + 0.006*fig + 0.006*cortex'),\n", + " '0.016*hidden + 0.012*recognition + 0.011*speech + 0.008*propagation + 0.007*classifier + 0.007*hidden_unit + 0.007*back_propagation + 0.006*trained + 0.006*hidden_layer + 0.005*training_set'),\n", " (7,\n", - " '0.009*energy + 0.007*gradient + 0.007*field + 0.007*hopfield + 0.006*matrix + 0.006*minimum + 0.006*convergence + 0.006*hidden + 0.005*vector + 0.004*equilibrium'),\n", + " '0.013*vector + 0.008*code + 0.008*region + 0.007*chain + 0.007*class + 0.006*matrix + 0.005*probability + 0.005*hopfield + 0.005*let + 0.005*domain'),\n", " (8,\n", - " '0.019*recognition + 0.009*joint + 0.009*visual + 0.009*speech + 0.008*field + 0.007*speaker + 0.007*object + 0.007*motion + 0.005*aspect + 0.005*control'),\n", + " '0.012*field + 0.007*constraint + 0.007*analog + 0.006*noise + 0.006*line + 0.006*image + 0.006*energy + 0.005*gradient + 0.005*velocity + 0.005*minimum'),\n", " (9,\n", - " '0.016*map + 0.010*delay + 0.010*region + 0.008*cortex + 0.008*activity + 0.008*brain + 0.008*oscillation + 0.008*distribution + 0.007*phase + 0.007*cell')]" + " '0.032*image + 0.019*object + 0.008*visual + 0.008*vector + 0.008*joint + 0.007*fig + 0.006*pixel + 0.006*position + 0.006*region + 0.006*view')]" ] }, - "execution_count": 98, + "execution_count": 154, "metadata": {}, "output_type": "execute_result" } diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index dce231c276..40dc7421a2 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -170,6 +170,8 @@ def update_alpha(self, var_gamma, rho): """ N = float(len(var_gamma)) + # NOTE: there might be possibility for overflow if number + # of authors is very high. logphat = 0.0 for a in xrange(self.num_authors): logphat += dirichlet_expectation(var_gamma[a, :]) @@ -203,6 +205,12 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No logger.info('Starting inference. Training on %d documents.', len(corpus)) + if var_lambda is None: + optimize_lambda = True + else: + # We have topics from LDA, thus we do not train the topics. + optimize_lambda = False + # Initial value of gamma and lambda. # NOTE: parameters of gamma distribution same as in `ldamodel`. var_gamma = self.random_state.gamma(100., 1. / 100., @@ -212,6 +220,9 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No var_lambda = self.random_state.gamma(100., 1. / 100., (self.num_topics, self.num_terms)) + self.var_lambda = var_lambda + self.var_gamma = var_gamma + # Initialize mu. # mu is 1/|A_d| if a is in A_d, zero otherwise. # var_mu is essentially a (self.num_docs, self.num_terms, self.num_authors) sparse matrix, @@ -235,6 +246,9 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No # things are multiplied along the correct dimensions. Elogtheta = dirichlet_expectation(var_gamma) + # NOTE: computing the Dirichlet expectation of lambda may + # cause overflow when the vocabulary is very large, as it + # requires a sum over vocab words. Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) @@ -256,6 +270,11 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No for v in ids: for k in xrange(self.num_topics): # Average Elogtheta over authors a in document d. + # NOTE: avgElogtheta may become numerically unsable. If + # it is a large positive number, exponentiating it may + # cause overflow, which probably results in the value + # "inf". If it is a large negative number, exponentiating + # it may result in 0.0. avgElogtheta = 0.0 for a in authors_d: avgElogtheta += var_mu[(d, v, a)] * Elogtheta[a, k] @@ -265,7 +284,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No # TODO: avoid computing phi if possible. var_phi[d, v, k] = expavgElogtheta * expElogbeta[k, v] # Normalize phi. - var_phi[d, v, :] = var_phi[d, v, :] / var_phi[d, v, :].sum() + var_phi[d, v, :] = var_phi[d, v, :] / (var_phi[d, v, :].sum() + 1e-100) # Update mu. for d, doc in enumerate(corpus): @@ -276,6 +295,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No mu_sum = 0.0 for a in authors_d: # Average Elogtheta over topics k. + # NOTE: we may have same problems as with phi update, above. avgElogtheta = 0.0 for k in xrange(self.num_topics): avgElogtheta += var_phi[d, v, k] * Elogtheta[a, k] @@ -286,7 +306,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No var_mu[(d, v, a)] = expavgElogtheta mu_sum += var_mu[(d, v, a)] - mu_norm_const = 1.0 / mu_sum + mu_norm_const = 1.0 / (mu_sum + 1e-100) for a in authors_d: var_mu[(d, v, a)] *= mu_norm_const @@ -317,31 +337,41 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No Elogtheta = dirichlet_expectation(var_gamma) # Update lambda. - #logger.info('Updating lambda.') - for k in xrange(self.num_topics): - #logger.info('k = %d.', k) - for v in xrange(self.num_terms): - #logger.info('v = %d.', v) - var_lambda[k, v] = self.eta[v] - sample_ratio = 1.0 # When sample_ratio is 1.0, the whole dataset is used. - nsamples = int(numpy.ceil(self.num_docs * sample_ratio)) - doc_idxs = sample(xrange(self.num_docs), nsamples) - for d in doc_idxs: - doc = corpus[d] - # Get the count of v in doc. If v is not in doc, return 0. - cnt = dict(doc).get(v, 0) - var_lambda[k, v] += cnt * var_phi[d, v, k] - - if self.optimize_eta: - stepsize = 1 - self.update_eta(var_lambda, stepsize) - - # Update Elogbeta, since lambda has been updated. - Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) + if optimize_lambda: + #logger.info('Updating lambda.') + for k in xrange(self.num_topics): + #logger.info('k = %d.', k) + for v in xrange(self.num_terms): + #logger.info('v = %d.', v) + var_lambda[k, v] = self.eta[v] + + # The following commented-out code is used for "sampling" documents when + # updating lambda: + # sample_ratio = 1.0 # When sample_ratio is 1.0, the whole dataset is used. + # nsamples = int(numpy.ceil(self.num_docs * sample_ratio)) + # doc_idxs = sample(xrange(self.num_docs), nsamples) + + # TODO: this would be more efficient if there was a mapping from words + # to the documents that contain that word, although that mapping would be + # very large. + # NOTE: the below might cause overflow if number of documents is very large, + # although it seems somewhat unlikely. + for d, doc in enumerate(corpus): + # Get the count of v in doc. If v is not in doc, return 0. + cnt = dict(doc).get(v, 0) + var_lambda[k, v] += cnt * var_phi[d, v, k] + + if self.optimize_eta: + stepsize = 1 + self.update_eta(var_lambda, stepsize) + + # Update Elogbeta, since lambda has been updated. + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + self.var_lambda = var_lambda self.var_gamma = var_gamma - self.var_lambda = var_lambda # Print topics: #pprint(self.show_topics()) @@ -381,6 +411,8 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): else: docs = [self.corpus[d] for d in doc_ids] + # NOTE: computing the bound this way is very numerically unstable, which is why + # "logsumexp" is used in the LDA code. bound= 0.0 for d, doc in enumerate(docs): authors_d = self.doc2author[d] @@ -400,6 +432,18 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): # TODO: can I do something along the lines of (as in ldamodel): # likelihood += numpy.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, id]) for id, cnt in doc) + # If I computed the LDA bound the way I compute the author-topic bound above: + # bound = 0.0 + # for d, doc in enumerate(docs): + # ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + # cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + # bound_d = 0.0 + # for vi, v in enumerate(ids): + # bound_v = 0.0 + # for k in xrange(self.num_topics): + # bound_v += numpy.exp(Elogtheta[d, k] + Elogbeta[k, v]) + # bound_d += cts[vi] * numpy.log(bound_v) + # bound += bound_d return bound From b43d34431821cf2549e36471212eb6a133117368 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 19 Oct 2016 18:24:34 +0200 Subject: [PATCH 028/100] Updating notebook. --- docs/notebooks/at_with_nips.ipynb | 156 ++++++++++++++++-------------- 1 file changed, 84 insertions(+), 72 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index cdcec88c65..016b4f0969 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -101,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 115, + "execution_count": 185, "metadata": { "collapsed": false }, @@ -138,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 116, + "execution_count": 186, "metadata": { "collapsed": false }, @@ -163,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 117, + "execution_count": 187, "metadata": { "collapsed": true }, @@ -178,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 118, + "execution_count": 188, "metadata": { "collapsed": false }, @@ -196,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 119, + "execution_count": 189, "metadata": { "collapsed": false }, @@ -222,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": 120, + "execution_count": 190, "metadata": { "collapsed": false }, @@ -245,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 121, + "execution_count": 191, "metadata": { "collapsed": true }, @@ -260,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 122, + "execution_count": 192, "metadata": { "collapsed": true }, @@ -279,7 +279,7 @@ }, { "cell_type": "code", - "execution_count": 123, + "execution_count": 193, "metadata": { "collapsed": true }, @@ -291,7 +291,7 @@ }, { "cell_type": "code", - "execution_count": 124, + "execution_count": 194, "metadata": { "collapsed": false }, @@ -300,7 +300,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHVWZ//HPlwBBlgTGmAQEBUUQcIFEhKgsDpLIqjOg\n2G4g4IKA/KIoijowoI6iJOyKArKojQiDwx4IjhC2CSaIICEoO2gCgdCBBMj2/P4455JK5fZ2b3du\n377f9+tVr9v31FN1TtXt5elT51QpIjAzMzNrNWs0ugFmZmZmjeAkyMzMzFqSkyAzMzNrSU6CzMzM\nrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMysJTkJMjMzs5bkJMjM+pykpyT9vPB+D0nLJb1v\nNdT9PUlLCu+H5Lon9Xfdub7Dc32brI76aiXpm5IekbRU0vRGt6enJL01n99PNrot1vycBNmgIeng\n/Mux2vKDRrevxVR7Hk+vn9Ej6duS9quh7uW9rau3umhbUMOxrk6S9gZ+APwvcAjw3YY2yKxB1mx0\nA8z6WJB+oT9WKr9/9TfFKiLiZkmvi4jFvdz0O8AlwNW92OYE4KRe1lOLztp2AXBJDce6On0QWAIc\nHn6ApLUwJ0E2GN0QETN7GixJwNoR8Wo/tqnl9XdSIGndiFgUEctZDT1BnclJxUBOgABGAQsHYgLk\nn0dbnXw5zFpKcXyIpM9I+ivwCrBHXi9JX5X0V0mvSPqnpHMkDSvtR5L+I499eUnSVElvl/RkaSzM\nSuNTCuVVx41I2kfStLzPDklXSXp7KeZXkuZL2jSvf1HSM5J+WKUeSZoo6S+SXs5x10naPq+/XdKf\nOjlXD0vqsgems/NQJW6VMUGStpL035Lm5LY9IenXktarfE7A2kDlXC2vnNt8XpfnffxW0nzSpZ1O\nz3le9xlJs3N908tjlPK5/VuV7V7bZw/a1tlne3Th++ppSWdU+b66TdJMSdtJ+l9Ji/K5/WpXn0Nh\n+zUlnZA/u1eUxvycJGmtUts/BQzP7VymTsbX5O+dJZLWK5Qdl7f7YaFszfz5n1QoW1/S5Pwz8Yqk\nWZL+X2n/3f08biTpYkkvSHpe0vnASucsx20s6aJ8rl6R9A9JV0ratCfnzVqXe4JsMBou6fXFgoh4\nrhQzHvgEcDbwPPBELr8AaMuvpwFvAY4G3i1pl9zLAGk8xXHAVcAUYCxwI/C6Uj2djQ9ZpVzSIcD5\nwHXAN4D1gC8D0yTtEBFPFbZdM9c3DfhaPp6vS/pbRJxf2O3FpD94VwM/J/3h3hXYCfhzXn+OpK0i\n4qFCW8YBWwDfqtL2op6eh0q7K/sfmuPWIJ3nucCmwH7AsIhYKOnTwC+B2/J5Afh7aV//DTwIfLNQ\n1tk53wP4JHAG6VLQkcAUSe+JiNndbPtaeUQs60Hbyp/t94DjgRtI33PbkD7bsaXvqwBGANcDvwMu\nBT4O/FjSvRFxc5W2FV2Yj/FS0vfGzqTLdlsDBxXa/mXg3cAXAAG3d7K/aaTP6P2kzwvgA8AyYJdC\n3FjSZ35rPl4B1+btfgH8BdgLmCRp44g4rlTPKj+PeR9Xk75XzwFmAweQznv5M/o9sCXps32C1NM1\nnvQ99RRmnYkIL14GxQIcTLoMUl6WFWKG5LLFwJal7XfP6w4ole+Vyw/M70fm7a8oxf0wx/28UHYy\nsLhKWw8j/SHZJL/fAHgBOLMUNyqXn1UouyRv+41S7J+BOwrv98ztOaWLc7Yh8DJwUqn87FzvOl1s\n25vzsEdu8/vy+7E5Zr9uPtOXi/spndflwIWdrFtceF/5zJcC7yiUv5nU63Bp6dw+1N0+u2lb+bMd\nlc/TVaW4r+S4TxXKpuWyjxfK1iYlib/p5lyNycd5dql8Ut7n+0vH+XwPfqaGAC8CJxfKniclWa9U\nvj+Ar+djXD+/PyC35djS/q4gJaBv6sHPY2UfXymUrUFKPJcBn8xl/1KO8+Klp4svh9lgE8ARwIcK\ny55V4m6OiL+Xyg4k/YL/o6TXVxbgT6Q/eB/McRNIv7zPLG1/Wh3t/jApEbq0VPcy4O5C3UU/L72/\njdRzVXEA6Q//yZ1VGhEvANeQeg+AdIkC+BgpuXmlizaPp/bz8EJ+3UvSOj2IryaAn/UiflpEvDZA\nPiIeJ/U0fLjG+ntqT9J5Kp+Xc4FFwD6l8o6IuKzyJtJYqrtZ+bOtZm/SOSnfCuBUUm9PuZ5uRcQy\n4E5S7yGS3gUMB/4LWIvUSwOpd+jeiHgpv9+LlNicXdrlJNK5KJ/zaj+PewGvUvg+j9RjdlY+nopF\npMTqg5KG9/IQrcU5CbLB6O6I+ENxqRLzWJWyt5H+q3y2tMwF1iH1fAC8Kb+u9Es7IuaQ/muuxZak\nX+zTSnU/A/xroe6Kl3ICUzQf2Kjw/i3AUxHRXZsuBraQtHN+/2Hg9aTegq68Ob/2+jxExMPA6cAX\ngeckXS/pCEkbdFNn2aO9iC3/kQV4CNhA0kZV1vWVynl6qFgYaeDvo4X1FU9W2Uf5s+2snqX53Bbr\neZr0eZTr6anbgB3zuKJdgCcj4l7SjMvKJbH3k753i215KiJeLu1rVmF90WNV6n0z8HSVRHx28U1e\nfzywL/CMpD9KOlZS+WfGbBUeE2StqvzLGdI/Bf8APsPK/2lWPJNfK+t6MrOms5ghVeoO0nikeVXi\nywN9l3WyX3XydVeuz3V+Grgrvz4dEX/sZrvenIdVRMTEPND1I6RepbOA4yTtnBOpnqj2OfZG+Rz1\n9POqp47u9OSz7e363rahaBrptgM7kXp8phXKd5G0Hemfh1vrqK/a5yiqfx6r7DsiTpV0JfBRUk/t\n94BvSdqt2PtnVuaeILMVHiYNSr2t3JOUl8ov08fy61bFjSWNJl3SKpoPDJG0bql88yp1AzzTSd3T\n6L2/A5uWZyCVRcRS8gBcSRuSBif/ugf7fyy/9uQ8dFb3/RHx/YjYDdiN1Mv2hWJIT/bTQ2+rUrYV\n8GJEzM/v55PGSZVtXqWsp217LL9uXSyUtHbe7+M93E9P6llT0ltL9WwCrF9HPXeRLqvuSur5qXwv\n3gq8j3SpNkg9RsW2bCqpPEB+m/zak7ZU9lG+XLp1lVgi4pGImBQRE4B3kgZq92hWnbUuJ0FmK1xG\nGoT6nfKKPAW4kkzcRPpv/ehS2MQq+3yY9J/rroV9rU/qbSq6HngJ+HYek1Ouf0QPj6HoClJvb0/u\nBnwJKQE8l/THoydJUG/Ow0okDZNU/v1zP+mP6dBC2UKqJyW1+EAe01Jpw+akSyg3FGIeBl4vaZtC\n3BtJiWFZT9tWOU/HlMq/SJoBeE0P9tET15G+1/5fqfxrpPN6bS07zZe0ZpK+Zzdm5Z6g9YCjgNkR\nUezBvI70s/Tl0u4mks7F9T2o+jrS98IXKwX5Z+MoVp5p+Lo827DoEdLP09BC3GhJW1f5vrMW5sth\nNtjU3O0fEX/Il2e+I2kMMJX0H/BWpEHTR5Bm+MyVNBk4VtJVpF/o7yENwn6+tNvrgaeBCyX9JJcd\nCvwTeO0+MhHRIeko0tT8mZIuJV2iejNpQOv/0sv/aiNiqqR24KtK9+65kXRZZxdgSkQUB5z+SdIs\n0oDov/TkEkIvzwOs/NnsCUyW9Dvgb6RBtgeTLvv9dyFuBjA+31/mn8DDEVH1vkY9cD9wo6QzSZ/r\nl/PrfxZifkOa9n9Vjlsf+BJpGv67S/vrUdvyefoRcLyk60hJzzZ5v3eSeuHqFhEzJf0a+HIeVD8N\nGEe6vHlZRHQ2Db4npgHHAs9FxKxc3z8lPUz6+fhFKf5KUk/RjyRtyYop8vsAP46IauOeyq4k9UL9\nJPduVabIl3tVtwVukHQZ8AApyTqQNK6tvRD3E9IEgE1Jl73NPEXey+BZSH9ElwFjuogZkmNO7SLm\n86TZOC+RLo/cA3wfGFmK+w9SgvMS6b/9rUmDWn9eihtD+mP3Muk/1CMpTaMuxO5O6pmYn/c7GzgP\n2L4Qcwnpj1G53ScDr5bKRPrj9UCufw5pRtS7qmz/zdymr/byvFc7D08A5xZiylPk35KP62+kHpVn\n8ra7lvb9duCPed/LKuc2H+sy0j2FujwPxc+clBA8lM/F9Ep7StuPB+4jTQH/K+k+PdWmyHfWts4+\n2yPz/l7J5+t0YINSzDRgRpU2XULqbenusxiSP4+Hcz2PkpK8Navsb5XvoS72u18+pitL5RdQmuZf\nWLceaTbYU7ktDwLH9ObnkTQY/GLSbMLnSPdk2oGVp8iPIM1QfABYQErAbwc+WuWYl5Y/Fy+tvSii\nLy+5m7U2SU8C10fEF7oNHmAkfY10j583RcQ/G90eM7P+5mujZlZxKOl+LU6AzKwleEyQWQtTeibU\n/qRxPG/Hs2nMrIU4CTLrW509e2qgGk2aCfY86dEZUxrcHjOz1cZjgszMzKwleUyQmZmZtSQnQWZm\nZtaSnASZWb+S9D1J5Wefre42DJG0XFL5Cev17HOPvM/9+2qfvaj7V5L+trrrNRtsnASZNZCkg/Mf\n0srysqTZks4cRE/BbrbB4r3RqOMKYHmD6jYbNDw7zKzxgvR8r8eAdUhP6j4C2EvSOyLilQa2zbpW\nz9PZ63FIA+s2GzScBJkNDDdExMz89QWSnic9bPIjwG8b16zuSVo3IhY1uh2tJCKWNaJef9Y22Phy\nmNnA9AfSf/pbVAokbSHpd5Kek7RQ0p2S9i5uJOnZwoNaUfKCpCWShhXKj8tl6xbKtpZ0ed7/y5Lu\nlrRfaf+Vy3e7SjpH0lzS89J6RdJhkm6WNDfXdb+kz5diTpc0p1T201z/lwplm+SyQ3tY92fyJceX\nJU2X9L4qMW+UdKGkOZJekXSfpIOr7C6ANSR9V9JTkhZJuknSFqX97ZY/uyfy/h6X9JPi088lfVPS\nMkmblCvJsS9L2iC/X2VMkKT1JU2W9GSuY1Z+uGsx5q35XH2yVF4ZM3V8oex7uWwrSb+VNJ/0IF+z\nQcNJkNnAtGV+fQ4gjw+6k/T09bOA44GhwNWSPlLY7nZg18L7dwGV5Of9hfIPADMr/9VL2o70xO6t\ngf8i3Tn6JeD3pf1XnEO6w/R/kp431ltHkB4m+33ga6QHip5bSoSmAW+QtFWp3cuAXQplu5KSkWk9\nqHcP4MfARaQHjY4EpkjauhIgaTTp4aq7AWcAx+S2/lLSl0v7E+lS5j7Aj/LyPtJDP4s+Tvq8zgKO\nIj0s9hjSA0grLs37+1iVdh8IXBcRL+b3K42zkiTgWuBo0lPqJ5IeTjtJ6Qn2tajs/79JDzr9JukB\npmaDR6Of4OrFSysvrHjy/QeB1wNvBA4CniUlIRvnuMk5blxh2/VITwt/uFD2NWAxsF5+fxTpD/id\nwA8Kcc8DPym8nwrcw6pPG78NeLDU3uWkp6erh8dY7QnsQ6vE3QTMKrwfles6LL/fKJ+DS4EnCnFn\nAXO6acOQvK+lwDsK5W8mPeH80kLZhcATwPDSPi4D5gFr5fd75H3eCwwpxE3M7dyqm+P9dm7PxoWy\n/wPuKMWNy/V8vFB2CfBQ4f0BOebY0rZXAEtID8UFeGuO+2Qn5+f40ue2HLiw0T8nXrz01+KeILPG\nE3AzKfF5EvgNsAD4aKx4mOlewPSIuLOyUUQsBH4ObC5p21w8jTTWr3KJZ5dcNi1/jaR3ARvmMiRt\nRErCfgcMl/T6ygLcCLxN0saF9gbwi4ioeWZURLz62sFLw3JdtwBbSXpdjpkL/J0VPVu7AK8CpwKb\nSnpz6Rh7YlpE3F9ox+PA1cCHc1sE/BvwP8CaVc7FRsD2pX2eHyuP0ZlG+kzf0snxrpv3d0eOK+7v\nt8BOkt5UKDsIWETq4enMXqTk9+xS+SRSgvPhLrbtSgA/q3FbswHPSZBZ4wXp8tCHgN2BbSPirREx\ntRDzZmB2lW1nFdYDzCT9waxcLvoAK5Kg90haO68LUi8PpEtvIv3n/2xpOTHHlKfrP1Z8I2ktSaOK\nS1cHLGkXSX+Q9BLwQq7rpLx6eCH0ttKxTAf+BHQAu0gaDryDnidBf69S9hCwQU4GRwMbAF9m1XPx\n8xxfPhflMVHz8+tGlQJJb5Z0saTnSD18z5ISX1j5eC/Lrx8vlB0AXBNdD0h+M/BURLxcKi9/f9Ti\n0Tq2NRvQPDvMbGC4O1bMDqtZRCyV9H/ArpLeCmwM3Er6o7sWsBMpmZgVEc/lzSr/DP0E6OwBquXk\nofzHdlfS5awgJVQhabOI+Ed5R5LelmPvJ106epLUi7E/aUxL8Z+zacDBkjYjJUNTIyIk3Z7fVxKO\nWztpd08Up5pX6r4I+FUn8feW3nc2U0uQBh2TLjduAPyAlMwuAt5EGhP02vFGxFOS7iQlQT+RtAvp\nEumlvTiGrnTWezeki23Kn7XZoOEkyKw5PE4atFy2TWF9xTTgG6RB1M9GxEMAkv5KSlZ2IV0Cqngk\nvy6JiD/U2L4ZpJ6somc7id2flJDtky95kds3oUpspYdnAjAGOCG/vxX4HCkJepFVE5POvK1K2VbA\nixExX9ICYCGwRh3nomx70lictoh47XYHkjq7RHUpcLqkt5Auhb0IXN9NHY8BH5D0ulJvUPn7o5I0\nbljavp6eIrOm5cthZs3hOuC9knaqFEhaD/gC8GhEPFCInUa66eIxrLjkRf76M6TeodcuH0XEs6SB\nzl/MM6NWImlEd42LiBci4g+lpbNHZVR6Tl77/ZMvRX22yn7/DswlDfhegzSOpnKMW5PG79zRi/FJ\nH8hjoir1bg7sC9yQ61sGXAl8XNI25Y2rnIue1FvteEX6fKpt/zvy4GXSpbCrimOKOnEdsDbpMl5R\nZZD29QARMZ90+XHXUtxRnbSlKknDlW6psH5PtzEbiNwTZNZ4PbmU8UOgDbhB0hmk2V2HkP6D//dS\n7J2kWUdbAecWym8ljT2qNp38yFx2n6RfkHqHRpFmJr0R2KGX7e3KFNJU8utyXcOAzwP/ZNXxNpCS\ntwNJU/pfymV3ky7TbEmazdVT9wM3SjqTdI6+nF//sxDzDVKSMD23bxbwL8B7SL1oxUSxJ+fir6Rx\nNaflwdwv5eMZVi04IuZKmgZ8HVifnt0s80rS5/sjSVsCfyENlt4H+HFEFMctnQccK6mDNIZsd1JP\nVW8+108AP82vl3UTazZguSfIrPG6/Q88Ip4hJSQ3kv5r/wFpave+EXFVKXYRabp7cfAzpCQnSNPL\nnyxtM4v0R/4a0jT4s4AvknoRTmJltcwKe22bXNeBpN8/PwEOB84k3Xuomkq7i71XS0nTyXt6f6BK\nG24GjiUd44mkXqbxuU2Vfc8BdiSNC/r33LavkJKW4zo7rs7Kc4/YvqTE5HjgO6TE6HNdtPW3pATo\nBTofp1WsI0gJzxnAfqRbKmwFfDUivlna7gTSWKSPk5LRpbl9vX3G22B9Hpy1ENUxy9XMzMysaTW8\nJ0jSo1r5KdqV5cy8fqiksyXNk/Si0m39R5b2sZmka5UeJTBH0imS1ijF7C5pRr6d/EOqcgt8SUfm\n9rws6S5JO/bv0ZuZmVmjNDwJInXBjy4se5K6WSvXmU8jdfMeQLpOvwnpLqgA5GTnOtL4pp1J3dyH\nUOjCz4MfryF1hb8bOB04T9KehZiDSDdhO4E0/uFe0u30ux0UamZmZs1nwF0Ok3QasHdEbKX0wMdn\ngU9ExJV5/dakgYo7R8R0SXsBV5FuPT8vx3yRNJD0Dfm+KT8C9oqI4qyQdtJt8ffO7+8C/i8ijsnv\nRbp/yRkRccrqOXozMzNbXQZCT9BrJK0FfIoVD+l7D6mHp3JnVSJiNum5PuNy0c7AfZUEKJtCugvr\ndoWY4t13KzHjCvWOLdUTeZtxmJmZ2aAzoJIg0j0/hpNmZUCaors4IhaU4uayYprq6Py+vJ4exAyT\nNBQYQbpjarWYVe6bYmZmZs1voN0n6FDg+jxFtSuiZ9Mzu4pRD2M6XZ8fgjiBdLfWV3rQHjMzM0vW\nATYHphQe47NaDZgkKD81+UPARwvFc4C1JQ0r9QaNZEWvTeWeHkWjCusqr+UHOo4EFkTEYknzSPdD\nqRZT7h0qmgD8uov1ZmZm1rVPAb9pRMUDJgki9QLNJc30qphBupHXHqQ7oiJpK9KDByu3z78TOF7S\niMK4oPGkp0zPKsTsVapvfC4nIpZImpHruSrXo/z+jC7a/BjAr371K7bZZpU77FsXJk6cyOTJkxvd\njKbic1Ybn7fe8zmrjc9b78yaNYtPf/rTkP+WNsKASIJywnEIcGFELK+UR8QCSecDkyRVHpR4BnB7\nRNydw24EHgAukXQc6blIJwNnFZ5d9DPgqDxL7AJScnMgsHehGZOAi3IyNJ30zJ116fqW/K8AbLPN\nNowZM6bGo29Nw4cP9znrJZ+z2vi89Z7PWW183mrWsOEkAyIJIl0G2wz4ZZV1lQcAXg4MJT3o8MjK\nyohYLmlf0nNs7iA9AfpCVjxtmoh4TNI+pETnK8BTwGERMbUQc1m+J9BJpMtifwYm5IdLmpmZ2SAz\nIJKgiLiJNDur2rpXgaPz0tn2T5KefdNVHbeQpsF3FXMOnT+/yMzMzAaRgTZF3szMzGy1cBJkDdHW\n1tboJjQdn7Pa+Lz1ns9ZbXzems+Ae2xGM5E0BpgxY8YMD4YzMzPrhZkzZzJ27FiAsRExsxFtcE+Q\nmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJ\nkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYtyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3J\nSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYt\nyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pIGRBIkaRNJl0iaJ2mR\npHsljSnFnCTpH3n9TZK2LK3fSNKvJXVImi/pPEnrlWLeJelWSS9LelzS16u05WOSZuWYeyXt1T9H\nbWZmZo3U8CRI0obA7cCrwARgG+BrwPxCzHHAUcAXgfcCC4EpktYu7Oo3eds9gH2AXYFzC/vYAJgC\nPAqMAb4OnCjp8ELMuLyfXwDbA78Hfi9p2z49aDMzszr9/vfwkY80uhXNbc1GNwD4JvBERBxeKHu8\nFHMMcHJEXA0g6bPAXOCjwGWStiElUGMj4p4cczRwraRjI2IO8GlgLeCwiFgKzJK0A/BV4LxCPddH\nxKT8/gRJ40kJ2Jf79KjNzMzq8PjjcPPNjW5Fc2t4TxCwH/AnSZdJmitpZql3ZgtgNPDaRx0RC4D/\nA8blop2B+ZUEKJsKBLBTIebWnABVTAG2ljQ8vx+Xt6MUMw4zMzMbVAZCEvQW4AhgNjAe+BlwhqRP\n5/WjScnM3NJ2c/O6SswzxZURsQx4vhRTbR/0IGY0ZmZmNqgMhMthawDTI+K7+f29krYjJUa/6mI7\nkZKjrnQXox7GdFePmZmZNZmBkAT9E5hVKpsF/Hv+eg4pERnFyr00I4F7CjEjizuQNATYKK+rxIwq\n1TOSlXuZOosp9w6tZOLEiQwfPnylsra2Ntra2rrazMzMrCW0t7fT3t6+UllHR0eDWrPCQEiCbge2\nLpVtTR4cHRGPSppDmvX1FwBJw0hjfc7O8XcCG0raoTAuaA9S8jS9EPM9SUPypTJIl99mR0RHIWYP\n4IxCW/bM5Z2aPHkyY8aM6SrEzMysZVXrGJg5cyZjx45tUIuSgTAmaDKws6RvSXqrpE8ChwNnFWJO\nA74jaT9J7wQuBp4C/gcgIh4kDWD+haQdJb0fOBNozzPDIE19XwxcIGlbSQcBXwFOLdRzOrCXpK9K\n2lrSicDYUlvMzMxsEGh4EhQRfwL+DWgD7gO+DRwTEZcWYk4hJTXnkmaFvQ7YKyIWF3b1SeBB0uyu\na4BbSfcVquxjAWka/ebAn4AfAydGxPmFmDtzO74A/Jl0Se4jEfFAnx60mZmZNdxAuBxGRFwHXNdN\nzInAiV2sf4F0L6Cu9nEfsFs3MVcAV3QVY2ZmZs2v4T1BZmZmZo3gJMjMzMxakpMgMzMza0lOgszM\nzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLM\nzMysJTkJMjMzs5bkJMjMzKwJRTS6Bc3PSZCZmVmTkhrdgubmJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSQ1PgiSd\nIGl5aXmgsH6opLMlzZP0oqTLJY0s7WMzSddKWihpjqRTJK1Ritld0gxJr0h6SNLBVdpypKRHJb0s\n6S5JO/bfkZuZmVkjNTwJyu4HRgGj8/KBwrrTgH2AA4BdgU2AKyorc7JzHbAmsDNwMHAIcFIhZnPg\nGuBm4N3A6cB5kvYsxBwEnAqcAOwA3AtMkTSiD4/TzMzMBoiBkgQtjYhnI+KZvDwPIGkYcCgwMSJu\niYh7gM8B75f03rztBODtwKci4r6ImAJ8FzhS0po55gjgkYj4RkTMjoizgcuBiYU2TATOjYiLI+JB\n4EvAoly/mZmZDTIDJQl6m6SnJT0s6VeSNsvlY0k9PDdXAiNiNvAEMC4X7QzcFxHzCvubAgwHtivE\nTC3VOaWyD0lr5bqK9UTeZhxmZmYDTESjW9D8BkISdBfp8tUEUu/LFsCtktYjXRpbHBELStvMzevI\nr3OrrKcHMcMkDQVGAEM6iRmNmZnZACQ1ugXNbc3uQ/pXvnxVcb+k6cDjwMeBVzrZTEBPcuCuYtTD\nGOfaZmZmg1DDk6CyiOiQ9BCwJely1NqShpV6g0ayotdmDlCexTWqsK7yOqoUMxJYEBGLJc0DlnUS\nU+4dWsXEiRMZPnz4SmVtbW20tbV1t6mZmdmg197eTnt7+0plHR0dDWrNCgMuCZK0PvBW4CJgBrAU\n2AO4Mq+xdAPHAAAgAElEQVTfCngTcEfe5E7geEkjCuOCxgMdwKxCzF6lqsbnciJiiaQZuZ6rcj3K\n78/ors2TJ09mzJgxvT5WMzOzVlCtY2DmzJmMHTu2QS1KGp4ESfoxcDXpEtgbgf8kJT6XRsQCSecD\nkyTNB14kJSW3R8TdeRc3Ag8Al0g6DtgYOBk4KyKW5JifAUdJ+hFwASm5ORDYu9CUScBFORmaTpot\nti5wYb8cuJmZmTVUw5MgYFPgN8DrgWeB24CdI+K5vH4i6VLV5cBQ4AbgyMrGEbFc0r7AT0m9QwtJ\nicsJhZjHJO1DSnS+AjwFHBYRUwsxl+V7Ap1Euiz2Z2BCRDzbD8dsZmZmDdbwJCgiuhw4ExGvAkfn\npbOYJ4F9u9nPLaRp8F3FnAOc01WMmZmZDQ4DYYq8mZmZ2WrnJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzNrQhGNbkHzcxJkZmbWpKRGt6C59UkS\nJGmIpO0lbdQX+zMzMzPrbzUlQZJOk3RY/noIcAswE3hS0u591zwzMzOz/lFrT9CBwL356/2ALYC3\nA5OB7/dBu8zMzMz6Va1J0AhgTv56b+B3EfEQcAHwzr5omJmZmVl/qjUJmgtsmy+FfRiYmsvXBZb1\nRcPMzMzM+tOaNW73S+Ay4J9AADfl8p2AB/ugXWZmZmb9qqYkKCJOlHQ/sBnpUtiredUy4Id91Tgz\nMzOz/lJrTxARcTmApHUKZRf1RaPMzMzM+lutU+SHSPqupKeBlyS9JZefXJk6b2ZmZjaQ1Tow+tvA\nIcA3gMWF8vuBw+tsk5mZmVm/qzUJ+izwhYj4NSvPBruXdL8gMzMzswGt1iTojcDfO9nfWrU3x8zM\nzGz1qDUJegDYpUr5gcA9tTfHzMzMbPWodXbYScBFkt5ISqT+XdLWpMtk+/ZV48zMzMz6S009QRHx\nP6Rk50PAQlJStA2wX0Tc1NW2ZmZmZgNBPfcJug3Ysw/bYmZmZrba1HqfoB0l7VSlfCdJ76mnQZK+\nJWm5pEmFsqGSzpY0T9KLki6XNLK03WaSrpW0UNIcSadIWqMUs7ukGZJekfSQpIOr1H+kpEclvSzp\nLkk71nM8ZmZmNjDVOjD6bNIjM8remNfVJCccnydNtS86DdgHOADYFdgEuKKw3RrAdaSerZ2Bg0n3\nMTqpELM5cA1wM/Bu4HTgPEl7FmIOAk4FTgB2yO2YImlErcdkZmZmA1OtSdC2wMwq5ffkdb0maX3g\nV6SbLb5QKB8GHApMjIhbIuIe4HPA+yW9N4dNIN2f6FMRcV9ETAG+CxwpqXLJ7wjgkYj4RkTMjoiz\ngcuBiYVmTATOjYiLI+JB4EvAoly/mZmZDSK1JkGvAqOqlG8MLK1xn2cDV0fEH0rl7yH18NxcKYiI\n2cATwLhctDNwX0TMK2w3BRgObFeImVra95TKPiStBYwt1RN5m3GYmZnZoFJrEnQj8F+ShlcKJG0I\n/ADo9ewwSZ8Atge+VWX1KGBxRCwolc8FRuevR+f35fX0IGaYpKHACGBIJzGjMTMzs0Gl1tlhxwK3\nAo9LqtwccXtSwvCZ3uxI0qakMT97RsSS3mwKRA/iuopRD2O6rGfixIkMHz58pbK2tjba2tp60Dwz\nM7Pei578BRwg2tvbaW9vX6mso6OjQa1ZoaYkKCKelvQu4FOkQcYvA78E2nuZyEC6BPUGYIakSlIy\nBNhV0lHAh4GhkoaVeoNGsqLXZg5QnsU1qrCu8lq+hDcSWBARiyXNIz0HrVpMuXdoJZMnT2bMmDFd\nhZiZmfW51/5qDnDVOgZmzpzJ2LFjG9SipJ77BC0Eft4HbZgKvLNUdiEwC/gh8DSwBNgDuBJA0lbA\nm4A7cvydwPGSRhTGBY0HOvJ+KjF7leoZn8uJiCWSZuR6rsr1KL8/o96DNDMzs4Gl5iQoJyK7k3pK\nVhpbFBEnVdummpxMPVDa90LguYiYld+fD0ySNB94kZSU3B4Rd+dNbsz7uETScaQB2icDZxV6pn4G\nHCXpR8AFpOTmQGDvQtWTSI8DmQFMJ80WW5eUlJmZmdkgUlMSJOnzwE+BeaTLTMUrk0Hh/jw1Kl/p\nnEi6VHU5MBS4ATjyteCI5ZL2zW26g/QojwtJ9/upxDwmaR9SovMV4CngsIiYWoi5LN8T6CTSZbE/\nAxMi4tk6j8fMzMwGmFp7gr4DfDsiftSXjamIiH8tvX8VODovnW3zJN08vDUibiGNQeoq5hzgnB43\n1szMzJpSrVPkNwJ+15cNMTMzM1udak2CfkcaVGxmZmbWlGq9HPZ34GRJOwP3kWZvvSYiPJvKzMzM\nBrRak6AvAC8Bu+WlKPCUcjMzMxvgar1Z4hZ93RAzMzOz1anWMUEASFpb0taFJ7WbmZmZNYWakiBJ\n6+YbGC4C/kq6ezOSzpT0zT5sn5mZmVm/qLUn6L9IzwzbHXilUD4VOKjONpmZmZn1u1ovY30UOCgi\n7pJUvLvzX4G31t8sMzMzs/5Va0/QG4BnqpSvx6qPvDAzMzMbcGpNgv4E7FN4X0l8Dic/ld3MzMxs\nIKv1ctjxwPWSts37OEbSdsA4Vr1vkJmZmdmAU1NPUETcRhoYvSbpjtHjgbnAuIiY0XfNMzMzM+sf\nve4JyvcE+iQwJSI+3/dNMjMzM+t/ve4JioilwM+Adfq+OWZmZmarR60Do6cDO/RlQ8zMzMxWp1oH\nRp8DnCppU2AGsLC4MiL+Um/DzMzMzPpTrUnQpfm1+LT4AJRfh9TTKDMzM+ta+K58das1CfJT5M3M\nzBpManQLmltNSVBEPN7XDTEzMzNbnWpKgiR9tqv1EXFxbc0xMzMzWz1qvRx2eun9WsC6wGJgEeAk\nyMzMzAa0Wi+HbVQuk/Q24KfAj+ttlJmZmVl/q/U+QauIiL8B32TVXiIzMzOzAafPkqBsKbBJH+/T\nzMzMrM/VOjB6/3IRsDFwFHB7vY0yMzMz62+1Doz+fel9AM8CfwC+VleLzMzMzFaDWgdG9/VlNDMz\nM7PVysmMmZmZtaSakiBJl0v6ZpXyr0v6Xf3NMjMzM+tftfYE7QZcW6X8BmDX2ptjZmZmtnrUmgSt\nT7o7dNkSYFjtzTEzMzNbPWpNgu4DDqpS/gnggdqbY2ZmZrZ61JoEnQx8V9JFkg7Oy8XAt/O6HpP0\nJUn3SurIyx2SPlxYP1TS2ZLmSXoxj0caWdrHZpKulbRQ0hxJp0haoxSzu6QZkl6R9JCkg6u05UhJ\nj0p6WdJdknbs1VkxMzOzplFTEhQRVwMfBbYEzgFOBTYFPhQR5XsIdedJ4DhgbF7+APyPpG3y+tOA\nfYADSOONNgGuqGyck53rSNP9dwYOBg4BTirEbA5cA9wMvJv0aI/zJO1ZiDkoH8cJwA7AvcAUSSN6\neTxmZmbWBGq9WSIRcS3VB0fXsp+i70g6AthZ0tPAocAnIuIWAEmfA2ZJem9ETAcmAG8HPhgR84D7\nJH0X+KGkEyNiKXAE8EhEfCPXMVvSB4CJwE25bCJwbkRcnOv5Ein5OhQ4pd7jNDMzs4Gl1inyO0ra\nqUr5TpLeU2tjJK0h6RPAusCdpJ6hNUk9OABExGzgCWBcLtoZuC8nQBVTgOHAdoWYqaXqplT2IWmt\nXFexnsjbjMPMzMwGnVrHBJ0NbFal/I15Xa9IeoekF4FXSZfX/i0iHgRGA4sjYkFpk7l5Hfl1bpX1\n9CBmmKShwAhgSCcxozEzM7NBp9bLYdsCM6uU35PX9daDpLE6G5LG/lwsqav7DYn0vLLudBWjHsb0\npB4zMzNrMrUmQa8Co4BHSuUbA0t7u7M8bqeyr5mS3gscA1wGrC1pWKk3aCQrem3mAOVZXKMK6yqv\no0oxI4EFEbFY0jxgWScx5d6hVUycOJHhw4evVNbW1kZbW1t3m5qZmQ167e3ttLe3r1TW0dHRoNas\nUGsSdCPwX5I+EhEdAJI2BH7AioHG9VgDGArMICVVewBX5nq2At4E3JFj7wSOlzSiMC5oPNABzCrE\n7FWqY3wuJyKWSJqR67kq16P8/ozuGjt58mTGjBnT+6M0MzNrAdU6BmbOnMnYsWMb1KKk1iToWOBW\n4HFJ9+Sy7Um9Jp/pzY4kfR+4njRVfgPgU6THcoyPiAWSzgcmSZoPvEhKSm6PiLvzLm4k3aDxEknH\nkXqjTgbOioglOeZnwFGSfgRcQEpuDgT2LjRlEnBRToamk2aLrQtc2JvjMTMzWx3CgzXqVlMSFBFP\nS3oXKWF5N/Ay8EugvZB49NQo4GJS8tIB/IWUAP0hr59IulR1Oal36AbgyEJblkvaF/gpqXdoISlx\nOaEQ85ikfUiJzleAp4DDImJqIeayfE+gk3Kb/gxMiIhne3k8ZmZmq4XUfYx1rp77BC0Efl5vAyLi\n8G7WvwocnZfOYp4E9u1mP7eQpsF3FXMOaXaamZmZDXI1JUGSPga0AVuRZk/9DfhNRFzeh20zMzMz\n6ze9uk9Qvpnhb4HfkqbC/500q2s74DJJl+YBxWZmZmYDWm97go4BPgTsHxHXFFdI2p80LugY0vO+\nzMzMzAas3t4x+nPA18sJEEBEXAV8g/SsLTMzM7MBrbdJ0NtY9RlcRVNzjJmZmdmA1tsk6GXSoy06\nMwx4pfbmmJmZma0evU2C7gSO6GL9kTnGzMzMbEDr7cDo7wN/lPR64CekB58K2Ab4GvAR4IN92kIz\nMzOzftCrJCgi7pB0EOkmiQeUVs8H2iLi9r5qnJmZmVl/6fXNEiPiSklTSA8g3SoXPwTcGBGL+rJx\nZmZmZv2l1meHLZL0IeA/IuL5Pm6TmZmZWb/r7R2jNy28/SSwfi6/T9JmfdkwMzMzs/7U256gByU9\nB9wOrANsBjwBbA6s1bdNMzMzM+s/vZ0iPxz4GDAjb3udpIeAocAESaP7uH1mZmZm/aK3SdBaETE9\nIk4l3ThxB9KjNJaRHpfxsKTZfdxGMzMzsz7X28thCyTdQ7octjawbkTcLmkpcBDwFPDePm6jmZmZ\nWZ/rbU/QJsD3gFdJCdSfJE0jJURjgIiI2/q2iWZmZmZ9r1dJUETMi4irI+JbwCJgR+BMIEh3kF4g\n6Za+b6aZmZlZ3+ptT1BZR0RcBiwB/hXYAjin7laZmZmZ9bOabpaYvQt4On/9OLAkIuYAv627VWZm\nZmb9rOYkKCKeLHz9jr5pjpmZmfVERKNb0PzqvRxmZmZmDSI1ugXNzUmQmZmZtSQnQWZmZtaSnASZ\nmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwE\nmZmZWUtqeBIk6VuSpktaIGmupCslbVWKGSrpbEnzJL0o6XJJI0sxm0m6VtJCSXMknSJpjVLM7pJm\nSHpF0kOSDq7SniMlPSrpZUl3Sdqxf47czMzMGqnhSRCwC3AmsBPwIWAt4EZJryvEnAbsAxwA7Aps\nAlxRWZmTneuANYGdgYOBQ4CTCjGbA9cANwPvBk4HzpO0ZyHmIOBU4ARgB+BeYIqkEX13uGZmZjYQ\nrNnoBkTE3sX3kg4BngHGArdJGgYcCnwiIm7JMZ8DZkl6b0RMByYAbwc+GBHzgPskfRf4oaQTI2Ip\ncATwSER8I1c1W9IHgInATblsInBuRFyc6/kSKfk6FDilf86AmZmZNcJA6Akq2xAI4Pn8fiwpWbu5\nEhARs4EngHG5aGfgvpwAVUwBhgPbFWKmluqaUtmHpLVyXcV6Im8zDjMzMxtUBlQSJEmkS1+3RcQD\nuXg0sDgiFpTC5+Z1lZi5VdbTg5hhkoYCI4AhncSMxszMzAaVhl8OKzkH2Bb4QA9iReox6k5XMeph\nTE/qMTMzsyYyYJIgSWcBewO7RMQ/CqvmAGtLGlbqDRrJil6bOUB5FteowrrK66hSzEhgQUQsljQP\nWNZJTLl3aCUTJ05k+PDhK5W1tbXR1tbW1WZmZmYtob29nfb29pXKOjo6GtSaFQZEEpQToI8Au0XE\nE6XVM4ClwB7AlTl+K+BNwB055k7geEkjCuOCxgMdwKxCzF6lfY/P5UTEEkkzcj1X5XqU35/RVfsn\nT57MmDFjeny8ZmZmraRax8DMmTMZO3Zsg1qUNDwJknQO0AbsDyyUVOmJ6YiIVyJigaTzgUmS5gMv\nkpKS2yPi7hx7I/AAcImk44CNgZOBsyJiSY75GXCUpB8BF5CSmwNJvU8Vk4CLcjI0nTRbbF3gwn44\ndDMzM2ughidBwJdIY27+WCr/HHBx/noi6VLV5cBQ4AbgyEpgRCyXtC/wU1Lv0EJS4nJCIeYxSfuQ\nEp2vAE8Bh0XE1ELMZfmeQCeRLov9GZgQEc/20bGamZnZANHwJCgiup2hFhGvAkfnpbOYJ4F9u9nP\nLaRp8F3FnEMaoG1mZjZghafs1G1ATZE3MzOznpO6j7HOOQkyMzNrQu4Jqp+TIDMzsyYU4Z6gejkJ\nMjMza0JOgurnJMjMzKxJOQmqj5MgMzOzJuQxQfVzEmRmZtaEfDmsfk6CzMzMmpCToPo5CTIzM2tC\nToLq5yTIzMysCTkJqp+TIDMzM2tJToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzM\nmpCToPo5CTIzM2tCToLq5yTIzMzMWpKTIDMzsybknqD6OQkyMzNrQk6C6uckyMzMrAk5CaqfkyAz\nM7Mm5CSofk6CzMzMmlBEo1vQ/JwEmZmZNSn3BNXHSZCZmVkT8uWw+jkJMjMza0JOgurnJMjMzKwJ\nOQmqn5MgMzOzJuQkqH5OgszMzJqQZ4fVz0mQmZlZk3JPUH2cBJmZmTUhXw6rn5MgMzOzJuQkqH5O\ngszMzJqQk6D6OQkyMzNrQh4YXb8BkQRJ2kXSVZKelrRc0v5VYk6S9A9JiyTdJGnL0vqNJP1aUoek\n+ZLOk7ReKeZdkm6V9LKkxyV9vUo9H5M0K8fcK2mvvj9iMzOz+rgnqH4DIgkC1gP+DBwJrJLbSjoO\nOAr4IvBeYCEwRdLahbDfANsAewD7ALsC5xb2sQEwBXgUGAN8HThR0uGFmHF5P78Atgd+D/xe0rZ9\ndaBmZmZ9xUlQfdZsdAMAIuIG4AYAqepHegxwckRcnWM+C8wFPgpcJmkbYAIwNiLuyTFHA9dKOjYi\n5gCfBtYCDouIpcAsSTsAXwXOK9RzfURMyu9PkDSelIB9ua+P28zMrFbuCarfQOkJ6pSkLYDRwM2V\nsohYAPwfMC4X7QzMryRA2VRSr9JOhZhbcwJUMQXYWtLw/H5c3o5SzDjMzMwGECdB9RvwSRApAQpS\nz0/R3LyuEvNMcWVELAOeL8VU2wc9iBmNmZnZAOKB0fVrhiSoM6LK+KFexqiHMf5WMzOzAcU9QfUb\nEGOCujGHlIiMYuVempHAPYWYkcWNJA0BNsrrKjGjSvseycq9TJ3FlHuHVjJx4kSGDx++UllbWxtt\nbW1dbWZmZlazZkqC2tvbaW9vX6mso6OjQa1ZYcAnQRHxqKQ5pFlffwGQNIw01ufsHHYnsKGkHQrj\ngvYgJU/TCzHfkzQkXyoDGA/MjoiOQswewBmFJuyZyzs1efJkxowZU+shmpmZ1aRZkqBqHQMzZ85k\n7NixDWpRMiAuh0laT9K7JW2fi96S32+W358GfEfSfpLeCVwMPAX8D0BEPEgawPwLSTtKej9wJtCe\nZ4ZBmvq+GLhA0raSDgK+ApxaaMrpwF6Svippa0knAmOBs/rr2M3MzGrRTD1BA9VA6Ql6D/C/pEtT\nwYrE5CLg0Ig4RdK6pPv+bAhMA/aKiMWFfXySlKxMBZYDl5OmvANpRpmkCTnmT8A84MSIOL8Qc6ek\nNuD7efkb8JGIeKDvD9nMzKx2ToLqNyCSoIi4hW56pSLiRODELta/QLoXUFf7uA/YrZuYK4Aruoox\nMzNrNM8Oq9+AuBxmZmZmveOeoPo5CTIzM2tCToLq5yTIzMysSTkJqo+TIDMzsybknqD6OQkyMzNr\nQh4YXT8nQWZmZk3IPUH1cxJkZmbWhJwE1c9JkJmZWZNyElQfJ0FmZmZNyD1B9XMSZGZm1oQ8MLp+\nToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzMmpSToPo4CTIzM2tC7gmqn5MgMzOz\nJuTZYfVzEmRmZtaE3BNUPydBZmZmTchJUP2cBJmZmTUhJ0H1cxJkZmbWhJYtgzXXbHQrmpuTIDMz\nsya0eDGstVajW9HcnASZmZk1oSVLnATVy0mQmZlZE1qyBNZeu9GtaG5OgszMzJqQL4fVz0mQmZlZ\nE/LlsPo5CTIzM2tCvhxWPydBZmZmTcg9QfVzEmRmZtaEXn3VSVC9nASZmZk1oWefhTe8odGtaG5O\ngszMzJrMwoXwwguw6aaNbklzcxJkZmbWZJ5+Or06CaqPkyAzM7Mm89RT6dVJUH2cBJmZmTWZShK0\nySaNbUezcxJkZmbWZJ5+Gl7/enjd6xrdkubmJKgKSUdKelTSy5LukrRjo9s02LS3tze6CU3H56w2\nPm+953NWm9V53m6/HbbbbrVVN2g5CSqRdBBwKnACsANwLzBF0oiGNmyQ8S/Z3vM5q43PW+/5nNVm\ndZ23Rx+Fm26Cf/u31VLdoOYkaFUTgXMj4uKIeBD4ErAIOLSxzTIzs1Z3113wwQ+msUAHH9zo1jS/\nNRvdgIFE0lrAWOAHlbKICElTgXENa5iZmbWUCFiwAB5/HGbPhhkz4MYb4Z570mWw66+HjTZqdCub\nn5OglY0AhgBzS+Vzga072+iBB2D58p5XEtH7hg22bZ5/Hu64Y/XUNVi2ee45uOWW1de2WrcbaNs8\n+2y6dNDf9dS7zeqsq7tt5s6Fa6/t/3r6apt66lq+PC2Vr6u9drWu+PrIIzBpEixdCsuWpdfy14sX\np+XVV1e8LlwIixal5cUXU/Lz/PNpXcUmm8Auu8AJJ8C++8KQIbWdJ1uZk6CeEVDtR2wdgM98Ztbq\nbc2g0MH73z+z0Y1oMh3svrvPWe91MH68z1vvdLDvvj5nXZFWXZYt6+C7353JkCF0uqy11srL2mvD\nOuvAeuul2V7rrpu+HjYsvR89GjbbbOVen3vvbdxx96VZs17727lOo9rgJGhl84BlwKhS+UhW7R0C\n2Dy9fLo/2zSIjW10A5qQz1ltfN56z+esK5UeorJFi3zearA5UMO1gfo5CSqIiCWSZgB7AFcBSFJ+\nf0aVTaYAnwIeA15ZTc00MzMbDNYhJUBTGtUARa0XYAcpSR8HLgK+CEwnzRY7EHh7RDzbyLaZmZlZ\n33FPUElEXJbvCXQS6bLYn4EJToDMzMwGF/cEmZmZWUvyzRLNzMysJTkJqkOrPmNM0gmSlpeWBwrr\nh0o6W9I8SS9KulzSyNI+NpN0raSFkuZIOkXSGqWY3SXNkPSKpIckNdX9USXtIukqSU/nc7R/lZiT\nJP1D0iJJN0nasrR+I0m/ltQhab6k8yStV4p5l6Rb8/fh45K+XqWej0malWPulbRX3x9x/bo7Z5J+\nWeV777pSTEudMwBJ35I0XdICSXMlXSlpq1LMavu5bIbfjT08Z38sfa8tk3ROKaZlzhmApC/ln4eO\nvNwh6cOF9c31fRYRXmpYgINIM8I+C7wdOBd4HhjR6LathmM/AfgL8AbS7QNGAv9SWP9T0oy53UjP\nX7sDmFZYvwZwH2lGwDuBCcAzwPcKMZsDLwGnkG5UeSSwBNiz0cffi/P0YdLYso+Sbr2wf2n9cfl7\nZj/gHcDvgYeBtQsx1wMzgfcA7wMeAn5VWL8B8E/SYP5tgI8DC4HDCzH/v707j9WjKuM4/v1VoECb\ntsjiTaTQ0goUQZayNWy3LCVIkCCJNpi68QfKHypEiYkkQEQJokQCNCaCRBZBTAwQKSLLbRsNS1hS\nK9CylbKUawoUKHgV2j7+cc5Lz532Lr2Ud7nz+yRvmpk5M++Zp2fmfe6ZMzOzcuzOz7G8BPgfsF+r\nYzSCmN0A3F1pexMrZWoVs1zfBcC8vD8HAH/Jx+AORZmmHJd0yLlxmDHrAX5TaW/j6xqzXNdT83E6\nPX8uzcfGjE5sZy0PaKd+gIeBq4ppAa8CF7S6bk3Y94uAJwZYNiEfEGcU8/YBNgCH5+lTcoPepShz\nDrAG2CZPXw78s7LtW4EFrd7/EcZsA5v+oK8CzqvErg/4Sp6ekdc7uChzMrAO6MrT3yU932qbosxl\nwNPF9G3AXZXvfgiY3+q4jCBmNwB/HmSdfescs6Kuu+Q4HF20raYcl516bqzGLM/rAa4cZJ1ax6yo\n75vAtzqxnfly2Aho4zvGHmjMi/S/UKd3jH0uX7J4QdLNkibn+TNJdx2WsVkOvMzG2BwJLI2IN4rt\n3QtMBD5flLm/8p33MkriK2kq0EX/OL0LPEL/OK2JiCeLVe8nPb38iKLM4ohYV5S5F9hH0sQ8PYvR\nFcvufPlimaT5kj5dLJuFYwYwibTPb+XpphyXHX5urMas4WuSVktaKunnknYoltU6ZpLGSJoL7Ej6\nI6Hj2pmToJEZ7B1jXc2vTtM9DHyT9Bf2d4CpwOI87qIL+CD/oJfK2HSx+dgxjDITJI39uDvQBrpI\nJ9zB2lAXqZv4IxGxnnSS3hqx7MS2eg+p+/t44AJSl/sCScrLax+zHItfA3+PiMZYvWYdlx15bhwg\nZgC3kF4J0E16sfY84KZieS1jJml/SWtJvT7zST0/y+jAdubnBG1dA71jbFSJiPLpnv+S9CiwkjS2\nYqAnZw83NoOV0TDKdLrhxGmoMhpmmY6LY0TcXkw+JWkpaRxVN+nSxUDqFLP5wH7A0cMo26zjst1j\n14jZUeXMiLiumHxKUi/wgKSpEbFiiG2O5pgtAw4k9Z6dCdwo6dhByrdtO3NP0Mhs6TvGRrWIeIc0\n+HhpytQAAAY1SURBVHQ60AtsJ2lCpVgZm142jd1nimUDldkNeDciPtga9W6xXtIBO1gb6s3TH5H0\nKWAnho5T2cs0UJmOb6v5h+gNUtuDmsdM0jXAF4HuiFhVLGrWcdlx58ZKzF4fovgj+d+yvdUuZhGx\nLiJejIgnIuInwBLg+3RgO3MSNAIR8SHQeMcY0O8dYy15CVwrSRoPTCMN9H2cNAi1jM3ewB5sjM1D\nwAFKT+ZumAO8AzxTlDmB/ubk+R0v/3j30j9OE0jjVso4TZJ0cLHqCaTk6dGizLH5h75hDrA8J6eN\nMtVYnsQoiKWk3YGdSXd7QY1jln/MTwdmR8TLlcVNOS477dw4RMw252BSsly2t1rFbABjgLF0Yjtr\n9ajyTv2QLv300f/2vDeBXVtdtybs+xXAscCepFuQ7yNl3zvn5fOBFaRLFDOBf7DpLZJLSOM7vkAa\nW/Rv4KdFmSmkWyQvJ91dcC7wAXBiq/d/C+I0jtRlfBDp7ogf5OnJefkFuc2cRrpV9A7gOfrfIr8A\neAw4jNRVvxy4qVg+gZR8/p7Unf/VHLezizKzcuwat3tfTLps2Xa3ew8Ws7zsF6REcU/SCe8x0olz\n27rGLNd3PunummNIfx03PttXynzixyUdcm4cKmbAXsCFwCG5vX0JeB54sK4xy3X9GelS656kR3tc\nRkp8ju/EdtbygHbyJ//HvJT/Ix4CDm11nZq037eSbkXsI436/wMwtVg+Fria1GW5FvgTsFtlG5NJ\nz+V4Lx8AlwNjKmWOI2X7faTkYF6r930L43Qc6Yd8feXzu6LMxaQf5P+Q7n6YXtnGJOBm0l9Ja4Df\nAjtWyhwALMrbeBn44WbqcibpOn4f6RlPJ7c6PlsaM9Ibp/9K6kH7L/Ai6Zkku1a2UauY5bpuLmbr\nga8XZZp2XNIB58ahYgbsDiwEVud2spz0gz++sp3axCzX87p87PXlY/Fv5ASoE9uZ3x1mZmZmteQx\nQWZmZlZLToLMzMyslpwEmZmZWS05CTIzM7NachJkZmZmteQkyMzMzGrJSZCZmZnVkpMgMzMzqyUn\nQWZmZlZLToLMzDJJPZKubHU9zKw5nASZWVuQdI6kdyWNKeaNk/ShpAcqZWdL2iBpSrPraWajh5Mg\nM2sXPaQ3xR9azDsGeB04UtJ2xfzjgJUR8dKWfomkbT5OJc1s9HASZGZtISKeJSU83cXsbuAOYAVw\nZGV+D4CkyZLulLRW0juS/ihpt0ZBSRdJelLS2ZJeJL2BHkk7Sroxr/eapPOrdZJ0rqRnJfVJ6pV0\n+9bdazNrJSdBZtZOFgKzi+nZed6ixnxJY4EjgAdzmTuBSaReoxOBacBtle1OB74MnAEclOf9Mq9z\nGjCHlFjNbKwg6VDgKuBCYG/gZGDxx9w/M2sj7hY2s3ayELgyjwsaR0pYFgPbAecAlwBH5emFkk4C\n9gemRMQqAEnzgKckzYyIx/N2twXmRcRbucw44NvAWRGxMM/7BvBqUZfJwHvA3RHxPvAKsOQT2m8z\nawH3BJlZO2mMCzoMOBp4NiLeIPUEHZHHBXUDL0TEq8C+wCuNBAggIp4B3gZmFNtd2UiAsmmkxOjR\nYr01wPKizH3ASmBFvmx2lqQdttqemlnLOQkys7YRES8Ar5Eufc0mJT9ExOuknpijKMYDAQJiM5uq\nzn9/M8sZYN1GXd4DDgHmAqtIvVBLJE0Y9g6ZWVtzEmRm7aaHlAB1ky6PNSwGTgEOZ2MS9DSwh6TP\nNgpJ2g+YmJcN5HlgHcVga0k7kcb+fCQiNkTEgxHxY+BAYApw/Aj2yczakMcEmVm76QGuJZ2fFhXz\nFwPXkC5jLQSIiPslLQVukXReXnYt0BMRTw70BRHxvqTrgSskvQWsBi4F1jfKSDoV2Ct/7xrgVFIP\n0vJNt2hmnchJkJm1mx5ge+CZiFhdzF8EjAeWRURvMf904Oq8fANwD/C9YXzPj0jjj+4C1gK/AspL\nXW+T7ii7KNfnOWBuHnNkZqOAIga8JG5mZmY2anlMkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\nnASZmZlZLTkJMjMzs1pyEmRmZma15CTIzMzMaslJkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\n/g8dz3zImRgHIwAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -326,7 +326,7 @@ }, { "cell_type": "code", - "execution_count": 125, + "execution_count": 195, "metadata": { "collapsed": true }, @@ -344,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 126, + "execution_count": 196, "metadata": { "collapsed": true }, @@ -358,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 127, + "execution_count": 197, "metadata": { "collapsed": false }, @@ -733,7 +733,7 @@ }, { "cell_type": "code", - "execution_count": 134, + "execution_count": 177, "metadata": { "collapsed": false }, @@ -742,8 +742,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 286 x 2245 x 10 (6420700 elements)\n", - "mu is 286 x 2245 x 578 (371116460 elements)\n" + "phi is 1740 x 8640 x 10 (150336000 elements)\n", + "mu is 1740 x 8640 x 3467 (52121491200 elements)\n" ] } ], @@ -756,7 +756,29 @@ }, { "cell_type": "code", - "execution_count": 157, + "execution_count": 207, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 38.1 s, sys: 8 ms, total: 38.1 s\n", + "Wall time: 38.2 s\n" + ] + } + ], + "source": [ + "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", + " iterations=100, alpha='symmetric', eta='auto', random_state=1)\n", + "#var_lambda = lda.state.get_lambda()" + ] + }, + { + "cell_type": "code", + "execution_count": 180, "metadata": { "collapsed": false }, @@ -768,7 +790,7 @@ }, { "cell_type": "code", - "execution_count": 159, + "execution_count": 181, "metadata": { "collapsed": false }, @@ -777,8 +799,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1min 30s, sys: 536 ms, total: 1min 31s\n", - "Wall time: 1min 30s\n" + "CPU times: user 13min 24s, sys: 5.46 s, total: 13min 29s\n", + "Wall time: 13min 24s\n" ] } ], @@ -791,7 +813,7 @@ }, { "cell_type": "code", - "execution_count": 162, + "execution_count": 182, "metadata": { "collapsed": false }, @@ -800,28 +822,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.019*memory + 0.016*chip + 0.012*synapse + 0.010*energy + 0.009*circuit + 0.009*analog + 0.009*bit + 0.008*hidden + 0.007*pulse + 0.007*associative'),\n", + " '0.009*net + 0.009*layer + 0.008*word + 0.008*hidden + 0.007*recognition + 0.006*speech + 0.006*node + 0.005*architecture + 0.005*signal + 0.005*memory'),\n", " (1,\n", - " '0.013*node + 0.009*vector + 0.009*role + 0.008*dynamic + 0.007*temporal + 0.006*propagation + 0.006*noise + 0.006*matrix + 0.006*transition + 0.006*variable'),\n", + " '0.012*classifier + 0.010*class + 0.008*classification + 0.007*rule + 0.006*recognition + 0.005*speech + 0.004*trained + 0.004*node + 0.004*rbf + 0.004*expert'),\n", " (2,\n", - " '0.019*activation + 0.015*processor + 0.011*cycle + 0.010*machine + 0.009*path + 0.007*update + 0.007*action + 0.007*element + 0.006*array + 0.006*operation'),\n", + " '0.009*neuron + 0.007*bound + 0.006*theorem + 0.006*let + 0.005*threshold + 0.004*matrix + 0.004*proof + 0.004*class + 0.004*solution + 0.004*xi'),\n", " (3,\n", - " '0.017*node + 0.010*threshold + 0.009*circuit + 0.008*probability + 0.008*classifier + 0.007*distribution + 0.005*class + 0.005*let + 0.005*vector + 0.004*bit'),\n", + " '0.005*gaussian + 0.005*likelihood + 0.004*prior + 0.004*density + 0.004*approximation + 0.004*estimate + 0.004*mixture + 0.004*sample + 0.004*bayesian + 0.004*markov'),\n", " (4,\n", - " '0.016*memory + 0.010*cell + 0.010*fig + 0.010*delay + 0.010*vector + 0.009*cortex + 0.009*matrix + 0.008*map + 0.007*phase + 0.007*associative'),\n", + " '0.009*component + 0.007*kernel + 0.007*matrix + 0.006*distance + 0.005*image + 0.004*signal + 0.004*pca + 0.004*source + 0.004*independent + 0.004*noise'),\n", " (5,\n", - " '0.040*cell + 0.016*firing + 0.014*response + 0.012*stimulus + 0.010*frequency + 0.010*spike + 0.010*activity + 0.010*potential + 0.007*current + 0.007*synaptic'),\n", + " '0.013*object + 0.009*field + 0.006*layer + 0.005*recognition + 0.005*view + 0.005*map + 0.005*image + 0.005*net + 0.004*sequence + 0.004*code'),\n", " (6,\n", - " '0.020*hidden + 0.017*recognition + 0.017*speech + 0.010*propagation + 0.009*hidden_unit + 0.009*back_propagation + 0.009*trained + 0.009*classifier + 0.008*training_set + 0.007*hidden_layer'),\n", + " '0.016*circuit + 0.013*chip + 0.012*neuron + 0.011*analog + 0.010*voltage + 0.007*signal + 0.006*noise + 0.006*vlsi + 0.005*channel + 0.004*implementation'),\n", " (7,\n", - " '0.014*vector + 0.009*region + 0.009*code + 0.008*class + 0.008*chain + 0.006*human + 0.006*matrix + 0.006*domain + 0.006*probability + 0.006*equilibrium'),\n", + " '0.016*cell + 0.013*neuron + 0.008*control + 0.007*response + 0.006*stimulus + 0.006*spike + 0.006*activity + 0.005*synaptic + 0.005*action + 0.005*firing'),\n", " (8,\n", - " '0.013*field + 0.009*constraint + 0.007*line + 0.007*analog + 0.007*noise + 0.006*velocity + 0.006*gradient + 0.006*energy + 0.006*minimum + 0.005*optimization'),\n", + " '0.007*generalization + 0.006*hidden + 0.005*optimal + 0.005*gradient + 0.005*noise + 0.004*solution + 0.004*hidden_unit + 0.003*training_set + 0.003*cost + 0.003*minimum'),\n", " (9,\n", - " '0.031*image + 0.018*object + 0.011*visual + 0.008*joint + 0.007*vector + 0.007*position + 0.007*region + 0.007*pixel + 0.006*view + 0.006*fig')]" + " '0.023*image + 0.011*visual + 0.008*motion + 0.007*map + 0.006*eye + 0.006*field + 0.005*object + 0.005*orientation + 0.005*pixel + 0.005*direction')]" ] }, - "execution_count": 162, + "execution_count": 182, "metadata": {}, "output_type": "execute_result" } @@ -832,7 +854,7 @@ }, { "cell_type": "code", - "execution_count": 163, + "execution_count": 183, "metadata": { "collapsed": false }, @@ -843,42 +865,43 @@ "text": [ "\n", "Yaser S.Abu-Mostafa\n", - "Docs: [21]\n", - "[(0, 0.037231992567152018),\n", - " (1, 0.016698200630964527),\n", - " (2, 0.0342011513633367),\n", - " (3, 0.67231870768323798),\n", - " (4, 0.013204120807888714),\n", - " (7, 0.18016218284618693),\n", - " (8, 0.015739022916034465),\n", - " (9, 0.02510090606398686)]\n", + "Docs: [1269]\n", + "[(0, 0.014276128036243068),\n", + " (1, 0.14997442204053549),\n", + " (2, 0.066977058012639326),\n", + " (3, 0.1005138681465144),\n", + " (4, 0.42617224612011045),\n", + " (5, 0.013753926706215542),\n", + " (6, 0.068383760611387165),\n", + " (8, 0.15630604619968017)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [276, 235, 270]\n", - "[(0, 0.05309541750092419),\n", - " (1, 0.19415617091685813),\n", - " (2, 0.031847363809780198),\n", - " (3, 0.019551957323076154),\n", - " (6, 0.33640266674595204),\n", - " (7, 0.040241620376066808),\n", - " (8, 0.030507189050582355),\n", - " (9, 0.28201509780410544)]\n", + "[(0, 0.19472153164068895),\n", + " (1, 0.10329311184348117),\n", + " (3, 0.025968463276070212),\n", + " (4, 0.017663281758177558),\n", + " (5, 0.54778614222038646),\n", + " (7, 0.013259175006857452),\n", + " (8, 0.050507870947931986),\n", + " (9, 0.034423560720921689)]\n", "\n", "Michael I. Jordan\n", "Docs: [205]\n", - "[(1, 0.13235075631104087),\n", - " (2, 0.36064441736611164),\n", - " (4, 0.033312619049718932),\n", - " (8, 0.3828853340620974),\n", - " (9, 0.064817361491075531)]\n", + "[(0, 0.029225477213953788),\n", + " (1, 0.013712801819294291),\n", + " (2, 0.019353402713854918),\n", + " (3, 0.087509201712253584),\n", + " (5, 0.030992806920687628),\n", + " (7, 0.64444478894908952),\n", + " (8, 0.1681356134963537)]\n", "\n", "James M. Bower\n", "Docs: [188, 251, 244]\n", - "[(2, 0.021041919799574347),\n", - " (4, 0.59368476827823236),\n", - " (5, 0.32772809273108228),\n", - " (8, 0.024300185703856069),\n", - " (9, 0.012102285218647514)]\n" + "[(5, 0.045995989778604321),\n", + " (6, 0.037665256830351566),\n", + " (7, 0.79540557768386366),\n", + " (9, 0.086156019081986016)]\n" ] } ], @@ -1173,17 +1196,6 @@ " iterations=100, alpha='auto', eta='symmetric')" ] }, - { - "cell_type": "code", - "execution_count": 153, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "var_lambda = lda.state.get_lambda()" - ] - }, { "cell_type": "code", "execution_count": 154, From 1dc7e6aea47106a0d246192a927660ec03e171ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Thu, 20 Oct 2016 14:12:12 +0200 Subject: [PATCH 029/100] Working on line search for hyperparam MLE. --- docs/notebooks/at_with_nips.ipynb | 112 +++++++++++++++++++----------- gensim/models/atvb.py | 77 ++++++++++++++++++-- 2 files changed, 141 insertions(+), 48 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 016b4f0969..038dbad55f 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -101,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 185, + "execution_count": 208, "metadata": { "collapsed": false }, @@ -116,7 +116,7 @@ "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00', '01', '02']\n", + "yrs = ['00']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -138,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 186, + "execution_count": 209, "metadata": { "collapsed": false }, @@ -163,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 187, + "execution_count": 210, "metadata": { "collapsed": true }, @@ -178,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 188, + "execution_count": 211, "metadata": { "collapsed": false }, @@ -196,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 189, + "execution_count": 212, "metadata": { "collapsed": false }, @@ -222,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": 190, + "execution_count": 213, "metadata": { "collapsed": false }, @@ -245,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 191, + "execution_count": 214, "metadata": { "collapsed": true }, @@ -260,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 192, + "execution_count": 215, "metadata": { "collapsed": true }, @@ -279,7 +279,7 @@ }, { "cell_type": "code", - "execution_count": 193, + "execution_count": 216, "metadata": { "collapsed": true }, @@ -291,16 +291,16 @@ }, { "cell_type": "code", - "execution_count": 194, + "execution_count": 217, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHVWZ//HPlwBBlgTGmAQEBUUQcIFEhKgsDpLIqjOg\n2G4g4IKA/KIoijowoI6iJOyKArKojQiDwx4IjhC2CSaIICEoO2gCgdCBBMj2/P4455JK5fZ2b3du\n377f9+tVr9v31FN1TtXt5elT51QpIjAzMzNrNWs0ugFmZmZmjeAkyMzMzFqSkyAzMzNrSU6CzMzM\nrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMysJTkJMjMzs5bkJMjM+pykpyT9vPB+D0nLJb1v\nNdT9PUlLCu+H5Lon9Xfdub7Dc32brI76aiXpm5IekbRU0vRGt6enJL01n99PNrot1vycBNmgIeng\n/Mux2vKDRrevxVR7Hk+vn9Ej6duS9quh7uW9rau3umhbUMOxrk6S9gZ+APwvcAjw3YY2yKxB1mx0\nA8z6WJB+oT9WKr9/9TfFKiLiZkmvi4jFvdz0O8AlwNW92OYE4KRe1lOLztp2AXBJDce6On0QWAIc\nHn6ApLUwJ0E2GN0QETN7GixJwNoR8Wo/tqnl9XdSIGndiFgUEctZDT1BnclJxUBOgABGAQsHYgLk\nn0dbnXw5zFpKcXyIpM9I+ivwCrBHXi9JX5X0V0mvSPqnpHMkDSvtR5L+I499eUnSVElvl/RkaSzM\nSuNTCuVVx41I2kfStLzPDklXSXp7KeZXkuZL2jSvf1HSM5J+WKUeSZoo6S+SXs5x10naPq+/XdKf\nOjlXD0vqsgems/NQJW6VMUGStpL035Lm5LY9IenXktarfE7A2kDlXC2vnNt8XpfnffxW0nzSpZ1O\nz3le9xlJs3N908tjlPK5/VuV7V7bZw/a1tlne3Th++ppSWdU+b66TdJMSdtJ+l9Ji/K5/WpXn0Nh\n+zUlnZA/u1eUxvycJGmtUts/BQzP7VymTsbX5O+dJZLWK5Qdl7f7YaFszfz5n1QoW1/S5Pwz8Yqk\nWZL+X2n/3f08biTpYkkvSHpe0vnASucsx20s6aJ8rl6R9A9JV0ratCfnzVqXe4JsMBou6fXFgoh4\nrhQzHvgEcDbwPPBELr8AaMuvpwFvAY4G3i1pl9zLAGk8xXHAVcAUYCxwI/C6Uj2djQ9ZpVzSIcD5\nwHXAN4D1gC8D0yTtEBFPFbZdM9c3DfhaPp6vS/pbRJxf2O3FpD94VwM/J/3h3hXYCfhzXn+OpK0i\n4qFCW8YBWwDfqtL2op6eh0q7K/sfmuPWIJ3nucCmwH7AsIhYKOnTwC+B2/J5Afh7aV//DTwIfLNQ\n1tk53wP4JHAG6VLQkcAUSe+JiNndbPtaeUQs60Hbyp/t94DjgRtI33PbkD7bsaXvqwBGANcDvwMu\nBT4O/FjSvRFxc5W2FV2Yj/FS0vfGzqTLdlsDBxXa/mXg3cAXAAG3d7K/aaTP6P2kzwvgA8AyYJdC\n3FjSZ35rPl4B1+btfgH8BdgLmCRp44g4rlTPKj+PeR9Xk75XzwFmAweQznv5M/o9sCXps32C1NM1\nnvQ99RRmnYkIL14GxQIcTLoMUl6WFWKG5LLFwJal7XfP6w4ole+Vyw/M70fm7a8oxf0wx/28UHYy\nsLhKWw8j/SHZJL/fAHgBOLMUNyqXn1UouyRv+41S7J+BOwrv98ztOaWLc7Yh8DJwUqn87FzvOl1s\n25vzsEdu8/vy+7E5Zr9uPtOXi/spndflwIWdrFtceF/5zJcC7yiUv5nU63Bp6dw+1N0+u2lb+bMd\nlc/TVaW4r+S4TxXKpuWyjxfK1iYlib/p5lyNycd5dql8Ut7n+0vH+XwPfqaGAC8CJxfKniclWa9U\nvj+Ar+djXD+/PyC35djS/q4gJaBv6sHPY2UfXymUrUFKPJcBn8xl/1KO8+Klp4svh9lgE8ARwIcK\ny55V4m6OiL+Xyg4k/YL/o6TXVxbgT6Q/eB/McRNIv7zPLG1/Wh3t/jApEbq0VPcy4O5C3UU/L72/\njdRzVXEA6Q//yZ1VGhEvANeQeg+AdIkC+BgpuXmlizaPp/bz8EJ+3UvSOj2IryaAn/UiflpEvDZA\nPiIeJ/U0fLjG+ntqT9J5Kp+Xc4FFwD6l8o6IuKzyJtJYqrtZ+bOtZm/SOSnfCuBUUm9PuZ5uRcQy\n4E5S7yGS3gUMB/4LWIvUSwOpd+jeiHgpv9+LlNicXdrlJNK5KJ/zaj+PewGvUvg+j9RjdlY+nopF\npMTqg5KG9/IQrcU5CbLB6O6I+ENxqRLzWJWyt5H+q3y2tMwF1iH1fAC8Kb+u9Es7IuaQ/muuxZak\nX+zTSnU/A/xroe6Kl3ICUzQf2Kjw/i3AUxHRXZsuBraQtHN+/2Hg9aTegq68Ob/2+jxExMPA6cAX\ngeckXS/pCEkbdFNn2aO9iC3/kQV4CNhA0kZV1vWVynl6qFgYaeDvo4X1FU9W2Uf5s+2snqX53Bbr\neZr0eZTr6anbgB3zuKJdgCcj4l7SjMvKJbH3k753i215KiJeLu1rVmF90WNV6n0z8HSVRHx28U1e\nfzywL/CMpD9KOlZS+WfGbBUeE2StqvzLGdI/Bf8APsPK/2lWPJNfK+t6MrOms5ghVeoO0nikeVXi\nywN9l3WyX3XydVeuz3V+Grgrvz4dEX/sZrvenIdVRMTEPND1I6RepbOA4yTtnBOpnqj2OfZG+Rz1\n9POqp47u9OSz7e363rahaBrptgM7kXp8phXKd5G0Hemfh1vrqK/a5yiqfx6r7DsiTpV0JfBRUk/t\n94BvSdqt2PtnVuaeILMVHiYNSr2t3JOUl8ov08fy61bFjSWNJl3SKpoPDJG0bql88yp1AzzTSd3T\n6L2/A5uWZyCVRcRS8gBcSRuSBif/ugf7fyy/9uQ8dFb3/RHx/YjYDdiN1Mv2hWJIT/bTQ2+rUrYV\n8GJEzM/v55PGSZVtXqWsp217LL9uXSyUtHbe7+M93E9P6llT0ltL9WwCrF9HPXeRLqvuSur5qXwv\n3gq8j3SpNkg9RsW2bCqpPEB+m/zak7ZU9lG+XLp1lVgi4pGImBQRE4B3kgZq92hWnbUuJ0FmK1xG\nGoT6nfKKPAW4kkzcRPpv/ehS2MQq+3yY9J/rroV9rU/qbSq6HngJ+HYek1Ouf0QPj6HoClJvb0/u\nBnwJKQE8l/THoydJUG/Ow0okDZNU/v1zP+mP6dBC2UKqJyW1+EAe01Jpw+akSyg3FGIeBl4vaZtC\n3BtJiWFZT9tWOU/HlMq/SJoBeE0P9tET15G+1/5fqfxrpPN6bS07zZe0ZpK+Zzdm5Z6g9YCjgNkR\nUezBvI70s/Tl0u4mks7F9T2o+jrS98IXKwX5Z+MoVp5p+Lo827DoEdLP09BC3GhJW1f5vrMW5sth\nNtjU3O0fEX/Il2e+I2kMMJX0H/BWpEHTR5Bm+MyVNBk4VtJVpF/o7yENwn6+tNvrgaeBCyX9JJcd\nCvwTeO0+MhHRIeko0tT8mZIuJV2iejNpQOv/0sv/aiNiqqR24KtK9+65kXRZZxdgSkQUB5z+SdIs\n0oDov/TkEkIvzwOs/NnsCUyW9Dvgb6RBtgeTLvv9dyFuBjA+31/mn8DDEVH1vkY9cD9wo6QzSZ/r\nl/PrfxZifkOa9n9Vjlsf+BJpGv67S/vrUdvyefoRcLyk60hJzzZ5v3eSeuHqFhEzJf0a+HIeVD8N\nGEe6vHlZRHQ2Db4npgHHAs9FxKxc3z8lPUz6+fhFKf5KUk/RjyRtyYop8vsAP46IauOeyq4k9UL9\nJPduVabIl3tVtwVukHQZ8AApyTqQNK6tvRD3E9IEgE1Jl73NPEXey+BZSH9ElwFjuogZkmNO7SLm\n86TZOC+RLo/cA3wfGFmK+w9SgvMS6b/9rUmDWn9eihtD+mP3Muk/1CMpTaMuxO5O6pmYn/c7GzgP\n2L4Qcwnpj1G53ScDr5bKRPrj9UCufw5pRtS7qmz/zdymr/byvFc7D08A5xZiylPk35KP62+kHpVn\n8ra7lvb9duCPed/LKuc2H+sy0j2FujwPxc+clBA8lM/F9Ep7StuPB+4jTQH/K+k+PdWmyHfWts4+\n2yPz/l7J5+t0YINSzDRgRpU2XULqbenusxiSP4+Hcz2PkpK8Navsb5XvoS72u18+pitL5RdQmuZf\nWLceaTbYU7ktDwLH9ObnkTQY/GLSbMLnSPdk2oGVp8iPIM1QfABYQErAbwc+WuWYl5Y/Fy+tvSii\nLy+5m7U2SU8C10fEF7oNHmAkfY10j583RcQ/G90eM7P+5mujZlZxKOl+LU6AzKwleEyQWQtTeibU\n/qRxPG/Hs2nMrIU4CTLrW509e2qgGk2aCfY86dEZUxrcHjOz1cZjgszMzKwleUyQmZmZtSQnQWZm\nZtaSnASZWb+S9D1J5Wefre42DJG0XFL5Cev17HOPvM/9+2qfvaj7V5L+trrrNRtsnASZNZCkg/Mf\n0srysqTZks4cRE/BbrbB4r3RqOMKYHmD6jYbNDw7zKzxgvR8r8eAdUhP6j4C2EvSOyLilQa2zbpW\nz9PZ63FIA+s2GzScBJkNDDdExMz89QWSnic9bPIjwG8b16zuSVo3IhY1uh2tJCKWNaJef9Y22Phy\nmNnA9AfSf/pbVAokbSHpd5Kek7RQ0p2S9i5uJOnZwoNaUfKCpCWShhXKj8tl6xbKtpZ0ed7/y5Lu\nlrRfaf+Vy3e7SjpH0lzS89J6RdJhkm6WNDfXdb+kz5diTpc0p1T201z/lwplm+SyQ3tY92fyJceX\nJU2X9L4qMW+UdKGkOZJekXSfpIOr7C6ANSR9V9JTkhZJuknSFqX97ZY/uyfy/h6X9JPi088lfVPS\nMkmblCvJsS9L2iC/X2VMkKT1JU2W9GSuY1Z+uGsx5q35XH2yVF4ZM3V8oex7uWwrSb+VNJ/0IF+z\nQcNJkNnAtGV+fQ4gjw+6k/T09bOA44GhwNWSPlLY7nZg18L7dwGV5Of9hfIPADMr/9VL2o70xO6t\ngf8i3Tn6JeD3pf1XnEO6w/R/kp431ltHkB4m+33ga6QHip5bSoSmAW+QtFWp3cuAXQplu5KSkWk9\nqHcP4MfARaQHjY4EpkjauhIgaTTp4aq7AWcAx+S2/lLSl0v7E+lS5j7Aj/LyPtJDP4s+Tvq8zgKO\nIj0s9hjSA0grLs37+1iVdh8IXBcRL+b3K42zkiTgWuBo0lPqJ5IeTjtJ6Qn2tajs/79JDzr9JukB\npmaDR6Of4OrFSysvrHjy/QeB1wNvBA4CniUlIRvnuMk5blxh2/VITwt/uFD2NWAxsF5+fxTpD/id\nwA8Kcc8DPym8nwrcw6pPG78NeLDU3uWkp6erh8dY7QnsQ6vE3QTMKrwfles6LL/fKJ+DS4EnCnFn\nAXO6acOQvK+lwDsK5W8mPeH80kLZhcATwPDSPi4D5gFr5fd75H3eCwwpxE3M7dyqm+P9dm7PxoWy\n/wPuKMWNy/V8vFB2CfBQ4f0BOebY0rZXAEtID8UFeGuO+2Qn5+f40ue2HLiw0T8nXrz01+KeILPG\nE3AzKfF5EvgNsAD4aKx4mOlewPSIuLOyUUQsBH4ObC5p21w8jTTWr3KJZ5dcNi1/jaR3ARvmMiRt\nRErCfgcMl/T6ygLcCLxN0saF9gbwi4ioeWZURLz62sFLw3JdtwBbSXpdjpkL/J0VPVu7AK8CpwKb\nSnpz6Rh7YlpE3F9ox+PA1cCHc1sE/BvwP8CaVc7FRsD2pX2eHyuP0ZlG+kzf0snxrpv3d0eOK+7v\nt8BOkt5UKDsIWETq4enMXqTk9+xS+SRSgvPhLrbtSgA/q3FbswHPSZBZ4wXp8tCHgN2BbSPirREx\ntRDzZmB2lW1nFdYDzCT9waxcLvoAK5Kg90haO68LUi8PpEtvIv3n/2xpOTHHlKfrP1Z8I2ktSaOK\nS1cHLGkXSX+Q9BLwQq7rpLx6eCH0ttKxTAf+BHQAu0gaDryDnidBf69S9hCwQU4GRwMbAF9m1XPx\n8xxfPhflMVHz8+tGlQJJb5Z0saTnSD18z5ISX1j5eC/Lrx8vlB0AXBNdD0h+M/BURLxcKi9/f9Ti\n0Tq2NRvQPDvMbGC4O1bMDqtZRCyV9H/ArpLeCmwM3Er6o7sWsBMpmZgVEc/lzSr/DP0E6OwBquXk\nofzHdlfS5awgJVQhabOI+Ed5R5LelmPvJ106epLUi7E/aUxL8Z+zacDBkjYjJUNTIyIk3Z7fVxKO\nWztpd08Up5pX6r4I+FUn8feW3nc2U0uQBh2TLjduAPyAlMwuAt5EGhP02vFGxFOS7iQlQT+RtAvp\nEumlvTiGrnTWezeki23Kn7XZoOEkyKw5PE4atFy2TWF9xTTgG6RB1M9GxEMAkv5KSlZ2IV0Cqngk\nvy6JiD/U2L4ZpJ6somc7id2flJDtky95kds3oUpspYdnAjAGOCG/vxX4HCkJepFVE5POvK1K2VbA\nixExX9ICYCGwRh3nomx70lictoh47XYHkjq7RHUpcLqkt5Auhb0IXN9NHY8BH5D0ulJvUPn7o5I0\nbljavp6eIrOm5cthZs3hOuC9knaqFEhaD/gC8GhEPFCInUa66eIxrLjkRf76M6TeodcuH0XEs6SB\nzl/MM6NWImlEd42LiBci4g+lpbNHZVR6Tl77/ZMvRX22yn7/DswlDfhegzSOpnKMW5PG79zRi/FJ\nH8hjoir1bg7sC9yQ61sGXAl8XNI25Y2rnIue1FvteEX6fKpt/zvy4GXSpbCrimOKOnEdsDbpMl5R\nZZD29QARMZ90+XHXUtxRnbSlKknDlW6psH5PtzEbiNwTZNZ4PbmU8UOgDbhB0hmk2V2HkP6D//dS\n7J2kWUdbAecWym8ljT2qNp38yFx2n6RfkHqHRpFmJr0R2KGX7e3KFNJU8utyXcOAzwP/ZNXxNpCS\ntwNJU/pfymV3ky7TbEmazdVT9wM3SjqTdI6+nF//sxDzDVKSMD23bxbwL8B7SL1oxUSxJ+fir6Rx\nNaflwdwv5eMZVi04IuZKmgZ8HVifnt0s80rS5/sjSVsCfyENlt4H+HFEFMctnQccK6mDNIZsd1JP\nVW8+108AP82vl3UTazZguSfIrPG6/Q88Ip4hJSQ3kv5r/wFpave+EXFVKXYRabp7cfAzpCQnSNPL\nnyxtM4v0R/4a0jT4s4AvknoRTmJltcwKe22bXNeBpN8/PwEOB84k3Xuomkq7i71XS0nTyXt6f6BK\nG24GjiUd44mkXqbxuU2Vfc8BdiSNC/r33LavkJKW4zo7rs7Kc4/YvqTE5HjgO6TE6HNdtPW3pATo\nBTofp1WsI0gJzxnAfqRbKmwFfDUivlna7gTSWKSPk5LRpbl9vX3G22B9Hpy1ENUxy9XMzMysaTW8\nJ0jSo1r5KdqV5cy8fqiksyXNk/Si0m39R5b2sZmka5UeJTBH0imS1ijF7C5pRr6d/EOqcgt8SUfm\n9rws6S5JO/bv0ZuZmVmjNDwJInXBjy4se5K6WSvXmU8jdfMeQLpOvwnpLqgA5GTnOtL4pp1J3dyH\nUOjCz4MfryF1hb8bOB04T9KehZiDSDdhO4E0/uFe0u30ux0UamZmZs1nwF0Ok3QasHdEbKX0wMdn\ngU9ExJV5/dakgYo7R8R0SXsBV5FuPT8vx3yRNJD0Dfm+KT8C9oqI4qyQdtJt8ffO7+8C/i8ijsnv\nRbp/yRkRccrqOXozMzNbXQZCT9BrJK0FfIoVD+l7D6mHp3JnVSJiNum5PuNy0c7AfZUEKJtCugvr\ndoWY4t13KzHjCvWOLdUTeZtxmJmZ2aAzoJIg0j0/hpNmZUCaors4IhaU4uayYprq6Py+vJ4exAyT\nNBQYQbpjarWYVe6bYmZmZs1voN0n6FDg+jxFtSuiZ9Mzu4pRD2M6XZ8fgjiBdLfWV3rQHjMzM0vW\nATYHphQe47NaDZgkKD81+UPARwvFc4C1JQ0r9QaNZEWvTeWeHkWjCusqr+UHOo4EFkTEYknzSPdD\nqRZT7h0qmgD8uov1ZmZm1rVPAb9pRMUDJgki9QLNJc30qphBupHXHqQ7oiJpK9KDByu3z78TOF7S\niMK4oPGkp0zPKsTsVapvfC4nIpZImpHruSrXo/z+jC7a/BjAr371K7bZZpU77FsXJk6cyOTJkxvd\njKbic1Ybn7fe8zmrjc9b78yaNYtPf/rTkP+WNsKASIJywnEIcGFELK+UR8QCSecDkyRVHpR4BnB7\nRNydw24EHgAukXQc6blIJwNnFZ5d9DPgqDxL7AJScnMgsHehGZOAi3IyNJ30zJ116fqW/K8AbLPN\nNowZM6bGo29Nw4cP9znrJZ+z2vi89Z7PWW183mrWsOEkAyIJIl0G2wz4ZZV1lQcAXg4MJT3o8MjK\nyohYLmlf0nNs7iA9AfpCVjxtmoh4TNI+pETnK8BTwGERMbUQc1m+J9BJpMtifwYm5IdLmpmZ2SAz\nIJKgiLiJNDur2rpXgaPz0tn2T5KefdNVHbeQpsF3FXMOnT+/yMzMzAaRgTZF3szMzGy1cBJkDdHW\n1tboJjQdn7Pa+Lz1ns9ZbXzems+Ae2xGM5E0BpgxY8YMD4YzMzPrhZkzZzJ27FiAsRExsxFtcE+Q\nmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJ\nkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYtyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3J\nSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYt\nyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pIGRBIkaRNJl0iaJ2mR\npHsljSnFnCTpH3n9TZK2LK3fSNKvJXVImi/pPEnrlWLeJelWSS9LelzS16u05WOSZuWYeyXt1T9H\nbWZmZo3U8CRI0obA7cCrwARgG+BrwPxCzHHAUcAXgfcCC4EpktYu7Oo3eds9gH2AXYFzC/vYAJgC\nPAqMAb4OnCjp8ELMuLyfXwDbA78Hfi9p2z49aDMzszr9/vfwkY80uhXNbc1GNwD4JvBERBxeKHu8\nFHMMcHJEXA0g6bPAXOCjwGWStiElUGMj4p4cczRwraRjI2IO8GlgLeCwiFgKzJK0A/BV4LxCPddH\nxKT8/gRJ40kJ2Jf79KjNzMzq8PjjcPPNjW5Fc2t4TxCwH/AnSZdJmitpZql3ZgtgNPDaRx0RC4D/\nA8blop2B+ZUEKJsKBLBTIebWnABVTAG2ljQ8vx+Xt6MUMw4zMzMbVAZCEvQW4AhgNjAe+BlwhqRP\n5/WjScnM3NJ2c/O6SswzxZURsQx4vhRTbR/0IGY0ZmZmNqgMhMthawDTI+K7+f29krYjJUa/6mI7\nkZKjrnQXox7GdFePmZmZNZmBkAT9E5hVKpsF/Hv+eg4pERnFyr00I4F7CjEjizuQNATYKK+rxIwq\n1TOSlXuZOosp9w6tZOLEiQwfPnylsra2Ntra2rrazMzMrCW0t7fT3t6+UllHR0eDWrPCQEiCbge2\nLpVtTR4cHRGPSppDmvX1FwBJw0hjfc7O8XcCG0raoTAuaA9S8jS9EPM9SUPypTJIl99mR0RHIWYP\n4IxCW/bM5Z2aPHkyY8aM6SrEzMysZVXrGJg5cyZjx45tUIuSgTAmaDKws6RvSXqrpE8ChwNnFWJO\nA74jaT9J7wQuBp4C/gcgIh4kDWD+haQdJb0fOBNozzPDIE19XwxcIGlbSQcBXwFOLdRzOrCXpK9K\n2lrSicDYUlvMzMxsEGh4EhQRfwL+DWgD7gO+DRwTEZcWYk4hJTXnkmaFvQ7YKyIWF3b1SeBB0uyu\na4BbSfcVquxjAWka/ebAn4AfAydGxPmFmDtzO74A/Jl0Se4jEfFAnx60mZmZNdxAuBxGRFwHXNdN\nzInAiV2sf4F0L6Cu9nEfsFs3MVcAV3QVY2ZmZs2v4T1BZmZmZo3gJMjMzMxakpMgMzMza0lOgszM\nzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLM\nzMysJTkJMjMzs5bkJMjMzKwJRTS6Bc3PSZCZmVmTkhrdgubmJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSQ1PgiSd\nIGl5aXmgsH6opLMlzZP0oqTLJY0s7WMzSddKWihpjqRTJK1Ritld0gxJr0h6SNLBVdpypKRHJb0s\n6S5JO/bfkZuZmVkjNTwJyu4HRgGj8/KBwrrTgH2AA4BdgU2AKyorc7JzHbAmsDNwMHAIcFIhZnPg\nGuBm4N3A6cB5kvYsxBwEnAqcAOwA3AtMkTSiD4/TzMzMBoiBkgQtjYhnI+KZvDwPIGkYcCgwMSJu\niYh7gM8B75f03rztBODtwKci4r6ImAJ8FzhS0po55gjgkYj4RkTMjoizgcuBiYU2TATOjYiLI+JB\n4EvAoly/mZmZDTIDJQl6m6SnJT0s6VeSNsvlY0k9PDdXAiNiNvAEMC4X7QzcFxHzCvubAgwHtivE\nTC3VOaWyD0lr5bqK9UTeZhxmZmYDTESjW9D8BkISdBfp8tUEUu/LFsCtktYjXRpbHBELStvMzevI\nr3OrrKcHMcMkDQVGAEM6iRmNmZnZACQ1ugXNbc3uQ/pXvnxVcb+k6cDjwMeBVzrZTEBPcuCuYtTD\nGOfaZmZmg1DDk6CyiOiQ9BCwJely1NqShpV6g0ayotdmDlCexTWqsK7yOqoUMxJYEBGLJc0DlnUS\nU+4dWsXEiRMZPnz4SmVtbW20tbV1t6mZmdmg197eTnt7+0plHR0dDWrNCgMuCZK0PvBW4CJgBrAU\n2AO4Mq+xdAPHAAAgAElEQVTfCngTcEfe5E7geEkjCuOCxgMdwKxCzF6lqsbnciJiiaQZuZ6rcj3K\n78/ors2TJ09mzJgxvT5WMzOzVlCtY2DmzJmMHTu2QS1KGp4ESfoxcDXpEtgbgf8kJT6XRsQCSecD\nkyTNB14kJSW3R8TdeRc3Ag8Al0g6DtgYOBk4KyKW5JifAUdJ+hFwASm5ORDYu9CUScBFORmaTpot\nti5wYb8cuJmZmTVUw5MgYFPgN8DrgWeB24CdI+K5vH4i6VLV5cBQ4AbgyMrGEbFc0r7AT0m9QwtJ\nicsJhZjHJO1DSnS+AjwFHBYRUwsxl+V7Ap1Euiz2Z2BCRDzbD8dsZmZmDdbwJCgiuhw4ExGvAkfn\npbOYJ4F9u9nPLaRp8F3FnAOc01WMmZmZDQ4DYYq8mZmZ2WrnJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzNrQhGNbkHzcxJkZmbWpKRGt6C59UkS\nJGmIpO0lbdQX+zMzMzPrbzUlQZJOk3RY/noIcAswE3hS0u591zwzMzOz/lFrT9CBwL356/2ALYC3\nA5OB7/dBu8zMzMz6Va1J0AhgTv56b+B3EfEQcAHwzr5omJmZmVl/qjUJmgtsmy+FfRiYmsvXBZb1\nRcPMzMzM+tOaNW73S+Ay4J9AADfl8p2AB/ugXWZmZmb9qqYkKCJOlHQ/sBnpUtiredUy4Id91Tgz\nMzOz/lJrTxARcTmApHUKZRf1RaPMzMzM+lutU+SHSPqupKeBlyS9JZefXJk6b2ZmZjaQ1Tow+tvA\nIcA3gMWF8vuBw+tsk5mZmVm/qzUJ+izwhYj4NSvPBruXdL8gMzMzswGt1iTojcDfO9nfWrU3x8zM\nzGz1qDUJegDYpUr5gcA9tTfHzMzMbPWodXbYScBFkt5ISqT+XdLWpMtk+/ZV48zMzMz6S009QRHx\nP6Rk50PAQlJStA2wX0Tc1NW2ZmZmZgNBPfcJug3Ysw/bYmZmZrba1HqfoB0l7VSlfCdJ76mnQZK+\nJWm5pEmFsqGSzpY0T9KLki6XNLK03WaSrpW0UNIcSadIWqMUs7ukGZJekfSQpIOr1H+kpEclvSzp\nLkk71nM8ZmZmNjDVOjD6bNIjM8remNfVJCccnydNtS86DdgHOADYFdgEuKKw3RrAdaSerZ2Bg0n3\nMTqpELM5cA1wM/Bu4HTgPEl7FmIOAk4FTgB2yO2YImlErcdkZmZmA1OtSdC2wMwq5ffkdb0maX3g\nV6SbLb5QKB8GHApMjIhbIuIe4HPA+yW9N4dNIN2f6FMRcV9ETAG+CxwpqXLJ7wjgkYj4RkTMjoiz\ngcuBiYVmTATOjYiLI+JB4EvAoly/mZmZDSK1JkGvAqOqlG8MLK1xn2cDV0fEH0rl7yH18NxcKYiI\n2cATwLhctDNwX0TMK2w3BRgObFeImVra95TKPiStBYwt1RN5m3GYmZnZoFJrEnQj8F+ShlcKJG0I\n/ADo9ewwSZ8Atge+VWX1KGBxRCwolc8FRuevR+f35fX0IGaYpKHACGBIJzGjMTMzs0Gl1tlhxwK3\nAo9LqtwccXtSwvCZ3uxI0qakMT97RsSS3mwKRA/iuopRD2O6rGfixIkMHz58pbK2tjba2tp60Dwz\nM7Pei578BRwg2tvbaW9vX6mso6OjQa1ZoaYkKCKelvQu4FOkQcYvA78E2nuZyEC6BPUGYIakSlIy\nBNhV0lHAh4GhkoaVeoNGsqLXZg5QnsU1qrCu8lq+hDcSWBARiyXNIz0HrVpMuXdoJZMnT2bMmDFd\nhZiZmfW51/5qDnDVOgZmzpzJ2LFjG9SipJ77BC0Eft4HbZgKvLNUdiEwC/gh8DSwBNgDuBJA0lbA\nm4A7cvydwPGSRhTGBY0HOvJ+KjF7leoZn8uJiCWSZuR6rsr1KL8/o96DNDMzs4Gl5iQoJyK7k3pK\nVhpbFBEnVdummpxMPVDa90LguYiYld+fD0ySNB94kZSU3B4Rd+dNbsz7uETScaQB2icDZxV6pn4G\nHCXpR8AFpOTmQGDvQtWTSI8DmQFMJ80WW5eUlJmZmdkgUlMSJOnzwE+BeaTLTMUrk0Hh/jw1Kl/p\nnEi6VHU5MBS4ATjyteCI5ZL2zW26g/QojwtJ9/upxDwmaR9SovMV4CngsIiYWoi5LN8T6CTSZbE/\nAxMi4tk6j8fMzMwGmFp7gr4DfDsiftSXjamIiH8tvX8VODovnW3zJN08vDUibiGNQeoq5hzgnB43\n1szMzJpSrVPkNwJ+15cNMTMzM1udak2CfkcaVGxmZmbWlGq9HPZ34GRJOwP3kWZvvSYiPJvKzMzM\nBrRak6AvAC8Bu+WlKPCUcjMzMxvgar1Z4hZ93RAzMzOz1anWMUEASFpb0taFJ7WbmZmZNYWakiBJ\n6+YbGC4C/kq6ezOSzpT0zT5sn5mZmVm/qLUn6L9IzwzbHXilUD4VOKjONpmZmZn1u1ovY30UOCgi\n7pJUvLvzX4G31t8sMzMzs/5Va0/QG4BnqpSvx6qPvDAzMzMbcGpNgv4E7FN4X0l8Dic/ld3MzMxs\nIKv1ctjxwPWSts37OEbSdsA4Vr1vkJmZmdmAU1NPUETcRhoYvSbpjtHjgbnAuIiY0XfNMzMzM+sf\nve4JyvcE+iQwJSI+3/dNMjMzM+t/ve4JioilwM+Adfq+OWZmZmarR60Do6cDO/RlQ8zMzMxWp1oH\nRp8DnCppU2AGsLC4MiL+Um/DzMzMzPpTrUnQpfm1+LT4AJRfh9TTKDMzM+ta+K58das1CfJT5M3M\nzBpManQLmltNSVBEPN7XDTEzMzNbnWpKgiR9tqv1EXFxbc0xMzMzWz1qvRx2eun9WsC6wGJgEeAk\nyMzMzAa0Wi+HbVQuk/Q24KfAj+ttlJmZmVl/q/U+QauIiL8B32TVXiIzMzOzAafPkqBsKbBJH+/T\nzMzMrM/VOjB6/3IRsDFwFHB7vY0yMzMz62+1Doz+fel9AM8CfwC+VleLzMzMzFaDWgdG9/VlNDMz\nM7PVysmMmZmZtaSakiBJl0v6ZpXyr0v6Xf3NMjMzM+tftfYE7QZcW6X8BmDX2ptjZmZmtnrUmgSt\nT7o7dNkSYFjtzTEzMzNbPWpNgu4DDqpS/gnggdqbY2ZmZrZ61JoEnQx8V9JFkg7Oy8XAt/O6HpP0\nJUn3SurIyx2SPlxYP1TS2ZLmSXoxj0caWdrHZpKulbRQ0hxJp0haoxSzu6QZkl6R9JCkg6u05UhJ\nj0p6WdJdknbs1VkxMzOzplFTEhQRVwMfBbYEzgFOBTYFPhQR5XsIdedJ4DhgbF7+APyPpG3y+tOA\nfYADSOONNgGuqGyck53rSNP9dwYOBg4BTirEbA5cA9wMvJv0aI/zJO1ZiDkoH8cJwA7AvcAUSSN6\neTxmZmbWBGq9WSIRcS3VB0fXsp+i70g6AthZ0tPAocAnIuIWAEmfA2ZJem9ETAcmAG8HPhgR84D7\nJH0X+KGkEyNiKXAE8EhEfCPXMVvSB4CJwE25bCJwbkRcnOv5Ein5OhQ4pd7jNDMzs4Gl1inyO0ra\nqUr5TpLeU2tjJK0h6RPAusCdpJ6hNUk9OABExGzgCWBcLtoZuC8nQBVTgOHAdoWYqaXqplT2IWmt\nXFexnsjbjMPMzMwGnVrHBJ0NbFal/I15Xa9IeoekF4FXSZfX/i0iHgRGA4sjYkFpk7l5Hfl1bpX1\n9CBmmKShwAhgSCcxozEzM7NBp9bLYdsCM6uU35PX9daDpLE6G5LG/lwsqav7DYn0vLLudBWjHsb0\npB4zMzNrMrUmQa8Co4BHSuUbA0t7u7M8bqeyr5mS3gscA1wGrC1pWKk3aCQrem3mAOVZXKMK6yqv\no0oxI4EFEbFY0jxgWScx5d6hVUycOJHhw4evVNbW1kZbW1t3m5qZmQ167e3ttLe3r1TW0dHRoNas\nUGsSdCPwX5I+EhEdAJI2BH7AioHG9VgDGArMICVVewBX5nq2At4E3JFj7wSOlzSiMC5oPNABzCrE\n7FWqY3wuJyKWSJqR67kq16P8/ozuGjt58mTGjBnT+6M0MzNrAdU6BmbOnMnYsWMb1KKk1iToWOBW\n4HFJ9+Sy7Um9Jp/pzY4kfR+4njRVfgPgU6THcoyPiAWSzgcmSZoPvEhKSm6PiLvzLm4k3aDxEknH\nkXqjTgbOioglOeZnwFGSfgRcQEpuDgT2LjRlEnBRToamk2aLrQtc2JvjMTMzWx3CgzXqVlMSFBFP\nS3oXKWF5N/Ay8EugvZB49NQo4GJS8tIB/IWUAP0hr59IulR1Oal36AbgyEJblkvaF/gpqXdoISlx\nOaEQ85ikfUiJzleAp4DDImJqIeayfE+gk3Kb/gxMiIhne3k8ZmZmq4XUfYx1rp77BC0Efl5vAyLi\n8G7WvwocnZfOYp4E9u1mP7eQpsF3FXMOaXaamZmZDXI1JUGSPga0AVuRZk/9DfhNRFzeh20zMzMz\n6ze9uk9Qvpnhb4HfkqbC/500q2s74DJJl+YBxWZmZmYDWm97go4BPgTsHxHXFFdI2p80LugY0vO+\nzMzMzAas3t4x+nPA18sJEEBEXAV8g/SsLTMzM7MBrbdJ0NtY9RlcRVNzjJmZmdmA1tsk6GXSoy06\nMwx4pfbmmJmZma0evU2C7gSO6GL9kTnGzMzMbEDr7cDo7wN/lPR64CekB58K2Ab4GvAR4IN92kIz\nMzOzftCrJCgi7pB0EOkmiQeUVs8H2iLi9r5qnJmZmVl/6fXNEiPiSklTSA8g3SoXPwTcGBGL+rJx\nZmZmZv2l1meHLZL0IeA/IuL5Pm6TmZmZWb/r7R2jNy28/SSwfi6/T9JmfdkwMzMzs/7U256gByU9\nB9wOrANsBjwBbA6s1bdNMzMzM+s/vZ0iPxz4GDAjb3udpIeAocAESaP7uH1mZmZm/aK3SdBaETE9\nIk4l3ThxB9KjNJaRHpfxsKTZfdxGMzMzsz7X28thCyTdQ7octjawbkTcLmkpcBDwFPDePm6jmZmZ\nWZ/rbU/QJsD3gFdJCdSfJE0jJURjgIiI2/q2iWZmZmZ9r1dJUETMi4irI+JbwCJgR+BMIEh3kF4g\n6Za+b6aZmZlZ3+ptT1BZR0RcBiwB/hXYAjin7laZmZmZ9bOabpaYvQt4On/9OLAkIuYAv627VWZm\nZmb9rOYkKCKeLHz9jr5pjpmZmfVERKNb0PzqvRxmZmZmDSI1ugXNzUmQmZmZtSQnQWZmZtaSnASZ\nmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwE\nmZmZWUtqeBIk6VuSpktaIGmupCslbVWKGSrpbEnzJL0o6XJJI0sxm0m6VtJCSXMknSJpjVLM7pJm\nSHpF0kOSDq7SniMlPSrpZUl3Sdqxf47czMzMGqnhSRCwC3AmsBPwIWAt4EZJryvEnAbsAxwA7Aps\nAlxRWZmTneuANYGdgYOBQ4CTCjGbA9cANwPvBk4HzpO0ZyHmIOBU4ARgB+BeYIqkEX13uGZmZjYQ\nrNnoBkTE3sX3kg4BngHGArdJGgYcCnwiIm7JMZ8DZkl6b0RMByYAbwc+GBHzgPskfRf4oaQTI2Ip\ncATwSER8I1c1W9IHgInATblsInBuRFyc6/kSKfk6FDilf86AmZmZNcJA6Akq2xAI4Pn8fiwpWbu5\nEhARs4EngHG5aGfgvpwAVUwBhgPbFWKmluqaUtmHpLVyXcV6Im8zDjMzMxtUBlQSJEmkS1+3RcQD\nuXg0sDgiFpTC5+Z1lZi5VdbTg5hhkoYCI4AhncSMxszMzAaVhl8OKzkH2Bb4QA9iReox6k5XMeph\nTE/qMTMzsyYyYJIgSWcBewO7RMQ/CqvmAGtLGlbqDRrJil6bOUB5FteowrrK66hSzEhgQUQsljQP\nWNZJTLl3aCUTJ05k+PDhK5W1tbXR1tbW1WZmZmYtob29nfb29pXKOjo6GtSaFQZEEpQToI8Au0XE\nE6XVM4ClwB7AlTl+K+BNwB055k7geEkjCuOCxgMdwKxCzF6lfY/P5UTEEkkzcj1X5XqU35/RVfsn\nT57MmDFjeny8ZmZmraRax8DMmTMZO3Zsg1qUNDwJknQO0AbsDyyUVOmJ6YiIVyJigaTzgUmS5gMv\nkpKS2yPi7hx7I/AAcImk44CNgZOBsyJiSY75GXCUpB8BF5CSmwNJvU8Vk4CLcjI0nTRbbF3gwn44\ndDMzM2ughidBwJdIY27+WCr/HHBx/noi6VLV5cBQ4AbgyEpgRCyXtC/wU1Lv0EJS4nJCIeYxSfuQ\nEp2vAE8Bh0XE1ELMZfmeQCeRLov9GZgQEc/20bGamZnZANHwJCgiup2hFhGvAkfnpbOYJ4F9u9nP\nLaRp8F3FnEMaoG1mZjZghafs1G1ATZE3MzOznpO6j7HOOQkyMzNrQu4Jqp+TIDMzsyYU4Z6gejkJ\nMjMza0JOgurnJMjMzKxJOQmqj5MgMzOzJuQxQfVzEmRmZtaEfDmsfk6CzMzMmpCToPo5CTIzM2tC\nToLq5yTIzMysCTkJqp+TIDMzM2tJToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzM\nmpCToPo5CTIzM2tCToLq5yTIzMzMWpKTIDMzsybknqD6OQkyMzNrQk6C6uckyMzMrAk5CaqfkyAz\nM7Mm5CSofk6CzMzMmlBEo1vQ/JwEmZmZNSn3BNXHSZCZmVkT8uWw+jkJMjMza0JOgurnJMjMzKwJ\nOQmqn5MgMzOzJuQkqH5OgszMzJqQZ4fVz0mQmZlZk3JPUH2cBJmZmTUhXw6rn5MgMzOzJuQkqH5O\ngszMzJqQk6D6OQkyMzNrQh4YXb8BkQRJ2kXSVZKelrRc0v5VYk6S9A9JiyTdJGnL0vqNJP1aUoek\n+ZLOk7ReKeZdkm6V9LKkxyV9vUo9H5M0K8fcK2mvvj9iMzOz+rgnqH4DIgkC1gP+DBwJrJLbSjoO\nOAr4IvBeYCEwRdLahbDfANsAewD7ALsC5xb2sQEwBXgUGAN8HThR0uGFmHF5P78Atgd+D/xe0rZ9\ndaBmZmZ9xUlQfdZsdAMAIuIG4AYAqepHegxwckRcnWM+C8wFPgpcJmkbYAIwNiLuyTFHA9dKOjYi\n5gCfBtYCDouIpcAsSTsAXwXOK9RzfURMyu9PkDSelIB9ua+P28zMrFbuCarfQOkJ6pSkLYDRwM2V\nsohYAPwfMC4X7QzMryRA2VRSr9JOhZhbcwJUMQXYWtLw/H5c3o5SzDjMzMwGECdB9RvwSRApAQpS\nz0/R3LyuEvNMcWVELAOeL8VU2wc9iBmNmZnZAOKB0fVrhiSoM6LK+KFexqiHMf5WMzOzAcU9QfUb\nEGOCujGHlIiMYuVempHAPYWYkcWNJA0BNsrrKjGjSvseycq9TJ3FlHuHVjJx4kSGDx++UllbWxtt\nbW1dbWZmZlazZkqC2tvbaW9vX6mso6OjQa1ZYcAnQRHxqKQ5pFlffwGQNIw01ufsHHYnsKGkHQrj\ngvYgJU/TCzHfkzQkXyoDGA/MjoiOQswewBmFJuyZyzs1efJkxowZU+shmpmZ1aRZkqBqHQMzZ85k\n7NixDWpRMiAuh0laT9K7JW2fi96S32+W358GfEfSfpLeCVwMPAX8D0BEPEgawPwLSTtKej9wJtCe\nZ4ZBmvq+GLhA0raSDgK+ApxaaMrpwF6Svippa0knAmOBs/rr2M3MzGrRTD1BA9VA6Ql6D/C/pEtT\nwYrE5CLg0Ig4RdK6pPv+bAhMA/aKiMWFfXySlKxMBZYDl5OmvANpRpmkCTnmT8A84MSIOL8Qc6ek\nNuD7efkb8JGIeKDvD9nMzKx2ToLqNyCSoIi4hW56pSLiRODELta/QLoXUFf7uA/YrZuYK4Aruoox\nMzNrNM8Oq9+AuBxmZmZmveOeoPo5CTIzM2tCToLq5yTIzMysSTkJqo+TIDMzsybknqD6OQkyMzNr\nQh4YXT8nQWZmZk3IPUH1cxJkZmbWhJwE1c9JkJmZWZNyElQfJ0FmZmZNyD1B9XMSZGZm1oQ8MLp+\nToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzMmpSToPo4CTIzM2tC7gmqn5MgMzOz\nJuTZYfVzEmRmZtaE3BNUPydBZmZmTchJUP2cBJmZmTUhJ0H1cxJkZmbWhJYtgzXXbHQrmpuTIDMz\nsya0eDGstVajW9HcnASZmZk1oSVLnATVy0mQmZlZE1qyBNZeu9GtaG5OgszMzJqQL4fVz0mQmZlZ\nE/LlsPo5CTIzM2tCvhxWPydBZmZmTcg9QfVzEmRmZtaEXn3VSVC9nASZmZk1oWefhTe8odGtaG5O\ngszMzJrMwoXwwguw6aaNbklzcxJkZmbWZJ5+Or06CaqPkyAzM7Mm89RT6dVJUH2cBJmZmTWZShK0\nySaNbUezcxJkZmbWZJ5+Gl7/enjd6xrdkubmJKgKSUdKelTSy5LukrRjo9s02LS3tze6CU3H56w2\nPm+953NWm9V53m6/HbbbbrVVN2g5CSqRdBBwKnACsANwLzBF0oiGNmyQ8S/Z3vM5q43PW+/5nNVm\ndZ23Rx+Fm26Cf/u31VLdoOYkaFUTgXMj4uKIeBD4ErAIOLSxzTIzs1Z3113wwQ+msUAHH9zo1jS/\nNRvdgIFE0lrAWOAHlbKICElTgXENa5iZmbWUCFiwAB5/HGbPhhkz4MYb4Z570mWw66+HjTZqdCub\nn5OglY0AhgBzS+Vzga072+iBB2D58p5XEtH7hg22bZ5/Hu64Y/XUNVi2ee45uOWW1de2WrcbaNs8\n+2y6dNDf9dS7zeqsq7tt5s6Fa6/t/3r6apt66lq+PC2Vr6u9drWu+PrIIzBpEixdCsuWpdfy14sX\np+XVV1e8LlwIixal5cUXU/Lz/PNpXcUmm8Auu8AJJ8C++8KQIbWdJ1uZk6CeEVDtR2wdgM98Ztbq\nbc2g0MH73z+z0Y1oMh3svrvPWe91MH68z1vvdLDvvj5nXZFWXZYt6+C7353JkCF0uqy11srL2mvD\nOuvAeuul2V7rrpu+HjYsvR89GjbbbOVen3vvbdxx96VZs17727lOo9rgJGhl84BlwKhS+UhW7R0C\n2Dy9fLo/2zSIjW10A5qQz1ltfN56z+esK5UeorJFi3zearA5UMO1gfo5CSqIiCWSZgB7AFcBSFJ+\nf0aVTaYAnwIeA15ZTc00MzMbDNYhJUBTGtUARa0XYAcpSR8HLgK+CEwnzRY7EHh7RDzbyLaZmZlZ\n33FPUElEXJbvCXQS6bLYn4EJToDMzMwGF/cEmZmZWUvyzRLNzMysJTkJqkOrPmNM0gmSlpeWBwrr\nh0o6W9I8SS9KulzSyNI+NpN0raSFkuZIOkXSGqWY3SXNkPSKpIckNdX9USXtIukqSU/nc7R/lZiT\nJP1D0iJJN0nasrR+I0m/ltQhab6k8yStV4p5l6Rb8/fh45K+XqWej0malWPulbRX3x9x/bo7Z5J+\nWeV777pSTEudMwBJ35I0XdICSXMlXSlpq1LMavu5bIbfjT08Z38sfa8tk3ROKaZlzhmApC/ln4eO\nvNwh6cOF9c31fRYRXmpYgINIM8I+C7wdOBd4HhjR6LathmM/AfgL8AbS7QNGAv9SWP9T0oy53UjP\nX7sDmFZYvwZwH2lGwDuBCcAzwPcKMZsDLwGnkG5UeSSwBNiz0cffi/P0YdLYso+Sbr2wf2n9cfl7\nZj/gHcDvgYeBtQsx1wMzgfcA7wMeAn5VWL8B8E/SYP5tgI8DC4HDCzH/v707j9WjKuM4/v1VoECb\ntsjiTaTQ0goUQZayNWy3LCVIkCCJNpi68QfKHypEiYkkQEQJokQCNCaCRBZBTAwQKSLLbRsNS1hS\nK9CylbKUawoUKHgV2j7+cc5Lz532Lr2Ud7nz+yRvmpk5M++Zp2fmfe6ZMzOzcuzOz7G8BPgfsF+r\nYzSCmN0A3F1pexMrZWoVs1zfBcC8vD8HAH/Jx+AORZmmHJd0yLlxmDHrAX5TaW/j6xqzXNdT83E6\nPX8uzcfGjE5sZy0PaKd+gIeBq4ppAa8CF7S6bk3Y94uAJwZYNiEfEGcU8/YBNgCH5+lTcoPepShz\nDrAG2CZPXw78s7LtW4EFrd7/EcZsA5v+oK8CzqvErg/4Sp6ekdc7uChzMrAO6MrT3yU932qbosxl\nwNPF9G3AXZXvfgiY3+q4jCBmNwB/HmSdfescs6Kuu+Q4HF20raYcl516bqzGLM/rAa4cZJ1ax6yo\n75vAtzqxnfly2Aho4zvGHmjMi/S/UKd3jH0uX7J4QdLNkibn+TNJdx2WsVkOvMzG2BwJLI2IN4rt\n3QtMBD5flLm/8p33MkriK2kq0EX/OL0LPEL/OK2JiCeLVe8nPb38iKLM4ohYV5S5F9hH0sQ8PYvR\nFcvufPlimaT5kj5dLJuFYwYwibTPb+XpphyXHX5urMas4WuSVktaKunnknYoltU6ZpLGSJoL7Ej6\nI6Hj2pmToJEZ7B1jXc2vTtM9DHyT9Bf2d4CpwOI87qIL+CD/oJfK2HSx+dgxjDITJI39uDvQBrpI\nJ9zB2lAXqZv4IxGxnnSS3hqx7MS2eg+p+/t44AJSl/sCScrLax+zHItfA3+PiMZYvWYdlx15bhwg\nZgC3kF4J0E16sfY84KZieS1jJml/SWtJvT7zST0/y+jAdubnBG1dA71jbFSJiPLpnv+S9CiwkjS2\nYqAnZw83NoOV0TDKdLrhxGmoMhpmmY6LY0TcXkw+JWkpaRxVN+nSxUDqFLP5wH7A0cMo26zjst1j\n14jZUeXMiLiumHxKUi/wgKSpEbFiiG2O5pgtAw4k9Z6dCdwo6dhByrdtO3NP0Mhs6TvGRrWIeIc0\n+HhpytQAAAY1SURBVHQ60AtsJ2lCpVgZm142jd1nimUDldkNeDciPtga9W6xXtIBO1gb6s3TH5H0\nKWAnho5T2cs0UJmOb6v5h+gNUtuDmsdM0jXAF4HuiFhVLGrWcdlx58ZKzF4fovgj+d+yvdUuZhGx\nLiJejIgnIuInwBLg+3RgO3MSNAIR8SHQeMcY0O8dYy15CVwrSRoPTCMN9H2cNAi1jM3ewB5sjM1D\nwAFKT+ZumAO8AzxTlDmB/ubk+R0v/3j30j9OE0jjVso4TZJ0cLHqCaTk6dGizLH5h75hDrA8J6eN\nMtVYnsQoiKWk3YGdSXd7QY1jln/MTwdmR8TLlcVNOS477dw4RMw252BSsly2t1rFbABjgLF0Yjtr\n9ajyTv2QLv300f/2vDeBXVtdtybs+xXAscCepFuQ7yNl3zvn5fOBFaRLFDOBf7DpLZJLSOM7vkAa\nW/Rv4KdFmSmkWyQvJ91dcC7wAXBiq/d/C+I0jtRlfBDp7ogf5OnJefkFuc2cRrpV9A7gOfrfIr8A\neAw4jNRVvxy4qVg+gZR8/p7Unf/VHLezizKzcuwat3tfTLps2Xa3ew8Ws7zsF6REcU/SCe8x0olz\n27rGLNd3PunummNIfx03PttXynzixyUdcm4cKmbAXsCFwCG5vX0JeB54sK4xy3X9GelS656kR3tc\nRkp8ju/EdtbygHbyJ//HvJT/Ix4CDm11nZq037eSbkXsI436/wMwtVg+Fria1GW5FvgTsFtlG5NJ\nz+V4Lx8AlwNjKmWOI2X7faTkYF6r930L43Qc6Yd8feXzu6LMxaQf5P+Q7n6YXtnGJOBm0l9Ja4Df\nAjtWyhwALMrbeBn44WbqcibpOn4f6RlPJ7c6PlsaM9Ibp/9K6kH7L/Ai6Zkku1a2UauY5bpuLmbr\nga8XZZp2XNIB58ahYgbsDiwEVud2spz0gz++sp3axCzX87p87PXlY/Fv5ASoE9uZ3x1mZmZmteQx\nQWZmZlZLToLMzMyslpwEmZmZWS05CTIzM7NachJkZmZmteQkyMzMzGrJSZCZmZnVkpMgMzMzqyUn\nQWZmZlZLToLMzDJJPZKubHU9zKw5nASZWVuQdI6kdyWNKeaNk/ShpAcqZWdL2iBpSrPraWajh5Mg\nM2sXPaQ3xR9azDsGeB04UtJ2xfzjgJUR8dKWfomkbT5OJc1s9HASZGZtISKeJSU83cXsbuAOYAVw\nZGV+D4CkyZLulLRW0juS/ihpt0ZBSRdJelLS2ZJeJL2BHkk7Sroxr/eapPOrdZJ0rqRnJfVJ6pV0\n+9bdazNrJSdBZtZOFgKzi+nZed6ixnxJY4EjgAdzmTuBSaReoxOBacBtle1OB74MnAEclOf9Mq9z\nGjCHlFjNbKwg6VDgKuBCYG/gZGDxx9w/M2sj7hY2s3ayELgyjwsaR0pYFgPbAecAlwBH5emFkk4C\n9gemRMQqAEnzgKckzYyIx/N2twXmRcRbucw44NvAWRGxMM/7BvBqUZfJwHvA3RHxPvAKsOQT2m8z\nawH3BJlZO2mMCzoMOBp4NiLeIPUEHZHHBXUDL0TEq8C+wCuNBAggIp4B3gZmFNtd2UiAsmmkxOjR\nYr01wPKizH3ASmBFvmx2lqQdttqemlnLOQkys7YRES8Ar5Eufc0mJT9ExOuknpijKMYDAQJiM5uq\nzn9/M8sZYN1GXd4DDgHmAqtIvVBLJE0Y9g6ZWVtzEmRm7aaHlAB1ky6PNSwGTgEOZ2MS9DSwh6TP\nNgpJ2g+YmJcN5HlgHcVga0k7kcb+fCQiNkTEgxHxY+BAYApw/Aj2yczakMcEmVm76QGuJZ2fFhXz\nFwPXkC5jLQSIiPslLQVukXReXnYt0BMRTw70BRHxvqTrgSskvQWsBi4F1jfKSDoV2Ct/7xrgVFIP\n0vJNt2hmnchJkJm1mx5ge+CZiFhdzF8EjAeWRURvMf904Oq8fANwD/C9YXzPj0jjj+4C1gK/AspL\nXW+T7ii7KNfnOWBuHnNkZqOAIga8JG5mZmY2anlMkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\nnASZmZlZLTkJMjMzs1pyEmRmZma15CTIzMzMaslJkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\n/g8dz3zImRgHIwAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -326,7 +326,7 @@ }, { "cell_type": "code", - "execution_count": 195, + "execution_count": 218, "metadata": { "collapsed": true }, @@ -344,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 196, + "execution_count": 219, "metadata": { "collapsed": true }, @@ -358,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 197, + "execution_count": 220, "metadata": { "collapsed": false }, @@ -367,9 +367,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 578\n", - "Number of unique tokens: 2245\n", - "Number of documents: 286\n" + "Number of authors: 166\n", + "Number of unique tokens: 681\n", + "Number of documents: 90\n" ] } ], @@ -540,7 +540,7 @@ }, { "cell_type": "code", - "execution_count": 128, + "execution_count": 298, "metadata": { "collapsed": false }, @@ -558,7 +558,7 @@ }, { "cell_type": "code", - "execution_count": 129, + "execution_count": 299, "metadata": { "collapsed": false }, @@ -575,7 +575,7 @@ }, { "cell_type": "code", - "execution_count": 130, + "execution_count": 300, "metadata": { "collapsed": false }, @@ -588,7 +588,7 @@ }, { "cell_type": "code", - "execution_count": 131, + "execution_count": 301, "metadata": { "collapsed": false }, @@ -605,7 +605,7 @@ }, { "cell_type": "code", - "execution_count": 132, + "execution_count": 302, "metadata": { "collapsed": false }, @@ -733,7 +733,7 @@ }, { "cell_type": "code", - "execution_count": 177, + "execution_count": 221, "metadata": { "collapsed": false }, @@ -742,8 +742,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 1740 x 8640 x 10 (150336000 elements)\n", - "mu is 1740 x 8640 x 3467 (52121491200 elements)\n" + "phi is 90 x 681 x 10 (612900 elements)\n", + "mu is 90 x 681 x 166 (10174140 elements)\n" ] } ], @@ -756,7 +756,7 @@ }, { "cell_type": "code", - "execution_count": 207, + "execution_count": 238, "metadata": { "collapsed": false }, @@ -765,20 +765,20 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 38.1 s, sys: 8 ms, total: 38.1 s\n", - "Wall time: 38.2 s\n" + "CPU times: user 7.81 s, sys: 0 ns, total: 7.81 s\n", + "Wall time: 7.81 s\n" ] } ], "source": [ "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", - " iterations=100, alpha='symmetric', eta='auto', random_state=1)\n", - "#var_lambda = lda.state.get_lambda()" + " iterations=100, alpha='auto', eta='symmetric', random_state=1)\n", + "var_lambda = lda.state.get_lambda()" ] }, { "cell_type": "code", - "execution_count": 180, + "execution_count": 294, "metadata": { "collapsed": false }, @@ -790,7 +790,7 @@ }, { "cell_type": "code", - "execution_count": 181, + "execution_count": 295, "metadata": { "collapsed": false }, @@ -799,8 +799,36 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 13min 24s, sys: 5.46 s, total: 13min 29s\n", - "Wall time: 13min 24s\n" + "172.5246533\n", + "0\n", + "-769.539793625\n", + "0\n", + "-1356.47731679\n", + "-1811.24359951\n", + "-2124.29912007\n", + "-2290.57120099\n", + "-2341.26972092\n", + "-2121.30550289\n", + "0\n", + "-2198.33258722\n", + "-2096.70620026\n", + "0\n", + "-2136.86845538\n", + "-2147.51877939\n", + "0\n", + "-2171.73124093\n", + "-2228.3126994\n", + "0\n", + "-2351.10110539\n", + "0\n", + "-2482.9042994\n", + "0\n", + "-2639.09181461\n", + "0\n", + "-2814.24635543\n", + "0\n", + "CPU times: user 14.5 s, sys: 16 ms, total: 14.5 s\n", + "Wall time: 14.5 s\n" ] } ], @@ -1052,7 +1080,7 @@ }, { "cell_type": "code", - "execution_count": 77, + "execution_count": 304, "metadata": { "collapsed": true }, @@ -1064,7 +1092,7 @@ }, { "cell_type": "code", - "execution_count": 90, + "execution_count": 310, "metadata": { "collapsed": false }, @@ -1076,7 +1104,7 @@ }, { "cell_type": "code", - "execution_count": 92, + "execution_count": 312, "metadata": { "collapsed": false }, @@ -1085,15 +1113,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 3.43 s, sys: 4 ms, total: 3.43 s\n", - "Wall time: 3.44 s\n" + "CPU times: user 1.61 s, sys: 0 ns, total: 1.61 s\n", + "Wall time: 1.61 s\n" ] } ], "source": [ "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha='auto', eta='auto', \\\n", + " iterations=10, alpha='symmetric', eta='symmetric', \\\n", " eval_every=1, random_state=1, var_lambda=var_lambda_init)" ] }, diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 40dc7421a2..5d9379875a 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -20,11 +20,13 @@ from gensim.models import LdaModel from scipy.special import gammaln, psi # gamma function utils from scipy.special import polygamma +from scipy.optimize import line_search from six.moves import xrange from pprint import pprint from random import sample +from copy import deepcopy # log(sum(exp(x))) that tries to avoid overflow try: @@ -62,6 +64,24 @@ def update_dir_prior(prior, N, logphat, rho): return prior +def dir_mle_search_direction(prior, N, logphat): + """ + Updates a given prior using Newton's method, described in + **Huang: Maximum Likelihood Estimation of Dirichlet Distribution Parameters.** + http://jonathan-huang.org/research/dirichlet/dirichlet.pdf + """ + dprior = numpy.copy(prior) + gradf = N * (psi(numpy.sum(prior)) - psi(prior) + logphat) + + c = N * polygamma(1, numpy.sum(prior)) + q = -N * polygamma(1, prior) + + b = numpy.sum(gradf / q) / (1 / c + numpy.sum(1 / q)) + + dprior = -(gradf - b) / q + + return dprior + class AtVb(LdaModel): """ Train the author-topic model using variational Bayes. @@ -163,12 +183,28 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, if corpus is not None: self.inference(corpus, author2doc, doc2author, var_lambda) - def update_alpha(self, var_gamma, rho): + def update_alpha(self, var_gamma): """ Update parameters for the Dirichlet prior on the per-document topic weights `alpha` given the last `var_gamma`. """ - N = float(len(var_gamma)) + N = float(var_gamma.shape[0]) + + # NOTE: there might be possibility for overflow if number + # of authors is very high. + logphat = 0.0 + for a in xrange(self.num_authors): + logphat += dirichlet_expectation(var_gamma[a, :]) + logphat *= 1 / N + + self.alpha = update_dir_prior(self.alpha, N, logphat, 1) + + def update_alpha_ls(self, var_gamma): + """ + Work in progress. + MLE of alpha with line search. + """ + N = float(var_gamma.shape[0]) # NOTE: there might be possibility for overflow if number # of authors is very high. @@ -177,12 +213,41 @@ def update_alpha(self, var_gamma, rho): logphat += dirichlet_expectation(var_gamma[a, :]) logphat *= 1 / N - self.alpha = update_dir_prior(self.alpha, N, logphat, rho) + def f(alpha): + '''Compute the Dirichlet likelihood.''' + return -N * (gammaln(numpy.sum(alpha)) - numpy.sum(gammaln(alpha)) + numpy.sum((alpha - 1) * logphat)) + + def g(alpha): + '''Compute the first derivative of the Dirichlet likelihood.''' + return -N * (psi(numpy.sum(alpha)) - psi(alpha) + logphat) + + + # TODO: consider what stopping criterion to use here, and + # how many maximum iterations to use. + # TODO: consider using line search. + f1 = f(self.alpha) + #print(f1) + #print(0) + for i in xrange(10): + # Obtain search direction for Newton step. + pk = dir_mle_search_direction(self.alpha, N, logphat) + # Obtain stepsize using Wolfe condition. + stepsize = line_search(f, g, self.alpha, pk)[0] + # Update alpha. + # NOTE: need to check that update is positive. + self.alpha += stepsize * pk + f2 = f(self.alpha) + if (f2 - f1) / f1 < 0.01: + break + else: + f1 = f2 + #print(f2) + # logger.info("optimized eta %s", list(self.alpha)) return self.alpha - def update_eta(self, var_lambda, rho): + def update_eta(self, var_lambda): """ Update parameters for the Dirichlet prior on the per-document topic weights `eta` given the last `var_lambda`. @@ -331,7 +396,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No # very arbitrary; if a carefully chosen stepsize is needed, # linesearch would probably be better. stepsize = 1 - self.update_alpha(var_gamma, stepsize) + self.update_alpha(var_gamma) # Update Elogtheta, since gamma has been updated. Elogtheta = dirichlet_expectation(var_gamma) @@ -363,7 +428,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No if self.optimize_eta: stepsize = 1 - self.update_eta(var_lambda, stepsize) + self.update_eta(var_lambda) # Update Elogbeta, since lambda has been updated. Elogbeta = dirichlet_expectation(var_lambda) From 910c62613952caa6b69255d4528aa832c5f852ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Thu, 20 Oct 2016 14:59:14 +0200 Subject: [PATCH 030/100] Made some structural changes to bound and log probability computation. --- docs/notebooks/at_with_nips.ipynb | 38 ++-------- gensim/models/atvb.py | 112 ++++++++++++++++-------------- 2 files changed, 65 insertions(+), 85 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 038dbad55f..f4172f8217 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -778,7 +778,7 @@ }, { "cell_type": "code", - "execution_count": 294, + "execution_count": 338, "metadata": { "collapsed": false }, @@ -790,7 +790,7 @@ }, { "cell_type": "code", - "execution_count": 295, + "execution_count": 339, "metadata": { "collapsed": false }, @@ -799,43 +799,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "172.5246533\n", - "0\n", - "-769.539793625\n", - "0\n", - "-1356.47731679\n", - "-1811.24359951\n", - "-2124.29912007\n", - "-2290.57120099\n", - "-2341.26972092\n", - "-2121.30550289\n", - "0\n", - "-2198.33258722\n", - "-2096.70620026\n", - "0\n", - "-2136.86845538\n", - "-2147.51877939\n", - "0\n", - "-2171.73124093\n", - "-2228.3126994\n", - "0\n", - "-2351.10110539\n", - "0\n", - "-2482.9042994\n", - "0\n", - "-2639.09181461\n", - "0\n", - "-2814.24635543\n", - "0\n", - "CPU times: user 14.5 s, sys: 16 ms, total: 14.5 s\n", - "Wall time: 14.5 s\n" + "CPU times: user 16.2 s, sys: 32 ms, total: 16.2 s\n", + "Wall time: 16.2 s\n" ] } ], "source": [ "%time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha='auto', eta='auto', var_lambda=var_lambda, \\\n", + " iterations=10, alpha='symmetric', eta='auto', var_lambda=var_lambda, \\\n", " eval_every=1, random_state=1)" ] }, diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 5d9379875a..38dccc5652 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -259,7 +259,7 @@ def update_eta(self, var_lambda): logphat += dirichlet_expectation(var_lambda[k, :]) logphat *= 1 / N - self.eta = update_dir_prior(self.eta, N, logphat, rho) + self.eta = update_dir_prior(self.eta, N, logphat, 1) # logger.info("optimized eta %s", list(self.eta)) return self.eta @@ -270,11 +270,15 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No logger.info('Starting inference. Training on %d documents.', len(corpus)) + # Whether or not to evaluate bound and log probability, respectively. + bound_eval = False + logprob_eval = True + if var_lambda is None: - optimize_lambda = True + self.optimize_lambda = True else: # We have topics from LDA, thus we do not train the topics. - optimize_lambda = False + self.optimize_lambda = False # Initial value of gamma and lambda. # NOTE: parameters of gamma distribution same as in `ldamodel`. @@ -284,6 +288,10 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No if var_lambda is None: var_lambda = self.random_state.gamma(100., 1. / 100., (self.num_topics, self.num_terms)) + else: + self.norm_lambda = var_lambda.copy() + for k in xrange(self.num_topics): + self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] self.var_lambda = var_lambda self.var_gamma = var_gamma @@ -317,12 +325,16 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta, var_gamma) - beta_bound = self.beta_bound(Elogbeta, var_lambda) - bound = word_bound + theta_bound + beta_bound - #likelihood = self.log_word_prob(var_gamma, var_lambda) - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if self.eval_every > 0: + if bound_eval: + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) for iteration in xrange(self.iterations): #logger.info('Starting iteration %d.', iteration) # Update phi. @@ -402,7 +414,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No Elogtheta = dirichlet_expectation(var_gamma) # Update lambda. - if optimize_lambda: + if self.optimize_lambda: #logger.info('Updating lambda.') for k in xrange(self.num_topics): #logger.info('k = %d.', k) @@ -441,27 +453,27 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No # Print topics: #pprint(self.show_topics()) - # Evaluate bound. if (iteration + 1) % self.eval_every == 0: - #logger.info('Computing bound.') - prev_bound = bound - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta, var_gamma) - beta_bound = self.beta_bound(Elogbeta, var_lambda) - bound = word_bound + theta_bound + beta_bound - #likelihood = self.log_word_prob(var_gamma, var_lambda) - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) - if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.threshold: - break + if bound_eval: + prev_bound = deepcopy(bound) + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + if bound_eval: + if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.threshold: + break # End of update loop (iterations). return var_gamma, var_lambda def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): """ - Note that this is not strictly speaking a likelihood. - Compute the expectation of the log conditional likelihood of the data, E_q[log p(w_d | theta, beta, A_d)], @@ -512,18 +524,10 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): return bound - def theta_bound(self, Elogtheta, var_gamma, doc_ids=None): - """ - """ - - if doc_ids is None: - docs = self.corpus - else: - docs = [self.corpus[d] for d in doc_ids] - + def theta_bound(self, Elogtheta): bound = 0.0 for a in xrange(self.num_authors): - var_gamma_a = var_gamma[a, :] + var_gamma_a = self.var_gamma[a, :] Elogtheta_a = Elogtheta[a, :] # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) @@ -532,15 +536,15 @@ def theta_bound(self, Elogtheta, var_gamma, doc_ids=None): return bound - def beta_bound(self, Elogbeta, var_lambda, doc_ids=None): + def beta_bound(self, Elogbeta): bound = 0.0 - bound += numpy.sum((self.eta - var_lambda) * Elogbeta) - bound += numpy.sum(gammaln(var_lambda) - gammaln(self.eta)) - bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(var_lambda, 1))) + bound += numpy.sum((self.eta - self.var_lambda) * Elogbeta) + bound += numpy.sum(gammaln(self.var_lambda) - gammaln(self.eta)) + bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(self.var_lambda, 1))) return bound - def log_word_prob(self, var_gamma, var_lambda, doc_ids=None): + def eval_logprob(self, doc_ids=None): """ Compute the liklihood of the corpus under the model, by first computing the conditional probabilities of the words in a @@ -551,35 +555,39 @@ def log_word_prob(self, var_gamma, var_lambda, doc_ids=None): summing over all documents, and dividing by the number of documents. """ - norm_gamma = var_gamma.copy() - norm_lambda = var_lambda.copy() + # TODO: if var_lambda is supplied from LDA, normalizing it every time + # is unnecessary. + norm_gamma = self.var_gamma.copy() for a in xrange(self.num_authors): - norm_gamma[a, :] = var_gamma[a, :] / var_gamma.sum(axis=1)[a] - for k in xrange(self.num_topics): - norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + norm_gamma[a, :] = self.var_gamma[a, :] / self.var_gamma.sum(axis=1)[a] + + if self.optimize_lambda: + norm_lambda = self.var_lambda.copy() + for k in xrange(self.num_topics): + norm_lambda[k, :] = self.var_lambda[k, :] / self.var_lambda.sum(axis=1)[k] + else: + norm_lambda = self.norm_lambda if doc_ids is None: docs = self.corpus else: docs = [self.corpus[d] for d in doc_ids] - log_word_prob = 0.0 + logprob = 0.0 for d, doc in enumerate(docs): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. authors_d = self.doc2author[d] - log_word_prob_d = 0.0 + logprob_d = 0.0 for vi, v in enumerate(ids): - log_word_prob_v = 0.0 + logprob_v = 0.0 for k in xrange(self.num_topics): for a in authors_d: - log_word_prob_v += norm_gamma[a, k] * norm_lambda[k, v] - log_word_prob_d += cts[vi] * numpy.log(log_word_prob_v) - log_word_prob += numpy.log(1.0 / len(authors_d)) + log_word_prob_d - #authors_idxs = [self.authorid2idx[aid] for aid in authors_d] - #likelihood += author_prior_prob * numpy.sum(cnt * numpy.log(numpy.sum(numpy.exp(logsumexp(Elogtheta[a, :] + Elogbeta[:, id])) for a in authors_idxs)) for id, cnt in doc) + logprob_v += norm_gamma[a, k] * norm_lambda[k, v] + logprob_d += cts[vi] * numpy.log(logprob_v) + logprob += numpy.log(1.0 / len(authors_d)) + logprob_d - return log_word_prob + return logprob # Overriding LdaModel.get_topic_terms. def get_topic_terms(self, topicid, topn=10): From 7dbd01f942f732be1d68aeee3c7c83164009580c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Thu, 20 Oct 2016 15:13:18 +0200 Subject: [PATCH 031/100] In process of updating online algo w.r.t. changes in offline algo. --- gensim/models/onlineatvb.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 9cefee43a2..fe281569c6 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -18,6 +18,7 @@ from gensim import utils, matutils from gensim.models.ldamodel import dirichlet_expectation, get_random_state from gensim.models import LdaModel +from gensim.models import AtVb from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. from six.moves import xrange from scipy.special import gammaln @@ -35,7 +36,7 @@ logger = logging.getLogger(__name__) -class OnlineAtVb(LdaModel): +class OnlineAtVb(AtVb): """ Train the author-topic model using online variational Bayes. """ @@ -46,12 +47,6 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, iterations=10, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, eval_every=1, random_state=None): - # TODO: allow for asymmetric priors. - if alpha is None: - alpha = 1.0 / num_topics - if eta is None: - eta = 1.0 / num_topics - self.id2word = id2word if corpus is None and self.id2word is None: raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') @@ -117,14 +112,16 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, # Make the reverse mapping, from author names to author IDs. self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) + # NOTE: I don't think this necessarily is a good way to initialize the topics. + self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) + self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + self.corpus = corpus self.iterations = iterations self.passes = passes self.num_topics = num_topics self.threshold = threshold self.minimum_probability = minimum_probability - self.alpha = alpha - self.eta = eta self.decay = decay self.offset = offset self.num_docs = len(corpus) From 9a045334b41aa7f3c1a27a6fec0858e55a094f90 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 21 Oct 2016 14:54:00 +0200 Subject: [PATCH 032/100] Mostly updated the online algorithm according to changes that have been done to the offline. --- docs/notebooks/at_with_nips.ipynb | 141 +++++++++++++------- gensim/models/atvb.py | 1 + gensim/models/onlineatvb.py | 206 ++++++++++++++++++------------ 3 files changed, 215 insertions(+), 133 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index f4172f8217..f94feba2ef 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -73,7 +73,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": { "collapsed": false }, @@ -81,8 +81,8 @@ "source": [ "# Configure logging.\n", "\n", - "#log_dir = '../../../log_files/log.log' # On my own machine.\n", - "log_dir = '../../../../log_files/log.log' # On Hetzner\n", + "log_dir = '../../../log_files/log.log' # On my own machine.\n", + "#log_dir = '../../../../log_files/log.log' # On Hetzner\n", "\n", "logger = logging.getLogger()\n", "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", @@ -101,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 208, + "execution_count": 47, "metadata": { "collapsed": false }, @@ -111,8 +111,8 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "#data_dir = '../../../../data/nipstxt/' # On my own machine.\n", - "data_dir = '../../../nipstxt/' # On Hetzner.\n", + "data_dir = '../../../../data/nipstxt/' # On my own machine.\n", + "#data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", @@ -138,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 209, + "execution_count": 48, "metadata": { "collapsed": false }, @@ -163,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 210, + "execution_count": 49, "metadata": { "collapsed": true }, @@ -178,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 211, + "execution_count": 50, "metadata": { "collapsed": false }, @@ -196,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 212, + "execution_count": 51, "metadata": { "collapsed": false }, @@ -222,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": 213, + "execution_count": 52, "metadata": { "collapsed": false }, @@ -245,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 214, + "execution_count": 53, "metadata": { "collapsed": true }, @@ -260,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 215, + "execution_count": 54, "metadata": { "collapsed": true }, @@ -279,7 +279,7 @@ }, { "cell_type": "code", - "execution_count": 216, + "execution_count": 55, "metadata": { "collapsed": true }, @@ -291,7 +291,7 @@ }, { "cell_type": "code", - "execution_count": 217, + "execution_count": 56, "metadata": { "collapsed": false }, @@ -300,7 +300,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -326,7 +326,7 @@ }, { "cell_type": "code", - "execution_count": 218, + "execution_count": 57, "metadata": { "collapsed": true }, @@ -344,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 219, + "execution_count": 58, "metadata": { "collapsed": true }, @@ -358,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 220, + "execution_count": 59, "metadata": { "collapsed": false }, @@ -388,7 +388,7 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": 60, "metadata": { "collapsed": false }, @@ -397,16 +397,61 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 8min 11s, sys: 88 ms, total: 8min 11s\n", - "Wall time: 8min 12s\n" + "CPU times: user 15.2 s, sys: 16 ms, total: 15.3 s\n", + "Wall time: 15.3 s\n" + ] + } + ], + "source": [ + "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", + " iterations=100, alpha='auto', eta='symmetric', random_state=1)\n", + "var_lambda = lda.state.get_lambda()" + ] + }, + { + "cell_type": "code", + "execution_count": 71, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(onlineatvb)\n", + "OnlineAtVb = onlineatvb.OnlineAtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'time model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, author2doc=author2doc, doc2author=doc2author, threshold=1e-10, iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=64.0, eval_every=1, random_state=1, var_lambda=var_lambda)'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, id2author, author2doc, doc2author, threshold, minimum_probability, iterations, passes, alpha, eta, decay, offset, eval_every, random_state, var_lambda)\u001b[0m\n\u001b[1;32m 131\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 132\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 133\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvar_lambda\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 134\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 135\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mt\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, var_lambda)\u001b[0m\n\u001b[1;32m 258\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0.0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 259\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 260\u001b[0;31m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcts\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_mu\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 261\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 262\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0malpha\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "%time model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-3, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=64.0, \\\n", - " eval_every=1, random_state=1)" + " eval_every=1, random_state=1, var_lambda=var_lambda)" ] }, { @@ -540,7 +585,7 @@ }, { "cell_type": "code", - "execution_count": 298, + "execution_count": 73, "metadata": { "collapsed": false }, @@ -558,7 +603,7 @@ }, { "cell_type": "code", - "execution_count": 299, + "execution_count": 74, "metadata": { "collapsed": false }, @@ -575,7 +620,7 @@ }, { "cell_type": "code", - "execution_count": 300, + "execution_count": 75, "metadata": { "collapsed": false }, @@ -588,7 +633,7 @@ }, { "cell_type": "code", - "execution_count": 301, + "execution_count": 76, "metadata": { "collapsed": false }, @@ -605,7 +650,7 @@ }, { "cell_type": "code", - "execution_count": 302, + "execution_count": 77, "metadata": { "collapsed": false }, @@ -621,7 +666,7 @@ }, { "cell_type": "code", - "execution_count": 133, + "execution_count": 78, "metadata": { "collapsed": false }, @@ -630,8 +675,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 10 x 2245 x 10 (224500 elements)\n", - "mu is 10 x 2245 x 21 (471450 elements)\n" + "phi is 10 x 681 x 10 (68100 elements)\n", + "mu is 10 x 681 x 22 (149820 elements)\n" ] } ], @@ -644,7 +689,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 92, "metadata": { "collapsed": false }, @@ -656,7 +701,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 93, "metadata": { "collapsed": false }, @@ -665,16 +710,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 7.46 s, sys: 0 ns, total: 7.46 s\n", - "Wall time: 7.48 s\n" + "CPU times: user 14.1 s, sys: 0 ns, total: 14.1 s\n", + "Wall time: 14.1 s\n" ] } ], "source": [ "%time model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", - " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-3, \\\n", + " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-1, \\\n", " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=64.0, \\\n", - " eval_every=1, random_state=1)" + " eval_every=1, random_state=1, var_lambda=None)" ] }, { @@ -790,7 +835,7 @@ }, { "cell_type": "code", - "execution_count": 339, + "execution_count": 33, "metadata": { "collapsed": false }, @@ -799,15 +844,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 16.2 s, sys: 32 ms, total: 16.2 s\n", - "Wall time: 16.2 s\n" + "CPU times: user 24 s, sys: 84 ms, total: 24.1 s\n", + "Wall time: 24 s\n" ] } ], "source": [ "%time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha='symmetric', eta='auto', var_lambda=var_lambda, \\\n", + " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=var_lambda, \\\n", " eval_every=1, random_state=1)" ] }, @@ -1059,7 +1104,7 @@ "outputs": [], "source": [ "lda = LdaModel(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, passes=10)\n", - "var_lambda_init = lda.state.get_lambda()" + "var_lambda = lda.state.get_lambda()" ] }, { @@ -1076,7 +1121,7 @@ }, { "cell_type": "code", - "execution_count": 312, + "execution_count": 88, "metadata": { "collapsed": false }, @@ -1085,8 +1130,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1.61 s, sys: 0 ns, total: 1.61 s\n", - "Wall time: 1.61 s\n" + "CPU times: user 16.6 s, sys: 12 ms, total: 16.6 s\n", + "Wall time: 16.6 s\n" ] } ], @@ -1094,7 +1139,7 @@ "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", " iterations=10, alpha='symmetric', eta='symmetric', \\\n", - " eval_every=1, random_state=1, var_lambda=var_lambda_init)" + " eval_every=1, random_state=1, var_lambda=None)" ] }, { @@ -1281,7 +1326,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.4.3+" + "version": "3.5.2" } }, "nbformat": 4, diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 38dccc5652..8e5649a4b6 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -327,6 +327,7 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No if self.eval_every > 0: if bound_eval: + # TODO: compute per-word bound. word_bound = self.word_bound(Elogtheta, Elogbeta) theta_bound = self.theta_bound(Elogtheta) beta_bound = self.beta_bound(Elogbeta) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index fe281569c6..970cebe246 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -18,8 +18,6 @@ from gensim import utils, matutils from gensim.models.ldamodel import dirichlet_expectation, get_random_state from gensim.models import LdaModel -from gensim.models import AtVb -from gensim.models.hdpmodel import log_normalize # For efficient normalization of variational parameters. from six.moves import xrange from scipy.special import gammaln @@ -36,7 +34,7 @@ logger = logging.getLogger(__name__) -class OnlineAtVb(AtVb): +class OnlineAtVb(LdaModel): """ Train the author-topic model using online variational Bayes. """ @@ -45,7 +43,7 @@ class OnlineAtVb(AtVb): def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, author2doc=None, doc2author=None, threshold=0.001, minimum_probability=0.01, iterations=10, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, - eval_every=1, random_state=None): + eval_every=1, random_state=None, var_lambda=None): self.id2word = id2word if corpus is None and self.id2word is None: @@ -112,10 +110,6 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, # Make the reverse mapping, from author names to author IDs. self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) - # NOTE: I don't think this necessarily is a good way to initialize the topics. - self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) - self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) - self.corpus = corpus self.iterations = iterations self.passes = passes @@ -129,15 +123,19 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.eval_every = eval_every self.random_state = random_state + # NOTE: I don't think this necessarily is a good way to initialize the topics. + self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) + self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + self.random_state = get_random_state(random_state) if corpus is not None: - self.inference(corpus) + self.inference(corpus, var_lambda=var_lambda) def rho(self, t): return pow(self.offset + t, -self.decay) - def inference(self, corpus=None): + def inference(self, corpus=None, var_lambda=None): if corpus is None: # TODO: I can't remember why I used "copy()" here. corpus = self.corpus.copy() @@ -146,37 +144,55 @@ def inference(self, corpus=None): logger.info('Starting inference. Training on %d documents.', len(corpus)) + # Whether or not to evaluate bound and log probability, respectively. + bound_eval = False + logprob_eval = True + + if var_lambda is None: + self.optimize_lambda = True + else: + # We have topics from LDA, thus we do not train the topics. + self.optimize_lambda = False + # Initial values of gamma and lambda. # NOTE: parameters of gamma distribution same as in `ldamodel`. - init_gamma = self.random_state.gamma(100., 1. / 100., + var_gamma = self.random_state.gamma(100., 1. / 100., (self.num_authors, self.num_topics)) - init_lambda = self.random_state.gamma(100., 1. / 100., - (self.num_topics, self.num_terms)) + tilde_gamma = var_gamma.copy() + self.var_gamma = var_gamma + + if var_lambda is None: + var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + tilde_lambda = var_lambda.copy() + else: + self.norm_lambda = var_lambda.copy() + for k in xrange(self.num_topics): + self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + + self.var_lambda = var_lambda # TODO: consider making phi sparse. Each document does not contain all terms. var_phi = numpy.zeros((self.num_terms, self.num_topics)) - var_gamma = init_gamma.copy() - var_lambda = init_lambda.copy() - tilde_gamma = init_gamma.copy() - tilde_lambda = init_lambda.copy() - # Initialize dirichlet expectations. Elogtheta = dirichlet_expectation(var_gamma) Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - # Evaluate bound. - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta, var_gamma) - beta_bound = self.beta_bound(Elogbeta, var_lambda) - bound = word_bound + theta_bound + beta_bound - #likelihood = self.log_word_prob(var_gamma, var_lambda) - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) t = 0 + if self.eval_every > 0: + if bound_eval: + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) for _pass in xrange(self.passes): converged = 0 # Number of documents converged for current pass over corpus. - prev_bound = bound for d, doc in enumerate(corpus): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. @@ -195,7 +211,8 @@ def inference(self, corpus=None): #logger.info('iteration %i', iteration) lastgamma = tilde_gamma.copy() - lastlambda = tilde_lambda.copy() + if self.optimize_lambda: + lastlambda = tilde_lambda.copy() # Update phi. for v in ids: @@ -211,7 +228,7 @@ def inference(self, corpus=None): var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] # Normalize phi over k. - var_phi[v, :] = var_phi[v, :] / var_phi[v, :].sum() + var_phi[v, :] = var_phi[v, :] / (var_phi[v, :].sum() + 1e-100) # Update mu. for v in ids: @@ -231,7 +248,7 @@ def inference(self, corpus=None): mu_sum += var_mu[(v, a)] # Normalize mu. - mu_norm_const = 1.0 / mu_sum + mu_norm_const = 1.0 / (mu_sum + 1e-100) for a in authors_d: var_mu[(v, a)] *= mu_norm_const @@ -242,24 +259,30 @@ def inference(self, corpus=None): for vi, v in enumerate(ids): tilde_gamma[a, k] += cts[vi] * var_mu[(v, a)] * var_phi[v, k] tilde_gamma[a, k] *= len(self.author2doc[a]) - tilde_gamma[a, k] += self.alpha + tilde_gamma[a, k] += self.alpha[k] - # Update lambda. - #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T - for k in xrange(self.num_topics): - for vi, v in enumerate(ids): - cnt = dict(doc).get(v, 0) - var_lambda[k, v] = self.eta + self.num_docs * cnt * var_phi[v, k] + if self.optimize_lambda: + # Update lambda. + #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T + for k in xrange(self.num_topics): + for vi, v in enumerate(ids): + cnt = dict(doc).get(v, 0) + var_lambda[k, v] = self.eta[v] + self.num_docs * cnt * var_phi[v, k] # Check for convergence. # Criterion is mean change in "local" gamma and lambda. # TODO: consider using separate thresholds for lambda and gamma. if iteration > 0: meanchange_gamma = numpy.mean(abs(tilde_gamma - lastgamma)) - meanchange_lambda = numpy.mean(abs(tilde_lambda - lastlambda)) + gamma_condition = meanchange_gamma < self.threshold + if self.optimize_lambda: + meanchange_lambda = numpy.mean(abs(tilde_lambda - lastlambda)) + lambda_condition = meanchange_lambda < self.threshold + else: + lambda_condition = True # logger.info('Mean change in gamma: %.3e', meanchange_gamma) # logger.info('Mean change in lambda: %.3e', meanchange_lambda) - if meanchange_gamma < self.threshold and meanchange_lambda < self.threshold: + if gamma_condition and lambda_condition: # logger.info('Converged after %d iterations.', iteration) converged += 1 break @@ -273,12 +296,14 @@ def inference(self, corpus=None): # and "global" gamma (var_gamma). Same goes for lambda. # TODO: I may need to be smarter about computing rho. In ldamodel, # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). - rhot = self.rho(t) + rhot = self.rho(iteration + _pass) t += 1 var_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma - # Note that we only changed the elements in lambda corresponding to - # the words in document d, hence the [:, ids] indexing. - var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] + + if self.optimize_lambda: + # Note that we only changed the elements in lambda corresponding to + # the words in document d, hence the [:, ids] indexing. + var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. Elogtheta = dirichlet_expectation(var_gamma) @@ -286,19 +311,25 @@ def inference(self, corpus=None): expElogbeta = numpy.exp(Elogbeta) # Print topics: - # self.var_lambda = var_lambda # pprint(self.show_topics()) # End of corpus loop. - # Evaluate bound. + self.var_gamma = var_gamma + self.var_lambda = var_lambda + if _pass % self.eval_every == 0: - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta, var_gamma) - beta_bound = self.beta_bound(Elogbeta, var_lambda) - bound = word_bound + theta_bound + beta_bound - #likelihood = self.log_word_prob(var_gamma, var_lambda) - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if self.eval_every > 0: + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) logger.info('Converged documents: %d/%d', converged, self.num_docs) @@ -315,8 +346,6 @@ def inference(self, corpus=None): def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): """ - Note that this is not strictly speaking a likelihood. - Compute the expectation of the log conditional likelihood of the data, E_q[log p(w_d | theta, beta, A_d)], @@ -331,6 +360,8 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): else: docs = [self.corpus[d] for d in doc_ids] + # NOTE: computing the bound this way is very numerically unstable, which is why + # "logsumexp" is used in the LDA code. bound= 0.0 for d, doc in enumerate(docs): authors_d = self.doc2author[d] @@ -350,21 +381,25 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): # TODO: can I do something along the lines of (as in ldamodel): # likelihood += numpy.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, id]) for id, cnt in doc) + # If I computed the LDA bound the way I compute the author-topic bound above: + # bound = 0.0 + # for d, doc in enumerate(docs): + # ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + # cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + # bound_d = 0.0 + # for vi, v in enumerate(ids): + # bound_v = 0.0 + # for k in xrange(self.num_topics): + # bound_v += numpy.exp(Elogtheta[d, k] + Elogbeta[k, v]) + # bound_d += cts[vi] * numpy.log(bound_v) + # bound += bound_d return bound - def theta_bound(self, Elogtheta, var_gamma, doc_ids=None): - """ - """ - - if doc_ids is None: - docs = self.corpus - else: - docs = [self.corpus[d] for d in doc_ids] - + def theta_bound(self, Elogtheta): bound = 0.0 for a in xrange(self.num_authors): - var_gamma_a = var_gamma[a, :] + var_gamma_a = self.var_gamma[a, :] Elogtheta_a = Elogtheta[a, :] # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) @@ -373,15 +408,15 @@ def theta_bound(self, Elogtheta, var_gamma, doc_ids=None): return bound - def beta_bound(self, Elogbeta, var_lambda, doc_ids=None): + def beta_bound(self, Elogbeta): bound = 0.0 - bound += numpy.sum((self.eta - var_lambda) * Elogbeta) - bound += numpy.sum(gammaln(var_lambda) - gammaln(self.eta)) - bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(var_lambda, 1))) + bound += numpy.sum((self.eta - self.var_lambda) * Elogbeta) + bound += numpy.sum(gammaln(self.var_lambda) - gammaln(self.eta)) + bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(self.var_lambda, 1))) return bound - def log_word_prob(self, var_gamma, var_lambda, doc_ids=None): + def eval_logprob(self, doc_ids=None): """ Compute the liklihood of the corpus under the model, by first computing the conditional probabilities of the words in a @@ -392,35 +427,39 @@ def log_word_prob(self, var_gamma, var_lambda, doc_ids=None): summing over all documents, and dividing by the number of documents. """ - norm_gamma = var_gamma.copy() - norm_lambda = var_lambda.copy() + # TODO: if var_lambda is supplied from LDA, normalizing it every time + # is unnecessary. + norm_gamma = self.var_gamma.copy() for a in xrange(self.num_authors): - norm_gamma[a, :] = var_gamma[a, :] / var_gamma.sum(axis=1)[a] - for k in xrange(self.num_topics): - norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + norm_gamma[a, :] = self.var_gamma[a, :] / self.var_gamma.sum(axis=1)[a] + + if self.optimize_lambda: + norm_lambda = self.var_lambda.copy() + for k in xrange(self.num_topics): + norm_lambda[k, :] = self.var_lambda[k, :] / self.var_lambda.sum(axis=1)[k] + else: + norm_lambda = self.norm_lambda if doc_ids is None: docs = self.corpus else: docs = [self.corpus[d] for d in doc_ids] - log_word_prob = 0.0 + logprob = 0.0 for d, doc in enumerate(docs): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. authors_d = self.doc2author[d] - log_word_prob_d = 0.0 + logprob_d = 0.0 for vi, v in enumerate(ids): - log_word_prob_v = 0.0 + logprob_v = 0.0 for k in xrange(self.num_topics): for a in authors_d: - log_word_prob_v += norm_gamma[a, k] * norm_lambda[k, v] - log_word_prob_d += cts[vi] * numpy.log(log_word_prob_v) - log_word_prob += numpy.log(1.0 / len(authors_d)) + log_word_prob_d - #authors_idxs = [self.authorid2idx[aid] for aid in authors_d] - #likelihood += author_prior_prob * numpy.sum(cnt * numpy.log(numpy.sum(numpy.exp(logsumexp(Elogtheta[a, :] + Elogbeta[:, id])) for a in authors_idxs)) for id, cnt in doc) + logprob_v += norm_gamma[a, k] * norm_lambda[k, v] + logprob_d += cts[vi] * numpy.log(logprob_v) + logprob += numpy.log(1.0 / len(authors_d)) + logprob_d - return log_word_prob + return logprob # Overriding LdaModel.get_topic_terms. def get_topic_terms(self, topicid, topn=10): @@ -434,6 +473,7 @@ def get_topic_terms(self, topicid, topn=10): bestn = matutils.argsort(topic, topn, reverse=True) return [(id, topic[id]) for id in bestn] + def get_author_topics(self, author_id, minimum_probability=None): """ Return topic distribution the given author, as a list of @@ -455,7 +495,3 @@ def get_author_topics(self, author_id, minimum_probability=None): - - - - From b450609adb1f5efc334760692f6e13e035708a6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 21 Oct 2016 16:34:03 +0200 Subject: [PATCH 033/100] Fixed a critical mistake in the online algorithm. --- docs/notebooks/at_with_nips.ipynb | 111 ++++++++++++++---------------- gensim/models/onlineatvb.py | 48 +++++++------ 2 files changed, 77 insertions(+), 82 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index f94feba2ef..0acd006618 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 5, "metadata": { "collapsed": false }, @@ -73,7 +73,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 6, "metadata": { "collapsed": false }, @@ -101,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": 7, "metadata": { "collapsed": false }, @@ -138,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 8, "metadata": { "collapsed": false }, @@ -163,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 9, "metadata": { "collapsed": true }, @@ -178,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 10, "metadata": { "collapsed": false }, @@ -196,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 11, "metadata": { "collapsed": false }, @@ -222,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": 52, + "execution_count": 12, "metadata": { "collapsed": false }, @@ -245,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": 13, "metadata": { "collapsed": true }, @@ -260,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 14, "metadata": { "collapsed": true }, @@ -279,7 +279,7 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 15, "metadata": { "collapsed": true }, @@ -291,7 +291,7 @@ }, { "cell_type": "code", - "execution_count": 56, + "execution_count": 16, "metadata": { "collapsed": false }, @@ -300,7 +300,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -326,7 +326,7 @@ }, { "cell_type": "code", - "execution_count": 57, + "execution_count": 17, "metadata": { "collapsed": true }, @@ -344,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 58, + "execution_count": 18, "metadata": { "collapsed": true }, @@ -358,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": 19, "metadata": { "collapsed": false }, @@ -388,7 +388,7 @@ }, { "cell_type": "code", - "execution_count": 60, + "execution_count": 38, "metadata": { "collapsed": false }, @@ -397,8 +397,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 15.2 s, sys: 16 ms, total: 15.3 s\n", - "Wall time: 15.3 s\n" + "CPU times: user 13.6 s, sys: 16 ms, total: 13.6 s\n", + "Wall time: 13.6 s\n" ] } ], @@ -410,7 +410,7 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 41, "metadata": { "collapsed": true }, @@ -422,35 +422,24 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 42, "metadata": { "collapsed": false }, "outputs": [ { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'time model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, author2doc=author2doc, doc2author=doc2author, threshold=1e-10, iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=64.0, eval_every=1, random_state=1, var_lambda=var_lambda)'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, id2author, author2doc, doc2author, threshold, minimum_probability, iterations, passes, alpha, eta, decay, offset, eval_every, random_state, var_lambda)\u001b[0m\n\u001b[1;32m 131\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 132\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 133\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvar_lambda\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 134\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 135\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mt\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, var_lambda)\u001b[0m\n\u001b[1;32m 258\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0.0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 259\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 260\u001b[0;31m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcts\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_mu\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 261\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 262\u001b[0m \u001b[0mtilde_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0malpha\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 4min 10s, sys: 28 ms, total: 4min 10s\n", + "Wall time: 4min 10s\n" ] } ], "source": [ "%time model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=64.0, \\\n", + " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", " eval_every=1, random_state=1, var_lambda=var_lambda)" ] }, @@ -585,7 +574,7 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 20, "metadata": { "collapsed": false }, @@ -603,7 +592,7 @@ }, { "cell_type": "code", - "execution_count": 74, + "execution_count": 21, "metadata": { "collapsed": false }, @@ -620,7 +609,7 @@ }, { "cell_type": "code", - "execution_count": 75, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -633,7 +622,7 @@ }, { "cell_type": "code", - "execution_count": 76, + "execution_count": 23, "metadata": { "collapsed": false }, @@ -650,7 +639,7 @@ }, { "cell_type": "code", - "execution_count": 77, + "execution_count": 24, "metadata": { "collapsed": false }, @@ -666,7 +655,7 @@ }, { "cell_type": "code", - "execution_count": 78, + "execution_count": 25, "metadata": { "collapsed": false }, @@ -689,7 +678,7 @@ }, { "cell_type": "code", - "execution_count": 92, + "execution_count": 36, "metadata": { "collapsed": false }, @@ -701,7 +690,7 @@ }, { "cell_type": "code", - "execution_count": 93, + "execution_count": 37, "metadata": { "collapsed": false }, @@ -710,15 +699,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 14.1 s, sys: 0 ns, total: 14.1 s\n", - "Wall time: 14.1 s\n" + "CPU times: user 41.6 s, sys: 20 ms, total: 41.6 s\n", + "Wall time: 41.6 s\n" ] } ], "source": [ "%time model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", - " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-1, \\\n", - " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=64.0, \\\n", + " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-3, \\\n", + " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", " eval_every=1, random_state=1, var_lambda=None)" ] }, @@ -778,7 +767,7 @@ }, { "cell_type": "code", - "execution_count": 221, + "execution_count": 28, "metadata": { "collapsed": false }, @@ -823,7 +812,7 @@ }, { "cell_type": "code", - "execution_count": 338, + "execution_count": 43, "metadata": { "collapsed": false }, @@ -835,7 +824,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 44, "metadata": { "collapsed": false }, @@ -844,8 +833,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 24 s, sys: 84 ms, total: 24.1 s\n", - "Wall time: 24 s\n" + "CPU times: user 24.1 s, sys: 156 ms, total: 24.3 s\n", + "Wall time: 24.1 s\n" ] } ], @@ -1097,7 +1086,7 @@ }, { "cell_type": "code", - "execution_count": 304, + "execution_count": 30, "metadata": { "collapsed": true }, @@ -1109,7 +1098,7 @@ }, { "cell_type": "code", - "execution_count": 310, + "execution_count": 31, "metadata": { "collapsed": false }, @@ -1121,7 +1110,7 @@ }, { "cell_type": "code", - "execution_count": 88, + "execution_count": 32, "metadata": { "collapsed": false }, @@ -1130,8 +1119,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 16.6 s, sys: 12 ms, total: 16.6 s\n", - "Wall time: 16.6 s\n" + "CPU times: user 16.7 s, sys: 4 ms, total: 16.7 s\n", + "Wall time: 16.8 s\n" ] } ], diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 970cebe246..09754d2b32 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -194,6 +194,7 @@ def inference(self, corpus=None, var_lambda=None): for _pass in xrange(self.passes): converged = 0 # Number of documents converged for current pass over corpus. for d, doc in enumerate(corpus): + rhot = self.rho(d + _pass) ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. authors_d = self.doc2author[d] # List of author IDs for document d. @@ -261,13 +262,35 @@ def inference(self, corpus=None, var_lambda=None): tilde_gamma[a, k] *= len(self.author2doc[a]) tilde_gamma[a, k] += self.alpha[k] + # TODO: see what happens if we put the lambda update outside this loop (i.e. + # only one update per document). if self.optimize_lambda: # Update lambda. #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T for k in xrange(self.num_topics): for vi, v in enumerate(ids): cnt = dict(doc).get(v, 0) - var_lambda[k, v] = self.eta[v] + self.num_docs * cnt * var_phi[v, k] + tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * var_phi[v, k] + + # FIXME: I don't need to update the entire gamma, as I only updated a few rows of it, + # corresponding to the authors in the document. The same goes for Elogtheta. + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). Same goes for lambda. + # TODO: I may need to be smarter about computing rho. In ldamodel, + # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). + tilde_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + Elogtheta = dirichlet_expectation(tilde_gamma) + + if self.optimize_lambda: + # Note that we only changed the elements in lambda corresponding to + # the words in document d, hence the [:, ids] indexing. + tilde_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] + Elogbeta = dirichlet_expectation(tilde_lambda) + expElogbeta = numpy.exp(Elogbeta) # Check for convergence. # Criterion is mean change in "local" gamma and lambda. @@ -288,27 +311,10 @@ def inference(self, corpus=None, var_lambda=None): break # End of iterations loop. - # TODO: I don't need to update the entire gamma, as I only updated a few rows of it, - # corresponding to the authors in the document. The same goes for Elogtheta. - - # Update gamma and lambda. - # Interpolation between document d's "local" gamma (tilde_gamma), - # and "global" gamma (var_gamma). Same goes for lambda. - # TODO: I may need to be smarter about computing rho. In ldamodel, - # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). - rhot = self.rho(iteration + _pass) - t += 1 - var_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma - + var_gamma = tilde_gamma.copy() + if self.optimize_lambda: - # Note that we only changed the elements in lambda corresponding to - # the words in document d, hence the [:, ids] indexing. - var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] - - # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. - Elogtheta = dirichlet_expectation(var_gamma) - Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) + var_lambda = tilde_lambda.copy() # Print topics: # pprint(self.show_topics()) From d3ca91765bf2dc680544cad276520879d85cff01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 4 Nov 2016 15:40:15 +0100 Subject: [PATCH 034/100] Removed a redundancy in lambda update. Updated notebook. --- docs/notebooks/at_with_nips.ipynb | 98 +++++++++++++++---------------- gensim/models/onlineatvb.py | 7 ++- 2 files changed, 54 insertions(+), 51 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 0acd006618..7e75eb95c7 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 4, + "execution_count": 1, "metadata": { "collapsed": false }, @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -73,7 +73,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 3, "metadata": { "collapsed": false }, @@ -101,7 +101,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 61, "metadata": { "collapsed": false }, @@ -116,7 +116,7 @@ "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00']\n", + "yrs = ['00', '01']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -138,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 62, "metadata": { "collapsed": false }, @@ -163,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 63, "metadata": { "collapsed": true }, @@ -178,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 64, "metadata": { "collapsed": false }, @@ -196,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 65, "metadata": { "collapsed": false }, @@ -222,7 +222,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 66, "metadata": { "collapsed": false }, @@ -245,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 67, "metadata": { "collapsed": true }, @@ -260,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 68, "metadata": { "collapsed": true }, @@ -279,7 +279,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 69, "metadata": { "collapsed": true }, @@ -291,16 +291,16 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 70, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcXFWZ//HPNwGiLGlQJgEFAUUQUJa0SFBZnEgii+gM\nKDbogODCpvxaEZTRgQFRQUlki6KoLGojwqDswYBAWBRMEAOEIPsiCYSlgUAISZ7fH+cUublUb5Xu\nrkrX9/161atT5z733HNvdaefPveccxURmJmZmTWbYfVugJmZmVk9OAkyMzOzpuQkyMzMzJqSkyAz\nMzNrSk6CzMzMrCk5CTIzM7Om5CTIzMzMmpKTIDMzM2tKToLMzMysKTkJMrN+J+lxST8rvB8naYmk\nDw7Csb8r6bXC++H52BMH+tj5eF/Ix3vbYByvVpK+KelBSYsk3Vbv9vSWpHfl67tvvdtiKz4nQTZk\nSNo//+dY7fW9erevyVR7Hk+fn9Ej6b8lfbyGYy/p67H6qpu2BTWc62CStBvwPeDPwAHAd+raILM6\nWaneDTDrZ0H6D/3hUvldg98Uq4iIayW9OSIW9nHXbwPnA5f1YZ9jgeP7eJxadNW2XwLn13Cug+kj\nwGvAF8IPkLQm5iTIhqKrI2JGb4MlCVglIl4dwDY1vYFOCiStGhEvR8QSBqEnqCs5qWjkBAhgNDC/\nERMg/zzaYPLtMGsqxfEhkj4n6W5gATAub5ekr0m6W9ICSU9KmixpZKkeSfqfPPblJUlTJb1H0mOl\nsTDLjE8plFcdNyJpd0nTcp2dki6V9J5SzK8lPSdpvbz9RUlPSfpBleNIUrukf0h6JcddKWnrvP1m\nSX/r4lo9IKnbHpiurkOVuDeMCZK0iaT/kzQnt+1RSb+RtFrlcwJWASrXaknl2ubruiTX8TtJz5Fu\n7XR5zfO2z0manY93W3mMUr62/6yy3+t19qJtXX22Xyl8Xz0h6bQq31c3SZohaQtJf5b0cr62X+vu\ncyjsv5KkY/Nnt0BpzM/xklYutX0/oCW3c7G6GF+Tv3dek7RaoezovN8PCmUr5c//+ELZ6pIm5Z+J\nBZJmSfp/pfp7+nlcS9J5kp6X9KykXwDLXLMct66kc/O1WiDpX5IukbReb66bNS/3BNlQ1CLprcWC\niHimFDMe+AxwJvAs8Ggu/yXQlr/+GHgn8BVgK0k75F4GSOMpjgYuBaYArcA1wJtLx+lqfMgbyiUd\nAPwCuBI4ClgNOBSYJmmbiHi8sO9K+XjTgK/n8/mGpH9GxC8K1Z5H+oV3GfAz0i/uHYHtgL/n7ZMl\nbRIR9xXasj2wEfCtKm0v6u11qLS7Uv+IHDeMdJ3nAusBHwdGRsR8SZ8FfgXclK8LwP2luv4PuBf4\nZqGsq2s+DtgXOI10K+gwYIqk90fE7B72fb08Ihb3om3lz/a7wDHA1aTvuc1In21r6fsqgLWBq4Df\nAxcAnwZ+KOnOiLi2StuKzsnneAHpe2Ms6bbdpsA+hbYfCmwFfAkQcHMX9U0jfUYfIn1eAB8GFgM7\nFOJaSZ/5jfl8BVyR9/s58A9gV2CipHUj4ujScd7w85jruIz0vToZmA3sRbru5c/oD8DGpM/2UVJP\n13jS99TjmHUlIvzya0i8gP1Jt0HKr8WFmOG5bCGwcWn/nfO2vUrlu+byvfP7UXn/i0txP8hxPyuU\nnQAsrNLWg0i/SN6W368BPA+cXoobncvPKJSdn/c9qhT7d+CWwvtdcntO7uaarQm8AhxfKj8zH/dN\n3ezbl+swLrf5g/l9a475eA+f6SvFekrXdQlwThfbFhbeVz7zRcB7C+UbkHodLihd2/t6qrOHtpU/\n29H5Ol1aivtqjtuvUDYtl326ULYKKUn8bQ/Xakw+zzNL5RNznR8qneezvfiZGg68CJxQKHuWlGQt\nqHx/AN/I57h6fr9XbsuRpfouJiWg7+jFz2Oljq8WyoaREs/FwL657C3lOL/86u3Lt8NsqAngEOCj\nhdcuVeKujYj7S2V7k/6Dv17SWysv4G+kX3gfyXETSP95n17a/8fL0e6PkRKhC0rHXgzcXjh20c9K\n728i9VxV7EX6xX9CVweNiOeBy0m9B0C6RQF8ipTcLOimzeOp/To8n7/uKulNvYivJoCf9iF+WkS8\nPkA+Ih4h9TR8rMbj99YupOtUvi5nAS8Du5fKOyPiwsqbSGOpbmfZz7aa3UjXpLwUwCmk3p7ycXoU\nEYuBW0m9h0jaEmgBvg+sTOqlgdQ7dGdEvJTf70pKbM4sVTmRdC3K17zaz+OuwKsUvs8j9Zidkc+n\n4mVSYvURSS19PEVrck6CbCi6PSKuK76qxDxcpezdpL8qny695gJvIvV8ALwjf13mP+2ImEP6q7kW\nG5P+Y59WOvZTwL8Xjl3xUk5gip4D1iq8fyfweET01KbzgI0kjc3vPwa8ldRb0J0N8tc+X4eIeAA4\nFfgy8IykqyQdImmNHo5Z9lAfYsu/ZAHuA9aQtFaVbf2lcp3uKxZGGvj7UGF7xWNV6ih/tl0dZ1G+\ntsXjPEH6PMrH6a2bgG3zuKIdgMci4k7SjMvKLbEPkb53i215PCJeKdU1q7C96OEqx90AeKJKIj67\n+CZvPwbYA3hK0vWSjpRU/pkxewOPCbJmVf7PGdIfBf8CPseyf2lWPJW/Vrb1ZmZNVzHDqxw7SOOR\n5lWJLw/0XdxFveri3925Kh/zs8Bf8tcnIuL6Hvbry3V4g4hozwNdP0HqVToDOFrS2JxI9Ua1z7Ev\nyteot5/X8hyjJ735bPu6va9tKJpGWnZgO1KPz7RC+Q6StiD98XDjchyv2ucoqn8eb6g7Ik6RdAnw\nSVJP7XeBb0naqdj7Z1bmniCzpR4gDUq9qdyTlF+V/0wfzl83Ke4saR3SLa2i54DhklYtlW9Y5dgA\nT3Vx7Gn03f3AeuUZSGURsYg8AFfSmqTByb/pRf0P56+9uQ5dHfuuiDgxInYCdiL1sn2pGNKbenrp\n3VXKNgFejIjn8vvnSOOkyjasUtbbtj2cv25aLJS0Sq73kV7W05vjrCTpXaXjvA1YfTmO8xfSbdUd\nST0/le/FG4EPkm7VBqnHqNiW9SSVB8hvlr/2pi2VOsq3SzetEktEPBgREyNiAvA+0kDtXs2qs+bl\nJMhsqQtJg1C/Xd6QpwBXkok/kf5a/0oprL1KnQ+Q/nLdsVDX6qTepqKrgJeA/85jcsrHX7uX51B0\nMam3tzerAZ9PSgDPIv3y6E0S1JfrsAxJIyWV//+5i/TLdEShbD7Vk5JafDiPaam0YUPSLZSrCzEP\nAG+VtFkh7u2kxLCst22rXKcjSuVfJs0AvLwXdfTGlaTvtf9XKv866bpeUUul+ZbWDNL37Los2xO0\nGnA4MDsiij2YV5J+lg4tVddOuhZX9eLQV5K+F75cKcg/G4ez7EzDN+fZhkUPkn6eRhTi1pG0aZXv\nO2tivh1mQ03N3f4RcV2+PfNtSWOAqaS/gDchDZo+hDTDZ66kScCRki4l/Yf+ftIg7GdL1V4FPAGc\nI+lHuexA4Eng9XVkIqJT0uGkqfkzJF1AukW1AWlA65/p41+1ETFVUgfwNaW1e64h3dbZAZgSEcUB\np3+TNIs0IPofvbmF0MfrAMt+NrsAkyT9HvgnaZDt/qTbfv9XiJsOjM/ryzwJPBARVdc16oW7gGsk\nnU76XA/NX/+3EPNb0rT/S3Pc6sDBpGn4W5Xq61Xb8nU6CThG0pWkpGezXO+tpF645RYRMyT9Bjg0\nD6qfBmxPur15YUR0NQ2+N6YBRwLPRMSsfLwnJT1A+vn4eSn+ElJP0UmSNmbpFPndgR9GRLVxT2WX\nkHqhfpR7typT5Mu9qpsDV0u6ELiHlGTtTRrX1lGI+xFpAsB6pNveZp4i79fQeZF+iS4GxnQTMzzH\nnNJNzBdJs3FeIt0euQM4ERhVivsfUoLzEumv/U1Jg1p/VoobQ/pl9wrpL9TDKE2jLsTuTOqZeC7X\nOxs4G9i6EHM+6ZdRud0nAK+WykT65XVPPv4c0oyoLavs/83cpq/18bpXuw6PAmcVYspT5N+Zz+uf\npB6Vp/K+O5bqfg9wfa57ceXa5nNdTFpTqNvrUPzMSQnBffla3FZpT2n/8cBM0hTwu0nr9FSbIt9V\n27r6bA/L9S3I1+tUYI1SzDRgepU2nU/qbenpsxieP48H8nEeIiV5K1Wp7w3fQ93U+/F8TpeUyn9J\naZp/YdtqpNlgj+e23Asc0ZefR9Jg8PNIswmfIa3JtA3LTpFfmzRD8R7gBVICfjPwySrnvKj8ufjV\n3C9F9Octd7PmJukx4KqI+FKPwQ1G0tdJa/y8IyKerHd7zMwGmu+NmlnFgaT1WpwAmVlT8Jggsyam\n9EyoPUnjeN6DZ9OYWRNxEmTWv7p69lSjWoc0E+xZ0qMzptS5PWZmg8ZjgszMzKwpeUyQmZmZNSUn\nQWZmZtaUnASZ2YCS9F1J5WefDXYbhktaIqn8hPXlqXNcrnPP/qqzD8f+taR/DvZxzYYaJ0FmdSRp\n//yLtPJ6RdJsSacPoadgr2iDxfuiXucVwJI6HdtsyPDsMLP6C9LzvR4G3kR6UvchwK6S3hsRC+rY\nNuve8jydfXkcUMdjmw0ZToLMGsPVETEj//uXkp4lPWzyE8Dv6tesnklaNSJernc7mklELK7Hcf1Z\n21Dj22Fmjek60l/6G1UKJG0k6feSnpE0X9KtknYr7iTp6cKDWlHyvKTXJI0slB+dy1YtlG0q6aJc\n/yuSbpf08VL9ldt3O0qaLGku6XlpfSLpIEnXSpqbj3WXpC+WYk6VNKdU9pN8/IMLZW/LZQf28tif\ny7ccX5F0m6QPVol5u6RzJM2RtEDSTEn7V6kugGGSviPpcUkvS/qTpI1K9e2UP7tHc32PSPpR8enn\nkr4pabGkt5UPkmNfkbRGfv+GMUGSVpc0SdJj+Riz8sNdizHvytdq31J5ZczUMYWy7+ayTST9TtJz\npAf5mg0ZToLMGtPG+eszAHl80K2kp6+fARwDjAAuk/SJwn43AzsW3m8JVJKfDxXKPwzMqPxVL2kL\n0hO7NwW+T1o5+iXgD6X6KyaTVpj+X9LzxvrqENLDZE8Evk56oOhZpURoGvBvkjYptXsxsEOhbEdS\nMjKtF8cdB/wQOJf0oNFRwBRJm1YCJK1DerjqTsBpwBG5rb+SdGipPpFuZe4OnJRfHyQ99LPo06TP\n6wzgcNLDYo8gPYC04oJc36eqtHtv4MqIeDG/X2aclSQBVwBfIT2lvp30cNqJSk+wr0Wl/v8jPej0\nm6QHmJoNHfV+gqtffjXzi6VPvv8I8Fbg7cA+wNOkJGTdHDcpx21f2Hc10tPCHyiUfR1YCKyW3x9O\n+gV+K/C9QtyzwI8K76cCd/DGp43fBNxbau8S0tPT1ctzrPYE9hFV4v4EzCq8H52PdVB+v1a+BhcA\njxbizgDm9NCG4bmuRcB7C+UbkJ5wfkGh7BzgUaClVMeFwDxg5fx+XK7zTmB4Ia49t3OTHs73v3N7\n1i2U/RW4pRS3fT7Opwtl5wP3Fd7vlWOOLO17MfAa6aG4AO/Kcft2cX2OKX1uS4Bz6v1z4pdfA/Vy\nT5BZ/Qm4lpT4PAb8FngB+GQsfZjprsBtEXFrZaeImA/8DNhQ0ua5eBpprF/lFs8OuWxa/jeStgTW\nzGVIWouUhP0eaJH01soLuAZ4t6R1C+0N4OcRUfPMqIh49fWTl0bmY90AbCLpzTlmLnA/S3u2dgBe\nBU4B1pO0Qekce2NaRNxVaMcjwGXAx3JbBPwH8EdgpSrXYi1g61Kdv4hlx+hMI32m7+zifFfN9d2S\n44r1/Q7YTtI7CmX7AC+Teni6sisp+T2zVD6RlOB8rJt9uxPAT2vc16zhOQkyq78g3R76KLAzsHlE\nvCsiphZiNgBmV9l3VmE7wAzSL8zK7aIPszQJer+kVfK2IPXyQLr1JtJf/k+XXsflmPJ0/YeLbySt\nLGl08dXdCUvaQdJ1kl4Cns/HOj5vbimE3lQ6l9uAvwGdwA6SWoD30vsk6P4qZfcBa+RkcB1gDeBQ\n3ngtfpbjy9eiPCbqufx1rUqBpA0knSfpGVIP39OkxBeWPd8L89dPF8r2Ai6P7gckbwA8HhGvlMrL\n3x+1eGg59jVraJ4dZtYYbo+ls8NqFhGLJP0V2FHSu4B1gRtJv3RXBrYjJROzIuKZvFvlj6EfAV09\nQLWcPJR/2e5Iup0VpIQqJK0fEf8qVyTp3Tn2LtKto8dIvRh7ksa0FP84mwbsL2l9UjI0NSJC0s35\nfSXhuLGLdvdGcap55djnAr/uIv7O0vuuZmoJ0qBj0u3GNYDvkZLZl4F3kMYEvX6+EfG4pFtJSdCP\nJO1AukV6QR/OoTtd9d4N72af8mdtNmQ4CTJbMTxCGrRctllhe8U04CjSIOqnI+I+AEl3k5KVHUi3\ngCoezF9fi4jramzfdFJPVtHTXcTuSUrIds+3vMjtm1AlttLDMwEYAxyb398IfJ6UBL3IGxOTrry7\nStkmwIsR8ZykF4D5wLDluBZlW5PG4rRFxOvLHUjq6hbVBcCpkt5JuhX2InBVD8d4GPiwpDeXeoPK\n3x+VpHHN0v7L01NktsLy7TCzFcOVwAckbVcpkLQa8CXgoYi4pxA7jbTo4hEsveVF/vfnSL1Dr98+\nioinSQOdv5xnRi1D0to9NS4ino+I60qvrh6VUek5ef3/n3wr6r+q1Hs/MJc04HsYaRxN5Rw3JY3f\nuaUP45M+nMdEVY67IbAHcHU+3mLgEuDTkjYr71zlWvTmuNXOV6TPp9r+vycPXibdCru0OKaoC1cC\nq5Bu4xVVBmlfBRARz5FuP+5Yiju8i7ZUJalFaUmF1Xu7j1kjck+QWf315lbGD4A24GpJp5Fmdx1A\n+gv+P0uxt5JmHW0CnFUov5E09qjadPLDctlMST8n9Q6NJs1MejuwTR/b250ppKnkV+ZjjQS+CDzJ\nG8fbQEre9iZN6X8pl91Ouk2zMWk2V2/dBVwj6XTSNTo0f/3fQsxRpCThtty+WcBbgPeTetGKiWJv\nrsXdpHE1P86DuV/K5zOyWnBEzJU0DfgGsDq9WyzzEtLne5KkjYF/kAZL7w78MCKK45bOBo6U1Eka\nQ7YzqaeqL5/rZ4Cf5K8X9hBr1rDcE2RWfz3+BR4RT5ESkmtIf7V/jzS1e4+IuLQU+zJpuntx8DOk\nJCdI08sfK+0zi/RL/nLSNPgzgC+TehGOZ1m1zAp7fZ98rL1J///8CPgCcDpp7aFqKu0u9l4tIk0n\n7+36QJU2XAscSTrH40i9TONzmyp1zwG2JY0L+s/ctq+Skpajuzqvrspzj9gepMTkGODbpMTo8920\n9XekBOh5uh6nVTxGkBKe04CPk5ZU2AT4WkR8s7TfsaSxSJ8mJaOLcvv6+oy3ofo8OGsiWo5ZrmZm\nZmYrrIbrCZL0rbxU+8RC2fVa9knbiyVNLu23vqQrlB4nMEfSyZKGlWJ2ljQ9Lyl/n6osgy/pMEkP\n5SXq/yJp24E7WzMzM6uXhkqCcsLxRd440yNIa3SMJt2PX5d0376y3zDSwMCVgLGkru4DKHTj5wGQ\nl5O6w7cCTgXOlrRLIWYf0kJsx5LGQNxJWlK/x4GhZmZmtmJpmCQozzL4NWl8wPNVQl6OiKcj4qn8\neqmwbQLpOUb7RcTMiJhCep7PYZIqg78PAR6MiKMiYnZEnAlcRJo9UdEOnBUR50XEvcDBpPU8evVg\nRjMzM1txNEwSRFru/bJu1ubYT+kJ2TMlfa+ytH42FpgZEfMKZVNIK7FuUYgprsBbidke0oq3QCtL\nV3GtDDacWokxMzOzoaMhpshL+gxpQbH3dxHyG9JiX/8iPRX7ZNLMh73z9nVIszyK5ha23dlNzEhJ\nI0hTYId3EVNtkTozMzNbgdU9CZK0HvBjYJeuFleLiLMLb++WNAe4VtJGEdHTc226m/6mXsZU3Z4f\ngjiBtFrrgh7aYWZmZku9CdgQmFJ4jM+gqnsSRLoF9W/A9LyKKqQemR0lHQ6MqLIa7F/z141Ji5BV\n1vUoqjzAcU7ha/mhjqOAFyJioaR5pDVRqsWUe4cqJpB6qczMzKw2+wG/rceBGyEJmgq8r1R2DmmV\n1h90sRz+NqTemSfz+1uBYyStXRgXNJ70pOlZhZhdS/WMz+VExGuSpgPjgEvh9aXtx5EWIKvmYYBf\n//rXbLbZG1bYtwHS3t7OpEmT6t2MpuJrPvh8zQefr/ngmjVrFp/97Gch/y6th7onQRExHyg+9whJ\n84FnImJWfojgvqQp8M+QprdPBG6IiLvyLtfkOs6XdDRpCv0JwBmFW2w/BQ6XdBJptdRxpDFFuxUO\nPRE4NydDt5Fmi61K18vyLwDYbLPNGDNmTG0XwPqspaXF13uQ+ZoPPl/zwedrXjd1G05S9ySoC8Xe\nn4Wkp1MfAawGPEZ6wOCJrwdHLJG0B+lZNreQngJ9DkufOE1EPCxpd1Ki81XgceCgiJhaiLkwrwl0\nPOm22N+BCfkBk2ZmZjaENGQSFBH/Xvj346QH/PW0z2Ok5990F3MDaQxSdzGT6foZRmZmZjZENNI6\nQWZmZmaDxkmQrXDa2trq3YSm42s++HzNB5+vefPxU+SXg6QxwPTp06d7MJ2ZmVkfzJgxg9bWVoDW\niJhRjza4J8jMzMyakpMgMzMza0pOgszMzKwpOQkyMzOzpuQkyMzMzJqSkyAzMzNrSk6CzMzMrCk5\nCTIzM7Om5CTIzMzMmpKTIDMzM2tKToLMzMysKTkJMjMzs6bkJMjMzMyakpMgMzMza0pOgszMzKwp\nOQkyMzOzpuQkyMzMzJqSkyAzMzNrSk6CzMzMrCk5CTIzMxtkkyfDj35U71ZYwyVBkr4laYmkiYWy\nEZLOlDRP0ouSLpI0qrTf+pKukDRf0hxJJ0saVorZWdJ0SQsk3Sdp/yrHP0zSQ5JekfQXSdsO3Nma\nmVkzuv56+NOf6t0Ka6gkKCccXwTuLG36MbA7sBewI/A24OLCfsOAK4GVgLHA/sABwPGFmA2By4Fr\nga2AU4GzJe1SiNkHOAU4Ftgmt2OKpLX77STNzMysITRMEiRpdeDXwBeA5wvlI4EDgfaIuCEi7gA+\nD3xI0gdy2ATgPcB+ETEzIqYA3wEOk7RSjjkEeDAijoqI2RFxJnAR0F5oRjtwVkScFxH3AgcDL+fj\nm5mZ2RDSMEkQcCZwWURcVyp/P6mH59pKQUTMBh4Fts9FY4GZETGvsN8UoAXYohAztVT3lEodklYG\nWkvHibzP9piZmdmQslLPIQNP0meArUkJT9loYGFEvFAqnwusk/+9Tn5f3l7Zdmc3MSMljQDeAgzv\nImbT3p2JmZmZrSjqngRJWo805meXiHitL7sC0Yu47mLUy5jeHMfMzMxWIHVPgki3oP4NmC6pkpQM\nB3aUdDjwMWCEpJGl3qBRLO21mQOUZ3GNLmyrfB1dihkFvBARCyXNAxZ3EVPuHVpGe3s7LS0ty5S1\ntbXR1tbW3W5mZmZNoaOjg46OjmXKOjs769SapRohCZoKvK9Udg4wC/gB8ATwGjAOuARA0ibAO4Bb\ncvytwDGS1i6MCxoPdOZ6KjG7lo4zPpcTEa9Jmp6Pc2k+jvL707o7gUmTJjFmzJjena2ZmTW9aLL7\nC9U6BmbMmEFra2udWpTUPQmKiPnAPcUySfOBZyJiVn7/C2CipOeAF0lJyc0RcXve5Zpcx/mSjgbW\nBU4AzijcYvspcLikk4BfkpKbvYHdCoeeCJybk6HbSLPFViUlZWZmZv3m9XsfVjd1T4K6UM6R20m3\nqi4CRgBXA4e9HhyxRNIewE9IvUPzSYnLsYWYhyXtTkp0vgo8DhwUEVMLMRfmNYGOJ90W+zswISKe\n7u8TNDMzs/pqyCQoIv699P5V4Cv51dU+jwF79FDvDaQxSN3FTAYm97qxZmZmtkJqpHWCzMzMzAaN\nkyAzMzNrSk6CzMzMrCk5CTIzM7Om5CTIzMxskDXbOkGNykmQmZmZNSUnQWZmZnXgxRLrz0mQmZmZ\nNSUnQWZmZtaUnASZmZlZU3ISZGZmZk3JSZCZmZk1JSdBZmZm1pScBJmZmQ0yL5bYGJwEmZmZWVNy\nEmRmZlYHXiyx/pwEmZmZWVNyEmRmZmZNyUmQmZmZNSUnQWZmZtaUnASZmZlZU3ISZGZmNsi8TlBj\ncBJkZmZmTclJkJmZWR14naD6q3sSJOlgSXdK6syvWyR9rLD9eklLCq/FkiaX6lhf0hWS5kuaI+lk\nScNKMTtLmi5pgaT7JO1fpS2HSXpI0iuS/iJp24E7czMzM6unuidBwGPA0UBrfl0H/FHSZnl7AD8D\nRgPrAOsCR1V2zsnOlcBKwFhgf+AA4PhCzIbA5cC1wFbAqcDZknYpxOwDnAIcC2wD3AlMkbR2/56u\nmZmZNYK6J0ERcUVEXB0R9+fXt4GXSAlNxcsR8XREPJVfLxW2TQDeA+wXETMjYgrwHeAwSSvlmEOA\nByPiqIiYHRFnAhcB7YV62oGzIuK8iLgXOBh4GThwQE7czMzM6qruSVCRpGGSPgOsCtxS2LSfpKcl\nzZT0PUlvLmwbC8yMiHmFsilAC7BFIWZq6XBTgO3zcVcm9UJdW9kYEZH32X75z8zMzMwazUo9hww8\nSe8FbgXeBLwI/EdEzM6bfwM8AvwL2BI4GdgE2DtvXweYW6pybmHbnd3EjJQ0AngLMLyLmE1rPjEz\nMzNrWA2RBAH3ksbqrAnsBZwnaceIuDcizi7E3S1pDnCtpI0i4qEe6u1uJQb1MqbH1Rza29tpaWlZ\npqytrY22traedjUzsybUbOsEdXR00NHRsUxZZ2dnnVqzVEMkQRGxCHgwv50h6QPAEaSxPGV/zV83\nBh4C5gDlWVyj89c5ha+jSzGjgBciYqGkecDiLmLKvUNvMGnSJMaMGdNTmJmZWVOq1jEwY8YMWltb\n69SipKHGBBUMA0Z0sW0bUu/Mk/n9rcD7SrO4xgOdwKxCzLhSPeNzORHxGjC9GCNJ+f0tmJmZ2ZBT\n954gSScCV5Gmyq8B7AfsBIyX9E5gX9IU+GdIt8wmAjdExF25imuAe4DzJR1NmkJ/AnBGTm4Afgoc\nLukk4Jf/h1SYAAAgAElEQVSk5GZvYLdCUyYC50qaDtxGmi22KnDOAJy2mZk1OS+WWH91T4JIt6DO\nIyUvncA/gPERcZ2k9YCPkm6NrUZKlH4PnFjZOSKWSNoD+Amp12Y+KXE5thDzsKTdSYnOV4HHgYMi\nYmoh5sLcm3R8btPfgQkR8fQAnbeZmZnVUd2ToIj4QjfbHgd27kUdjwF79BBzA2kafHcxk4HJ3cWY\nmZnZ0NCoY4LMzMzMBpSTIDMzM2tKToLMzMwGWbOtE9SonASZmZlZU3ISZGZmZk3JSZCZmVkdeJ2g\n+nMSZGZmZk3JSZCZmZk1JSdBZmZm1pScBJmZmVlTchJkZmZmTclJkJmZ2SDzYomNwUmQmZmZNSUn\nQWZmZtaUnASZmZnVgRdLrD8nQWZmZtaUnASZmZlZU3ISZGZmZk3JSZCZmZk1JSdBZmZmg8zrBDUG\nJ0FmZmbWlJwEmZmZWVNyEmRmZlYHXieo/uqeBEk6WNKdkjrz6xZJHytsHyHpTEnzJL0o6SJJo0p1\nrC/pCknzJc2RdLKkYaWYnSVNl7RA0n2S9q/SlsMkPSTpFUl/kbTtwJ25mZmZ1VPdkyDgMeBooDW/\nrgP+KGmzvP3HwO7AXsCOwNuAiys752TnSmAlYCywP3AAcHwhZkPgcuBaYCvgVOBsSbsUYvYBTgGO\nBbYB7gSmSFq7f0/XzMzMGkHdk6CIuCIiro6I+/Pr28BLwFhJI4EDgfaIuCEi7gA+D3xI0gdyFROA\n9wD7RcTMiJgCfAc4TNJKOeYQ4MGIOCoiZkfEmcBFQHuhKe3AWRFxXkTcCxwMvJyPb2ZmZkNM3ZOg\nIknDJH0GWBW4ldQztBKpBweAiJgNPApsn4vGAjMjYl6hqilAC7BFIWZq6XBTKnVIWjkfq3icyPts\nj5mZmQ05DZEESXqvpBeBV4HJwH/k3ph1gIUR8UJpl7l5G/nr3Crb6UXMSEkjgLWB4V3ErIOZmVk/\n8jpBjWGlnkMGxb2ksTprksb+nCdpx27iBfTmW6i7GPUyxt+qZmZmQ1BDJEERsQh4ML+dkcf7HAFc\nCKwiaWSpN2gUS3tt5gDlWVyjC9sqX0eXYkYBL0TEQknzgMVdxJR7h96gvb2dlpaWZcra2tpoa2vr\naVczM7Mhr6Ojg46OjmXKOjs769SapRoiCapiGDACmA4sAsYBlwBI2gR4B3BLjr0VOEbS2oVxQeOB\nTmBWIWbX0jHG53Ii4jVJ0/NxLs3HUX5/Wk+NnTRpEmPGjOn7WZqZmTWBah0DM2bMoLW1tU4tSuqe\nBEk6EbiKNFV+DWA/YCdgfES8IOkXwERJzwEvkpKSmyPi9lzFNcA9wPmSjgbWBU4AzoiI13LMT4HD\nJZ0E/JKU3OwN7FZoykTg3JwM3UaaLbYqcM6AnLiZmTU1L5ZYf3VPgki3oM4jJS+dwD9ICdB1eXs7\n6VbVRaTeoauBwyo7R8QSSXsAPyH1Ds0nJS7HFmIelrQ7KdH5KvA4cFBETC3EXJjXBDo+t+nvwISI\neHoAztnMzMzqrO5JUER8oYftrwJfya+uYh4D9uihnhtI0+C7i5lMmp1mZmZmQ1xDTJE3MzMzG2xO\ngszMzAaZ1wlqDE6CzMzMrCn1SxIkabikrSWt1R/1mZmZmQ20mpIgST+WdFD+93DgBmAG8Jiknfuv\neWZmZmYDo9aeoL2BO/O/Pw5sRHqS+yTgxH5ol5mZ2ZDmdYLqr9YkaG2WPpJiN+D3EXEfaSHC9/VH\nw8zMzMwGUq1J0Fxg83wr7GNAZdHBVUkLG5qZmZk1tFoXS/wV6eGmT5Kesv6nXL4d6YnwZmZmZg2t\npiQoIo6TdBewPulW2Kt502LgB/3VODMzM7OBUvNjMyLiIgBJbyqUndsfjTIzMxvKvFhiY6h1ivxw\nSd+R9ATwkqR35vITKlPnzczMzBpZrQOj/xs4ADgKWFgovwvo9oGoZmZmZo2g1iTov4AvRcRvWHY2\n2J2k9YLMzMzMGlqtSdDbgfu7qG/l2ptjZmbWHLxYYv3VmgTdA+xQpXxv4I7am2NmZmY2OGqdHXY8\ncK6kt5MSqf+UtCnpNtke/dU4MzMzs4FSU09QRPyRlOx8FJhPSoo2Az4eEX/qbl8zMzOzRrA86wTd\nBOzSj20xMzNrCl4nqDHUuk7QtpK2q1K+naT3L3+zzMzMzAZWrQOjzyQ9MqPs7XmbmZmZWUOrNQna\nHJhRpfyOvM3MzMysodWaBL0KjK5Svi6wqPbmmJmZNQevE1R/tSZB1wDfl9RSKZC0JvA9wLPDzMzM\nrOHVmgQdSRoT9IikP0v6M/AQsA7w9b5UJOlbkm6T9IKkuZIukbRJKeZ6SUsKr8WSJpdi1pd0haT5\nkuZIOlnSsFLMzpKmS1og6T5J+1dpz2GSHpL0iqS/SNq2L+djZmZmK4Za1wl6AtiS9ADVe4DpwBHA\n+yLisT5WtwNwOrAdad2hlYFrJL25eEjgZ6RbcOuQbrsdVdmYk50rSVP+xwL7kx7wenwhZkPgcuBa\nYCvgVOBsSbsUYvYBTgGOBbYhPQttiqS1+3hOZmZm1uCWZ52g+aTEZLlExG7F95IOAJ4CWoGbCpte\njoinu6hmAunBrR+JiHnATEnfAX4g6biIWAQcAjwYEZXkabakDwPtLL2F1w6cFRHn5bYcDOwOHAic\nvHxnamZmlnidoMZQcxKUb1ntDIyi1KMUEcdX26eX1iT1/DxbKt9P0ueAOcBlwAkR8UreNhaYmROg\niinAT4AtSD06Y4GppTqnAJPy+axMSry+VziPkDQV2H45zsfMzMwaUE1JkKQvkhKMeaSkpJjTBoXb\nUH2sV8CPgZsi4p7Cpt8AjwD/It2GOxnYhPTAVki3yOaWqptb2HZnNzEjJY0A3gIM7yJm01rOx8zM\nzBpXrT1B3wb+OyJO6s/GAJNJ6wx9qFgYEWcX3t4taQ5wraSNIuKhHursrtNRvYzptuOyvb2dlpaW\nZcra2tpoa2vroWlmZmZDX0dHBx0dHcuUdXZ21qk1S9WaBK0F/L4/GyLpDGA3YIeIeLKH8L/mrxuT\nZqXNAcqzuCrrGM0pfC2vbTQKeCEiFkqaByzuIqbcO7SMSZMmMWbMmB6abGZm1pyqdQzMmDGD1tbW\nOrUoqXWK/O+B8f3ViJwAfYI0sPnRXuyyDal3ppIs3Qq8rzSLazzQCcwqxIwr1TM+lxMRr5Fmub0e\nk2/PjQNu6cv5mJmZ9cSLJdZfrT1B9wMnSBoLzAReK26MiNN6W1Fe76cN2BOYL6nSE9MZEQskvRPY\nlzQF/hnS9PaJwA0RcVeOvYY0Vf98SUeTptCfAJyRkxuAnwKHSzoJ+CUpudmb1PtUMRE4V9J04DbS\nbLFVgXN6ez5mZma2Yqg1CfoS8BKwU34VBdDrJAg4OO9zfan888B5wELS+kFHAKsBj5F6ok58/YAR\nSyTtQRqsfQswn5S4HFuIeVjS7qRE56vA48BBETG1EHNh7k06nnRb7O/AhG6m5puZmdkKqqYkKCI2\n6q8GRES3t+Qi4nHSVPye6nkM2KOHmBtI0+C7i5lMGqBtZmY2ILxOUGOodUwQAJJWkbSppJrXGzIz\nMzOrh5qSIEmrSvoF8DJwN/COXH66pG/2Y/vMzMzMBkStPUHfJw1Q3hlYUCifCuyznG0yMzMzG3C1\n3sb6JLBPRPxFUvHO5t3Au5a/WWZmZmYDq9aeoH8jPeS0bDV6WF3ZzMzMrBHUmgT9jfR09YpK4vMF\n8uKDZmZm1jUvllh/td4OOwa4StLmuY4jJG1Betp6ed0gMzMzs4ZTU09QRNxEGhi9EmnF6PGk52tt\nHxHT+695ZmZmZgOjzz1BeU2gfYEpEfHF/m+SmZnZ0ObFEhtDn3uCImIR6Tlcb+r/5piZmZkNjloH\nRt9GepK7mZmZ2Qqp1oHRk4FTJK0HTCc9sPR1EfGP5W2YmZmZ2UCqNQm6IH8tPi0+AOWvw5enUWZm\nZmYDrdYkqN+eIm9mZtaMvE5Q/dWUBEXEI/3dEDMzM7PBVFMSJOm/utseEefV1hwzMzOzwVHr7bBT\nS+9XBlYFFgIvA06CzMzMuuB1ghpDrbfD1iqXSXo38BPgh8vbKDMzM7OBVus6QW8QEf8Evskbe4nM\nzMzMGk6/JUHZIuBt/VynmZmZWb+rdWD0nuUiYF3gcODm5W2UmZmZ2UCrdWD0H0rvA3gauA74+nK1\nyMzMzGwQ1Dowur9vo5mZmTUVL5ZYf3VPZiR9S9Jtkl6QNFfSJZI2KcWMkHSmpHmSXpR0kaRRpZj1\nJV0hab6kOZJOljSsFLOzpOmSFki6T9L+VdpzmKSHJL0i6S+Sth2YMzczM7N6qikJyknIN6uUf0PS\n7/tY3Q7A6cB2wEdJaw5dI+nNhZgfA7sDewE7kgZfX1w47jDgSlLP1lhgf+AA4PhCzIbA5cC1wFak\nWWxnS9qlELMPcApwLLANcCcwRdLafTwnMzOzLnmdoMZQa0/QTsAVVcqvJiUpvRYRu0XE+RExKyJm\nkpKXdwCtAJJGAgcC7RFxQ0TcAXwe+JCkD+RqJgDvAfaLiJkRMQX4DnCYpMotv0OAByPiqIiYHRFn\nAhcB7YXmtANnRcR5EXEvcDBp8ccD+3JOZmZm1vhqTYJWJ60OXfYaMLL25gCwJmmg9bP5fSuph+fa\nSkBEzAYeBbbPRWOBmRExr1DPFKAF2KIQM7V0rCmVOiStnI9VPE7kfbbHzMzMhpRak6CZwD5Vyj8D\n3FNrYySJdOvrpoio1LMOsDAiXiiFz83bKjFzq2ynFzEjJY0A1gaGdxGzDmZmZjak1DpF/gTg/yS9\nizQtHmAc0AZ8ajnaMxnYHPhwL2JF6jHqSXcx6mWM796amZkNMbVOkb9M0ieBY4C9gVeAfwAfjYgb\naqlT0hnAbsAOEfGvwqY5wCqSRpZ6g0axtNdmDlCexTW6sK3ydXQpZhTwQkQslDQPWNxFTLl3aBnt\n7e20tLQsU9bW1kZbW1t3u5mZmTWFjo4OOjo6linr7OysU2uWqrUniIi4guqDo/ssJ0CfAHaKiEdL\nm6eTHscxDrgkx29CGjx9S465FThG0tqFcUHjgU5gViFm11Ld43M5EfGapOn5OJfm4yi/P6279k+a\nNIkxY8b0+nzNzMyaaZ2gah0DM2bMoLW1tU4tSmp9bMa2wLCI+GupfDtgcUT8rQ91TSbdRtsTmC+p\n0hPTGRELIuIFSb8AJkp6DniRlJTcHBG359hrSGORzpd0NOkRHicAZ0TEaznmp8Dhkk4CfklKbvYm\n9T5VTATOzcnQbaTZYqsC5/T2fMzMzGzFUOvA6DOB9auUvz1v64uDSTPKrgf+VXh9uhDTTlrj56JC\n3F6VjRGxBNiDdDvrFuA8UuJybCHmYdJaQx8F/p7rPCgiphZiLiQ99uN44A5gS2BCRDzdx3MyMzPr\nktcJagy13g7bHJhRpfyOvK3XevMIjoh4FfhKfnUV8xgpEequnhvI6w91EzOZNEDbzMzMhrBae4Je\n5Y0DiCHdhlpUe3PMzMzMBketSdA1wPclvT4lStKawPeAP/VHw8zMzMwGUq23w44EbgQekXRHLtua\nNJX8c/3RMDMzM7OBVOs6QU9I2hLYj/Qw0leAXwEdhdlYZmZmZg1redYJmg/8rB/bYmZmZjZoal0n\n6FOktX02IT1S4p/AbyPion5sm5mZ2ZDVTIslNqo+DYyWNEzS74DfkabC3w88SHpS+4WSLsirLJuZ\nmZk1tL72BB1BWmxwz4i4vLhB0p6kcUFHkJ4Eb2ZmZlV4scTG0Ncp8p8HvlFOgAAi4lLgKODA/miY\nmZmZ2UDqaxL0bmBqN9un5hgzMzOzhtbXJOgVYM1uto8EFtTeHDMzM7PB0dck6FbgkG62H5ZjzMzM\nzBpaXwdGnwhcL+mtwI+AewEBm5Gevv4J4CP92kIzMzOzAdCnJCgibpG0D2mRxL1Km58D2iLi5v5q\nnJmZ2VDlBWXqr8+LJUbEJZKmAONJiyUC3AdcExEv92fjzMzMzAZKrc8Oe1nSR4H/iYhn+7lNZmZm\nQ5rXCWoMfV0xer3C232B1XP5TEnr92fDzMzMzAZSX3uC7pX0DHAz8CZgfeBRYENg5f5tmpmZmdnA\n6esU+RbgU8D0vO+Vku4DRgATJK3Tz+0zMzMzGxB9TYJWjojbIuIU0sKJ25AepbGY9LiMByTN7uc2\nmpmZmfW7vt4Oe0HSHaTbYasAq0bEzZIWAfsAjwMf6Oc2mpmZmfW7vvYEvQ34LvAqKYH6m6RppIRo\nDBARcVP/NtHMzMys//UpCYqIeRFxWUR8C3gZ2BY4HQjSCtIvSLqh/5tpZmY2tHixxPrra09QWWdE\nXAi8Bvw7sBEwua+VSNpB0qWSnpC0RNKepe2/yuXF15WlmLUk/UZSp6TnJJ0tabVSzJaSbpT0iqRH\nJH2jSls+JWlWjrlT0q59PR8zM7PueJ2gxrA8SdCWpDFAAI8Ar0XEnIj4XQ11rQb8nfQA1q6+Na4C\nRgPr5FdbaftvSc8wGwfsDuwInFXZKGkNYArwEOnW3TeA4yR9oRCzfa7n58DWwB+AP0javIZzMjMz\nswZW04rRABHxWOHf712eRkTE1cDVAFKXHYSvRsTT1TZIeg8wAWiNiDty2VeAKyQdGRFzgM+S1jI6\nKCIWAbMkbQN8DTg7V3UEcFVETMzvj5U0HjgcOHR5ztHMzMway/LeDhtMO0uaK+leSZMlvaWwbXvg\nuUoClE0l9Sptl9+PBW7MCVDFFGBTSS2FeqaWjjsll5uZmdkQsqIkQVcB/0Uad3QUsBNpocZKr9E6\nwFPFHSJiMfBs3laJmVuqd25hW3cxXgTSzMxsiKn5dthgyoOvK+6WNBN4ANgZ+HM3u4quxxhVtvcm\nxkPYzMzMhpgVIgkqi4iHJM0DNiYlQXOAUcUYScOBtfI28tfRpapGkRKcuT3ElHuHltHe3k5LS8sy\nZW1tbbS1lcdum5mZNZ+Ojg46OjqWKevs7KxTa5ZaIZOg/DT7twJP5qJbgTUlbVMYFzSO1ItzWyHm\nu5KG51tlAOOB2RHRWYgZB5xWONwuubxLkyZNYsyYMctzSmZm1mSaaZ2gah0DM2bMoLW1tU4tShpi\nTJCk1SRtJWnrXPTO/H79vO1kSdtJ2kDSONLU9ftIg5aJiHvzv38uaVtJHyIt4tiRZ4ZBmvq+EPil\npM0l7QN8FTil0JRTgV0lfU3SppKOA1qBMwb2CpiZWTPxOkGNoSGSIOD9wB2kp9MHKTGZAfwv6eGs\nWwJ/BGaT1vC5HdgxIl4r1LEvcC9pdtflwI3AlysbI+IF0jT6DYG/AT8EjouIXxRibiWtP/Ql0rpF\n/wl8IiLu6e8TNjMzs/pqiNthEXED3SdkH+tFHc+T1gLqLmYmaWZZdzEXAxf3dDwzMzNbsTVKT5CZ\nmZnZoHISZGZmZk3JSZCZmZk1JSdBZmZm1pScBJmZmVlTchJkZmZWB820WGKjchJkZmY2yLxYYmNw\nEmRmZmZNyUmQmZmZNSUnQWZmZtaUnASZmZlZU3ISZGZmZk3JSZCZmZk1JSdBZmZmdeB1gurPSZCZ\nmdkg8zpBjcFJkJmZmTUlJ0FmZmbWlJwEmZmZWVNyEmRmZmZNyUmQmZmZNSUnQWZmZtaUnASZmZlZ\nU3ISZGZmNsgivFhiI3ASZGZmZk2pIZIgSTtIulTSE5KWSNqzSszxkv4l6WVJf5K0cWn7WpJ+I6lT\n0nOSzpa0WilmS0k3SnpF0iOSvlHlOJ+SNCvH3Clp1/4/YzMzM6u3hkiCgNWAvwOHAW9YTFzS0cDh\nwJeBDwDzgSmSVimE/RbYDBgH7A7sCJxVqGMNYArwEDAG+AZwnKQvFGK2z/X8HNga+APwB0mb99eJ\nmpmZWWNYqd4NAIiIq4GrAaSqd0mPAE6IiMtyzH8Bc4FPAhdK2gyYALRGxB055ivAFZKOjIg5wGeB\nlYGDImIRMEvSNsDXgLMLx7kqIibm98dKGk9KwA7t7/M2MzOz+mmUnqAuSdoIWAe4tlIWES8AfwW2\nz0VjgecqCVA2ldSrtF0h5sacAFVMATaV1JLfb5/3oxSzPWZmZjakNHwSREqAgtTzUzQ3b6vEPFXc\nGBGLgWdLMdXqoBcx62BmZmZDyoqQBHVFVBk/1McY9TKmp+OYmZnZCqYhxgT1YA4pERnNsr00o4A7\nCjGjijtJGg6slbdVYkaX6h7Fsr1MXcWUe4eW0d7eTktLyzJlbW1ttLW1dbebmZk1qWiyP607Ojro\n6OhYpqyzs7NOrVmq4ZOgiHhI0hzSrK9/AEgaSRrrc2YOuxVYU9I2hXFB40jJ022FmO9KGp5vlQGM\nB2ZHRGchZhxwWqEJu+TyLk2aNIkxY8bUeopmZtaEmmmxxGodAzNmzKC1tbVOLUoa4naYpNUkbSVp\n61z0zvx+/fz+x8C3JX1c0vuA84DHgT8CRMS9pAHMP5e0raQPAacDHXlmGKSp7wuBX0raXNI+wFeB\nUwpNORXYVdLXJG0q6TigFThjoM7dzMzM6qNReoLeD/yZdGsqWJqYnAscGBEnS1qVtO7PmsA0YNeI\nWFioY19SsjIVWAJcRJryDqQZZZIm5Ji/AfOA4yLiF4WYWyW1ASfm1z+BT0TEPf1/ymZmZlZPDZEE\nRcQN9NArFRHHAcd1s/150lpA3dUxE9iph5iLgYu7izEzM7MVX0PcDjMzMzMbbE6CzMzMrCk5CTIz\nM7Om5CTIzMzMmpKTIDMzs0EW0VzrBDUqJ0FmZmbWlJwEmZmZWVNyEmRmZmZNyUmQmZmZNSUnQWZm\nZtaUnASZmZlZU3ISZGZmZk3JSZCZmdkgi6h3CwycBJmZmdWFF0usPydBZmZm1pScBJmZmVlTchJk\nZmZmTclJkJmZmTUlJ0FmZmbWlJwEmZmZWVNyEmRmZjbIvE5QY3ASZGZmNsgivE5QI3ASZGZmNsic\nBDWGFSIJknSspCWl1z2F7SMknSlpnqQXJV0kaVSpjvUlXSFpvqQ5kk6WNKwUs7Ok6ZIWSLpP0v6D\ndY5mZtZcnATV3wqRBGV3AaOBdfLrw4VtPwZ2B/YCdgTeBlxc2ZiTnSuBlYCxwP7AAcDxhZgNgcuB\na4GtgFOBsyXtMjCnY2Zmzco9QY1hpXo3oA8WRcTT5UJJI4EDgc9ExA257PPALEkfiIjbgAnAe4CP\nRMQ8YKak7wA/kHRcRCwCDgEejIijctWzJX0YaAf+NOBnZ2ZmTcNJUGNYkXqC3i3pCUkPSPq1pPVz\neSspmbu2EhgRs4FHge1z0VhgZk6AKqYALcAWhZippWNOKdRhZmbWL5wENYYVJQn6C+n21QTgYGAj\n4EZJq5FujS2MiBdK+8zN28hf51bZTi9iRkoasbwnYGZmVuEkqDGsELfDImJK4e1dkm4DHgE+DSzo\nYjcBvVmJobsY9SLGzMysT5wENYYVIgkqi4hOSfcBG5NuYa0iaWSpN2gUS3t25gDblqoZXdhW+Tq6\nFDMKeCEiFnbXnvb2dlpaWpYpa2tro62trTenY2ZmTabZkqCOjg46OjqWKevs7KxTa5ZaIZMgSasD\n7wLOBaYDi4BxwCV5+ybAO4Bb8i63AsdIWrswLmg80AnMKsTsWjrU+FzerUmTJjFmzJiaz8fMzJpL\nsyVB1ToGZsyYQWtra51alKwQY4Ik/VDSjpI2kPRBUrKzCLgg9/78ApiY1/lpBX4F3BwRt+cqrgHu\nAc6XtKWkCcAJwBkR8VqO+SnwLkknSdpU0qHA3sDEwTtTMzNrBkuWwLAV4jfw0Lai9AStB/wWeCvw\nNHATMDYinsnb24HFwEXACOBq4LDKzvH/27v/IC/q+47jzxcHAoIgCRAmhgSDP9BGo0GjFAkQgj/q\njzT9oZmkmE7sxDSZZmqm2nYmMyRpJh01cZomkiZNYpNojbbTGqegRBS42miYoCLFEwxBfnoU5DwE\nDuTuPv3js1/Y27vjjoPb3bt9PWZ2vuzu57v72fd3b/fNZz+7G0K7pOuA7xJbh/YD/wIsTJV5VdK1\nxKTnC8A24JYQQvaOMTMzsxPS1gZDB8oZeBAbED9BCOGYnWtCCIeAv0iG7spsBa7rYTkribfcm5mZ\n9ZvWVidBZeDGODMzs5w5CSoHJ0FmZmY5a22Furqia2FOgszMzHLmlqBycBJkZmaWM3eMLgcnQWZm\nZjlzS1A5OAkyMzPLmZOgcnASZGZmljN3jC4HJ0FmZmY5c5+gcnASZGZmljNfDisHJ0FmZmY5CsFJ\nUFk4CTIzM8tRe3v8dJ+g4jkJMjMzy1Fra/x0S1DxnASZmZnlqK0tfjoJKp6TIDMzsxy5Jag8nASZ\nmZnl6PDh+Ok+QcVzEmRmZpajvXvj55gxxdbDnASZmZnlqqkpfo4bV2w9zEmQmZlZrpwElYeTIDMz\nsxw5CSoPJ0FmZmY5amoCCcaOLbom5iTIzMwsR01NMQEa4jNw4fwTmJmZ5WjLFnjnO4uuhYGTIDMz\ns1y98gpMnVp0LQycBJmZmeVm/36or4fZs4uuiYGToC5J+rykTZJaJD0r6dKi62RHPfjgg0VXoXIc\n8/w55vnLI+bLlsHBg3DDDf2+KusFJ0EZkm4CvgksBC4G1gBLJY0vtGJ2hE8O+XPM8+eY56+/Y37g\nANx9N0ybBmef3a+rsl5yEtTZbcD3Qgg/CSG8DHwWOAB8uthqmZnZQBQC/OIXMG8evPACfP/7RdfI\navwO2xRJw4DpwNdr00IIQdIyYEZhFTMzs9I7dAhefx22b4dNm2DjRli9OvYB2rULLroIHn8crrii\n6JpajZOgjsYDdcDOzPSdwLndfWndOmhr690KQji+CpWtfB7r6Kn8nj3w9NP51SePdZR9G3btin0Z\n+mv5/VE+j3X0Z/mdO2Hx4nLVKY/yAO3tcQjh6Gf2332d19oaj9ddDQ0NcPvt8d+HD3ceDh2ClpbY\np6elJV7e2rcvDs3N8NZbHbdj7Fi48EL4zGfg6qth5sz4kEQrDydBvSOgqz/lEQA339yQb20qr5lZ\ns93FiicAAArSSURBVJ4ruhIV08z8+Y55vpq57jrHvC+krochQ6CuLv67ru7o+JAhcXjjjWYeeug5\n6upg6NDOw7BhMGIEjBwZE5zhw+HUU+MwahSMHh3fDD9hApxxBpx2Wsek5/nni4tJGTU0HDl3jiiq\nDk6COtoNtAHvyEyfSOfWIYAp8eNP+rNO1qXpRVegghzz/DnmfVFr9emLrVsd8wJMAX5ZxIqdBKWE\nEA5LWg3MAx4FkKRk/B+7+MpS4JPAq8DBnKppZmY2GIwgJkBLi6qAQl/T5UFK0o3Aj4FbgVXEu8X+\nCJgWQthVZN3MzMzs5HFLUEYI4eHkmUBfJV4WewG4ygmQmZnZ4OKWIDMzM6skPyzRzMzMKslJ0Anw\nO8b6RtJCSe2Z4aXU/OGS7pW0W9Kbkv5d0sTMMiZLWixpv6RGSXdJGpIpM0fSakkHJW2Q9Km8trFo\nkmZJelTS9iS+nd5UJOmrknZIOiDpCUlnZeaPk/SApGZJTZJ+IGlUpsyFkuqTv4HNkm7vYj1/LKkh\nKbNG0jUnf4uL11PMJd3XxX6/JFPGMT8Okv5W0ipJeyXtlPSfks7JlMnteDLYzwm9jPeKzD7eJmlR\npkx54h1C8NCHAbiJeEfYzcA04HvAHmB80XUr+0B8L9uLwATi4wcmAm9Lzf8u8Y672cT3t/0S+O/U\n/CHAWuIdBRcAVwH/B3wtVWYKsA+4i/igy88Dh4H5RW9/TjG+mtiv7feJj324ITP/r5P99XrgfcAj\nwEbglFSZx4DngEuA3wU2APen5p8GvEa8keA84EZgP/BnqTIzkrh/MfkdvgIcAs4vOkYFxPw+YHFm\nvx+bKeOYH1/MlwALklhcAPxXcuwYmSqTy/GECpwTehnv5cA/Zfbz0WWNd+FBHagD8CzwrdS4gG3A\nHUXXrewDMQl6rpt5Y5ID9sdS084F2oEPJuPXJH8Q41NlbgWagKHJ+J3Ai5llPwgsKXr7C4h3O51P\nyDuA2zJxbwFuTMbPS753carMVUArMCkZ/3Pis7WGpsr8PfBSavxnwKOZdT8DLCo6LgXE/D7gP47x\nnWmO+QnHfXwSwyuS8dyOJ1U8J2TjnUxbDtxzjO+UKt6+HNYHOvqOsSdr00L8FfyOsd47O7lssFHS\n/ZImJ9OnE+9aTMd2PbCFo7G9HFgbQtidWt5SYCzwO6ky2Rc9LMW/D5LOBCbRMcZ7gV/RMcZNIYT0\nM26XEZ+cflmqTH0IoTVVZilwrqSxyfgM/DukzUkuI7wsaZGkt6XmzcAxP1GnE+O1JxnP5XhS4XNC\nNt41n5S0S9JaSV+XNDI1r1TxdhLUN8d6x9ik/Ksz4DwL/Cnxf7mfBc4E6pO+D5OAt5KTclo6tpPo\nOvb0oswYScNPdAMGuEnEA9ex9t9JxCbqI0IIbcSD3cn4Har4d/IYsen+w8AdxMszS6QjL1ZwzE9A\nEsd/AJ4OIdT6GOZ1PKncOaGbeAM8QHyNwhziy8gXAD9NzS9VvP2coJOru3eMWUoIIf100P+VtArY\nTOzf0N2Tt3sb22OVUS/KVFlvYtxTGfWyTOV+gxDCw6nRdZLWEvthzSFeQuiOY947i4Dzgd68oz2v\n48lgjnst3jPTE0MIP0iNrpPUCDwp6cwQwqYelpl7vN0S1DfH+44xO4YQQjOxA+hZQCNwiqQxmWLp\n2DbSOfbvSM3rrsxEYG8IIfOu58ppJB4sjrX/NibjR0iqA8bRc4zTrUzdlan830lyQthN3O/BMe8z\nSd8Bfg+YE0LYkZqV1/GkUueETLxf66H4r5LP9H5emng7CeqDEMJhoPaOMaDDO8YKeQncQCZpNDCV\n2Fl3NbEjaDq25wDv5mhsnwEuUHyyd82VQDPQkCozj46uTKZXWnLybaRjjMcQ+52kY3y6pItTX51H\nTJ5Wpcp8KDlR11wJrE8S21qZ7O8wH/8OSHoX8Hbi3V7gmPdJckL+KDA3hLAlMzuX40mVzgk9xLsr\nFxOT9PR+Xp54F927fKAOxEs3LXS8Pe91YELRdSv7ANwNfAh4D/E24CeI2fvbk/mLgE3EywTTgf+h\n8y2ta4h9LC4k9i3aCfxdqswU4i2WdxLvBvkc8BbwkaK3P6cYjwLeD1xEvHvjL5Pxycn8O5L99Xri\nbaqPAK/Q8Rb5JcCvgUuJTd7rgZ+m5o8hJq4/JjaL35TE/JZUmRlJ3Gu3a3+ZeMlzMN6u3W3Mk3l3\nERPN9xAP1r8mHvSHOeZ9jvki4l1Fs4itArVhRKZMvx9PqMA5oad4A+8FvgR8INnPbwB+AzxV1ngX\nHtSBPCQ/zKvJD/EMcEnRdRoIA/FWx21J3LYA/wqcmZo/HPg2scnzTeDfgImZZUwmPqNiX/IHdCcw\nJFNmNvF/Cy3EE/yCorc9xxjPJp6I2zLDj1Jlvkw8oR4g3nlxVmYZpwP3E/+H1gT8M3BqpswFwMpk\nGVuAv+qiLn8IvJz8Di8S38VXeIzyjDnxbdmPE1vgDgK/JT6/ZkJmGY758cW8q3i3ATenyuR2PGGQ\nnxN6ijfwLmAFsCvZP9cTH+EwOrOc0sTb7w4zMzOzSnKfIDMzM6skJ0FmZmZWSU6CzMzMrJKcBJmZ\nmVklOQkyMzOzSnISZGZmZpXkJMjMzMwqyUmQmZmZVZKTIDMzM6skJ0FmZglJyyXdU3Q9zCwfToLM\nrBQk3Sppr6QhqWmjJB2W9GSm7FxJ7ZKm5F1PMxs8nASZWVksJ75t/ZLUtFnAa8Dlkk5JTZ8NbA4h\nvHq8K5E09EQqaWaDh5MgMyuFEMIGYsIzJzV5DvAIsAm4PDN9OYCkyZJ+LulNSc2SHpI0sVZQ0kJJ\nz0u6RdJviW9xR9Kpkn6SfG+7pC9m6yTpc5I2SGqR1Cjp4ZO71WZWJCdBZlYmK4C5qfG5ybSVtemS\nhgOXAU8lZX4OnE5sNfoIMBX4WWa5ZwF/AHwMuCiZ9o3kO9cDVxITq+m1L0i6BPgW8CXgHOAqoP4E\nt8/MSsTNwmZWJiuAe5J+QaOICUs9cApwK/AVYGYyvkLSfOB9wJQQwg4ASQuAdZKmhxBWJ8sdBiwI\nIexJyowCPg18IoSwIpn2KWBbqi6TgX3A4hDCfmArsKafttvMCuCWIDMrk1q/oEuBK4ANIYTdxJag\ny5J+QXOAjSGEbcA0YGstAQIIITQAbwDnpZa7uZYAJaYSE6NVqe81AetTZZ4ANgObkstmn5A08qRt\nqZkVzkmQmZVGCGEjsJ146WsuMfkhhPAasSVmJqn+QICA0MWistP3dzGfbr5bq8s+4APAx4EdxFao\nNZLG9HqDzKzUnASZWdksJyZAc4iXx2rqgWuAD3I0CXoJeLekM2qFJJ0PjE3mdec3QCupztaSxhH7\n/hwRQmgPITwVQvgb4P3AFODDfdgmMysh9wkys7JZDtxLPD6tTE2vB75DvIy1AiCEsEzSWuABSbcl\n8+4FlocQnu9uBSGE/ZJ+CNwtaQ+wC/ga0FYrI+la4L3JepuAa4ktSOs7L9HMBiInQWZWNsuBEUBD\nCGFXavpKYDTwcgihMTX9o8C3k/ntwGPAF3qxntuJ/Y8eBd4EvgmkL3W9QbyjbGFSn1eAjyd9jsxs\nEFAI3V4SNzMzMxu03CfIzMzMKslJkJmZmVWSkyAzMzOrJCdBZmZmVklOgszMzKySnASZmZlZJTkJ\nMjMzs0pyEmRmZmaV5CTIzMzMKslJkJmZmVWSkyAzMzOrJCdBZmZmVkn/D96lI6Vndy41AAAAAElF\nTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -326,7 +326,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 71, "metadata": { "collapsed": true }, @@ -344,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 72, "metadata": { "collapsed": true }, @@ -358,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 73, "metadata": { "collapsed": false }, @@ -367,9 +367,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 166\n", - "Number of unique tokens: 681\n", - "Number of documents: 90\n" + "Number of authors: 376\n", + "Number of unique tokens: 1524\n", + "Number of documents: 185\n" ] } ], @@ -410,7 +410,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 75, "metadata": { "collapsed": true }, @@ -422,7 +422,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 76, "metadata": { "collapsed": false }, @@ -431,16 +431,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 4min 10s, sys: 28 ms, total: 4min 10s\n", - "Wall time: 4min 10s\n" + "CPU times: user 9min 14s, sys: 52 ms, total: 9min 14s\n", + "Wall time: 9min 14s\n" ] } ], "source": [ "%time model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=var_lambda)" + " iterations=10, passes=5, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None)" ] }, { @@ -574,7 +574,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 36, "metadata": { "collapsed": false }, @@ -592,7 +592,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 37, "metadata": { "collapsed": false }, @@ -609,7 +609,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 38, "metadata": { "collapsed": false }, @@ -622,7 +622,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 39, "metadata": { "collapsed": false }, @@ -639,7 +639,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 40, "metadata": { "collapsed": false }, @@ -655,7 +655,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 41, "metadata": { "collapsed": false }, @@ -678,7 +678,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 42, "metadata": { "collapsed": false }, @@ -690,7 +690,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 43, "metadata": { "collapsed": false }, @@ -699,15 +699,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 41.6 s, sys: 20 ms, total: 41.6 s\n", - "Wall time: 41.6 s\n" + "CPU times: user 16.5 s, sys: 4 ms, total: 16.5 s\n", + "Wall time: 16.5 s\n" ] } ], "source": [ "%time model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-3, \\\n", - " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " iterations=10, passes=5, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", " eval_every=1, random_state=1, var_lambda=None)" ] }, @@ -812,7 +812,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 59, "metadata": { "collapsed": false }, @@ -824,7 +824,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 74, "metadata": { "collapsed": false }, @@ -833,15 +833,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 24.1 s, sys: 156 ms, total: 24.3 s\n", - "Wall time: 24.1 s\n" + "CPU times: user 7min 52s, sys: 476 ms, total: 7min 52s\n", + "Wall time: 7min 52s\n" ] } ], "source": [ "%time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=var_lambda, \\\n", + " iterations=5, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", " eval_every=1, random_state=1)" ] }, @@ -1098,7 +1098,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 44, "metadata": { "collapsed": false }, @@ -1110,7 +1110,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 45, "metadata": { "collapsed": false }, @@ -1119,15 +1119,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 16.7 s, sys: 4 ms, total: 16.7 s\n", - "Wall time: 16.8 s\n" + "CPU times: user 8.61 s, sys: 4 ms, total: 8.61 s\n", + "Wall time: 8.62 s\n" ] } ], "source": [ "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha='symmetric', eta='symmetric', \\\n", + " iterations=5, alpha='symmetric', eta='symmetric', \\\n", " eval_every=1, random_state=1, var_lambda=None)" ] }, diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 09754d2b32..477820e642 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -266,12 +266,15 @@ def inference(self, corpus=None, var_lambda=None): # only one update per document). if self.optimize_lambda: # Update lambda. - #tilde_lambda = self.eta + self.num_docs * cts * var_phi[ids, :].T for k in xrange(self.num_topics): for vi, v in enumerate(ids): - cnt = dict(doc).get(v, 0) + # cnt = dict(doc).get(v, 0) + cnt = cts[vi] tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * var_phi[v, k] + # This is a little bit faster: + # tilde_lambda[:, ids] = self.eta[ids] + self.num_docs * cts * var_phi[ids, :].T + # FIXME: I don't need to update the entire gamma, as I only updated a few rows of it, # corresponding to the authors in the document. The same goes for Elogtheta. From ba5ba63b8b39e2ecc884163cd8ab95792482f9d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Mon, 7 Nov 2016 16:53:15 +0100 Subject: [PATCH 035/100] Making sure that the model is evaluated after the last iteration, if eval_every is different from 0. Various comment changes. Updated notebook. --- docs/notebooks/at_with_nips.ipynb | 378 +++++++++++++++++++----------- gensim/models/atvb.py | 22 +- gensim/models/onlineatvb.py | 48 ++-- 3 files changed, 295 insertions(+), 153 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 7e75eb95c7..d2fcace683 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": 123, "metadata": { "collapsed": false }, @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 231, "metadata": { "collapsed": false }, @@ -68,6 +68,8 @@ "from gensim.models import OnlineAtVb\n", "from gensim.models import onlineatvb\n", "\n", + "from time import time\n", + "\n", "%matplotlib inline" ] }, @@ -81,8 +83,8 @@ "source": [ "# Configure logging.\n", "\n", - "log_dir = '../../../log_files/log.log' # On my own machine.\n", - "#log_dir = '../../../../log_files/log.log' # On Hetzner\n", + "#log_dir = '../../../log_files/log.log' # On my own machine.\n", + "log_dir = '../../../../log_files/log.log' # On Hetzner\n", "\n", "logger = logging.getLogger()\n", "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", @@ -101,7 +103,7 @@ }, { "cell_type": "code", - "execution_count": 61, + "execution_count": 247, "metadata": { "collapsed": false }, @@ -111,12 +113,12 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "data_dir = '../../../../data/nipstxt/' # On my own machine.\n", - "#data_dir = '../../../nipstxt/' # On Hetzner.\n", + "#data_dir = '../../../../data/nipstxt/' # On my own machine.\n", + "data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", - "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00', '01']\n", + "yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", + "#yrs = ['00']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -138,7 +140,7 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": 248, "metadata": { "collapsed": false }, @@ -163,7 +165,7 @@ }, { "cell_type": "code", - "execution_count": 63, + "execution_count": 249, "metadata": { "collapsed": true }, @@ -178,7 +180,7 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": 250, "metadata": { "collapsed": false }, @@ -196,7 +198,7 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 251, "metadata": { "collapsed": false }, @@ -222,7 +224,7 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": 252, "metadata": { "collapsed": false }, @@ -245,7 +247,7 @@ }, { "cell_type": "code", - "execution_count": 67, + "execution_count": 253, "metadata": { "collapsed": true }, @@ -260,7 +262,7 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": 254, "metadata": { "collapsed": true }, @@ -279,7 +281,7 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": 255, "metadata": { "collapsed": true }, @@ -291,16 +293,16 @@ }, { "cell_type": "code", - "execution_count": 70, + "execution_count": 256, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcXFWZ//HPNwGiLGlQJgEFAUUQUJa0SFBZnEgii+gM\nKDbogODCpvxaEZTRgQFRQUlki6KoLGojwqDswYBAWBRMEAOEIPsiCYSlgUAISZ7fH+cUublUb5Xu\nrkrX9/161atT5z733HNvdaefPveccxURmJmZmTWbYfVugJmZmVk9OAkyMzOzpuQkyMzMzJqSkyAz\nMzNrSk6CzMzMrCk5CTIzM7Om5CTIzMzMmpKTIDMzM2tKToLMzMysKTkJMrN+J+lxST8rvB8naYmk\nDw7Csb8r6bXC++H52BMH+tj5eF/Ix3vbYByvVpK+KelBSYsk3Vbv9vSWpHfl67tvvdtiKz4nQTZk\nSNo//+dY7fW9erevyVR7Hk+fn9Ej6b8lfbyGYy/p67H6qpu2BTWc62CStBvwPeDPwAHAd+raILM6\nWaneDTDrZ0H6D/3hUvldg98Uq4iIayW9OSIW9nHXbwPnA5f1YZ9jgeP7eJxadNW2XwLn13Cug+kj\nwGvAF8IPkLQm5iTIhqKrI2JGb4MlCVglIl4dwDY1vYFOCiStGhEvR8QSBqEnqCs5qWjkBAhgNDC/\nERMg/zzaYPLtMGsqxfEhkj4n6W5gATAub5ekr0m6W9ICSU9KmixpZKkeSfqfPPblJUlTJb1H0mOl\nsTDLjE8plFcdNyJpd0nTcp2dki6V9J5SzK8lPSdpvbz9RUlPSfpBleNIUrukf0h6JcddKWnrvP1m\nSX/r4lo9IKnbHpiurkOVuDeMCZK0iaT/kzQnt+1RSb+RtFrlcwJWASrXaknl2ubruiTX8TtJz5Fu\n7XR5zfO2z0manY93W3mMUr62/6yy3+t19qJtXX22Xyl8Xz0h6bQq31c3SZohaQtJf5b0cr62X+vu\ncyjsv5KkY/Nnt0BpzM/xklYutX0/oCW3c7G6GF+Tv3dek7RaoezovN8PCmUr5c//+ELZ6pIm5Z+J\nBZJmSfp/pfp7+nlcS9J5kp6X9KykXwDLXLMct66kc/O1WiDpX5IukbReb66bNS/3BNlQ1CLprcWC\niHimFDMe+AxwJvAs8Ggu/yXQlr/+GHgn8BVgK0k75F4GSOMpjgYuBaYArcA1wJtLx+lqfMgbyiUd\nAPwCuBI4ClgNOBSYJmmbiHi8sO9K+XjTgK/n8/mGpH9GxC8K1Z5H+oV3GfAz0i/uHYHtgL/n7ZMl\nbRIR9xXasj2wEfCtKm0v6u11qLS7Uv+IHDeMdJ3nAusBHwdGRsR8SZ8FfgXclK8LwP2luv4PuBf4\nZqGsq2s+DtgXOI10K+gwYIqk90fE7B72fb08Ihb3om3lz/a7wDHA1aTvuc1In21r6fsqgLWBq4Df\nAxcAnwZ+KOnOiLi2StuKzsnneAHpe2Ms6bbdpsA+hbYfCmwFfAkQcHMX9U0jfUYfIn1eAB8GFgM7\nFOJaSZ/5jfl8BVyR9/s58A9gV2CipHUj4ujScd7w85jruIz0vToZmA3sRbru5c/oD8DGpM/2UVJP\n13jS99TjmHUlIvzya0i8gP1Jt0HKr8WFmOG5bCGwcWn/nfO2vUrlu+byvfP7UXn/i0txP8hxPyuU\nnQAsrNLWg0i/SN6W368BPA+cXoobncvPKJSdn/c9qhT7d+CWwvtdcntO7uaarQm8AhxfKj8zH/dN\n3ezbl+swLrf5g/l9a475eA+f6SvFekrXdQlwThfbFhbeVz7zRcB7C+UbkHodLihd2/t6qrOHtpU/\n29H5Ol1aivtqjtuvUDYtl326ULYKKUn8bQ/Xakw+zzNL5RNznR8qneezvfiZGg68CJxQKHuWlGQt\nqHx/AN/I57h6fr9XbsuRpfouJiWg7+jFz2Oljq8WyoaREs/FwL657C3lOL/86u3Lt8NsqAngEOCj\nhdcuVeKujYj7S2V7k/6Dv17SWysv4G+kX3gfyXETSP95n17a/8fL0e6PkRKhC0rHXgzcXjh20c9K\n728i9VxV7EX6xX9CVweNiOeBy0m9B0C6RQF8ipTcLOimzeOp/To8n7/uKulNvYivJoCf9iF+WkS8\nPkA+Ih4h9TR8rMbj99YupOtUvi5nAS8Du5fKOyPiwsqbSGOpbmfZz7aa3UjXpLwUwCmk3p7ycXoU\nEYuBW0m9h0jaEmgBvg+sTOqlgdQ7dGdEvJTf70pKbM4sVTmRdC3K17zaz+OuwKsUvs8j9Zidkc+n\n4mVSYvURSS19PEVrck6CbCi6PSKuK76qxDxcpezdpL8qny695gJvIvV8ALwjf13mP+2ImEP6q7kW\nG5P+Y59WOvZTwL8Xjl3xUk5gip4D1iq8fyfweET01KbzgI0kjc3vPwa8ldRb0J0N8tc+X4eIeAA4\nFfgy8IykqyQdImmNHo5Z9lAfYsu/ZAHuA9aQtFaVbf2lcp3uKxZGGvj7UGF7xWNV6ih/tl0dZ1G+\ntsXjPEH6PMrH6a2bgG3zuKIdgMci4k7SjMvKLbEPkb53i215PCJeKdU1q7C96OEqx90AeKJKIj67\n+CZvPwbYA3hK0vWSjpRU/pkxewOPCbJmVf7PGdIfBf8CPseyf2lWPJW/Vrb1ZmZNVzHDqxw7SOOR\n5lWJLw/0XdxFveri3925Kh/zs8Bf8tcnIuL6Hvbry3V4g4hozwNdP0HqVToDOFrS2JxI9Ua1z7Ev\nyteot5/X8hyjJ735bPu6va9tKJpGWnZgO1KPz7RC+Q6StiD98XDjchyv2ucoqn8eb6g7Ik6RdAnw\nSVJP7XeBb0naqdj7Z1bmniCzpR4gDUq9qdyTlF+V/0wfzl83Ke4saR3SLa2i54DhklYtlW9Y5dgA\nT3Vx7Gn03f3AeuUZSGURsYg8AFfSmqTByb/pRf0P56+9uQ5dHfuuiDgxInYCdiL1sn2pGNKbenrp\n3VXKNgFejIjn8vvnSOOkyjasUtbbtj2cv25aLJS0Sq73kV7W05vjrCTpXaXjvA1YfTmO8xfSbdUd\nST0/le/FG4EPkm7VBqnHqNiW9SSVB8hvlr/2pi2VOsq3SzetEktEPBgREyNiAvA+0kDtXs2qs+bl\nJMhsqQtJg1C/Xd6QpwBXkok/kf5a/0oprL1KnQ+Q/nLdsVDX6qTepqKrgJeA/85jcsrHX7uX51B0\nMam3tzerAZ9PSgDPIv3y6E0S1JfrsAxJIyWV//+5i/TLdEShbD7Vk5JafDiPaam0YUPSLZSrCzEP\nAG+VtFkh7u2kxLCst22rXKcjSuVfJs0AvLwXdfTGlaTvtf9XKv866bpeUUul+ZbWDNL37Los2xO0\nGnA4MDsiij2YV5J+lg4tVddOuhZX9eLQV5K+F75cKcg/G4ez7EzDN+fZhkUPkn6eRhTi1pG0aZXv\nO2tivh1mQ03N3f4RcV2+PfNtSWOAqaS/gDchDZo+hDTDZ66kScCRki4l/Yf+ftIg7GdL1V4FPAGc\nI+lHuexA4Eng9XVkIqJT0uGkqfkzJF1AukW1AWlA65/p41+1ETFVUgfwNaW1e64h3dbZAZgSEcUB\np3+TNIs0IPofvbmF0MfrAMt+NrsAkyT9HvgnaZDt/qTbfv9XiJsOjM/ryzwJPBARVdc16oW7gGsk\nnU76XA/NX/+3EPNb0rT/S3Pc6sDBpGn4W5Xq61Xb8nU6CThG0pWkpGezXO+tpF645RYRMyT9Bjg0\nD6qfBmxPur15YUR0NQ2+N6YBRwLPRMSsfLwnJT1A+vn4eSn+ElJP0UmSNmbpFPndgR9GRLVxT2WX\nkHqhfpR7typT5Mu9qpsDV0u6ELiHlGTtTRrX1lGI+xFpAsB6pNveZp4i79fQeZF+iS4GxnQTMzzH\nnNJNzBdJs3FeIt0euQM4ERhVivsfUoLzEumv/U1Jg1p/VoobQ/pl9wrpL9TDKE2jLsTuTOqZeC7X\nOxs4G9i6EHM+6ZdRud0nAK+WykT65XVPPv4c0oyoLavs/83cpq/18bpXuw6PAmcVYspT5N+Zz+uf\npB6Vp/K+O5bqfg9wfa57ceXa5nNdTFpTqNvrUPzMSQnBffla3FZpT2n/8cBM0hTwu0nr9FSbIt9V\n27r6bA/L9S3I1+tUYI1SzDRgepU2nU/qbenpsxieP48H8nEeIiV5K1Wp7w3fQ93U+/F8TpeUyn9J\naZp/YdtqpNlgj+e23Asc0ZefR9Jg8PNIswmfIa3JtA3LTpFfmzRD8R7gBVICfjPwySrnvKj8ufjV\n3C9F9Octd7PmJukx4KqI+FKPwQ1G0tdJa/y8IyKerHd7zMwGmu+NmlnFgaT1WpwAmVlT8Jggsyam\n9EyoPUnjeN6DZ9OYWRNxEmTWv7p69lSjWoc0E+xZ0qMzptS5PWZmg8ZjgszMzKwpeUyQmZmZNSUn\nQWZmZtaUnASZ2YCS9F1J5WefDXYbhktaIqn8hPXlqXNcrnPP/qqzD8f+taR/DvZxzYYaJ0FmdSRp\n//yLtPJ6RdJsSacPoadgr2iDxfuiXucVwJI6HdtsyPDsMLP6C9LzvR4G3kR6UvchwK6S3hsRC+rY\nNuve8jydfXkcUMdjmw0ZToLMGsPVETEj//uXkp4lPWzyE8Dv6tesnklaNSJernc7mklELK7Hcf1Z\n21Dj22Fmjek60l/6G1UKJG0k6feSnpE0X9KtknYr7iTp6cKDWlHyvKTXJI0slB+dy1YtlG0q6aJc\n/yuSbpf08VL9ldt3O0qaLGku6XlpfSLpIEnXSpqbj3WXpC+WYk6VNKdU9pN8/IMLZW/LZQf28tif\ny7ccX5F0m6QPVol5u6RzJM2RtEDSTEn7V6kugGGSviPpcUkvS/qTpI1K9e2UP7tHc32PSPpR8enn\nkr4pabGkt5UPkmNfkbRGfv+GMUGSVpc0SdJj+Riz8sNdizHvytdq31J5ZczUMYWy7+ayTST9TtJz\npAf5mg0ZToLMGtPG+eszAHl80K2kp6+fARwDjAAuk/SJwn43AzsW3m8JVJKfDxXKPwzMqPxVL2kL\n0hO7NwW+T1o5+iXgD6X6KyaTVpj+X9LzxvrqENLDZE8Evk56oOhZpURoGvBvkjYptXsxsEOhbEdS\nMjKtF8cdB/wQOJf0oNFRwBRJm1YCJK1DerjqTsBpwBG5rb+SdGipPpFuZe4OnJRfHyQ99LPo06TP\n6wzgcNLDYo8gPYC04oJc36eqtHtv4MqIeDG/X2aclSQBVwBfIT2lvp30cNqJSk+wr0Wl/v8jPej0\nm6QHmJoNHfV+gqtffjXzi6VPvv8I8Fbg7cA+wNOkJGTdHDcpx21f2Hc10tPCHyiUfR1YCKyW3x9O\n+gV+K/C9QtyzwI8K76cCd/DGp43fBNxbau8S0tPT1ctzrPYE9hFV4v4EzCq8H52PdVB+v1a+BhcA\njxbizgDm9NCG4bmuRcB7C+UbkJ5wfkGh7BzgUaClVMeFwDxg5fx+XK7zTmB4Ia49t3OTHs73v3N7\n1i2U/RW4pRS3fT7Opwtl5wP3Fd7vlWOOLO17MfAa6aG4AO/Kcft2cX2OKX1uS4Bz6v1z4pdfA/Vy\nT5BZ/Qm4lpT4PAb8FngB+GQsfZjprsBtEXFrZaeImA/8DNhQ0ua5eBpprF/lFs8OuWxa/jeStgTW\nzGVIWouUhP0eaJH01soLuAZ4t6R1C+0N4OcRUfPMqIh49fWTl0bmY90AbCLpzTlmLnA/S3u2dgBe\nBU4B1pO0Qekce2NaRNxVaMcjwGXAx3JbBPwH8EdgpSrXYi1g61Kdv4hlx+hMI32m7+zifFfN9d2S\n44r1/Q7YTtI7CmX7AC+Teni6sisp+T2zVD6RlOB8rJt9uxPAT2vc16zhOQkyq78g3R76KLAzsHlE\nvCsiphZiNgBmV9l3VmE7wAzSL8zK7aIPszQJer+kVfK2IPXyQLr1JtJf/k+XXsflmPJ0/YeLbySt\nLGl08dXdCUvaQdJ1kl4Cns/HOj5vbimE3lQ6l9uAvwGdwA6SWoD30vsk6P4qZfcBa+RkcB1gDeBQ\n3ngtfpbjy9eiPCbqufx1rUqBpA0knSfpGVIP39OkxBeWPd8L89dPF8r2Ai6P7gckbwA8HhGvlMrL\n3x+1eGg59jVraJ4dZtYYbo+ls8NqFhGLJP0V2FHSu4B1gRtJv3RXBrYjJROzIuKZvFvlj6EfAV09\nQLWcPJR/2e5Iup0VpIQqJK0fEf8qVyTp3Tn2LtKto8dIvRh7ksa0FP84mwbsL2l9UjI0NSJC0s35\nfSXhuLGLdvdGcap55djnAr/uIv7O0vuuZmoJ0qBj0u3GNYDvkZLZl4F3kMYEvX6+EfG4pFtJSdCP\nJO1AukV6QR/OoTtd9d4N72af8mdtNmQ4CTJbMTxCGrRctllhe8U04CjSIOqnI+I+AEl3k5KVHUi3\ngCoezF9fi4jramzfdFJPVtHTXcTuSUrIds+3vMjtm1AlttLDMwEYAxyb398IfJ6UBL3IGxOTrry7\nStkmwIsR8ZykF4D5wLDluBZlW5PG4rRFxOvLHUjq6hbVBcCpkt5JuhX2InBVD8d4GPiwpDeXeoPK\n3x+VpHHN0v7L01NktsLy7TCzFcOVwAckbVcpkLQa8CXgoYi4pxA7jbTo4hEsveVF/vfnSL1Dr98+\nioinSQOdv5xnRi1D0to9NS4ino+I60qvrh6VUek5ef3/n3wr6r+q1Hs/MJc04HsYaRxN5Rw3JY3f\nuaUP45M+nMdEVY67IbAHcHU+3mLgEuDTkjYr71zlWvTmuNXOV6TPp9r+vycPXibdCru0OKaoC1cC\nq5Bu4xVVBmlfBRARz5FuP+5Yiju8i7ZUJalFaUmF1Xu7j1kjck+QWf315lbGD4A24GpJp5Fmdx1A\n+gv+P0uxt5JmHW0CnFUov5E09qjadPLDctlMST8n9Q6NJs1MejuwTR/b250ppKnkV+ZjjQS+CDzJ\nG8fbQEre9iZN6X8pl91Ouk2zMWk2V2/dBVwj6XTSNTo0f/3fQsxRpCThtty+WcBbgPeTetGKiWJv\nrsXdpHE1P86DuV/K5zOyWnBEzJU0DfgGsDq9WyzzEtLne5KkjYF/kAZL7w78MCKK45bOBo6U1Eka\nQ7YzqaeqL5/rZ4Cf5K8X9hBr1rDcE2RWfz3+BR4RT5ESkmtIf7V/jzS1e4+IuLQU+zJpuntx8DOk\nJCdI08sfK+0zi/RL/nLSNPgzgC+TehGOZ1m1zAp7fZ98rL1J///8CPgCcDpp7aFqKu0u9l4tIk0n\n7+36QJU2XAscSTrH40i9TONzmyp1zwG2JY0L+s/ctq+Skpajuzqvrspzj9gepMTkGODbpMTo8920\n9XekBOh5uh6nVTxGkBKe04CPk5ZU2AT4WkR8s7TfsaSxSJ8mJaOLcvv6+oy3ofo8OGsiWo5ZrmZm\nZmYrrIbrCZL0rbxU+8RC2fVa9knbiyVNLu23vqQrlB4nMEfSyZKGlWJ2ljQ9Lyl/n6osgy/pMEkP\n5SXq/yJp24E7WzMzM6uXhkqCcsLxRd440yNIa3SMJt2PX5d0376y3zDSwMCVgLGkru4DKHTj5wGQ\nl5O6w7cCTgXOlrRLIWYf0kJsx5LGQNxJWlK/x4GhZmZmtmJpmCQozzL4NWl8wPNVQl6OiKcj4qn8\neqmwbQLpOUb7RcTMiJhCep7PYZIqg78PAR6MiKMiYnZEnAlcRJo9UdEOnBUR50XEvcDBpPU8evVg\nRjMzM1txNEwSRFru/bJu1ubYT+kJ2TMlfa+ytH42FpgZEfMKZVNIK7FuUYgprsBbidke0oq3QCtL\nV3GtDDacWokxMzOzoaMhpshL+gxpQbH3dxHyG9JiX/8iPRX7ZNLMh73z9nVIszyK5ha23dlNzEhJ\nI0hTYId3EVNtkTozMzNbgdU9CZK0HvBjYJeuFleLiLMLb++WNAe4VtJGEdHTc226m/6mXsZU3Z4f\ngjiBtFrrgh7aYWZmZku9CdgQmFJ4jM+gqnsSRLoF9W/A9LyKKqQemR0lHQ6MqLIa7F/z141Ji5BV\n1vUoqjzAcU7ha/mhjqOAFyJioaR5pDVRqsWUe4cqJpB6qczMzKw2+wG/rceBGyEJmgq8r1R2DmmV\n1h90sRz+NqTemSfz+1uBYyStXRgXNJ70pOlZhZhdS/WMz+VExGuSpgPjgEvh9aXtx5EWIKvmYYBf\n//rXbLbZG1bYtwHS3t7OpEmT6t2MpuJrPvh8zQefr/ngmjVrFp/97Gch/y6th7onQRExHyg+9whJ\n84FnImJWfojgvqQp8M+QprdPBG6IiLvyLtfkOs6XdDRpCv0JwBmFW2w/BQ6XdBJptdRxpDFFuxUO\nPRE4NydDt5Fmi61K18vyLwDYbLPNGDNmTG0XwPqspaXF13uQ+ZoPPl/zwedrXjd1G05S9ySoC8Xe\nn4Wkp1MfAawGPEZ6wOCJrwdHLJG0B+lZNreQngJ9DkufOE1EPCxpd1Ki81XgceCgiJhaiLkwrwl0\nPOm22N+BCfkBk2ZmZjaENGQSFBH/Xvj346QH/PW0z2Ok5990F3MDaQxSdzGT6foZRmZmZjZENNI6\nQWZmZmaDxkmQrXDa2trq3YSm42s++HzNB5+vefPxU+SXg6QxwPTp06d7MJ2ZmVkfzJgxg9bWVoDW\niJhRjza4J8jMzMyakpMgMzMza0pOgszMzKwpOQkyMzOzpuQkyMzMzJqSkyAzMzNrSk6CzMzMrCk5\nCTIzM7Om5CTIzMzMmpKTIDMzM2tKToLMzMysKTkJMjMzs6bkJMjMzMyakpMgMzMza0pOgszMzKwp\nOQkyMzOzpuQkyMzMzJqSkyAzMzNrSk6CzMzMrCk5CTIzMxtkkyfDj35U71ZYwyVBkr4laYmkiYWy\nEZLOlDRP0ouSLpI0qrTf+pKukDRf0hxJJ0saVorZWdJ0SQsk3Sdp/yrHP0zSQ5JekfQXSdsO3Nma\nmVkzuv56+NOf6t0Ka6gkKCccXwTuLG36MbA7sBewI/A24OLCfsOAK4GVgLHA/sABwPGFmA2By4Fr\nga2AU4GzJe1SiNkHOAU4Ftgmt2OKpLX77STNzMysITRMEiRpdeDXwBeA5wvlI4EDgfaIuCEi7gA+\nD3xI0gdy2ATgPcB+ETEzIqYA3wEOk7RSjjkEeDAijoqI2RFxJnAR0F5oRjtwVkScFxH3AgcDL+fj\nm5mZ2RDSMEkQcCZwWURcVyp/P6mH59pKQUTMBh4Fts9FY4GZETGvsN8UoAXYohAztVT3lEodklYG\nWkvHibzP9piZmdmQslLPIQNP0meArUkJT9loYGFEvFAqnwusk/+9Tn5f3l7Zdmc3MSMljQDeAgzv\nImbT3p2JmZmZrSjqngRJWo805meXiHitL7sC0Yu47mLUy5jeHMfMzMxWIHVPgki3oP4NmC6pkpQM\nB3aUdDjwMWCEpJGl3qBRLO21mQOUZ3GNLmyrfB1dihkFvBARCyXNAxZ3EVPuHVpGe3s7LS0ty5S1\ntbXR1tbW3W5mZmZNoaOjg46OjmXKOjs769SapRohCZoKvK9Udg4wC/gB8ATwGjAOuARA0ibAO4Bb\ncvytwDGS1i6MCxoPdOZ6KjG7lo4zPpcTEa9Jmp6Pc2k+jvL707o7gUmTJjFmzJjena2ZmTW9aLL7\nC9U6BmbMmEFra2udWpTUPQmKiPnAPcUySfOBZyJiVn7/C2CipOeAF0lJyc0RcXve5Zpcx/mSjgbW\nBU4AzijcYvspcLikk4BfkpKbvYHdCoeeCJybk6HbSLPFViUlZWZmZv3m9XsfVjd1T4K6UM6R20m3\nqi4CRgBXA4e9HhyxRNIewE9IvUPzSYnLsYWYhyXtTkp0vgo8DhwUEVMLMRfmNYGOJ90W+zswISKe\n7u8TNDMzs/pqyCQoIv699P5V4Cv51dU+jwF79FDvDaQxSN3FTAYm97qxZmZmtkJqpHWCzMzMzAaN\nkyAzMzNrSk6CzMzMrCk5CTIzM7Om5CTIzMxskDXbOkGNykmQmZmZNSUnQWZmZnXgxRLrz0mQmZmZ\nNSUnQWZmZtaUnASZmZlZU3ISZGZmZk3JSZCZmZk1JSdBZmZm1pScBJmZmQ0yL5bYGJwEmZmZWVNy\nEmRmZlYHXiyx/pwEmZmZWVNyEmRmZmZNyUmQmZmZNSUnQWZmZtaUnASZmZlZU3ISZGZmNsi8TlBj\ncBJkZmZmTclJkJmZWR14naD6q3sSJOlgSXdK6syvWyR9rLD9eklLCq/FkiaX6lhf0hWS5kuaI+lk\nScNKMTtLmi5pgaT7JO1fpS2HSXpI0iuS/iJp24E7czMzM6unuidBwGPA0UBrfl0H/FHSZnl7AD8D\nRgPrAOsCR1V2zsnOlcBKwFhgf+AA4PhCzIbA5cC1wFbAqcDZknYpxOwDnAIcC2wD3AlMkbR2/56u\nmZmZNYK6J0ERcUVEXB0R9+fXt4GXSAlNxcsR8XREPJVfLxW2TQDeA+wXETMjYgrwHeAwSSvlmEOA\nByPiqIiYHRFnAhcB7YV62oGzIuK8iLgXOBh4GThwQE7czMzM6qruSVCRpGGSPgOsCtxS2LSfpKcl\nzZT0PUlvLmwbC8yMiHmFsilAC7BFIWZq6XBTgO3zcVcm9UJdW9kYEZH32X75z8zMzMwazUo9hww8\nSe8FbgXeBLwI/EdEzM6bfwM8AvwL2BI4GdgE2DtvXweYW6pybmHbnd3EjJQ0AngLMLyLmE1rPjEz\nMzNrWA2RBAH3ksbqrAnsBZwnaceIuDcizi7E3S1pDnCtpI0i4qEe6u1uJQb1MqbH1Rza29tpaWlZ\npqytrY22traedjUzsybUbOsEdXR00NHRsUxZZ2dnnVqzVEMkQRGxCHgwv50h6QPAEaSxPGV/zV83\nBh4C5gDlWVyj89c5ha+jSzGjgBciYqGkecDiLmLKvUNvMGnSJMaMGdNTmJmZWVOq1jEwY8YMWltb\n69SipKHGBBUMA0Z0sW0bUu/Mk/n9rcD7SrO4xgOdwKxCzLhSPeNzORHxGjC9GCNJ+f0tmJmZ2ZBT\n954gSScCV5Gmyq8B7AfsBIyX9E5gX9IU+GdIt8wmAjdExF25imuAe4DzJR1NmkJ/AnBGTm4Afgoc\nLukk4Jf/h1SYAAAgAElEQVSk5GZvYLdCUyYC50qaDtxGmi22KnDOAJy2mZk1OS+WWH91T4JIt6DO\nIyUvncA/gPERcZ2k9YCPkm6NrUZKlH4PnFjZOSKWSNoD+Amp12Y+KXE5thDzsKTdSYnOV4HHgYMi\nYmoh5sLcm3R8btPfgQkR8fQAnbeZmZnVUd2ToIj4QjfbHgd27kUdjwF79BBzA2kafHcxk4HJ3cWY\nmZnZ0NCoY4LMzMzMBpSTIDMzM2tKToLMzMwGWbOtE9SonASZmZlZU3ISZGZmZk3JSZCZmVkdeJ2g\n+nMSZGZmZk3JSZCZmZk1JSdBZmZm1pScBJmZmVlTchJkZmZmTclJkJmZ2SDzYomNwUmQmZmZNSUn\nQWZmZtaUnASZmZnVgRdLrD8nQWZmZtaUnASZmZlZU3ISZGZmZk3JSZCZmZk1JSdBZmZmg8zrBDUG\nJ0FmZmbWlJwEmZmZWVNyEmRmZlYHXieo/uqeBEk6WNKdkjrz6xZJHytsHyHpTEnzJL0o6SJJo0p1\nrC/pCknzJc2RdLKkYaWYnSVNl7RA0n2S9q/SlsMkPSTpFUl/kbTtwJ25mZmZ1VPdkyDgMeBooDW/\nrgP+KGmzvP3HwO7AXsCOwNuAiys752TnSmAlYCywP3AAcHwhZkPgcuBaYCvgVOBsSbsUYvYBTgGO\nBbYB7gSmSFq7f0/XzMzMGkHdk6CIuCIiro6I+/Pr28BLwFhJI4EDgfaIuCEi7gA+D3xI0gdyFROA\n9wD7RcTMiJgCfAc4TNJKOeYQ4MGIOCoiZkfEmcBFQHuhKe3AWRFxXkTcCxwMvJyPb2ZmZkNM3ZOg\nIknDJH0GWBW4ldQztBKpBweAiJgNPApsn4vGAjMjYl6hqilAC7BFIWZq6XBTKnVIWjkfq3icyPts\nj5mZmQ05DZEESXqvpBeBV4HJwH/k3ph1gIUR8UJpl7l5G/nr3Crb6UXMSEkjgLWB4V3ErIOZmVk/\n8jpBjWGlnkMGxb2ksTprksb+nCdpx27iBfTmW6i7GPUyxt+qZmZmQ1BDJEERsQh4ML+dkcf7HAFc\nCKwiaWSpN2gUS3tt5gDlWVyjC9sqX0eXYkYBL0TEQknzgMVdxJR7h96gvb2dlpaWZcra2tpoa2vr\naVczM7Mhr6Ojg46OjmXKOjs769SapRoiCapiGDACmA4sAsYBlwBI2gR4B3BLjr0VOEbS2oVxQeOB\nTmBWIWbX0jHG53Ii4jVJ0/NxLs3HUX5/Wk+NnTRpEmPGjOn7WZqZmTWBah0DM2bMoLW1tU4tSuqe\nBEk6EbiKNFV+DWA/YCdgfES8IOkXwERJzwEvkpKSmyPi9lzFNcA9wPmSjgbWBU4AzoiI13LMT4HD\nJZ0E/JKU3OwN7FZoykTg3JwM3UaaLbYqcM6AnLiZmTU1L5ZYf3VPgki3oM4jJS+dwD9ICdB1eXs7\n6VbVRaTeoauBwyo7R8QSSXsAPyH1Ds0nJS7HFmIelrQ7KdH5KvA4cFBETC3EXJjXBDo+t+nvwISI\neHoAztnMzMzqrO5JUER8oYftrwJfya+uYh4D9uihnhtI0+C7i5lMmp1mZmZmQ1xDTJE3MzMzG2xO\ngszMzAaZ1wlqDE6CzMzMrCn1SxIkabikrSWt1R/1mZmZmQ20mpIgST+WdFD+93DgBmAG8Jiknfuv\neWZmZmYDo9aeoL2BO/O/Pw5sRHqS+yTgxH5ol5mZ2ZDmdYLqr9YkaG2WPpJiN+D3EXEfaSHC9/VH\nw8zMzMwGUq1J0Fxg83wr7GNAZdHBVUkLG5qZmZk1tFoXS/wV6eGmT5Kesv6nXL4d6YnwZmZmZg2t\npiQoIo6TdBewPulW2Kt502LgB/3VODMzM7OBUvNjMyLiIgBJbyqUndsfjTIzMxvKvFhiY6h1ivxw\nSd+R9ATwkqR35vITKlPnzczMzBpZrQOj/xs4ADgKWFgovwvo9oGoZmZmZo2g1iTov4AvRcRvWHY2\n2J2k9YLMzMzMGlqtSdDbgfu7qG/l2ptjZmbWHLxYYv3VmgTdA+xQpXxv4I7am2NmZmY2OGqdHXY8\ncK6kt5MSqf+UtCnpNtke/dU4MzMzs4FSU09QRPyRlOx8FJhPSoo2Az4eEX/qbl8zMzOzRrA86wTd\nBOzSj20xMzNrCl4nqDHUuk7QtpK2q1K+naT3L3+zzMzMzAZWrQOjzyQ9MqPs7XmbmZmZWUOrNQna\nHJhRpfyOvM3MzMysodWaBL0KjK5Svi6wqPbmmJmZNQevE1R/tSZB1wDfl9RSKZC0JvA9wLPDzMzM\nrOHVmgQdSRoT9IikP0v6M/AQsA7w9b5UJOlbkm6T9IKkuZIukbRJKeZ6SUsKr8WSJpdi1pd0haT5\nkuZIOlnSsFLMzpKmS1og6T5J+1dpz2GSHpL0iqS/SNq2L+djZmZmK4Za1wl6AtiS9ADVe4DpwBHA\n+yLisT5WtwNwOrAdad2hlYFrJL25eEjgZ6RbcOuQbrsdVdmYk50rSVP+xwL7kx7wenwhZkPgcuBa\nYCvgVOBsSbsUYvYBTgGOBbYhPQttiqS1+3hOZmZm1uCWZ52g+aTEZLlExG7F95IOAJ4CWoGbCpte\njoinu6hmAunBrR+JiHnATEnfAX4g6biIWAQcAjwYEZXkabakDwPtLL2F1w6cFRHn5bYcDOwOHAic\nvHxnamZmlnidoMZQcxKUb1ntDIyi1KMUEcdX26eX1iT1/DxbKt9P0ueAOcBlwAkR8UreNhaYmROg\niinAT4AtSD06Y4GppTqnAJPy+axMSry+VziPkDQV2H45zsfMzMwaUE1JkKQvkhKMeaSkpJjTBoXb\nUH2sV8CPgZsi4p7Cpt8AjwD/It2GOxnYhPTAVki3yOaWqptb2HZnNzEjJY0A3gIM7yJm01rOx8zM\nzBpXrT1B3wb+OyJO6s/GAJNJ6wx9qFgYEWcX3t4taQ5wraSNIuKhHursrtNRvYzptuOyvb2dlpaW\nZcra2tpoa2vroWlmZmZDX0dHBx0dHcuUdXZ21qk1S9WaBK0F/L4/GyLpDGA3YIeIeLKH8L/mrxuT\nZqXNAcqzuCrrGM0pfC2vbTQKeCEiFkqaByzuIqbcO7SMSZMmMWbMmB6abGZm1pyqdQzMmDGD1tbW\nOrUoqXWK/O+B8f3ViJwAfYI0sPnRXuyyDal3ppIs3Qq8rzSLazzQCcwqxIwr1TM+lxMRr5Fmub0e\nk2/PjQNu6cv5mJmZ9cSLJdZfrT1B9wMnSBoLzAReK26MiNN6W1Fe76cN2BOYL6nSE9MZEQskvRPY\nlzQF/hnS9PaJwA0RcVeOvYY0Vf98SUeTptCfAJyRkxuAnwKHSzoJ+CUpudmb1PtUMRE4V9J04DbS\nbLFVgXN6ez5mZma2Yqg1CfoS8BKwU34VBdDrJAg4OO9zfan888B5wELS+kFHAKsBj5F6ok58/YAR\nSyTtQRqsfQswn5S4HFuIeVjS7qRE56vA48BBETG1EHNh7k06nnRb7O/AhG6m5puZmdkKqqYkKCI2\n6q8GRES3t+Qi4nHSVPye6nkM2KOHmBtI0+C7i5lMGqBtZmY2ILxOUGOodUwQAJJWkbSppJrXGzIz\nMzOrh5qSIEmrSvoF8DJwN/COXH66pG/2Y/vMzMzMBkStPUHfJw1Q3hlYUCifCuyznG0yMzMzG3C1\n3sb6JLBPRPxFUvHO5t3Au5a/WWZmZmYDq9aeoH8jPeS0bDV6WF3ZzMzMrBHUmgT9jfR09YpK4vMF\n8uKDZmZm1jUvllh/td4OOwa4StLmuY4jJG1Betp6ed0gMzMzs4ZTU09QRNxEGhi9EmnF6PGk52tt\nHxHT+695ZmZmZgOjzz1BeU2gfYEpEfHF/m+SmZnZ0ObFEhtDn3uCImIR6Tlcb+r/5piZmZkNjloH\nRt9GepK7mZmZ2Qqp1oHRk4FTJK0HTCc9sPR1EfGP5W2YmZmZ2UCqNQm6IH8tPi0+AOWvw5enUWZm\nZmYDrdYkqN+eIm9mZtaMvE5Q/dWUBEXEI/3dEDMzM7PBVFMSJOm/utseEefV1hwzMzOzwVHr7bBT\nS+9XBlYFFgIvA06CzMzMuuB1ghpDrbfD1iqXSXo38BPgh8vbKDMzM7OBVus6QW8QEf8Evskbe4nM\nzMzMGk6/JUHZIuBt/VynmZmZWb+rdWD0nuUiYF3gcODm5W2UmZmZ2UCrdWD0H0rvA3gauA74+nK1\nyMzMzGwQ1Dowur9vo5mZmTUVL5ZYf3VPZiR9S9Jtkl6QNFfSJZI2KcWMkHSmpHmSXpR0kaRRpZj1\nJV0hab6kOZJOljSsFLOzpOmSFki6T9L+VdpzmKSHJL0i6S+Sth2YMzczM7N6qikJyknIN6uUf0PS\n7/tY3Q7A6cB2wEdJaw5dI+nNhZgfA7sDewE7kgZfX1w47jDgSlLP1lhgf+AA4PhCzIbA5cC1wFak\nWWxnS9qlELMPcApwLLANcCcwRdLafTwnMzOzLnmdoMZQa0/QTsAVVcqvJiUpvRYRu0XE+RExKyJm\nkpKXdwCtAJJGAgcC7RFxQ0TcAXwe+JCkD+RqJgDvAfaLiJkRMQX4DnCYpMotv0OAByPiqIiYHRFn\nAhcB7YXmtANnRcR5EXEvcDBp8ccD+3JOZmZm1vhqTYJWJ60OXfYaMLL25gCwJmmg9bP5fSuph+fa\nSkBEzAYeBbbPRWOBmRExr1DPFKAF2KIQM7V0rCmVOiStnI9VPE7kfbbHzMzMhpRak6CZwD5Vyj8D\n3FNrYySJdOvrpoio1LMOsDAiXiiFz83bKjFzq2ynFzEjJY0A1gaGdxGzDmZmZjak1DpF/gTg/yS9\nizQtHmAc0AZ8ajnaMxnYHPhwL2JF6jHqSXcx6mWM796amZkNMbVOkb9M0ieBY4C9gVeAfwAfjYgb\naqlT0hnAbsAOEfGvwqY5wCqSRpZ6g0axtNdmDlCexTW6sK3ydXQpZhTwQkQslDQPWNxFTLl3aBnt\n7e20tLQsU9bW1kZbW1t3u5mZmTWFjo4OOjo6linr7OysU2uWqrUniIi4guqDo/ssJ0CfAHaKiEdL\nm6eTHscxDrgkx29CGjx9S465FThG0tqFcUHjgU5gViFm11Ld43M5EfGapOn5OJfm4yi/P6279k+a\nNIkxY8b0+nzNzMyaaZ2gah0DM2bMoLW1tU4tSmp9bMa2wLCI+GupfDtgcUT8rQ91TSbdRtsTmC+p\n0hPTGRELIuIFSb8AJkp6DniRlJTcHBG359hrSGORzpd0NOkRHicAZ0TEaznmp8Dhkk4CfklKbvYm\n9T5VTATOzcnQbaTZYqsC5/T2fMzMzGzFUOvA6DOB9auUvz1v64uDSTPKrgf+VXh9uhDTTlrj56JC\n3F6VjRGxBNiDdDvrFuA8UuJybCHmYdJaQx8F/p7rPCgiphZiLiQ99uN44A5gS2BCRDzdx3MyMzPr\nktcJagy13g7bHJhRpfyOvK3XevMIjoh4FfhKfnUV8xgpEequnhvI6w91EzOZNEDbzMzMhrBae4Je\n5Y0DiCHdhlpUe3PMzMzMBketSdA1wPclvT4lStKawPeAP/VHw8zMzMwGUq23w44EbgQekXRHLtua\nNJX8c/3RMDMzM7OBVOs6QU9I2hLYj/Qw0leAXwEdhdlYZmZmZg1redYJmg/8rB/bYmZmZjZoal0n\n6FOktX02IT1S4p/AbyPion5sm5mZ2ZDVTIslNqo+DYyWNEzS74DfkabC3w88SHpS+4WSLsirLJuZ\nmZk1tL72BB1BWmxwz4i4vLhB0p6kcUFHkJ4Eb2ZmZlV4scTG0Ncp8p8HvlFOgAAi4lLgKODA/miY\nmZmZ2UDqaxL0bmBqN9un5hgzMzOzhtbXJOgVYM1uto8EFtTeHDMzM7PB0dck6FbgkG62H5ZjzMzM\nzBpaXwdGnwhcL+mtwI+AewEBm5Gevv4J4CP92kIzMzOzAdCnJCgibpG0D2mRxL1Km58D2iLi5v5q\nnJmZ2VDlBWXqr8+LJUbEJZKmAONJiyUC3AdcExEv92fjzMzMzAZKrc8Oe1nSR4H/iYhn+7lNZmZm\nQ5rXCWoMfV0xer3C232B1XP5TEnr92fDzMzMzAZSX3uC7pX0DHAz8CZgfeBRYENg5f5tmpmZmdnA\n6esU+RbgU8D0vO+Vku4DRgATJK3Tz+0zMzMzGxB9TYJWjojbIuIU0sKJ25AepbGY9LiMByTN7uc2\nmpmZmfW7vt4Oe0HSHaTbYasAq0bEzZIWAfsAjwMf6Oc2mpmZmfW7vvYEvQ34LvAqKYH6m6RppIRo\nDBARcVP/NtHMzMys//UpCYqIeRFxWUR8C3gZ2BY4HQjSCtIvSLqh/5tpZmY2tHixxPrra09QWWdE\nXAi8Bvw7sBEwua+VSNpB0qWSnpC0RNKepe2/yuXF15WlmLUk/UZSp6TnJJ0tabVSzJaSbpT0iqRH\nJH2jSls+JWlWjrlT0q59PR8zM7PueJ2gxrA8SdCWpDFAAI8Ar0XEnIj4XQ11rQb8nfQA1q6+Na4C\nRgPr5FdbaftvSc8wGwfsDuwInFXZKGkNYArwEOnW3TeA4yR9oRCzfa7n58DWwB+AP0javIZzMjMz\nswZW04rRABHxWOHf712eRkTE1cDVAFKXHYSvRsTT1TZIeg8wAWiNiDty2VeAKyQdGRFzgM+S1jI6\nKCIWAbMkbQN8DTg7V3UEcFVETMzvj5U0HjgcOHR5ztHMzMway/LeDhtMO0uaK+leSZMlvaWwbXvg\nuUoClE0l9Sptl9+PBW7MCVDFFGBTSS2FeqaWjjsll5uZmdkQsqIkQVcB/0Uad3QUsBNpocZKr9E6\nwFPFHSJiMfBs3laJmVuqd25hW3cxXgTSzMxsiKn5dthgyoOvK+6WNBN4ANgZ+HM3u4quxxhVtvcm\nxkPYzMzMhpgVIgkqi4iHJM0DNiYlQXOAUcUYScOBtfI28tfRpapGkRKcuT3ElHuHltHe3k5LS8sy\nZW1tbbS1lcdum5mZNZ+Ojg46OjqWKevs7KxTa5ZaIZOg/DT7twJP5qJbgTUlbVMYFzSO1ItzWyHm\nu5KG51tlAOOB2RHRWYgZB5xWONwuubxLkyZNYsyYMctzSmZm1mSaaZ2gah0DM2bMoLW1tU4tShpi\nTJCk1SRtJWnrXPTO/H79vO1kSdtJ2kDSONLU9ftIg5aJiHvzv38uaVtJHyIt4tiRZ4ZBmvq+EPil\npM0l7QN8FTil0JRTgV0lfU3SppKOA1qBMwb2CpiZWTPxOkGNoSGSIOD9wB2kp9MHKTGZAfwv6eGs\nWwJ/BGaT1vC5HdgxIl4r1LEvcC9pdtflwI3AlysbI+IF0jT6DYG/AT8EjouIXxRibiWtP/Ql0rpF\n/wl8IiLu6e8TNjMzs/pqiNthEXED3SdkH+tFHc+T1gLqLmYmaWZZdzEXAxf3dDwzMzNbsTVKT5CZ\nmZnZoHISZGZmZk3JSZCZmZk1JSdBZmZm1pScBJmZmVlTchJkZmZWB820WGKjchJkZmY2yLxYYmNw\nEmRmZmZNyUmQmZmZNSUnQWZmZtaUnASZmZlZU3ISZGZmZk3JSZCZmZk1JSdBZmZmdeB1gurPSZCZ\nmdkg8zpBjcFJkJmZmTUlJ0FmZmbWlJwEmZmZWVNyEmRmZmZNyUmQmZmZNSUnQWZmZtaUnASZmZlZ\nU3ISZGZmNsgivFhiI3ASZGZmZk2pIZIgSTtIulTSE5KWSNqzSszxkv4l6WVJf5K0cWn7WpJ+I6lT\n0nOSzpa0WilmS0k3SnpF0iOSvlHlOJ+SNCvH3Clp1/4/YzMzM6u3hkiCgNWAvwOHAW9YTFzS0cDh\nwJeBDwDzgSmSVimE/RbYDBgH7A7sCJxVqGMNYArwEDAG+AZwnKQvFGK2z/X8HNga+APwB0mb99eJ\nmpmZWWNYqd4NAIiIq4GrAaSqd0mPAE6IiMtyzH8Bc4FPAhdK2gyYALRGxB055ivAFZKOjIg5wGeB\nlYGDImIRMEvSNsDXgLMLx7kqIibm98dKGk9KwA7t7/M2MzOz+mmUnqAuSdoIWAe4tlIWES8AfwW2\nz0VjgecqCVA2ldSrtF0h5sacAFVMATaV1JLfb5/3oxSzPWZmZjakNHwSREqAgtTzUzQ3b6vEPFXc\nGBGLgWdLMdXqoBcx62BmZmZDyoqQBHVFVBk/1McY9TKmp+OYmZnZCqYhxgT1YA4pERnNsr00o4A7\nCjGjijtJGg6slbdVYkaX6h7Fsr1MXcWUe4eW0d7eTktLyzJlbW1ttLW1dbebmZk1qWiyP607Ojro\n6OhYpqyzs7NOrVmq4ZOgiHhI0hzSrK9/AEgaSRrrc2YOuxVYU9I2hXFB40jJ022FmO9KGp5vlQGM\nB2ZHRGchZhxwWqEJu+TyLk2aNIkxY8bUeopmZtaEmmmxxGodAzNmzKC1tbVOLUoa4naYpNUkbSVp\n61z0zvx+/fz+x8C3JX1c0vuA84DHgT8CRMS9pAHMP5e0raQPAacDHXlmGKSp7wuBX0raXNI+wFeB\nUwpNORXYVdLXJG0q6TigFThjoM7dzMzM6qNReoLeD/yZdGsqWJqYnAscGBEnS1qVtO7PmsA0YNeI\nWFioY19SsjIVWAJcRJryDqQZZZIm5Ji/AfOA4yLiF4WYWyW1ASfm1z+BT0TEPf1/ymZmZlZPDZEE\nRcQN9NArFRHHAcd1s/150lpA3dUxE9iph5iLgYu7izEzM7MVX0PcDjMzMzMbbE6CzMzMrCk5CTIz\nM7Om5CTIzMzMmpKTIDMzs0EW0VzrBDUqJ0FmZmbWlJwEmZmZWVNyEmRmZmZNyUmQmZmZNSUnQWZm\nZtaUnASZmZlZU3ISZGZmZk3JSZCZmdkgi6h3CwycBJmZmdWFF0usPydBZmZm1pScBJmZmVlTchJk\nZmZmTclJkJmZmTUlJ0FmZmbWlJwEmZmZWVNyEmRmZjbIvE5QY3ASZGZmNsgivE5QI3ASZGZmNsic\nBDWGFSIJknSspCWl1z2F7SMknSlpnqQXJV0kaVSpjvUlXSFpvqQ5kk6WNKwUs7Ok6ZIWSLpP0v6D\ndY5mZtZcnATV3wqRBGV3AaOBdfLrw4VtPwZ2B/YCdgTeBlxc2ZiTnSuBlYCxwP7AAcDxhZgNgcuB\na4GtgFOBsyXtMjCnY2Zmzco9QY1hpXo3oA8WRcTT5UJJI4EDgc9ExA257PPALEkfiIjbgAnAe4CP\nRMQ8YKak7wA/kHRcRCwCDgEejIijctWzJX0YaAf+NOBnZ2ZmTcNJUGNYkXqC3i3pCUkPSPq1pPVz\neSspmbu2EhgRs4FHge1z0VhgZk6AKqYALcAWhZippWNOKdRhZmbWL5wENYYVJQn6C+n21QTgYGAj\n4EZJq5FujS2MiBdK+8zN28hf51bZTi9iRkoasbwnYGZmVuEkqDGsELfDImJK4e1dkm4DHgE+DSzo\nYjcBvVmJobsY9SLGzMysT5wENYYVIgkqi4hOSfcBG5NuYa0iaWSpN2gUS3t25gDblqoZXdhW+Tq6\nFDMKeCEiFnbXnvb2dlpaWpYpa2tro62trTenY2ZmTabZkqCOjg46OjqWKevs7KxTa5ZaIZMgSasD\n7wLOBaYDi4BxwCV5+ybAO4Bb8i63AsdIWrswLmg80AnMKsTsWjrU+FzerUmTJjFmzJiaz8fMzJpL\nsyVB1ToGZsyYQWtra51alKwQY4Ik/VDSjpI2kPRBUrKzCLgg9/78ApiY1/lpBX4F3BwRt+cqrgHu\nAc6XtKWkCcAJwBkR8VqO+SnwLkknSdpU0qHA3sDEwTtTMzNrBkuWwLAV4jfw0Lai9AStB/wWeCvw\nNHATMDYinsnb24HFwEXACOBq4LDKzvH/27v/IC/q+47jzxcHAoIgCRAmhgSDP9BGo0GjFAkQgj/q\njzT9oZmkmE7sxDSZZmqm2nYmMyRpJh01cZomkiZNYpNojbbTGqegRBS42miYoCLFEwxBfnoU5DwE\nDuTuPv3js1/Y27vjjoPb3bt9PWZ2vuzu57v72fd3b/fNZz+7G0K7pOuA7xJbh/YD/wIsTJV5VdK1\nxKTnC8A24JYQQvaOMTMzsxPS1gZDB8oZeBAbED9BCOGYnWtCCIeAv0iG7spsBa7rYTkribfcm5mZ\n9ZvWVidBZeDGODMzs5w5CSoHJ0FmZmY5a22Furqia2FOgszMzHLmlqBycBJkZmaWM3eMLgcnQWZm\nZjlzS1A5OAkyMzPLmZOgcnASZGZmljN3jC4HJ0FmZmY5c5+gcnASZGZmljNfDisHJ0FmZmY5CsFJ\nUFk4CTIzM8tRe3v8dJ+g4jkJMjMzy1Fra/x0S1DxnASZmZnlqK0tfjoJKp6TIDMzsxy5Jag8nASZ\nmZnl6PDh+Ok+QcVzEmRmZpajvXvj55gxxdbDnASZmZnlqqkpfo4bV2w9zEmQmZlZrpwElYeTIDMz\nsxw5CSoPJ0FmZmY5amoCCcaOLbom5iTIzMwsR01NMQEa4jNw4fwTmJmZ5WjLFnjnO4uuhYGTIDMz\ns1y98gpMnVp0LQycBJmZmeVm/36or4fZs4uuiYGToC5J+rykTZJaJD0r6dKi62RHPfjgg0VXoXIc\n8/w55vnLI+bLlsHBg3DDDf2+KusFJ0EZkm4CvgksBC4G1gBLJY0vtGJ2hE8O+XPM8+eY56+/Y37g\nANx9N0ybBmef3a+rsl5yEtTZbcD3Qgg/CSG8DHwWOAB8uthqmZnZQBQC/OIXMG8evPACfP/7RdfI\navwO2xRJw4DpwNdr00IIQdIyYEZhFTMzs9I7dAhefx22b4dNm2DjRli9OvYB2rULLroIHn8crrii\n6JpajZOgjsYDdcDOzPSdwLndfWndOmhr690KQji+CpWtfB7r6Kn8nj3w9NP51SePdZR9G3btin0Z\n+mv5/VE+j3X0Z/mdO2Hx4nLVKY/yAO3tcQjh6Gf2332d19oaj9ddDQ0NcPvt8d+HD3ceDh2ClpbY\np6elJV7e2rcvDs3N8NZbHbdj7Fi48EL4zGfg6qth5sz4kEQrDydBvSOgqz/lEQA339yQb20qr5lZ\ns93FiicAAArSSURBVJ4ruhIV08z8+Y55vpq57jrHvC+krochQ6CuLv67ru7o+JAhcXjjjWYeeug5\n6upg6NDOw7BhMGIEjBwZE5zhw+HUU+MwahSMHh3fDD9hApxxBpx2Wsek5/nni4tJGTU0HDl3jiiq\nDk6COtoNtAHvyEyfSOfWIYAp8eNP+rNO1qXpRVegghzz/DnmfVFr9emLrVsd8wJMAX5ZxIqdBKWE\nEA5LWg3MAx4FkKRk/B+7+MpS4JPAq8DBnKppZmY2GIwgJkBLi6qAQl/T5UFK0o3Aj4FbgVXEu8X+\nCJgWQthVZN3MzMzs5HFLUEYI4eHkmUBfJV4WewG4ygmQmZnZ4OKWIDMzM6skPyzRzMzMKslJ0Anw\nO8b6RtJCSe2Z4aXU/OGS7pW0W9Kbkv5d0sTMMiZLWixpv6RGSXdJGpIpM0fSakkHJW2Q9Km8trFo\nkmZJelTS9iS+nd5UJOmrknZIOiDpCUlnZeaPk/SApGZJTZJ+IGlUpsyFkuqTv4HNkm7vYj1/LKkh\nKbNG0jUnf4uL11PMJd3XxX6/JFPGMT8Okv5W0ipJeyXtlPSfks7JlMnteDLYzwm9jPeKzD7eJmlR\npkx54h1C8NCHAbiJeEfYzcA04HvAHmB80XUr+0B8L9uLwATi4wcmAm9Lzf8u8Y672cT3t/0S+O/U\n/CHAWuIdBRcAVwH/B3wtVWYKsA+4i/igy88Dh4H5RW9/TjG+mtiv7feJj324ITP/r5P99XrgfcAj\nwEbglFSZx4DngEuA3wU2APen5p8GvEa8keA84EZgP/BnqTIzkrh/MfkdvgIcAs4vOkYFxPw+YHFm\nvx+bKeOYH1/MlwALklhcAPxXcuwYmSqTy/GECpwTehnv5cA/Zfbz0WWNd+FBHagD8CzwrdS4gG3A\nHUXXrewDMQl6rpt5Y5ID9sdS084F2oEPJuPXJH8Q41NlbgWagKHJ+J3Ai5llPwgsKXr7C4h3O51P\nyDuA2zJxbwFuTMbPS753carMVUArMCkZ/3Pis7WGpsr8PfBSavxnwKOZdT8DLCo6LgXE/D7gP47x\nnWmO+QnHfXwSwyuS8dyOJ1U8J2TjnUxbDtxzjO+UKt6+HNYHOvqOsSdr00L8FfyOsd47O7lssFHS\n/ZImJ9OnE+9aTMd2PbCFo7G9HFgbQtidWt5SYCzwO6ky2Rc9LMW/D5LOBCbRMcZ7gV/RMcZNIYT0\nM26XEZ+cflmqTH0IoTVVZilwrqSxyfgM/DukzUkuI7wsaZGkt6XmzcAxP1GnE+O1JxnP5XhS4XNC\nNt41n5S0S9JaSV+XNDI1r1TxdhLUN8d6x9ik/Ksz4DwL/Cnxf7mfBc4E6pO+D5OAt5KTclo6tpPo\nOvb0oswYScNPdAMGuEnEA9ex9t9JxCbqI0IIbcSD3cn4Har4d/IYsen+w8AdxMszS6QjL1ZwzE9A\nEsd/AJ4OIdT6GOZ1PKncOaGbeAM8QHyNwhziy8gXAD9NzS9VvP2coJOru3eMWUoIIf100P+VtArY\nTOzf0N2Tt3sb22OVUS/KVFlvYtxTGfWyTOV+gxDCw6nRdZLWEvthzSFeQuiOY947i4Dzgd68oz2v\n48lgjnst3jPTE0MIP0iNrpPUCDwp6cwQwqYelpl7vN0S1DfH+44xO4YQQjOxA+hZQCNwiqQxmWLp\n2DbSOfbvSM3rrsxEYG8IIfOu58ppJB4sjrX/NibjR0iqA8bRc4zTrUzdlan830lyQthN3O/BMe8z\nSd8Bfg+YE0LYkZqV1/GkUueETLxf66H4r5LP9H5emng7CeqDEMJhoPaOMaDDO8YKeQncQCZpNDCV\n2Fl3NbEjaDq25wDv5mhsnwEuUHyyd82VQDPQkCozj46uTKZXWnLybaRjjMcQ+52kY3y6pItTX51H\nTJ5Wpcp8KDlR11wJrE8S21qZ7O8wH/8OSHoX8Hbi3V7gmPdJckL+KDA3hLAlMzuX40mVzgk9xLsr\nFxOT9PR+Xp54F927fKAOxEs3LXS8Pe91YELRdSv7ANwNfAh4D/E24CeI2fvbk/mLgE3EywTTgf+h\n8y2ta4h9LC4k9i3aCfxdqswU4i2WdxLvBvkc8BbwkaK3P6cYjwLeD1xEvHvjL5Pxycn8O5L99Xri\nbaqPAK/Q8Rb5JcCvgUuJTd7rgZ+m5o8hJq4/JjaL35TE/JZUmRlJ3Gu3a3+ZeMlzMN6u3W3Mk3l3\nERPN9xAP1r8mHvSHOeZ9jvki4l1Fs4itArVhRKZMvx9PqMA5oad4A+8FvgR8INnPbwB+AzxV1ngX\nHtSBPCQ/zKvJD/EMcEnRdRoIA/FWx21J3LYA/wqcmZo/HPg2scnzTeDfgImZZUwmPqNiX/IHdCcw\nJFNmNvF/Cy3EE/yCorc9xxjPJp6I2zLDj1Jlvkw8oR4g3nlxVmYZpwP3E/+H1gT8M3BqpswFwMpk\nGVuAv+qiLn8IvJz8Di8S38VXeIzyjDnxbdmPE1vgDgK/JT6/ZkJmGY758cW8q3i3ATenyuR2PGGQ\nnxN6ijfwLmAFsCvZP9cTH+EwOrOc0sTb7w4zMzOzSnKfIDMzM6skJ0FmZmZWSU6CzMzMrJKcBJmZ\nmVklOQkyMzOzSnISZGZmZpXkJMjMzMwqyUmQmZmZVZKTIDMzM6skJ0FmZglJyyXdU3Q9zCwfToLM\nrBQk3Sppr6QhqWmjJB2W9GSm7FxJ7ZKm5F1PMxs8nASZWVksJ75t/ZLUtFnAa8Dlkk5JTZ8NbA4h\nvHq8K5E09EQqaWaDh5MgMyuFEMIGYsIzJzV5DvAIsAm4PDN9OYCkyZJ+LulNSc2SHpI0sVZQ0kJJ\nz0u6RdJviW9xR9Kpkn6SfG+7pC9m6yTpc5I2SGqR1Cjp4ZO71WZWJCdBZlYmK4C5qfG5ybSVtemS\nhgOXAU8lZX4OnE5sNfoIMBX4WWa5ZwF/AHwMuCiZ9o3kO9cDVxITq+m1L0i6BPgW8CXgHOAqoP4E\nt8/MSsTNwmZWJiuAe5J+QaOICUs9cApwK/AVYGYyvkLSfOB9wJQQwg4ASQuAdZKmhxBWJ8sdBiwI\nIexJyowCPg18IoSwIpn2KWBbqi6TgX3A4hDCfmArsKafttvMCuCWIDMrk1q/oEuBK4ANIYTdxJag\ny5J+QXOAjSGEbcA0YGstAQIIITQAbwDnpZa7uZYAJaYSE6NVqe81AetTZZ4ANgObkstmn5A08qRt\nqZkVzkmQmZVGCGEjsJ146WsuMfkhhPAasSVmJqn+QICA0MWistP3dzGfbr5bq8s+4APAx4EdxFao\nNZLG9HqDzKzUnASZWdksJyZAc4iXx2rqgWuAD3I0CXoJeLekM2qFJJ0PjE3mdec3QCupztaSxhH7\n/hwRQmgPITwVQvgb4P3AFODDfdgmMysh9wkys7JZDtxLPD6tTE2vB75DvIy1AiCEsEzSWuABSbcl\n8+4FlocQnu9uBSGE/ZJ+CNwtaQ+wC/ga0FYrI+la4L3JepuAa4ktSOs7L9HMBiInQWZWNsuBEUBD\nCGFXavpKYDTwcgihMTX9o8C3k/ntwGPAF3qxntuJ/Y8eBd4EvgmkL3W9QbyjbGFSn1eAjyd9jsxs\nEFAI3V4SNzMzMxu03CfIzMzMKslJkJmZmVWSkyAzMzOrJCdBZmZmVklOgszMzKySnASZmZlZJTkJ\nMjMzs0pyEmRmZmaV5CTIzMzMKslJkJmZmVWSkyAzMzOrJCdBZmZmVkn/D96lI6Vndy41AAAAAElF\nTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkoAAAGcCAYAAAAmrI82AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHFW5//HPNwGiLAlcuQkgKCAGQWVJQMhFAt5IkEXU\nC4oDekFwYZU7iqAIPxAUBSVhVxCQRR1kuSh7MHCFAMFAgggSguyLJBDABMISkjy/P85pqBRdMz2d\nkZ6ZfN+vV786feqpc05VTzJPTp06pYjAzMzMzN5uQKs7YGZmZtZbOVEyMzMzq+BEyczMzKyCEyUz\nMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMrOWkPSUpLMLn8dIWiTp\nP96Btn8o6Y3C54G57XH/6rZze1/N7a3xTrTXLEnflfSIpAWSprS6P42S9IF8fvdodV+s73OiZEsV\nSXvlf0DrvY5vdf+WMvWen9TtZypJ+r6kTzfR9qLuttVdnfQtaOJY30mSdgSOB/4P2Bs4qqUdMmuR\nZVrdAbMWCNI/+o+Vyu9757tiNRFxo6R3R8T8bu56JHARcFU39jkaOLab7TSjqm/nARc1cazvpE8A\nbwBfDT8U1JZiTpRsaXV9RExrNFiSgOUi4vV/YZ+Wev/qxEHS8hHxSkQs4h0YUaqSE4/enCQBDAPm\n9cYkyX8f7Z3kS29mJcX5KpK+LOlvwGvAmLxdkr4l6W+SXpP0jKQzJQ0u1SNJ/y/PxXlZ0kRJH5L0\nZGluzmLzZQrldeexSNpJ0qRc5xxJV0r6UCnm15JelLRm3v6SpGcl/aROO5LULumvkl7NcddK2iRv\nv03SXRXn6mFJnY7kVJ2HOnFvm6Mkabik/5U0M/ftCUm/kbRC7XsClgNq52pR7dzm87oo1/E7SS+S\nLiNVnvO87cuSZuT2ppTnTOVz+/c6+71ZZwN9q/puDy78XD0t6dQ6P1e3Spom6cOS/k/SK/ncfquz\n76Gw/zKSjs7f3WtKc5COlbRsqe97AkNyPxeqYr5P/tl5Q9IKhbLD834/KZQtk7//YwtlK0oan/9O\nvCZpuqT/KdXf1d/HVSRdKOmfkl6QdC6w2DnLcatLuiCfq9ck/UPSFZLWbOS82dLLI0q2tBoi6T3F\ngoh4vhQzFvgicAbwAvBELj8PaMvvJwPrAgcDG0vaOo9WQJrfcThwJTABGAncALy71E7VfJW3lUva\nGzgXuBY4DFgBOACYJGnTiHiqsO8yub1JwLfz8XxH0t8j4txCtReSfileBZxN+uU+GtgC+Evefqak\n4RHxYKEvo4B1gO/V6XtRo+eh1u9a/YNy3ADSeZ4FrAl8GhgcEfMkfQn4FXBrPi8AD5Xq+l/gAeC7\nhbKqcz4G2AM4lXTZ6UBggqTNImJGF/u+WR4RCxvoW/m7/SFwBHA96WduA9J3O7L0cxXAqsB1wKXA\nxcAXgJ9KuicibqzTt6Lz8zFeTPrZ2JJ0iXB9YPdC3w8ANga+Dgi4raK+SaTvaCvS9wXwcWAhsHUh\nbiTpO78lH6+Aa/J+vwT+CuwAjJO0ekQcXmrnbX8fcx1XkX5WzwRmALuSznv5O/o9sB7pu32CNGI2\nlvQz9RRmVSLCL7+WmhewF+mSS/m1sBAzMJfNB9Yr7b9t3rZrqXyHXL5b/jw07395Ke4nOe7sQtlx\nwPw6fd2X9Mtmjfx5JeCfwGmluGG5/PRC2UV538NKsX8Bbi983i7358ROztnKwKvAsaXyM3K77+pk\n3+6chzG5z/+RP4/MMZ/u4jt9tVhP6bwuAs6v2Da/8Ln2nS8APlIofz9p9OLi0rl9sKs6u+hb+bsd\nls/TlaW4b+a4PQtlk3LZFwply5ESyd92ca5G5OM8o1Q+Lte5Vek4X2jg79RA4CXguELZC6RE7LXa\nzwfwnXyMK+bPu+a+HFqq73JSkvq+Bv4+1ur4ZqFsACk5XQjskcv+rRznl1+NvnzpzZZGAewPfLLw\n2q5O3I0R8VCpbDfSL4E/SXpP7QXcRfql+Ikctz3pH/jTSvufvAT9/hQpWbq41PZC4M5C20Vnlz7f\nShoBq9mVlBwcV9VoRPwTuJo0CgGkyyHA50kJ0Gud9HkszZ+Hf+b3HSS9q4H4egL4RTfiJ0XEm5P6\nI+Jx0ojFp5psv1Hbkc5T+bycBbwC7FQqnxMRl9Q+RJrbdSeLf7f17Eg6J+VlEE4ijRqV2+lSRCwE\nJpNGIZG0ETAE+DGwLGm0B9Io0z0R8XL+vAMp+TmjVOU40rkon/N6fx93AF6n8HMeaeTt9Hw8Na+Q\nkq9PSBrSzUO0pZwTJVta3RkRNxVfdWIeq1P2QdL/Tp8rvWYB7yKNoAC8L78v9g97RMwk/e+7GeuR\n/vGfVGr7WeA/C23XvJyTnKIXgVUKn9cFnoqIrvp0IbCOpC3z508B7yGNOnTm/fm92+chIh4GTgG+\nATwv6TpJ+0taqYs2yx7tRmz5FzHAg8BKklaps62n1M7Tg8XCSJOVHy1sr3myTh3l77aqnQX53Bbb\neZr0fZTbadStwOZ5ntPWwJMRcQ/pTtLa5betSD+7xb48FRGvluqaXthe9Fiddt8PPF0nWZ9R/JC3\nHwHsDDwr6U+SDpVU/jtj9jaeo2RWrfwPOKT/XPwD+DKL/4+15tn8XtvWyB1DVTED67QdpPlRs+vE\nlycnL6yoVxV/7sx1uc0vAXfk96cj4k9d7Ned8/A2EdGeJ+d+hjQ6dTpwuKQtc7LViHrfY3eUz1Gj\n39eStNGVRr7b7m7vbh+KJpGWXNiCNHI0qVC+taQPk/6DccsStFfvexT1v4+31R0RJ0m6AvgsacT3\nh8D3JG1THEU0K/OIkln3PEyaSHtreUQqv2r/4D6W34cXd5a0GunyWdGLwEBJy5fK167TNsCzFW1P\novseAtYs31lVFhELyJOGJa1MmlD9mwbqfyy/N3Ieqtq+LyJ+FBHbANuQRuu+XgxppJ4GfbBO2XDg\npYh4MX9+kTRvq2ztOmWN9u2x/L5+sVDScrnexxusp5F2lpH0gVI7awArLkE7d5Au4Y4mjSDVfhZv\nAf6DdFk4SCNPxb6sKak8qX+D/N5IX2p1lC/Nrl8nloh4JCLGRcT2wEdJk8sbulvQll5OlMy65xLS\nxNkjyxvy7c+1hOOPpP/1H1wKa69T58Ok/wGPLtS1ImnUqug64GXg+3mOULn9VRs8hqLLSSPLjay6\nfBEpSTyL9AumkUSpO+dhMZIGSyr/G3Uf6RfuoELZPOonLs34eJ5jU+vD2qTLNdcXYh4G3iNpg0Lc\ne0nJY1mjfaudp0NK5d8g3dl4dQN1NOJa0s/a/5TKv006r9c0U2m+fDaN9DO7OouPKK0AHATMiIji\nSOi1pL9LB5Sqayedi+saaPpa0s/CN2oF+e/GQSx+B+W7812URY+Q/j4NKsStJmn9Oj93thTzpTdb\nGjV9iSEibsqXgo6UNAKYSPqf9HDSRO/9SXcuzZI0HjhU0pWkf/Q3I00cf6FU7XXA08D5kn6Wy/YB\nngHeXGcnIuZIOoi0LME0SReTLoe9nzQJ9//o5v+OI2KipA7gW0prG91AuoS0NTAhIoqTZO+SNJ00\nifuvjVyu6OZ5gMW/m+2A8ZIuBf5Omhi8F+kS4/8W4qYCY/P6O88AD0dE3XWfGnAfcIOk00jf6wH5\n/QeFmN+Sljy4MsetCOxHWoJg41J9DfUtn6cTgCMkXUtKjDbI9U4mjeYtsYiYJuk3wAH5RoBJwCjS\npdRLIqJqCYBGTAIOBZ6PiOm5vWckPUz6+/HLUvwVpBGnEyStx1vLA+wE/DQi6s3DKruCNJr1szxK\nVlseoDw6uyFwvaRLgPtJidhupHl2HYW4n5FuWliTdIndzMsD+LV0vUi/aBcCIzqJGZhjTuok5muk\nu4xeJl2KuRv4ETC0FPf/SEnQy6RRg/VJE3HPLsWNIP1CfJX0P90DKd1CXojdljTC8WKudwZwDrBJ\nIeYi0i+scr+PA14vlYn0C+7+3P5M0p1eG9XZ/7u5T9/q5nmvdx6eAM4qxJSXB1g3H9ffSSMzz+Z9\nR5fq/hDwp1z3wtq5zce6kLTmUqfnofidk5KGB/O5mFLrT2n/scC9pNvf/0Zax6je8gBVfav6bg/M\n9b2Wz9cpwEqlmEnA1Dp9uog0atPVdzEwfx8P53YeJSWCy9Sp720/Q53U++l8TFeUys+jtMRBYdsK\npLvcnsp9eQA4pDt/H0kT2C8k3SX5PGnNqk1ZfHmAVUl3Xt4PzCUl6bcBn61zzAvK34tfS/dLET15\ned/MuiLpSeC6iPh6l8G9jKRvk9ZAel9EPNPq/piZ/av5OqyZdcc+pPVsnCSZ2VLBc5TMrFNKz/Da\nhTSv6EP4LiEzW4o4UTJ751U9K6y3Wo10h9sLpMeYTGhxf8zM3jGeo2RmZmZWwXOUzMzMzCo4UTIz\nMzOr4ETJzFpO0g8llZ9V9073YaCkRZLG9WCdY3Kdu/RUnd1o+9eS/v5Ot2vW3zhRMuvlJO2Vf9nW\nXq9KmiHptH709PO+NsG9O1p1XAEsalHbZv2G73oz6xuC9Dy2x4B3kZ7Qvj+wg6SPRMRrLeybda7p\nR+Ysob1b2LZZv+FEyazvuD4ipuU/nyfpBdIDRD8D/K513eqapOUj4pVW92NpEhELW9Guv2vrb3zp\nzazvuok0YrBOrUDSOpIulfS8pHmSJkvasbiTpOcKD99FyT8lvSFpcKH88Fy2fKFsfUmX5fpflXSn\npE+X6q9dKhwt6UxJs0jPt+sWSftKulHSrNzWfZK+Voo5RdLMUtnPc/v7FcrWyGX7NNj2l/PlzVcl\nTZH0H3Vi3ivpfEkzJb0m6V5Je9WpLoABko6S9JSkVyT9UdI6pfq2yd/dE7m+xyX9rPjUe0nflbRQ\n0hrlRnLsq5JWyp/fNkdJ0oqSxkt6MrcxPT+wtxjzgXyu9iiV1+ZwHVEo+2EuGy7pd5JeJD2c2azf\ncKJk1netl9+fB8jzlSYD2wGnA0cAg4CrJH2msN9twOjC542AWoK0VaH848C02uiApA+TntS+PvBj\n0grdLwO/L9VfcyZpJe8fkJ4P1137kx4Q/CPg26SHxJ5VSpYmAf8uaXip3wuBrQtlo0kJy6QG2h0D\n/BS4gPTw2KHABEnr1wIkrUZ6YO42wKnAIbmvv5J0QKk+kS6b7gSckF//QXqQa9EXSN/X6cBBpAcA\nH0J6qGzNxbm+z9fp927AtRHxUv682LwvSQKuAQ4GriaNRv4dGCfphM5OSCdq9f8v6eG13yU9lNas\n/2j1U3n98suvzl/AXqRf/J8A3gO8F9gdeI6UqKye48bnuFGFfVcgPSX+4ULZt4H5wAr580GkX/KT\ngeMLcS8APyt8ngjczdufMn8r8ECpv4uAP5EXtW3gGI8D5pfKBtWJ+yMwvfB5WG5r3/x5lXwOLgae\nKMSdDszsog8Dc10LgI8Uyt9PerL9xYWy84EngCGlOi4BZgPL5s9jcp33AAMLce25n8O7ON7v5/6s\nXij7M3B7KW5UbucLhbKLgAcLn3fNMYeW9r0ceIP0oGOAD+S4PSrOzxGl720RcH6r/5745de/6uUR\nJbO+QcCNpOToSeC3wFzgs/HWA2p3AKZExOTaThExDzgbWFvShrl4Eml+Yu1y0ta5bFL+M5I2AlbO\nZUhahZSoXQoMkfSe2gu4AfigpNUL/Q3glxHR9B1fEfH6mwcvDc5t3QwMl/TuHDMLeIi3Rsi2Bl4H\nTgLWlPT+0jE2YlJE3Ffox+PAVcCncl8EfA74A7BMnXOxCrBJqc5zY/E5Q5NI3+m6Fce7fK7v9hxX\nrO93wBaS3lco2x14hTRSVGUHUoJ8Rql8HCkJ+lQn+3YmgF80ua9Zr+dEyaxvCNKlqE8C2wIbRsQH\nImJiIeb9wIw6+04vbAeYRvqlWrs09XHeSpQ2k7Rc3hak0SJIl/lEGkF4rvQ6JseUlyp4rPhB0rKS\nhhVfnR2wpK0l3STpZeCfua1j8+YhhdBbS8cyBbgLmANsLWkI8BEaT5QeqlP2ILBSThhXA1YCDuDt\n5+LsHF8+F+U5Wi/m91VqBZLeL+lCSc+TRgqfIyXHsPjxXpLfv1Ao2xW4OjqfRP1+4KmIeLVUXv75\naMajS7CvWa/mu97M+o4746273poWEQsk/RkYLekDwOrALaRfzMsCW5ASjukR8Xzerfafqp8BVQ/F\nLScY5V/Io0mXzoKUdIWktSLiH+WKJH0wx95Hukz1JGk0ZBfSHJvif/ImAXtJWouUME2MiJB0W/5c\nS0puqeh3I4q32dfavgD4dUX8PaXPVXegCdJEadKlzZWA40kJ7yvA+0hzlN483oh4StJkUqL0M0lb\nky7HXtyNY+hM1SjgwE72KX/XZv2GEyWz/uNx0kTrsg0K22smAYeRJn4/FxEPAkj6Gymh2Zp0uanm\nkfz+RkTc1GT/ppJGxIqeq4jdhZS07ZQvr5H7t32d2NpI0fbACODo/PkW4CukROkl3p68VPlgnbLh\nwEsR8aKkucA8YMASnIuyTUhzg9oi4s2lHiRVXQ67GDhF0rqky24vAdd10cZjwMclvbs0qlT++agl\nliuX9l+SESezPsuX3sz6j2uBj0naolYgaQXg68CjEXF/IXYSaeHKQ3jr8hr5z18mjTK9eakqIp4j\nTc7+Rr7jazGSVu2qcxHxz4i4qfSqemxJbQTmzX+j8mWv/65T70PALNIk9QGkeT21Y1yfNJ/o9m7M\nl/p4nqNVa3dtYGfg+tzeQuAK4AuSNijvXOdcNNJuveMV6fupt/+l5AnXpMtuVxbnOFW4FliOdMmw\nqDax/DqAiHiRdKlzdCnuoIq+1CVpiNJyEis2uo9Zb+QRJbO+oZHLJj8B2oDrJZ1Kumttb9JIwH+V\nYieT7qYaDpxVKL+FNBeq3q30B+ayeyX9kjTKNIx0x9V7gU272d/OTCDdRn9tbmsw8DXgGd4+/wdS\ngrcbaTmDl3PZnaRLQuuR7lJr1H3ADZJOI52jA/L7Dwoxh5ESiSm5f9OBfwM2I43GFZPJRs7F30jz\nfE7OE9BfzsczuF5wRMySNAn4DrAijS04egXp+z1B0nrAX0kTvHcCfhoRxXlU5wCHSppDmtO2LWnE\nqzvf6xeBn+f3S7qINeu1PKJk1jd0+T/5iHiWlLTcQPrf//Gk29p3jogrS7GvkG71L07YhpQIBenW\n+idL+0wnJQJXk5YAOB34Bmk04lgW18zdbm/uk9vajfRv1M+ArwKnkdZmqqfW7+Io2ALSrfSNrp9U\n68ONwKGkYzyGNFo1NvepVvdMYHPSPKX/yn37JimxObzquKrK88jazqTk5QjgSFLy9JVO+vo7UpL0\nT6rnjRXbCFJSdCrwadJyEsOBb0XEd0v7HU2aG/UFUsK6IPevu8/k66/P77OliJbg7l0zMzOzfq3X\njShJ+l5eEn9coWyQpDMkzZb0ktIjFIaW9ltL0jVKj22YKelESQNKMdtKmpqX7n9QdR43IOlASY/m\nRwHcIWnz0vYu+2JmZmb9Q69KlHJS8jXefnfKyaQh411J8wLWIK0mW9tvAGmi4jLAlqQh870pXA7I\nEzKvJg2rbwycApwjabtCzO6kheqOJs23uIf06ILi5MxO+2JmZmb9R6+59JbvjJhKmkh6FHB3RHxL\n6SGdzwFfjIgrcuz6pMmTW0bEFEk7AFeSlvmfnWO+QZrc+u953ZgTgB0iong3SwfpEQQ75s93AH+O\niEPyZ5HWbzk1Ik5spC//0pNkZmZm76jeNKJ0BnBVnXVJNiONFNVWqCUiZpCeszQqF20J3FtLkrIJ\npNVsP1yIKa5iXIsZBWnVYGBkqZ3I+9TaaaQvZmZm1k/0ikRJ0hdJC659r87mYaSHZc4tlc/irVtw\nV8ufy9tpIGawpEHAqqSVZ+vF1OpopC+Lyc9sGiFp+XrbzczMrL7e8Du05esoSVqTNO9nu04Wn6u7\nK43detpZjBqM6aqdzmI2AW4DpuVnVhVdT/VtvWZmZkuT7Xn7w5lXJK24vxVvLSb7jmp5okS63PXv\nwNQ8JwjSyM5oSQeRTtogSYNLIzlDeWv0p7amSdGwwrbae/khnEOBuRExX9Js0now9WKK7SzXRV/K\n1s7vI+psG01a68bMzMyqrc1SnChNBD5aKjufNEH6J8DTwBvAGNLKskgaTnpYZO2kTQaOkLRqYZ7S\nWNLTw6cXYnYotTM2lxMRb0iamtu5Mrej/PnUHD+VtPBavb5Mrji+xwB+/etfs8EGb3vaQb/S3t7O\n+PHjW92Nf7ml5Thh6TlWH2f/4uPsP6ZPn86XvvQlyL9LW6HliVJEzAOKz6BC0jzg+dpKuJLOBcZJ\nqj3c8lTgtoi4M+9yQ67jIkmHk55TdRxweuFy3i+Ag/Ldb+eRkp3dgB0LTY8DLsgJ0xTSM5CWJz/+\nICLmdtKXqjveXgPYYIMNGDGi3qBS/zFkyJB+f4yw9BwnLD3H6uPsX3yc/dJrrWq45YlShfJ8n9pD\nGy8DBpHm9hz4ZnDEIkk7k54rdDvpyd7n89ZTxImIxyTtREqGvgk8BewbERMLMZfkNZOOJV2C+wuw\nfX4gaEN9MTMzs/6jVyZKEfGfpc+vAwfnV9U+T5KeRdRZvTeT5kR1FnMm1c+TaqgvZmZm1j/0iuUB\nzMzMzHojJ0rWY9ra2lrdhXfE0nKcsPQcq4+zf/FxWk/qNY8w6a8kjQCmTp06dWmadGdmZrbEpk2b\nxsiRIwFGRsS0VvTBI0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCi\nZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVw\nomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlV\ncKJkZmZmvcr//A987Wut7kWyTKs7YGZmZlb01FPw8sut7kXS8hElSftJukfSnPy6XdKnCtv/JGlR\n4bVQ0pmlOtaSdI2keZJmSjpR0oBSzLaSpkp6TdKDkvaq05cDJT0q6VVJd0javLR9kKQzJM2W9JKk\nyyQN7elzYmZmZr1DyxMl4EngcGBkft0E/EHSBnl7AGcDw4DVgNWBw2o754ToWtLo2JbAXsDewLGF\nmLWBq4EbgY2BU4BzJG1XiNkdOAk4GtgUuAeYIGnVQl9PBnYCdgVGA2sAly/pCTAzM7PeqeWJUkRc\nExHXR8RD+XUk8DIp6al5JSKei4hn86s4ILc98CFgz4i4NyImAEcBB0qqXVrcH3gkIg6LiBkRcQZw\nGdBeqKcdOCsiLoyIB4D9gFeAfQAkDc5/bo+ImyPibuArwFaSPtbDp8XMzMx6gZYnSkWSBkj6IrA8\ncHth056SnpN0r6TjJb27sG1L4N6ImF0omwAMAT5ciJlYam4CMCq3uyxpNOvG2saIiLzPqFy0GWnU\nqhgzA3iiEGNmZmb9SK+YzC3pI8Bk4F3AS8DnchIC8BvgceAfwEbAicBwYLe8fTVgVqnKWYVt93QS\nM1jSIODfgIEVMevnPw8D5kfE3DoxqzV0oGZmZtan9IpECXiANHdoZdL8nwsljY6IByLinELc3yTN\nBG6UtE5EPNpFvdHJNjUY09n2RmPMzMysD+oViVJELAAeyR+n5Tk/h5DmFpX9Ob+vBzwKzAQ2L8UM\ny+8zC+/DSjFDgbkRMV/SbGBhRUxtlGkmsJykwaVRpWJMpfb2doYMGbJYWVtbG21tbV3tamZm1u91\ndHTQ0dEBwF13wYIF0N4+p8W96iWJUh0DgEEV2zYljeA8kz9PBo6QtGphntJYYA4wvRCzQ6mesbmc\niHhD0lRgDHAlgCTlz6fm+KnAglx2RY4ZDryvVk9nxo8fz4gRI7oKMzMzWyoVBw922y2to3T88dMY\nOXJkS/vV8kRJ0o+A60jLBKwE7AlsA4yVtC6wB+n2/+dJl+fGATdHxH25ihuA+4GLJB1OWj7gOOD0\niHgjx/wCOEjSCcB5pGRnN2DHQlfGARfkhGkK6S645YHzASJirqRzgXGSXiTNpToVuC0ipvToSTEz\nM7NeoeWJEuly14WkBGcO8FdgbETcJGlN4JOky3ArkJKpS4Ef1XaOiEWSdgZ+TrpTbh4puTm6EPOY\npJ1IydA3gaeAfSNiYiHmkrxm0rG5T38Bto+I5wp9bSddoruMNOJ1PXBgj50JMzMz61VanihFxFc7\n2fYUsG0DdTwJ7NxFzM2kJQA6izkTOLOT7a8DB+eXmZmZ9XO9ah0lMzMzs97EiZKZmZlZBSdKZmZm\nZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZm\nZtbrSK3uQeJEyczMzHqViFb34C1OlMzMzMwqOFEyMzMzq+BEyczMzKyCEyUzMzOzCk6UzMzMzCo4\nUTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMjMzM6vgRMnMzMysghMlMzMzswpOlMzMzMwq\nOFEyMzMzq+BEyczMzKyCEyUzMzOzCi1PlCTtJ+keSXPy63ZJnypsHyTpDEmzJb0k6TJJQ0t1rCXp\nGknzJM2UdKKkAaWYbSVNlfSapAcl7VWnLwdKelTSq5LukLR5aXuXfTEzM7P+o+WJEvAkcDgwMr9u\nAv4gaYO8/WRgJ2BXYDSwBnB5beecEF0LLANsCewF7A0cW4hZG7gauBHYGDgFOEfSdoWY3YGTgKOB\nTYF7gAmSVi30tdO+mJmZWf/S8kQpIq6JiOsj4qH8OhJ4GdhS0mBgH6A9Im6OiLuBrwBbSfpYrmJ7\n4EPAnhFxb0RMAI4CDpS0TI7ZH3gkIg6LiBkRcQZwGdBe6Eo7cFZEXBgRDwD7Aa/k9mmwL2ZmZtaP\ntDxRKpI0QNIXgeWByaQRpmVII0EARMQM4AlgVC7aErg3ImYXqpoADAE+XIiZWGpuQq0OScvmtort\nRN6n1s5mDfTFzMzM+pFekShJ+oikl4DXgTOBz+VRndWA+RExt7TLrLyN/D6rznYaiBksaRCwKjCw\nIqZWx7B7GWMFAAAgAElEQVQG+mJmZmY9QGp1D5Jlug55RzxAmju0Mmn+z4WSRncSLyAaqLezGDUY\n01U7jfbFzMzM+phekShFxALgkfxxWp7zcwhwCbCcpMGlkZyhvDX6MxNY7O400uhPbVvtfVgpZigw\nNyLmS5oNLKyIKbbTVV8qtbe3M2TIkMXK2traaGtr62pXMzOzfq+jo4OOjg4A7rwTFi2C9vY5Le5V\nL0mU6hgADAKmAguAMcAVAJKGA+8Dbs+xk4EjJK1amKc0FpgDTC/E7FBqY2wuJyLekDQ1t3Nlbkf5\n86k5vrO+TO7qgMaPH8+IESMaO3ozM7OlTHHwYNdd4dVX4Yc/nMbIkSNb2q+WJ0qSfgRcR1omYCVg\nT2AbYGxEzJV0LjBO0ovAS6TE5baIuDNXcQNwP3CRpMOB1YHjgNMj4o0c8wvgIEknAOeRkp3dgB0L\nXRkHXJATpimku+CWB84H6KIvU3r4tJiZmS21ohdNaGl5okS63HUhKcGZA/yVlCTdlLe3ky6LXUYa\nZboeOLC2c0QskrQz8HPSKNM8UnJzdCHmMUk7kZKhbwJPAftGxMRCzCV5zaRjc5/+AmwfEc8V+tpp\nX8zMzKx/aXmiFBFf7WL768DB+VUV8ySwcxf13ExaAqCzmDNJd9013RczMzPrP3rF8gBmZmZmvZET\nJTMzM7MKTpTMzMzMKjhRMjMzM6vgRMnMzMysghMlMzMzswpOlMzMzMwqOFEyMzMzq+BEyczMzKyC\nEyUzMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMjMzs15HanUPEidK\nZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZWa8S0eoevMWJkpmZmVkFJ0pm\nZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVWp4oSfqepCmS5kqaJekKScNLMX+StKjw\nWijpzFLMWpKukTRP0kxJJ0oaUIrZVtJUSa9JelDSXnX6c6CkRyW9KukOSZuXtg+SdIak2ZJeknSZ\npKE9eU7MzMysd+iRREnSQEmbSFqlid23Bk4DtgA+CSwL3CDp3YWYAM4GhgGrAasDhxXaHwBcCywD\nbAnsBewNHFuIWRu4GrgR2Bg4BThH0naFmN2Bk4CjgU2Be4AJklYt9OVkYCdgV2A0sAZweRPHbWZm\nZr1cU4mSpJMl7Zv/PBC4GZgGPClp2+7UFRE7RsRFETE9Iu4lJTjvA0aWQl+JiOci4tn8ermwbXvg\nQ8CeEXFvREwAjgIOlLRMjtkfeCQiDouIGRFxBnAZ0F6opx04KyIujIgHgP2AV4B98rEOzn9uj4ib\nI+Ju4CvAVpI+1p3jNjMzs96v2RGl3UijLQCfBtYhJSrjgR8tYZ9WJo0gvVAq31PSc5LulXR8acRp\nS+DeiJhdKJsADAE+XIiZWKpzAjAKQNKypOTsxtrGiIi8z6hctBlp1KoYMwN4ohBjZmZm/USzidKq\nwMz85x2BSyPiQeA84KPNdkaSSJe2bo2I+wubfgN8CdgWOB74MnBRYftqwKxSdbMK2zqLGSxpEOmY\nBlbE1OoYBsyPiLmdxJiZmVk/sUzXIXXNAjaU9AzwKeCAXL48sHAJ+nMmsCGwVbEwIs4pfPybpJnA\njZLWiYhHu6izsyfGqMGYrp4600iMmZmZ9THNJkq/Ai4BniElCH/M5VsADzRToaTTSaNTW0fEM12E\n/zm/rwc8Shrd2rwUMyy/zyy8DyvFDAXmRsR8SbNJSV69mNoo00xgOUmDS6NKxZi62tvbGTJkyGJl\nbW1ttLW1dbabmZnZUqGjo4OOjg4A7rwTFi2C9vY5Le5Vk4lSRBwj6T5gLdJlt9fzpoXAT7pbX06S\nPgNsExFPNLDLpqQErZZQTQaOkLRqYZ7SWGAOML0Qs0OpnrG5nIh4Q9JUYAxwZe6X8udTc/xUYEEu\nuyLHDCdNPp/cWYfHjx/PiBEjGjg0MzOzpU9x8OBzn4P58+G446YxcmT53q53VrMjSkTEZQCS3lUo\nu6C79eT1kNqAXYB5kmojOnMi4jVJ6wJ7kG7/f550a/844OaIuC/H3gDcD1wk6XDS8gHHAadHxBs5\n5hfAQZJOIM2lGkOalL5joTvjgAtywjSFdBfc8sD5+fjmSjoXGCfpReAlUhJ1W0RM6e6xm5mZWe/W\n7PIAAyUdJelp4OWczCDpuNqyAd2wHzAY+BPwj8LrC3n7fNL6ShNIo0M/BS4lJVYARMQiYGfSiNbt\nwIWk5OboQsxjpPWPPgn8hZQE7RsREwsxlwDfJq2/dDewEbB9RDxX6G87aT2mywp93rWbx2xmZmZ9\nQLMjSt8nLep4GPDLQvl9wP8A5zZaUUR0mqxFxFOku926qudJUrLUWczNvH19pnLMmaRJ5VXbXwcO\nzi8zMzP7F5C6jnknNLs8wH8DX4+I37D4XW73kNZTMjMzM+vzmk2U3gs8VFHfss13x8zMzKz3aDZR\nup/0jLay3Uhze8zMzMz6vGbnKB1LujvsvaRk678krU+6JNfpPCEzMzOzvqKpEaWI+AMpIfokMI+U\nOG0AfDoi/tjZvmZmZmZ9xZKso3QrsF0P9sXMzMysV2l2HaXNJW1Rp3wLSZstebfMzMzMWq/Zydxn\nkB5fUvbevM3MzMysKdGLHjPfbKK0ITCtTvndeZuZmZlZn9dsovQ6MKxO+eqkh8aamZmZ9XnNJko3\nAD+WNKRWIGll4HjAd72ZmZlZv9DsXW+HArcAj0uqLTC5CTAL+HJPdMzMzMys1ZpKlCLiaUkbAXsC\nGwOvAr8COiLijR7sn5mZmVnLLMk6SvOAs3uwL2ZmZma9StOJkqThwLbAUEpznSLi2CXrlpmZmVnr\nNZUoSfoa8HNgNjATKK54EKRHmpiZmZn1ac2OKB0JfD8iTujJzpiZmZn1Js0uD7AKcGlPdsTMzMys\nt2k2UboUGNuTHTEzMzPrbZq99PYQcJykLYF7gcWWBIiIU5e0Y2ZmZmat1myi9HXgZWCb/CoKwImS\nmZmZ9XnNLji5Tk93xMzMzKxGanUPkmbnKAEgaTlJ60tqej0mMzMzs96qqURJ0vKSzgVeAf4GvC+X\nnybpuz3YPzMzM7OWaXZE6cekZ7xtC7xWKJ8I7L6EfTIzMzPrFZq9ZPZZYPeIuENScVXuvwEfWPJu\nmZmZmbVesyNK/w48W6d8BRZ/nImZmZlZn9VsonQXsFPhcy05+ioweYl6ZGZmZtZLNJsoHQEcL+nn\npMt3h0j6I/AV4PvdqUjS9yRNkTRX0ixJV0gaXooZJOkMSbMlvSTpMklDSzFrSbpG0jxJMyWdKGlA\nKWZbSVMlvSbpQUl71enPgZIelfSqpDskbd7dvpiZmVn/0FSiFBG3kiZzL0NamXssMAsYFRFTu1nd\n1sBpwBbAJ4FlgRskvbsQczJpBGtXYDSwBnB5bWNOiK7N/dkS2AvYGzi2ELM2cDVwY+77KcA5krYr\nxOwOnAQcDWwK3ANMkLRqo30xMzOz/qPbk7nzmkl7ABMi4mtL2oGI2LFU/96k+U8jgVslDQb2Ab4Y\nETfnmK8A0yV9LCKmANsDHwI+ERGzgXslHQX8RNIxEbEA2B94JCIOy03NkPRxoB34Yy5rB86KiAtz\nO/uRkqJ9gBMb7IuZmZn1E90eUcpJxy+Ad/V8dwBYmTTn6YX8eSQpobux0IcZwBPAqFy0JXBvTpJq\nJgBDgA8XYiaW2ppQq0PSsrmtYjuR96m1s1kDfTEzM7MlEL3otrBm5yhNIV2a6lGSRLq0dWtE3J+L\nVwPmR8TcUvisvK0WM6vOdhqIGSxpELAqMLAiplbHsAb6YmZmZv1Es+sonQmcJGlNYCowr7gxIv66\nBPVuCHy8gVjR2FIEncWowZiu2ukypr29nSFDhixW1tbWRltbWxdVm5mZ9X8dHR10dHQAMCVPZGlv\nn9PCHiXNJkoX5/dTC2XBWwnDwO5WKOl0YEdg64j4R2HTTGA5SYNLIzlDeWv0Zyaw2N1ppNGf2rba\n+7BSzFBgbkTMlzQbWFgRU2ynq77UNX78eEaMGNFZiJmZ2VKrOHjwmc/AokXwgx9MY+TIkS3tV7OX\n3tap81q38N4tOUn6DGky9hOlzVOBBcCYQvxw0vPlbs9Fk4GPlu5OGwvMAaYXYsawuLG5nIh4I7dV\nbEf5c62dzvri9aPMzMz6maZGlCLi8Z7qgKQzgTZgF2CepNqIzpyIeC0i5uYH8I6T9CLwEmkk67aI\nuDPH3gDcD1wk6XBgdeA44PScAEGagH6QpBOA80jJzm6kUayaccAFkqaS5mG1A8sD5+fj7qwvvuPN\nzMysn2kqUZL0351tr91e36D9SJfr/lQq/wpQq6eddFnsMmAQcD1wYKG9RZJ2Bn5OGv2ZR0puji7E\nPCZpJ1Iy9E3gKWDfiJhYiLkkj0odS7oE9xdg+4h4rtCvTvtiZmZm/Uezc5ROKX1eljTyMh94hbcS\nnC5FRJeX/yLideDg/KqKeRLYuYt6biYtAdBZzJmkSeVN98XMzMz6h2Yvva1SLpP0QdKIzk+XtFNm\nZmZmvUGzk7nfJiL+DnyXt482mZmZmfVJPZYoZQtIzz4zMzMz6/Oancy9S7mIdKfZQcBtS9opMzMz\ns96g2cncvy99DuA54Cbg20vUIzMzM1vqSV3HvBOanczd05fszMzMzHodJzxmZmZmFZpKlCRdJum7\ndcq/I+nSJe+WmZmZWes1O6K0DXBNnfLrgdHNd8fMzMys92g2UVqRtAp32RvA4Oa7Y2ZmZtZ7NJso\n3QvsXqf8i6SH05qZmZn1ec0uD3Ac8L+SPkBaEgBgDNAGfL4nOmZmZmbWas0uD3CVpM8CRwC7Aa8C\nfwU+mR88a2ZmZtbnNTuiRERcQ/0J3WZmZmb9QrPLA2wuaYs65VtI2mzJu2VmZmbWes1O5j4DWKtO\n+XvzNjMzM7M+r9lEaUNgWp3yu/M2MzMzsz6v2UTpdWBYnfLVgQXNd8fMzMyWdhGt7sFbmk2UbgB+\nLGlIrUDSysDxwB97omNmZmZmrdbsXW+HArcAj0u6O5dtAswCvtwTHTMzMzNrtWbXUXpa0kbAnsDG\npHWUfgV0RMQbPdg/MzMzs5ZZknWU5gFn92BfzMzMzHqVphIlSZ8nPa5kOBDA34HfRsRlPdg3MzMz\ns5bq1mRuSQMk/Q74HWkZgIeAR4APA5dIuliSer6bZmZmZu+87o4oHQJ8EtglIq4ubpC0C2me0iHA\nyT3TPTMzM7PW6e7yAF8BvlNOkgAi4krgMGCfnuiYmZmZWat1N1H6IDCxk+0Tc4yZmZlZn9fdROlV\nYOVOtg8GXutuJyRtLelKSU9LWpQv4xW3/yqXF1/XlmJWkfQbSXMkvSjpHEkrlGI2knSLpFclPS7p\nO3X68nlJ03PMPZJ2qBNzrKR/SHpF0h8lrdfdYzYzM7NqvWXGc3cTpcnA/p1sPzDHdNcKwF/y/lUL\nl19HemzKavnVVtr+W2ADYAywEzAaOKu2UdJKwATgUWAE8B3gGElfLcSMyvX8krSA5u+B30vasBBz\nOHAQ8A3gY8A8YIKk5Zo4bjMzM+vFujuZ+0fAnyS9B/gZ8AAgUoLybeAzwCe624mIuB64HqCTu+Ze\nj4jn6m2Q9CFge2BkRNydyw4GrpF0aETMBL4ELAvsGxELgOmSNgW+BZyTqzoEuC4ixuXPR0saS0qM\nDijEHBcRV+V2/pu0IvlngUu6e+xmZmbWe3VrRCkibgd2JyVDk4EXgReA23JZW0Tc1tOdzLaVNEvS\nA5LOlPRvhW2jgBdrSVI2kTQ6tUX+vCVwS06SaiYA6xeeWTeKt8/BmpDLkbQuaTTrxtrGiJgL/LkW\nY2ZmZv1HtxecjIgrJE0AxpIWnAR4ELghIl7pyc4VXAdcTrps9gHgx8C1kkZFRJCSl2dL/Vwo6YW8\njfz+SKneWYVtc/L7rDoxtTqGkZKvzmLMzMysn2j2WW+vSPok8P8i4oUe7lO99oqXtP4m6V7gYWBb\n4P862VVUz3mqbW8kprPtjcaYmZlZH9OtREnSmhHxVP64B3Ai8EJOXHaMiCd7uoP1RMSjkmYD65ES\npZnA0FJfBwKr5G3k92Glqoay+AhRVUxxu3LMrFLM3XSivb2dIUOGLFbW1tZGW1t5TrqZmdnSp6Oj\ng46ODgCmTEll7e1zWtijpLsjSg9Iep40J+ldwFrAE8DapInS7whJawLvAZ7JRZOBlSVtWpinNIaU\n1EwpxPxQ0sCIWJjLxgIzImJOIWYMcGqhue1yeS1Bm5lj/pr7Mpg0D+qMzvo8fvx4RowY0czhmpmZ\n9XvFwYNddknLAxx99DRGjhzZ0n51d3mAIcDngal532slPQgMAraX1NQ8HUkrSNpY0ia5aN38ea28\n7URJW0h6v6QxpNv2HyRNtCYiHsh//qWkzSVtBZwGdOQ73iDd9j8fOE/ShpJ2B74JnFToyinADpK+\nJWl9SccAI4HTCzEnA0dK+rSkjwIXAk8Bf2jm2M3MzKz36m6itGxETImIk0iLT25KeqzJQtKjSx6W\nNKOJfmxGunQ1lXQp7CRgGvCDXPdGpERkBmmNozuB0RHxRqGOPUjLFUwErgZuIa11BLx5d9r2pNGv\nu4CfAsdExLmFmMmk9Zm+TlrX6b+Az0TE/YWYE0lJ2Fmku93eDewQEfObOG4zMzPrxbp76W2upLtJ\nl96WA5aPiNskLSAtG/AUaRHGbomIm+k8aftUA3X8k7RWUmcx9wLbdBFzOekOu85ijgGO6apPZmZm\n1rd1d0RpDeCHwOukJOsuSZNISdMIICLi1p7topmZmVlrdHfBydkRcVVEfA94BdicdBkqSCt1z5V0\nc89308zMzOyd190RpbI5eY2jN4D/BNYBzlziXpmZmZn1Ak0tOJltBDyd//w48Ea+w+x3S9wrMzMz\nW2pFpOUBeoOmE6Xi4pIR8ZGe6Y6ZmZlZ77Gkl97MzMzM+i0nSmZmZmYVnCiZmZmZVXCiZGZmZlbB\niZKZmZlZBSdKZmZmZhWcKJmZmVmv01vWUXKiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJk\nZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCi\nZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlV6BWJkqStJV0p6WlJiyTtUifmWEn/kPSKpD9KWq+0\nfRVJv5E0R9KLks6RtEIpZiNJt0h6VdLjkr5Tp53PS5qeY+6RtEN3+2JmZmbNi2h1D97SKxIlYAXg\nL8CBwNtOj6TDgYOAbwAfA+YBEyQtVwj7LbABMAbYCRgNnFWoYyVgAvAoMAL4DnCMpK8WYkblen4J\nbAL8Hvi9pA272RczMzPrB5ZpdQcAIuJ64HoASaoTcghwXERclWP+G5gFfBa4RNIGwPbAyIi4O8cc\nDFwj6dCImAl8CVgW2DciFgDTJW0KfAs4p9DOdRExLn8+WtJYUmJ0QCN96ZETYmZmZr1CbxlRqiRp\nHWA14MZaWUTMBf4MjMpFWwIv1pKkbCJpdGqLQswtOUmqmQCsL2lI/jwq70cpZlTuy7oN9MXMzMz6\niV6fKJESkyCN2hTNyttqMc8WN0bEQuCFUky9OmggprZ9WAN9MTMzs36iLyRKVUSd+UzdjFGDMUva\njpmZmXVD3Yk4LdAr5ih1YSYpERnG4iM5Q4G7CzFDiztJGgiskrfVYoaV6h7K4iNEVTHF7V31pa72\n9naGDBmyWFlbWxttbW2d7WZmZrZU6OjooKOjA4A774QBA6C9fU6Le9UHEqWIeFTSTNLdbH8FkDSY\nNPfojBw2GVhZ0qaFeUpjSEnNlELMDyUNzJflAMYCMyJiTiFmDHBqoQvb5fJG+1LX+PHjGTFiRHcP\n38zMbKlQHDzYeWdYdlk46qhpjBw5sqX96hWX3iStIGljSZvkonXz57Xy55OBIyV9WtJHgQuBp4A/\nAETEA6RJ17+UtLmkrYDTgI58xxuk2/7nA+dJ2lDS7sA3gZMKXTkF2EHStyStL+kYYCRweiGm076Y\nmZlZ/9FbRpQ2A/6PdBkseCt5uQDYJyJOlLQ8aV2klYFJwA4RMb9Qxx6khGYisAi4jHQrP5DuTpO0\nfY65C5gNHBMR5xZiJktqA36UX38HPhMR9xdiGumLmZmZ9QO9IlGKiJvpYnQrIo4Bjulk+z9JayV1\nVse9wDZdxFwOXL4kfTEzM7P+oVdcejMzMzPrjZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYV\nnCiZmZmZVXCiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZm\nFZwomZmZWa8S0eoevMWJkpmZmVkFJ0pmZmZmFZwomZmZWa8jtboHiRMlMzMzswpOlMzMzMwqOFEy\nMzMzq+BEyczMzKyCEyUzMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhR\nMjMzM6vQJxIlSUdLWlR63V/YPkjSGZJmS3pJ0mWShpbqWEvSNZLmSZop6URJA0ox20qaKuk1SQ9K\n2qtOXw6U9KikVyXdIWnzf92Rm5mZWSv1iUQpuw8YBqyWXx8vbDsZ2AnYFRgNrAFcXtuYE6JrgWWA\nLYG9gL2BYwsxawNXAzcCGwOnAOdI2q4QsztwEnA0sClwDzBB0qo9eJxmZmbWS/SlRGlBRDwXEc/m\n1wsAkgYD+wDtEXFzRNwNfAXYStLH8r7bAx8C9oyIeyNiAnAUcKCkZXLM/sAjEXFYRMyIiDOAy4D2\nQh/agbMi4sKIeADYD3glt29mZmb9TF9KlD4o6WlJD0v6taS1cvlI0kjRjbXAiJgBPAGMykVbAvdG\nxOxCfROAIcCHCzETS21OqNUhadncVrGdyPuMwszMzPqdvpIo3UG6VLY9aRRnHeAWSSuQLsPNj4i5\npX1m5W3k91l1ttNAzGBJg4BVgYEVMathZmZm/c4yXYe0Xr5UVnOfpCnA48AXgNcqdhMQjVTfyTY1\nGNNIO2ZmZtbH9IlEqSwi5kh6EFiPdOlrOUmDS6NKQ3lr9GcmUL47bVhhW+19WClmKDA3IuZLmg0s\nrIgpjzK9TXt7O0OGDFmsrK2tjba2tq52NTMz6/c6Ojro6OgA4K67YMAAaG+f0+Je9dFESdKKwAeA\nC4CpwAJgDHBF3j4ceB9we95lMnCEpFUL85TGAnOA6YWYHUpNjc3lRMQbkqbmdq7M7Sh/PrWrPo8f\nP54RI0Z0+1jNzMyWBsXBg512gkGD4MgjpzFy5MiW9qtPJEqSfgpcRbrc9l7gB6Tk6OKImCvpXGCc\npBeBl0iJy20RcWeu4gb4/+3de7QdZXnH8e8vYBIIDaFACK2BIJGbIGCCBLmFWwLBaFELLARtwSUU\nLBZX1dLaFW4WwZqKQERRW26CyFpFRCgFCUkVbZYhRgohwRDCJSQkBEJuwMk5T/94300mO3tyPefs\ns+f8PmvtleyZd2ae58ycOc9+553ZPA3cJumrwO7AlcANEdGW29wEfEHSNcCPSAXQp4BxhVAmArfk\ngmka6S647YH/6JLEzczMrKlaolAC3gv8GNgZWAz8ChgVEa/l+ZeQLovdA/QD/gu4qLZwRHRI+ijw\nXVIv00pScTOh0OZ5SaeSiqGLgZeA8yLikUKbu/Mzk64gXYL7PTA2IhZ3Qc5mZmbWZC1RKEXEBgfy\nRMTbwN/mV1mbF4GPbmQ9U0iPANhQm0nApA21MTMzsy0XPegWqVZ5PICZmZn1Eh0daTB3T9BDwjAz\nMzNLIkDaeLvu4ELJzMzMepQI9yiZmZmZNdTR4R4lMzMzs4Z86c3MzMyshAslMzMzsxK+683MzMys\nhHuUzMzMzEp4MLeZmZlZCT8ewMzMzKyEL72ZmZmZlfClNzMzM7MSvvRmZmZmVsKX3szMzMxK+NKb\nmZmZWQlfejMzMzMr4R4lMzMzsxIeo2RmZmZWwpfezMzMzEr40puZmZlZCV96MzMzMyvhS29mZmZm\nJdrbXSiZmZmZNbR6NWy3XbOjSFwomZmZWY+yYgUMGNDsKBIXSmZmZtajrFwJO+zQ7CgSF0pmZmbW\nY7S3w1tvuUfJzMzMbD0rV6Z/XSi1OEkXSZonabWk30o6rNkxNdudd97Z7BC6RW/JE3pPrs6zWpxn\na1uwIP27++7NjaPGhdIWkHQG8C1gAnAoMBN4SNIuTQ2syar6S1uvt+QJvSdX51ktzrO1zZuX/t1z\nz+bGUeNCactcAnwvIm6NiGeAC4BVwLnNDcvMzKy13Xtv6k3aY49mR5K4UNpMkt4DjAB+WZsWEQE8\nAhzRrLjMzMxa2dKlcOGF8P3vw8UX95wHTm7b7ABa0C7ANsCiuumLgH3LFpo1Kz2SfXNtyTLNWv6N\nN2DatOZtv7OXL1t26VJ4/PGu3XZPWf6112Dq1O7ffnfnvmQJTJ7cvO131/KLF8PDD3ft9ntC7q++\nCg8+2Lztd9fyixbB/fc3Z9tly3d0pLvW1qxJr7a2tf9fswbefhtWrUqvFSvSOebVV2H+fJg7F/r1\ng5tugs9/futi6UwulDqPgEaHWX+As8+e1b3RNMUyDj/8iWYH0Q2WceSRvSFPgGUce2xvyHUZxx/f\nO/IcM6Z35DluXO/Ic/z41shzm21SD1HfvtC/f3rqdv/+MGhQeo0aBeeck/7ddVeYMSMtN2vWu387\n+zcrdhdKm28J0A7sVjd9MOv3MgEMS/+c3ZUx9SAjmh1AN+kteULvydV5Vovz7Ena29OrrW3t7f+b\naRiwCX35nc+F0maKiDZJ04ETgPsAJCm//06DRR4CPg08D7zVTWGamZlVQX9SkfRQswJQbO1FyV5I\n0unALcD5wDTSXXCfAvaLiMXNjM3MzMw6j3uUtkBE3J2fmXQF6RLc74GxLpLMzMyqxT1KZmZmZiV6\nyCSLcAIAAAsFSURBVFMKzMzMzHoeF0pdrCd/J5ykoyXdJ+llSR2SPtagzRWSFkhaJelhScPr5u8k\n6Q5JyyS9LukHkgbUtfmgpKn5ZzBf0pcbbOcvJc3KbWZKOqWTcrxU0jRJb0paJOk/Je1T16afpBsl\nLZG0XNI9kgbXtRkq6ReSVkpaKOlaSX3q2oyWNF3SW5LmSPpsg3i67HiQdEH+2S3Lr8clnVy1POu2\nc2k+didWLU9JE3JuxdfTVcszr//PJN2Wc1mVj+MP1bVp9XPRvAb7s0PS9Xl+JfanpD6SrpT0XN5X\nf5T0tQbtWmd/RoRfXfQCziDd6fYZYD/ge8BSYJdmx5bjO5k0zuovSI88+Fjd/K/meMcDBwL3AnOB\nvoU2DwJPACOBjwBzgNsL8/8EeIU0+H1/4HRgJfC5QpsjgDbgS6SHdl4OvA0c0Ak5PgCck7d9EHA/\n6Q7E7QptvpunHUv67r7Hgf8pzO8DPEm66+IgYCzwKnBVoc0wYAVwbc7hopzTSd11PACn5n06PL+u\nyj/H/auUZ2E7hwHPATOAiRXcnxOAPwC7kh4/Mhj40wrmOQiYB/yAdK/7nsCJwF4VOxftXNiPg0l3\nSrcDR1dsf/5jjutkYA/gE8CbwBdadX922knLr4YHzG+B6wrvBbwEfKXZsTWItYP1C6UFwCWF9wOB\n1cDp+f3+eblDC23GAmuAIfn935CePbVtoc3VwNOF93cB99Vt+zfApC7Ic5cc81GFnN4GTiu02Te3\n+XB+f0r+Zdul0OZ84PVaXsA1wB/qtnUn8EAzjwfgNeCvq5YnsAMwGzgemEwulKqUJ6lQeqJkXpXy\n/AYwZSNtqngu+jYwp4L78+fAzXXT7gFubdX96UtvXUQt/p1wkvYChrBu/G8C/8va+EcBr0fEjMKi\nj5CeUH54oc3UiFhTaPMQsK+kHfP7I/Jy1LXpip/ToBzf0vx+BOnuz2Kes4EXWDfPJyNiSV18OwIf\nKLQpzaG7j4fc/X0msD3pxFC1PG8Efh4Rj9ZNH0m18ny/0qXxuZJulzQ0T6/S/hwP/E7S3UqXx5+Q\n9LnazCqei/LP9dPAD/OkKh23jwMnSHp/3ubBwJGk3v2W3J8ulLrOhr4Tbkj3h7PZhpAOyg3FP4TU\nxfquiGgnFSHFNo3WwSa06dSfkySRPsX9KiJqYz2GAO/kX9Sy7W9NDgMl9aObjgdJB0paTvp0Oon0\nCfUZKpRnLgAPAS5tMHs3KpIn6ZP/X5E+SV8A7AVMzeM0KrM/gfeRegdmA2OAm4DvSKp9nUHlzkXA\naaQC55b8vkrH7TeAnwDPSHoHmA58OyLuKsTYUvvTz1HqfmXfCdcqNiX+jbXRJrbp7J/TJOAA4KhN\naLup299YDpvSpjPzfAY4mNRz9kngVknHdML2e0Sekt5LKnZPioi2zVl0E7ffI/IEiIjik4j/T9I0\nYD5pLEbZU/5bLk/SB/ZpEfHP+f1MSR8gFU+3b2UMPfVcdC7wYEQs3Ei7VtyfZwBnAWcCT5M+1Fwn\naUFE3LaVMTRlf7pHqets7nfC9TQLSQfUhuJfmN+/S9I2wE55Xq1No3UUP1GUtem0n5OkG4BxwOiI\nWFCYtRDoK2ngBrbfKL7dCvPK2gwG3oyId+im4yEi1kTEcxHxRET8EzAT+CLVyXMEaXDzdEltktpI\ng1+/mD+9LgL6VSDP9UTEMtKA1uFUZ39CGpBb/63hs0gDgWsxVulctAdpsPrNhclV2p/XAldHxE8j\n4qmIuAP4N9b2ALfc/nSh1EXyp93ad8IB63wnXFO+2G9zRMQ80kFWjH8g6fpwLf7fAIMkHVpY9ATS\nL8G0Qptj8kFeMwaYnU/8tTYnsK6T8vStloukjwPHRcQLdbOnkwYIFvPch3SSLuZ5kNLT2Is5LGPt\nCb5RDmNqOTTxeOgD9KM6eT5CuuPnEFLP2cHA70g9D7X/t9H6ea5H0g7A3qSBsFXZnwC/Jg1cLtqX\n1HtWqXNRdi7pD/UDhWlV2p/bs36PTQe53mjJ/dkZo9z9Kh39fzppJH/xNszXgF2bHVuObwDpj8sh\n+UD+u/x+aJ7/lRzveNIfp3uBZ1n3Fs4HSH+cDiMN2JsN3FaYP5B0Yr+FdNnrDNLtq+cV2hwBvMPa\nWzgvI11a6IxbcieR7go5mvTJovbqX9dmHjCa1GPxa9a/LXcm6XbVD5LGjCwCriy0GZbzuibncGHO\n6cTuOh6Ar5MuK+5JuuX2atLJ9/gq5dkg73fveqtSnsA3gWPy/vwI8HCOc+eK5TmSNKbuUlIheBaw\nHDiz0Kblz0V5/SI9AuDrDeZVZX/+O2kQ+rh87J5GGm/0L626Pzv9pOXXegfNhfkXYzWpih3Z7JgK\nsR1LKpDa614/KrS5LB+Mq0h3CwyvW8cg0qf5ZaSC5GZg+7o2BwFT8jpeAP6+QSyfJI2vWU16dszY\nTsqxUX7twGcKbfoB15O6pZcDPwUG161nKOkZTCvyyekaoE+Dn+f0nMOzwDndeTyQnkPzXF73QuC/\nyUVSlfJssK1HWbdQqkSepNu6X8rrfgH4Mes+W6gSeeb1jyP93q8CngLObdDmMlr4XJTXfRLp/DO8\nwbxK7E/SB/CJpKJvZY7hcgq38bfa/vR3vZmZmZmV8BglMzMzsxIulMzMzMxKuFAyMzMzK+FCyczM\nzKyECyUzMzOzEi6UzMzMzEq4UDIzMzMr4ULJzMzMrIQLJTMzM7MSLpTMzDaDpMmSJjY7DjPrHi6U\nzKxlSDpf0puS+hSmDZDUJumXdW2Pk9QhaVh3x2lm1eFCycxayWTSl26OLEw7GngFGCWpb2H6scD8\niHh+czciadutCdLMqsOFkpm1jIiYQyqKRhcmjwbuJX1b+ai66ZMBJA2V9DNJyyUtk/QTSYNrDSVN\nkDRD0nmSngPeytO3l3RrXu5lSV+qj0nShZLmSFotaaGkuzs3azNrJhdKZtZqHgOOK7w/Lk+bUpsu\nqR9wOPBobvMzYBCp9+lEYG/grrr1Dgc+AZwGHJKn/WteZjwwhlR8jagtIGkkcB3wNWAfYCwwdSvz\nM7MexN3LZtZqHgMm5nFKA0hFzVSgL3A+cDlwZH7/mKSTgAOBYRGxAEDSOcBTkkZExPS83vcA50TE\n0txmAHAucFZEPJanfRZ4qRDLUGAF8IuIWAm8CMzsorzNrAnco2RmraY2Tukw4ChgTkQsIfUoHZ7H\nKY0G5kbES8B+wIu1IgkgImYBbwD7F9Y7v1YkZXuTiqdpheVeB2YX2jwMzAfm5Ut0Z0nartMyNbOm\nc6FkZi0lIuYCL5Musx1HKpCIiFdIPTpHUhifBAiIBquqn76ywXxKlq3FsgL4EHAmsIDUmzVT0sBN\nTsjMejQXSmbWiiaTiqTRpEtxNVOBU4APs7ZQehrYQ9Kf1xpJOgDYMc8r80dgDYUB4pJ2Io1FeldE\ndETEoxHxD8DBwDDg+C3Iycx6II9RMrNWNBm4kXQOm1KYPhW4gXTJ7DGAiHhE0pPAHZIuyfNuBCZH\nxIyyDUTESkk/BL4paSmwGLgKaK+1kXQq8L683deBU0k9UbPXX6OZtSIXSmbWiiYD/YFZEbG4MH0K\nsAPwTEQsLEz/OHB9nt8BPAhcvAnb+TJpPNR9wHLgW0DxstobpDvlJuR4ngXOzGOgzKwCFFF6+d3M\nzMysV/MYJTMzM7MSLpTMzMzMSrhQMjMzMyvhQsnMzMyshAslMzMzsxIulMzMzMxKuFAyMzMzK+FC\nyczMzKyECyUzMzOzEi6UzMzMzEq4UDIzMzMr4ULJzMzMrMT/A3OQOlsNsogaAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -326,7 +328,7 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 257, "metadata": { "collapsed": true }, @@ -344,7 +346,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 258, "metadata": { "collapsed": true }, @@ -358,7 +360,7 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 259, "metadata": { "collapsed": false }, @@ -367,9 +369,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 376\n", - "Number of unique tokens: 1524\n", - "Number of documents: 185\n" + "Number of authors: 3467\n", + "Number of unique tokens: 8640\n", + "Number of documents: 1740\n" ] } ], @@ -379,6 +381,70 @@ "print('Number of documents: %d' % len(corpus))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Disjoint set stuff" + ] + }, + { + "cell_type": "code", + "execution_count": 260, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def find_disjoint_sets(d):\n", + " while True:\n", + " for tuple_, set1 in d.items():\n", + " try:\n", + " match = next(k for k, set2 in d.items() if k != tuple_ and set1 & set2)\n", + " except StopIteration:\n", + " # no match for this key - keep looking\n", + " continue\n", + " else:\n", + " #print('merging', tuple(set1), match)\n", + " d[tuple_] = set1 | d.pop(match)\n", + " break\n", + " else:\n", + " # no match for any key - we are done!\n", + " break\n", + "\n", + " output = sorted(tuple(s) for s in d.values())\n", + " \n", + " return output" + ] + }, + { + "cell_type": "code", + "execution_count": 261, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(0,), (1, 27), (2,), (3,), (4,), (5,), (7,), (8,), (9,), (10,), (11, 44, 77, 54), (12,), (15,), (16,), (17,), (18,), (20,), (21,), (22,), (23,), (24,), (25,), (26,), (28,), (29,), (30,), (31,), (32,), (33,), (34, 13), (35, 19), (36,), (37,), (38,), (39,), (40,), (41,), (42,), (45,), (46,), (47,), (48,), (49,), (50,), (51,), (52,), (53,), (55,), (56,), (57,), (58,), (59,), (60,), (61,), (62,), (63,), (64,), (65,), (66,), (67,), (68,), (69,), (70,), (71,), (72,), (73,), (74,), (75, 43), (76,), (78,), (79,), (80,), (81,), (82, 14, 6), (83,), (84,), (85,), (86,), (87,), (88,), (89,), (90,), (92,), (94,), (95,), (96,), (97,), (98,), (100,), (102,), (103,), (104,), (105,), (106,), (107, 166), (108,), (109,), (110,), (111,), (112,), (113,), (114,), (115,), (116,), (117,), (118,), (119,), (120, 93), (121, 148), (122,), (123,), (124,), (125,), (126,), (127,), (128, 162, 99, 163, 101, 150), (129,), (130,), (131,), (133,), (134,), (135,), (136,), (137,), (138,), (139, 181), (140,), (141,), (142,), (143,), (144, 132), (145,), (146,), (147,), (149,), (151,), (152,), (153,), (154,), (155,), (156,), (157,), (158,), (159,), (160,), (161,), (164,), (165,), (167,), (168,), (169,), (170, 91), (171,), (172,), (173,), (174,), (175,), (176,), (177,), (178,), (179,), (180,), (182,), (183,), (184,), (185, 231), (186, 207), (187,), (189,), (190, 271), (191,), (192, 236, 268), (193,), (194,), (196,), (197,), (200,), (201, 210), (202, 237), (203,), (204,), (205,), (206,), (208,), (209,), (211, 252), (213,), (214,), (215,), (216,), (217, 281), (218, 228), (219,), (220,), (221,), (224,), (225,), (226, 242), (227,), (229,), (230,), (232,), (233,), (234, 198), (235, 212, 276, 270), (238,), (240,), (241,), (243,), (245,), (246,), (247,), (248, 284, 222, 239), (249,), (250,), (251, 188, 244), (253,), (254,), (255,), (256,), (257, 223), (258, 195), (259,), (260,), (261,), (262,), (263,), (264,), (265,), (266,), (267,), (269,), (272,), (273,), (274,), (275, 199), (277,), (278,), (279,), (280,), (282,), (283,), (285,), (286,), (287,), (288,), (289,), (290,), (291,), (292,), (294,), (296,), (297,), (298, 325), (299,), (300,), (301,), (302,), (303,), (304,), (305,), (306, 359), (307,), (310,), (311,), (312,), (313,), (314,), (315,), (316,), (317,), (318,), (319,), (320,), (321,), (322,), (323,), (328,), (329, 383), (330,), (331,), (332,), (333,), (335,), (336, 295), (337,), (338,), (340,), (341,), (345,), (346,), (347,), (348,), (349,), (350, 326), (351,), (352, 358, 327, 393, 367, 342, 406, 344), (353,), (354, 339), (357,), (360, 293), (361,), (362,), (363, 374), (364,), (365,), (366,), (368,), (369,), (370,), (371,), (372,), (373,), (375,), (376,), (378,), (379,), (380,), (381,), (382, 334), (384,), (385,), (386,), (387, 356, 414), (388,), (389, 343), (390,), (391,), (392,), (394,), (395,), (396,), (398,), (399,), (400,), (401,), (402,), (403,), (404,), (405,), (407,), (408, 308), (409,), (410, 355, 324), (411,), (412,), (413,), (415,), (416,), (417, 309), (418,), (419,), (420,), (421,), (422,), (423,), (424,), (425, 377, 397), (426,), (427,), (428,), (429,), (430,), (431, 515, 549, 535), (432, 489, 508, 510), (433,), (434,), (437,), (439,), (440, 435, 556), (441,), (442, 501), (443,), (448,), (449,), (450, 459), (451,), (452,), (453,), (454,), (455,), (456,), (457, 469, 566, 471), (458,), (460,), (462,), (463,), (464,), (465,), (466,), (467,), (468, 487), (470,), (472,), (473, 546, 509), (474,), (475,), (476,), (477,), (479,), (480,), (481, 524), (482,), (484,), (486,), (488,), (490,), (491, 461), (492,), (494,), (495,), (496,), (497,), (498, 478), (499,), (500,), (502,), (503,), (504,), (505, 444), (506,), (507,), (511,), (512,), (513,), (514, 541, 551), (516,), (517,), (518,), (520,), (521,), (522,), (523,), (525,), (526,), (527,), (528,), (529,), (530,), (531,), (532, 485, 565), (533,), (534,), (536, 562, 447), (537,), (538,), (539,), (540,), (542,), (543, 438, 446), (544,), (545,), (547,), (548,), (550,), (552,), (553,), (554, 436), (555,), (557,), (558,), (559,), (560,), (561,), (563, 445), (564,), (567,), (568,), (569, 493), (570,), (571, 483), (572, 519), (573,), (574,), (575,), (576, 674, 642, 666, 621), (577,), (578,), (579,), (581,), (582,), (583,), (584,), (585,), (586,), (587,), (588,), (590,), (592, 606), (593,), (594,), (595,), (597,), (599, 661, 639), (600,), (601,), (602,), (603,), (604,), (605,), (608, 641), (609,), (610, 580, 596), (611,), (614,), (615,), (618,), (619,), (620,), (622,), (624,), (625,), (626,), (627,), (628,), (629,), (630,), (632,), (633, 685), (634,), (636,), (637,), (638,), (640,), (643,), (644,), (645, 678), (646,), (647,), (648,), (649,), (650,), (651,), (652, 684), (653, 631), (654,), (656,), (657, 598), (658,), (659,), (660,), (663,), (665, 662), (667,), (668,), (669,), (670,), (671,), (672, 681, 617, 635, 607), (673,), (675, 613), (676,), (677,), (679,), (680,), (682,), (683,), (686,), (687, 655), (688,), (689, 612), (690,), (691,), (692,), (693,), (694,), (695, 591), (696, 664, 616), (697,), (698, 623), (699, 589), (700,), (704,), (705,), (706,), (707, 799), (708,), (709,), (710,), (711,), (713,), (714,), (715,), (717,), (720, 793, 701), (721,), (722,), (723,), (724,), (725,), (728,), (729, 788), (731,), (732,), (733,), (735, 822, 791), (736,), (737,), (738, 730, 758), (739,), (740,), (742,), (743,), (744, 786), (745,), (746,), (747, 798, 775), (748,), (749,), (750,), (751,), (752,), (753,), (754,), (755,), (756,), (757, 741), (759,), (760,), (761,), (762,), (763, 726), (764,), (765,), (766,), (768,), (769, 702), (772,), (773,), (774,), (776, 771, 767), (777, 831), (778,), (779,), (780,), (781,), (782,), (783,), (784,), (785,), (787, 805, 727, 734), (789,), (792,), (794,), (795, 716), (797, 790), (800,), (801, 712, 827, 718, 719), (802,), (803, 796), (807,), (808, 804, 703), (809,), (810,), (811,), (812, 806), (813,), (815,), (816,), (817,), (818, 814), (819,), (820,), (821,), (823,), (824, 825, 826), (829,), (830,), (832,), (833,), (834, 828), (835,), (836,), (837,), (838,), (839,), (840, 770), (841,), (842,), (843,), (845,), (846,), (847,), (848,), (850, 851), (853,), (855,), (856,), (857,), (859, 927), (861, 894, 869), (863,), (864,), (865,), (866,), (867,), (871,), (872,), (873, 858, 970, 973), (874, 958), (875,), (876,), (877, 862), (880,), (882,), (884, 967), (885,), (886,), (887,), (888,), (889,), (892,), (893,), (895,), (896,), (897, 849, 860), (898, 868), (900,), (902,), (904, 971), (905,), (906,), (907, 911), (908,), (909,), (910,), (912,), (913,), (915,), (916,), (917,), (918,), (919,), (920,), (921, 891), (922, 943), (923,), (924,), (925,), (926,), (929,), (930,), (931, 901, 903, 878, 942, 881, 946, 852, 980, 854, 890), (932,), (934,), (935,), (936,), (937,), (938,), (939,), (940,), (941,), (944,), (945, 914), (947, 870), (948,), (949,), (950,), (951,), (952,), (953,), (954,), (955,), (956,), (957,), (959,), (960, 928, 899, 883, 983, 974, 975), (961,), (962,), (963,), (964,), (965,), (966,), (968, 844, 933), (969,), (972,), (976, 977), (978,), (979,), (981, 879), (982,), (985, 1068), (986,), (987, 1111), (988,), (991,), (992, 1122), (993,), (994, 1007), (995,), (996,), (997, 1078), (999,), (1000,), (1001,), (1002,), (1003,), (1004,), (1006,), (1008, 1053), (1009,), (1010,), (1011, 1037), (1012,), (1014,), (1015,), (1016,), (1017,), (1018,), (1019,), (1022,), (1023,), (1024,), (1025,), (1027,), (1028,), (1029,), (1030, 1102, 1103, 1074, 1079, 984, 1020, 1055), (1031,), (1032, 1107, 1115), (1033,), (1034,), (1035,), (1036,), (1039,), (1040, 1089, 998, 1130), (1041,), (1042,), (1043,), (1044,), (1045,), (1046,), (1048,), (1049,), (1050, 1047), (1051,), (1054,), (1056,), (1057, 1021), (1058, 1099), (1059,), (1060,), (1061,), (1062,), (1064,), (1065, 1052), (1066, 1077, 1085), (1067,), (1069,), (1071,), (1072, 1026, 1063, 1005, 1070), (1073, 1125), (1075,), (1076,), (1080,), (1081, 1114), (1082,), (1083,), (1084,), (1086,), (1088,), (1090, 990), (1091,), (1092,), (1094,), (1095,), (1096,), (1097,), (1098, 1013, 1119), (1100,), (1101,), (1104,), (1105,), (1106,), (1108,), (1109,), (1110,), (1112,), (1113,), (1116, 1093, 989, 1038), (1117, 1087), (1118,), (1120,), (1121,), (1123,), (1124,), (1126,), (1127,), (1128,), (1129,), (1131,), (1132,), (1133,), (1134,), (1135,), (1136, 1186), (1137,), (1138, 1181), (1139,), (1140,), (1141,), (1142,), (1145,), (1146,), (1147,), (1148,), (1150,), (1151,), (1152,), (1155,), (1156,), (1157,), (1158,), (1160,), (1161,), (1162,), (1163,), (1166,), (1167,), (1169, 1211, 1279), (1170, 1172, 1189, 1159), (1171,), (1173,), (1175,), (1176,), (1177,), (1178,), (1179,), (1180,), (1182,), (1183, 1215), (1184, 1257), (1185,), (1187, 1285), (1188,), (1192,), (1193, 1164, 1230), (1194,), (1195,), (1196, 1254), (1197,), (1198,), (1200,), (1201,), (1202,), (1203, 1174, 1262), (1204,), (1205,), (1208,), (1209,), (1210, 1244, 1206), (1213,), (1216,), (1217,), (1219, 1259, 1227, 1144, 1212, 1214), (1220,), (1221,), (1222,), (1223,), (1225,), (1226,), (1228,), (1229,), (1231,), (1232,), (1233,), (1234,), (1235,), (1236, 1190, 1207), (1238,), (1239,), (1240,), (1241,), (1243,), (1245,), (1246,), (1247,), (1248,), (1250,), (1251, 1199), (1253, 1278), (1256,), (1258,), (1260,), (1261,), (1263,), (1264,), (1265,), (1266,), (1267,), (1268, 1143), (1269,), (1270,), (1271,), (1272, 1149), (1273,), (1274,), (1275,), (1277,), (1280, 1153, 1218, 1282, 1284, 1154, 1224, 1165, 1168, 1237, 1242, 1249, 1252, 1255, 1191, 1276), (1281,), (1283,), (1286,), (1287,), (1288,), (1290,), (1291,), (1292,), (1293,), (1294,), (1296,), (1297,), (1300,), (1301,), (1302,), (1303,), (1304, 1410, 1346), (1305, 1406), (1306,), (1307,), (1308,), (1309, 1327), (1311,), (1312,), (1313,), (1314, 1379, 1357, 1298, 1332, 1430, 1432, 1373, 1343), (1316,), (1318,), (1319,), (1320,), (1321,), (1322,), (1323,), (1324,), (1326,), (1328, 1333), (1329, 1364), (1330, 1334), (1331,), (1335,), (1336,), (1337,), (1338,), (1339,), (1341,), (1344,), (1345, 1402), (1347, 1355, 1366), (1348, 1349, 1317, 1325, 1425, 1363, 1428, 1374), (1350,), (1351, 1375), (1352, 1398), (1353,), (1354, 1365), (1356,), (1358,), (1359, 1362, 1407), (1360, 1340), (1361,), (1367,), (1368,), (1369, 1315), (1370,), (1371, 1422), (1372,), (1376,), (1377,), (1378, 1295), (1380,), (1381,), (1382,), (1383,), (1384,), (1385,), (1386, 1390), (1387, 1391), (1388,), (1389, 1405), (1392,), (1393, 1289), (1395,), (1396,), (1397,), (1399,), (1400, 1299), (1401,), (1403,), (1404,), (1408,), (1409, 1438), (1411,), (1412,), (1413,), (1414,), (1415,), (1416,), (1417,), (1418,), (1419,), (1420,), (1421,), (1423,), (1424, 1394), (1426,), (1427, 1310, 1342), (1429,), (1431,), (1433,), (1434,), (1435,), (1436,), (1437,), (1440, 1462, 1464, 1452, 1581, 1518, 1455), (1441, 1476), (1443,), (1444,), (1445,), (1446,), (1448,), (1450,), (1456,), (1457,), (1458, 1451, 1439), (1459, 1540, 1571), (1461,), (1465,), (1466,), (1467, 1516), (1468,), (1469,), (1470,), (1471,), (1472,), (1473,), (1474, 1579, 1565), (1475, 1580), (1477, 1463, 1449, 1482, 1548, 1454, 1519), (1478,), (1480, 1490), (1481,), (1483,), (1484,), (1485,), (1486,), (1487,), (1488, 1542, 1526), (1489,), (1491, 1532, 1589), (1493,), (1496,), (1497,), (1498,), (1500,), (1501,), (1502,), (1504, 1479), (1505,), (1506,), (1507, 1460, 1453), (1509,), (1510,), (1512,), (1513,), (1514,), (1517,), (1520,), (1521,), (1523,), (1525,), (1528,), (1529,), (1530,), (1531,), (1533,), (1536, 1572, 1508, 1559, 1545, 1535), (1537, 1442, 1515, 1524), (1538, 1499, 1527), (1539,), (1541,), (1543,), (1544,), (1546,), (1547,), (1549,), (1550,), (1551,), (1552, 1568, 1570, 1492, 1574, 1494), (1553,), (1554,), (1555,), (1557,), (1558,), (1560, 1503), (1561, 1495), (1562,), (1563,), (1564,), (1566, 1534), (1567,), (1569,), (1573,), (1575,), (1576,), (1577, 1522, 1511), (1578,), (1582, 1447), (1583,), (1584, 1556), (1585,), (1586,), (1587,), (1588,), (1590, 1695), (1591, 1598, 1711), (1592,), (1593,), (1594, 1653), (1595,), (1596,), (1597,), (1599,), (1600, 1721), (1601,), (1602, 1702), (1603, 1691, 1710), (1605,), (1606,), (1607,), (1608,), (1610,), (1611,), (1612,), (1613,), (1616,), (1617, 1679), (1621,), (1622,), (1623,), (1624, 1642, 1635, 1650), (1625,), (1627,), (1628,), (1629,), (1630,), (1631,), (1632,), (1633,), (1634, 1643, 1618, 1683, 1714, 1654, 1688, 1722, 1725, 1663), (1637,), (1639,), (1640, 1708), (1641,), (1644,), (1646,), (1647,), (1648,), (1649,), (1651,), (1655,), (1656, 1614), (1657,), (1658,), (1659,), (1660,), (1661, 1669), (1664, 1718), (1665, 1693), (1666,), (1667,), (1668, 1604, 1685), (1670,), (1672,), (1673, 1732, 1615), (1674, 1715), (1675,), (1676, 1694, 1678), (1677,), (1680, 1645), (1681,), (1682,), (1684,), (1686,), (1687,), (1689, 1739, 1735), (1690,), (1692,), (1696,), (1697,), (1698,), (1699,), (1701,), (1703,), (1704, 1652), (1705,), (1707,), (1709,), (1712, 1734), (1713, 1619, 1700, 1733, 1620, 1609, 1706), (1716, 1636), (1719, 1717, 1662, 1638), (1720,), (1723,), (1724,), (1726,), (1727,), (1728,), (1729,), (1730, 1626), (1731,), (1736, 1671), (1737,), (1738,)]\n", + "1312\n", + "278.3766186237335\n" + ] + } + ], + "source": [ + "start = time()\n", + "\n", + "thing = {a: set(_list) for a, _list in author2doc.items()}\n", + "disjoint_authors = find_disjoint_sets(thing)\n", + "print(disjoint_authors)\n", + "print(len(disjoint_authors))\n", + "\n", + "print(time() - start)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -410,7 +476,7 @@ }, { "cell_type": "code", - "execution_count": 75, + "execution_count": 98, "metadata": { "collapsed": true }, @@ -422,7 +488,7 @@ }, { "cell_type": "code", - "execution_count": 76, + "execution_count": 138, "metadata": { "collapsed": false }, @@ -431,21 +497,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 9min 14s, sys: 52 ms, total: 9min 14s\n", - "Wall time: 9min 14s\n" + "CPU times: user 2min 35s, sys: 0 ns, total: 2min 35s\n", + "Wall time: 2min 35s\n" ] } ], "source": [ - "%time model = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + "%time model_online = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=10, passes=5, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None)" + " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1000, random_state=1, var_lambda=None)" ] }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 139, "metadata": { "collapsed": false }, @@ -454,39 +520,39 @@ "data": { "text/plain": [ "[(0,\n", - " '0.098*object + 0.029*frequency + 0.022*tree + 0.016*structured + 0.015*intrinsic + 0.013*impulse + 0.012*time_step + 0.012*induced + 0.010*bar + 0.009*experiment'),\n", + " '0.041*training + 0.040*hidden + 0.037*representation + 0.024*role + 0.023*gradient + 0.021*hidden_unit + 0.021*procedure + 0.020*back_propagation + 0.020*node + 0.018*connectionist'),\n", " (1,\n", - " '0.047*potential + 0.042*synapsis + 0.037*firing_rate + 0.037*cerebral + 0.034*hebbian + 0.034*synapse + 0.034*hebb + 0.029*ii + 0.026*dt + 0.024*expression'),\n", + " '0.039*differential + 0.035*search + 0.031*strategy + 0.023*control + 0.020*he + 0.018*goal + 0.017*target + 0.015*start + 0.014*question + 0.014*influence'),\n", " (2,\n", - " '0.041*environment + 0.039*visual + 0.039*reconstruction + 0.035*orientation + 0.029*spatial + 0.028*action + 0.016*image + 0.016*receptive + 0.016*filter + 0.016*receptive_field'),\n", + " '0.062*code + 0.056*node + 0.051*activation + 0.044*adaptive + 0.029*sequence + 0.022*update + 0.022*learned + 0.014*past + 0.013*summation + 0.013*machine'),\n", " (3,\n", - " '0.078*class + 0.078*competitive + 0.026*block + 0.026*competition + 0.024*field + 0.024*winner + 0.021*square + 0.019*operation + 0.019*column + 0.017*ideal'),\n", + " '0.070*cell + 0.043*stimulus + 0.030*synapsis + 0.030*current + 0.029*firing + 0.028*activity + 0.023*synaptic + 0.022*spatial + 0.019*classification + 0.015*channel'),\n", " (4,\n", - " '0.042*propagation + 0.036*machine + 0.034*update + 0.034*back_propagation + 0.031*hidden + 0.027*hidden_unit + 0.025*bp + 0.025*classifier + 0.024*test_set + 0.022*variance'),\n", + " '0.047*processor + 0.037*dynamic + 0.035*interconnection + 0.032*group + 0.029*iv + 0.023*temporal + 0.022*delay + 0.019*learning_rule + 0.018*vol + 0.017*sigmoid'),\n", " (5,\n", - " '0.044*implementation + 0.030*dimension + 0.028*polynomial + 0.019*measure + 0.017*find + 0.017*recurrent + 0.015*forward + 0.015*stanford + 0.013*sum + 0.013*per'),\n", + " '0.058*image + 0.025*convergence + 0.025*energy + 0.024*matrix + 0.018*hopfield + 0.018*minimum + 0.015*recall + 0.015*recognition + 0.015*associative_memory + 0.013*field'),\n", " (6,\n", - " '0.089*processor + 0.051*cm + 0.035*code + 0.022*communication + 0.018*generator + 0.018*asynchronous + 0.014*connected + 0.014*transfer + 0.014*reduction + 0.012*compute'),\n", + " '0.050*capacity + 0.042*bit + 0.025*stored + 0.024*analog + 0.023*bound + 0.016*definition + 0.015*off + 0.014*binary + 0.014*word + 0.013*correct'),\n", " (7,\n", - " '0.073*node + 0.035*perceptron + 0.035*likelihood + 0.024*robot + 0.022*perceptton + 0.020*accuracy + 0.016*fast + 0.015*testing + 0.015*speech + 0.015*multi'),\n", + " '0.058*cell + 0.040*probability + 0.035*firing + 0.025*cycle + 0.019*phase + 0.019*active + 0.018*specific + 0.017*shape + 0.017*region + 0.015*action'),\n", " (8,\n", - " '0.050*power + 0.030*uniform + 0.021*capacitor + 0.020*transistor + 0.018*curve + 0.018*maximum + 0.014*next + 0.014*formed + 0.011*every + 0.011*implement'),\n", + " '0.057*visual + 0.048*constraint + 0.037*map + 0.034*noise + 0.027*optimization + 0.025*gain + 0.022*mapping + 0.020*field + 0.020*device + 0.019*stage'),\n", " (9,\n", - " '0.137*cell + 0.059*modulation + 0.052*fiber + 0.037*cortex + 0.033*plasticity + 0.033*consequently + 0.033*chemical + 0.030*visual_cortex + 0.026*action_potential + 0.026*neurosci')]" + " '0.074*loop + 0.044*path + 0.044*product + 0.037*circuit + 0.036*edge + 0.025*magnitude + 0.025*eq + 0.021*direction + 0.020*interaction + 0.017*higher')]" ] }, - "execution_count": 54, + "execution_count": 139, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "model.show_topics()" + "model_online.show_topics()" ] }, { "cell_type": "code", - "execution_count": 52, + "execution_count": 122, "metadata": { "collapsed": false }, @@ -498,48 +564,49 @@ "\n", "Yaser S.Abu-Mostafa\n", "Docs: [21]\n", - "[(0, 0.21583496794863521),\n", - " (1, 0.097266339574133789),\n", - " (2, 0.046104075223616918),\n", - " (3, 0.082806188712471071),\n", - " (4, 0.038965292793156206),\n", - " (5, 0.087321487508770779),\n", - " (6, 0.27427875618494268),\n", - " (8, 0.11559328603434335),\n", - " (9, 0.033930683980366742)]\n", + "[(0, 0.43981727821822292),\n", + " (1, 0.028347213089721844),\n", + " (3, 0.096034791617892343),\n", + " (5, 0.11974213992896583),\n", + " (6, 0.04818530676877044),\n", + " (7, 0.052015356949023761),\n", + " (8, 0.19358105735922765),\n", + " (9, 0.012210592598702002)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [276, 235, 270]\n", - "[(0, 0.21478874086777811),\n", - " (2, 0.045025956135116882),\n", - " (3, 0.018058748789869943),\n", - " (4, 0.035433940765978195),\n", - " (5, 0.017238697764781256),\n", - " (6, 0.55389866230226381),\n", - " (7, 0.052574483064497073),\n", - " (8, 0.059424559853111729)]\n", + "[(0, 0.23709584775467316),\n", + " (1, 0.036278840277891584),\n", + " (2, 0.060881928460912567),\n", + " (3, 0.040860020890840953),\n", + " (4, 0.22120874865101597),\n", + " (5, 0.17881352536707981),\n", + " (6, 0.011552682298532534),\n", + " (7, 0.17862704317305195),\n", + " (8, 0.034587306660400441)]\n", "\n", "Michael I. Jordan\n", "Docs: [205]\n", - "[(0, 0.28200285471582809),\n", - " (1, 0.019207440913240986),\n", - " (2, 0.036697731732562668),\n", - " (3, 0.028229183886206974),\n", - " (4, 0.047970907798814945),\n", - " (5, 0.049451568961465901),\n", - " (6, 0.41516092316824699),\n", - " (7, 0.040312982014292426),\n", - " (8, 0.079229887840147561)]\n", + "[(0, 0.2657244791505019),\n", + " (1, 0.042233864299392278),\n", + " (2, 0.056208047768936259),\n", + " (3, 0.020408371858599395),\n", + " (4, 0.075285256015344873),\n", + " (5, 0.44939042793717449),\n", + " (7, 0.064270462578477641),\n", + " (8, 0.014524432524677481),\n", + " (9, 0.011664709291004252)]\n", "\n", "James M. Bower\n", "Docs: [188, 251, 244]\n", - "[(0, 0.26698792921714365),\n", - " (1, 0.15878442632165649),\n", - " (2, 0.060474251888253387),\n", - " (3, 0.010249883539547755),\n", - " (6, 0.40223568615538446),\n", - " (8, 0.052211994055734137),\n", - " (9, 0.033610105811001288)]\n" + "[(0, 0.35369888348382267),\n", + " (1, 0.097782509316364244),\n", + " (2, 0.11873783156273017),\n", + " (3, 0.02244484445927224),\n", + " (5, 0.11510615687752906),\n", + " (6, 0.01642214092725941),\n", + " (7, 0.056016631498195003),\n", + " (9, 0.21370723667506888)]\n" ] } ], @@ -547,22 +614,22 @@ "name = 'Yaser S.Abu-Mostafa'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", + "pprint(model_online.get_author_topics(author2id[name]))\n", "\n", "name = 'Geoffrey E. Hinton'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", + "pprint(model_online.get_author_topics(author2id[name]))\n", "\n", "name = 'Michael I. Jordan'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", + "pprint(model_online.get_author_topics(author2id[name]))\n", "\n", "name = 'James M. Bower'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))" + "pprint(model_online.get_author_topics(author2id[name]))" ] }, { @@ -767,7 +834,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 356, "metadata": { "collapsed": false }, @@ -776,8 +843,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "phi is 90 x 681 x 10 (612900 elements)\n", - "mu is 90 x 681 x 166 (10174140 elements)\n" + "phi is 286 x 2245 x 10 (6420700 elements)\n", + "mu is 286 x 2245 x 578 (371116460 elements)\n" ] } ], @@ -812,7 +879,7 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": 140, "metadata": { "collapsed": false }, @@ -824,7 +891,7 @@ }, { "cell_type": "code", - "execution_count": 74, + "execution_count": 144, "metadata": { "collapsed": false }, @@ -833,21 +900,62 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 7min 52s, sys: 476 ms, total: 7min 52s\n", - "Wall time: 7min 52s\n" + "CPU times: user 1min 19s, sys: 28 ms, total: 1min 19s\n", + "Wall time: 1min 20s\n" ] } ], "source": [ - "%time model = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + "%time model_offline = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=5, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", - " eval_every=1, random_state=1)" + " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", + " eval_every=10, random_state=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 143, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.017*cell + 0.008*activity + 0.008*training + 0.007*matrix + 0.007*field + 0.007*representation + 0.006*hopfield + 0.006*probability + 0.006*stimulus + 0.006*current'),\n", + " (1,\n", + " '0.015*cell + 0.009*matrix + 0.007*probability + 0.007*hopfield + 0.007*training + 0.006*activity + 0.006*feature + 0.006*stimulus + 0.006*hidden + 0.005*representation'),\n", + " (2,\n", + " '0.015*cell + 0.008*matrix + 0.008*training + 0.007*representation + 0.006*image + 0.006*field + 0.006*dynamic + 0.006*probability + 0.006*activity + 0.006*sequence'),\n", + " (3,\n", + " '0.013*cell + 0.009*training + 0.007*activity + 0.006*hidden + 0.006*node + 0.005*matrix + 0.005*probability + 0.005*hopfield + 0.005*representation + 0.005*sequence'),\n", + " (4,\n", + " '0.016*cell + 0.007*activity + 0.007*training + 0.007*matrix + 0.007*representation + 0.006*hidden + 0.006*feature + 0.006*synaptic + 0.006*rate + 0.005*field'),\n", + " (5,\n", + " '0.015*cell + 0.008*training + 0.006*matrix + 0.006*sequence + 0.006*field + 0.006*bit + 0.006*stimulus + 0.006*hopfield + 0.006*noise + 0.005*firing'),\n", + " (6,\n", + " '0.018*cell + 0.008*representation + 0.008*matrix + 0.006*hopfield + 0.006*hidden + 0.005*rate + 0.005*firing + 0.005*training + 0.005*capacity + 0.005*node'),\n", + " (7,\n", + " '0.014*cell + 0.007*activity + 0.007*matrix + 0.007*field + 0.007*training + 0.007*node + 0.006*hopfield + 0.006*representation + 0.006*rate + 0.005*synaptic'),\n", + " (8,\n", + " '0.013*cell + 0.008*activity + 0.007*hidden + 0.007*training + 0.007*matrix + 0.006*feature + 0.006*capacity + 0.006*hopfield + 0.006*synaptic + 0.005*rate'),\n", + " (9,\n", + " '0.014*cell + 0.009*matrix + 0.008*representation + 0.007*image + 0.007*activity + 0.007*hidden + 0.006*stimulus + 0.006*training + 0.006*hopfield + 0.006*firing')]" + ] + }, + "execution_count": 143, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_offline.show_topics()" ] }, { "cell_type": "code", - "execution_count": 182, + "execution_count": 142, "metadata": { "collapsed": false }, @@ -856,28 +964,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.009*net + 0.009*layer + 0.008*word + 0.008*hidden + 0.007*recognition + 0.006*speech + 0.006*node + 0.005*architecture + 0.005*signal + 0.005*memory'),\n", + " '0.015*dynamic + 0.014*delay + 0.012*frequency + 0.011*phase + 0.010*noise + 0.008*temporal + 0.007*filter + 0.007*oscillation + 0.007*target + 0.007*controller'),\n", " (1,\n", - " '0.012*classifier + 0.010*class + 0.008*classification + 0.007*rule + 0.006*recognition + 0.005*speech + 0.004*trained + 0.004*node + 0.004*rbf + 0.004*expert'),\n", + " '0.017*memory + 0.017*vector + 0.015*matrix + 0.013*hopfield + 0.011*probability + 0.008*capacity + 0.008*let + 0.008*fig + 0.007*code + 0.007*distribution'),\n", " (2,\n", - " '0.009*neuron + 0.007*bound + 0.006*theorem + 0.006*let + 0.005*threshold + 0.004*matrix + 0.004*proof + 0.004*class + 0.004*solution + 0.004*xi'),\n", + " '0.035*cell + 0.018*response + 0.012*region + 0.012*stimulus + 0.011*cortex + 0.009*fig + 0.009*sensory + 0.009*motor + 0.009*control + 0.009*velocity'),\n", " (3,\n", - " '0.005*gaussian + 0.005*likelihood + 0.004*prior + 0.004*density + 0.004*approximation + 0.004*estimate + 0.004*mixture + 0.004*sample + 0.004*bayesian + 0.004*markov'),\n", + " '0.041*image + 0.038*field + 0.023*visual + 0.016*map + 0.015*receptive + 0.014*receptive_field + 0.014*motion + 0.012*eye + 0.011*direction + 0.008*vision'),\n", " (4,\n", - " '0.009*component + 0.007*kernel + 0.007*matrix + 0.006*distance + 0.005*image + 0.004*signal + 0.004*pca + 0.004*source + 0.004*independent + 0.004*noise'),\n", + " '0.030*hidden + 0.017*hidden_unit + 0.016*activation + 0.012*propagation + 0.010*processor + 0.009*back_propagation + 0.008*gradient + 0.007*hidden_layer + 0.007*bit + 0.006*internal'),\n", " (5,\n", - " '0.013*object + 0.009*field + 0.006*layer + 0.005*recognition + 0.005*view + 0.005*map + 0.005*image + 0.005*net + 0.004*sequence + 0.004*code'),\n", + " '0.018*vector + 0.016*sequence + 0.016*object + 0.014*memory + 0.009*adaptive + 0.009*matrix + 0.008*recurrent + 0.008*action + 0.008*self + 0.008*view'),\n", " (6,\n", - " '0.016*circuit + 0.013*chip + 0.012*neuron + 0.011*analog + 0.010*voltage + 0.007*signal + 0.006*noise + 0.006*vlsi + 0.005*channel + 0.004*implementation'),\n", + " '0.025*classifier + 0.024*recognition + 0.023*speech + 0.014*classification + 0.013*trained + 0.011*class + 0.010*test + 0.010*noise + 0.010*hidden + 0.009*word'),\n", " (7,\n", - " '0.016*cell + 0.013*neuron + 0.008*control + 0.007*response + 0.006*stimulus + 0.006*spike + 0.006*activity + 0.005*synaptic + 0.005*action + 0.005*firing'),\n", + " '0.033*node + 0.008*position + 0.007*connectionist + 0.005*neural_net + 0.005*tree + 0.005*character + 0.004*move + 0.004*generalization + 0.004*search + 0.004*human'),\n", " (8,\n", - " '0.007*generalization + 0.006*hidden + 0.005*optimal + 0.005*gradient + 0.005*noise + 0.004*solution + 0.004*hidden_unit + 0.003*training_set + 0.003*cost + 0.003*minimum'),\n", + " '0.036*circuit + 0.024*analog + 0.024*chip + 0.020*voltage + 0.020*current + 0.014*synapse + 0.010*transistor + 0.010*vlsi + 0.009*device + 0.009*implementation'),\n", " (9,\n", - " '0.023*image + 0.011*visual + 0.008*motion + 0.007*map + 0.006*eye + 0.006*field + 0.005*object + 0.005*orientation + 0.005*pixel + 0.005*direction')]" + " '0.030*cell + 0.021*firing + 0.019*synaptic + 0.017*activity + 0.016*potential + 0.010*synapsis + 0.010*spike + 0.009*stimulus + 0.009*memory + 0.009*membrane')]" ] }, - "execution_count": 182, + "execution_count": 142, "metadata": {}, "output_type": "execute_result" } @@ -888,7 +996,7 @@ }, { "cell_type": "code", - "execution_count": 183, + "execution_count": 119, "metadata": { "collapsed": false }, @@ -899,43 +1007,41 @@ "text": [ "\n", "Yaser S.Abu-Mostafa\n", - "Docs: [1269]\n", - "[(0, 0.014276128036243068),\n", - " (1, 0.14997442204053549),\n", - " (2, 0.066977058012639326),\n", - " (3, 0.1005138681465144),\n", - " (4, 0.42617224612011045),\n", - " (5, 0.013753926706215542),\n", - " (6, 0.068383760611387165),\n", - " (8, 0.15630604619968017)]\n", + "Docs: [21]\n", + "[(0, 0.13509546636836239),\n", + " (1, 0.44987514305413251),\n", + " (3, 0.015628876899866424),\n", + " (4, 0.17133899219205551),\n", + " (5, 0.12622125049769653),\n", + " (7, 0.038299020391926251),\n", + " (8, 0.060545623938452663)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [276, 235, 270]\n", - "[(0, 0.19472153164068895),\n", - " (1, 0.10329311184348117),\n", - " (3, 0.025968463276070212),\n", - " (4, 0.017663281758177558),\n", - " (5, 0.54778614222038646),\n", - " (7, 0.013259175006857452),\n", - " (8, 0.050507870947931986),\n", - " (9, 0.034423560720921689)]\n", + "[(0, 0.051128741737399293),\n", + " (2, 0.031947903095827117),\n", + " (3, 0.12717064775550488),\n", + " (4, 0.14970319940657217),\n", + " (5, 0.22650616321963829),\n", + " (6, 0.27680468289365207),\n", + " (7, 0.13302071076542252)]\n", "\n", "Michael I. Jordan\n", "Docs: [205]\n", - "[(0, 0.029225477213953788),\n", - " (1, 0.013712801819294291),\n", - " (2, 0.019353402713854918),\n", - " (3, 0.087509201712253584),\n", - " (5, 0.030992806920687628),\n", - " (7, 0.64444478894908952),\n", - " (8, 0.1681356134963537)]\n", + "[(0, 0.62535055104859583),\n", + " (4, 0.087850011456332838),\n", + " (5, 0.18252069201813775),\n", + " (7, 0.094925833198552231)]\n", "\n", "James M. Bower\n", "Docs: [188, 251, 244]\n", - "[(5, 0.045995989778604321),\n", - " (6, 0.037665256830351566),\n", - " (7, 0.79540557768386366),\n", - " (9, 0.086156019081986016)]\n" + "[(0, 0.066667129031022732),\n", + " (1, 0.01033213561317742),\n", + " (2, 0.60401298021861427),\n", + " (3, 0.073436683842966574),\n", + " (4, 0.024716090603344801),\n", + " (8, 0.023197011324340159),\n", + " (9, 0.19741318615356793)]\n" ] } ], @@ -1315,7 +1421,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.2" + "version": "3.4.3+" } }, "nbformat": 4, diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 8e5649a4b6..954f468c42 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -1,7 +1,8 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (C) 2011 Radim Rehurek +# Copyright (C) 2016 Radim Rehurek +# Copyright (C) 2016 Olavur Mortensen # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html """ @@ -271,8 +272,8 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No logger.info('Starting inference. Training on %d documents.', len(corpus)) # Whether or not to evaluate bound and log probability, respectively. - bound_eval = False - logprob_eval = True + bound_eval = True + logprob_eval = False if var_lambda is None: self.optimize_lambda = True @@ -360,6 +361,8 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No # Compute phi. # TODO: avoid computing phi if possible. + # NOTE: computation can be made more stable by adding the maximal value + # inside the exponential, which will disappear in the normalization. var_phi[d, v, k] = expavgElogtheta * expElogbeta[k, v] # Normalize phi. var_phi[d, v, :] = var_phi[d, v, :] / (var_phi[d, v, :].sum() + 1e-100) @@ -471,6 +474,19 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No break # End of update loop (iterations). + # Ensure that the bound (or log probabilities) is computed after the last iteration. + if self.eval_every != 0 and not (iteration + 1) % self.eval_every == 0: + if bound_eval: + prev_bound = deepcopy(bound) + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + return var_gamma, var_lambda def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 477820e642..378242bfa2 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -1,13 +1,16 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (C) 2011 Radim Rehurek +# Copyright (C) 2016 Radim Rehurek +# Copyright (C) 2016 Olavur Mortensen # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html """ Author-topic model. """ +# NOTE: from what I understand, my name as well as Radim's should be attributed copyright above? + import pdb from pdb import set_trace as st @@ -145,8 +148,8 @@ def inference(self, corpus=None, var_lambda=None): logger.info('Starting inference. Training on %d documents.', len(corpus)) # Whether or not to evaluate bound and log probability, respectively. - bound_eval = False - logprob_eval = True + bound_eval = True + logprob_eval = False if var_lambda is None: self.optimize_lambda = True @@ -155,7 +158,7 @@ def inference(self, corpus=None, var_lambda=None): self.optimize_lambda = False # Initial values of gamma and lambda. - # NOTE: parameters of gamma distribution same as in `ldamodel`. + # Parameters of gamma distribution same as in `ldamodel`. var_gamma = self.random_state.gamma(100., 1. / 100., (self.num_authors, self.num_topics)) tilde_gamma = var_gamma.copy() @@ -202,7 +205,6 @@ def inference(self, corpus=None, var_lambda=None): # Initialize mu. # mu is 1/|A_d| if a is in A_d, zero otherwise. # TODO: consider doing random initialization instead. - # TODO: consider making mu a sparse matrix instead of a dictionary. var_mu = dict() for v in ids: for a in authors_d: @@ -225,7 +227,6 @@ def inference(self, corpus=None, var_lambda=None): expavgElogtheta = numpy.exp(avgElogtheta) # Compute phi. - # TODO: avoid computing phi if possible. var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] # Normalize phi over k. @@ -244,7 +245,6 @@ def inference(self, corpus=None, var_lambda=None): expavgElogtheta = numpy.exp(avgElogtheta) # Compute mu over a. - # TODO: avoid computing mu if possible. var_mu[(v, a)] = expavgElogtheta mu_sum += var_mu[(v, a)] @@ -275,17 +275,18 @@ def inference(self, corpus=None, var_lambda=None): # This is a little bit faster: # tilde_lambda[:, ids] = self.eta[ids] + self.num_docs * cts * var_phi[ids, :].T - # FIXME: I don't need to update the entire gamma, as I only updated a few rows of it, - # corresponding to the authors in the document. The same goes for Elogtheta. - # Update gamma and lambda. # Interpolation between document d's "local" gamma (tilde_gamma), # and "global" gamma (var_gamma). Same goes for lambda. # TODO: I may need to be smarter about computing rho. In ldamodel, # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). + # FIXME: if tilde_gamma is computed like this in every iteration, then I can't compare + # lastgamma to it for convergence test. FIXME. tilde_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + # FIXME: I don't need to update the entire gamma, as I only updated a few rows of it, + # corresponding to the authors in the document. The same goes for Elogtheta. Elogtheta = dirichlet_expectation(tilde_gamma) if self.optimize_lambda: @@ -324,10 +325,9 @@ def inference(self, corpus=None, var_lambda=None): # End of corpus loop. - self.var_gamma = var_gamma - self.var_lambda = var_lambda - if _pass % self.eval_every == 0: + self.var_gamma = var_gamma + self.var_lambda = var_lambda if self.eval_every > 0: if bound_eval: prev_bound = bound @@ -340,13 +340,31 @@ def inference(self, corpus=None, var_lambda=None): logprob = self.eval_logprob() logger.info('Log prob: %.3e.', logprob) - logger.info('Converged documents: %d/%d', converged, self.num_docs) + #logger.info('Converged documents: %d/%d', converged, self.num_docs) # TODO: consider whether to include somthing like this: #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold: # break # End of pass over corpus loop. + # Ensure that the bound (or log probabilities) is computed at the very last pass. + if self.eval_every != 0 and not _pass % self.eval_every == 0: + # If the bound should be computed, and it wasn't computed at the last pass, + # then compute the bound. + self.var_gamma = var_gamma + self.var_lambda = var_lambda + if self.eval_every > 0: + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + self.var_lambda = var_lambda self.var_gamma = var_gamma @@ -371,6 +389,8 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): # NOTE: computing the bound this way is very numerically unstable, which is why # "logsumexp" is used in the LDA code. + # NOTE: computing bound is very very computationally intensive. I could, for example, + # only use a portion of the data to do that (even a held-out set). bound= 0.0 for d, doc in enumerate(docs): authors_d = self.doc2author[d] From afa747de219b4676d0588e09ba100fbd7e077c15 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Tue, 8 Nov 2016 11:41:30 +0100 Subject: [PATCH 036/100] Updated notebook. --- docs/notebooks/at_with_nips.ipynb | 126 +++++++++++++++--------------- 1 file changed, 64 insertions(+), 62 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index d2fcace683..783f53d98a 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 123, + "execution_count": 262, "metadata": { "collapsed": false }, @@ -103,7 +103,7 @@ }, { "cell_type": "code", - "execution_count": 247, + "execution_count": 334, "metadata": { "collapsed": false }, @@ -117,8 +117,8 @@ "data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", - "yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "#yrs = ['00']\n", + "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", + "yrs = ['00']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -140,7 +140,7 @@ }, { "cell_type": "code", - "execution_count": 248, + "execution_count": 335, "metadata": { "collapsed": false }, @@ -149,38 +149,38 @@ "filenames = [data_dir + 'idx/a' + yr + '.txt' for yr in yrs] # Using the years defined in previous cell.\n", "\n", "# Get all author names and their corresponding document IDs.\n", - "authors_names = []\n", - "author2doc = {}\n", - "author_id = 0\n", + "author2id = dict()\n", + "author2doc = dict()\n", + "i = 0\n", "for yr in yrs:\n", " filename = data_dir + 'idx/a' + yr + '.txt'\n", " for line in open(filename, errors='ignore', encoding='utf-8'):\n", " contents = re.split(',', line)\n", " author_name = (contents[1] + contents[0]).strip()\n", " ids = [c.strip() for c in contents[2:]]\n", - " authors_names.append(author_name)\n", - " author2doc[author_id] = [yr + '_' + id for id in ids]\n", - " author_id += 1" + " if not author2id.get(author_name):\n", + " author2id[author_name] = i\n", + " i += 1\n", + "\n", + " author_id = author2id[author_name]\n", + " author2doc[author_id] = [yr + '_' + id for id in ids]" ] }, { "cell_type": "code", - "execution_count": 249, + "execution_count": 336, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Make a mapping from author ID to author name.\n", - "id2author = dict(zip(range(len(authors_names)), authors_names))\n", - "\n", - "# Also the reverse mapping.\n", - "author2id = dict(zip(authors_names, range(len(authors_names))))" + "id2author = dict(zip(authors_names.values(), authors_names.keys()))" ] }, { "cell_type": "code", - "execution_count": 250, + "execution_count": 337, "metadata": { "collapsed": false }, @@ -198,7 +198,7 @@ }, { "cell_type": "code", - "execution_count": 251, + "execution_count": 338, "metadata": { "collapsed": false }, @@ -224,7 +224,7 @@ }, { "cell_type": "code", - "execution_count": 252, + "execution_count": 339, "metadata": { "collapsed": false }, @@ -247,7 +247,7 @@ }, { "cell_type": "code", - "execution_count": 253, + "execution_count": 340, "metadata": { "collapsed": true }, @@ -262,7 +262,7 @@ }, { "cell_type": "code", - "execution_count": 254, + "execution_count": 341, "metadata": { "collapsed": true }, @@ -281,7 +281,7 @@ }, { "cell_type": "code", - "execution_count": 255, + "execution_count": 342, "metadata": { "collapsed": true }, @@ -293,16 +293,16 @@ }, { "cell_type": "code", - "execution_count": 256, + "execution_count": 343, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkoAAAGcCAYAAAAmrI82AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHFW5//HPNwGiLAlcuQkgKCAGQWVJQMhFAt5IkEXU\nC4oDekFwYZU7iqAIPxAUBSVhVxCQRR1kuSh7MHCFAMFAgggSguyLJBDABMISkjy/P85pqBRdMz2d\nkZ6ZfN+vV786feqpc05VTzJPTp06pYjAzMzMzN5uQKs7YGZmZtZbOVEyMzMzq+BEyczMzKyCEyUz\nMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMrOWkPSUpLMLn8dIWiTp\nP96Btn8o6Y3C54G57XH/6rZze1/N7a3xTrTXLEnflfSIpAWSprS6P42S9IF8fvdodV+s73OiZEsV\nSXvlf0DrvY5vdf+WMvWen9TtZypJ+r6kTzfR9qLuttVdnfQtaOJY30mSdgSOB/4P2Bs4qqUdMmuR\nZVrdAbMWCNI/+o+Vyu9757tiNRFxo6R3R8T8bu56JHARcFU39jkaOLab7TSjqm/nARc1cazvpE8A\nbwBfDT8U1JZiTpRsaXV9RExrNFiSgOUi4vV/YZ+Wev/qxEHS8hHxSkQs4h0YUaqSE4/enCQBDAPm\n9cYkyX8f7Z3kS29mJcX5KpK+LOlvwGvAmLxdkr4l6W+SXpP0jKQzJQ0u1SNJ/y/PxXlZ0kRJH5L0\nZGluzmLzZQrldeexSNpJ0qRc5xxJV0r6UCnm15JelLRm3v6SpGcl/aROO5LULumvkl7NcddK2iRv\nv03SXRXn6mFJnY7kVJ2HOnFvm6Mkabik/5U0M/ftCUm/kbRC7XsClgNq52pR7dzm87oo1/E7SS+S\nLiNVnvO87cuSZuT2ppTnTOVz+/c6+71ZZwN9q/puDy78XD0t6dQ6P1e3Spom6cOS/k/SK/ncfquz\n76Gw/zKSjs7f3WtKc5COlbRsqe97AkNyPxeqYr5P/tl5Q9IKhbLD834/KZQtk7//YwtlK0oan/9O\nvCZpuqT/KdXf1d/HVSRdKOmfkl6QdC6w2DnLcatLuiCfq9ck/UPSFZLWbOS82dLLI0q2tBoi6T3F\ngoh4vhQzFvgicAbwAvBELj8PaMvvJwPrAgcDG0vaOo9WQJrfcThwJTABGAncALy71E7VfJW3lUva\nGzgXuBY4DFgBOACYJGnTiHiqsO8yub1JwLfz8XxH0t8j4txCtReSfileBZxN+uU+GtgC+Evefqak\n4RHxYKEvo4B1gO/V6XtRo+eh1u9a/YNy3ADSeZ4FrAl8GhgcEfMkfQn4FXBrPi8AD5Xq+l/gAeC7\nhbKqcz4G2AM4lXTZ6UBggqTNImJGF/u+WR4RCxvoW/m7/SFwBHA96WduA9J3O7L0cxXAqsB1wKXA\nxcAXgJ9KuicibqzTt6Lz8zFeTPrZ2JJ0iXB9YPdC3w8ANga+Dgi4raK+SaTvaCvS9wXwcWAhsHUh\nbiTpO78lH6+Aa/J+vwT+CuwAjJO0ekQcXmrnbX8fcx1XkX5WzwRmALuSznv5O/o9sB7pu32CNGI2\nlvQz9RRmVSLCL7+WmhewF+mSS/m1sBAzMJfNB9Yr7b9t3rZrqXyHXL5b/jw07395Ke4nOe7sQtlx\nwPw6fd2X9Mtmjfx5JeCfwGmluGG5/PRC2UV538NKsX8Bbi983i7358ROztnKwKvAsaXyM3K77+pk\n3+6chzG5z/+RP4/MMZ/u4jt9tVhP6bwuAs6v2Da/8Ln2nS8APlIofz9p9OLi0rl9sKs6u+hb+bsd\nls/TlaW4b+a4PQtlk3LZFwply5ESyd92ca5G5OM8o1Q+Lte5Vek4X2jg79RA4CXguELZC6RE7LXa\nzwfwnXyMK+bPu+a+HFqq73JSkvq+Bv4+1ur4ZqFsACk5XQjskcv+rRznl1+NvnzpzZZGAewPfLLw\n2q5O3I0R8VCpbDfSL4E/SXpP7QXcRfql+Ikctz3pH/jTSvufvAT9/hQpWbq41PZC4M5C20Vnlz7f\nShoBq9mVlBwcV9VoRPwTuJo0CgGkyyHA50kJ0Gud9HkszZ+Hf+b3HSS9q4H4egL4RTfiJ0XEm5P6\nI+Jx0ojFp5psv1Hbkc5T+bycBbwC7FQqnxMRl9Q+RJrbdSeLf7f17Eg6J+VlEE4ijRqV2+lSRCwE\nJpNGIZG0ETAE+DGwLGm0B9Io0z0R8XL+vAMp+TmjVOU40rkon/N6fx93AF6n8HMeaeTt9Hw8Na+Q\nkq9PSBrSzUO0pZwTJVta3RkRNxVfdWIeq1P2QdL/Tp8rvWYB7yKNoAC8L78v9g97RMwk/e+7GeuR\n/vGfVGr7WeA/C23XvJyTnKIXgVUKn9cFnoqIrvp0IbCOpC3z508B7yGNOnTm/fm92+chIh4GTgG+\nATwv6TpJ+0taqYs2yx7tRmz5FzHAg8BKklaps62n1M7Tg8XCSJOVHy1sr3myTh3l77aqnQX53Bbb\neZr0fZTbadStwOZ5ntPWwJMRcQ/pTtLa5betSD+7xb48FRGvluqaXthe9Fiddt8PPF0nWZ9R/JC3\nHwHsDDwr6U+SDpVU/jtj9jaeo2RWrfwPOKT/XPwD+DKL/4+15tn8XtvWyB1DVTED67QdpPlRs+vE\nlycnL6yoVxV/7sx1uc0vAXfk96cj4k9d7Ned8/A2EdGeJ+d+hjQ6dTpwuKQtc7LViHrfY3eUz1Gj\n39eStNGVRr7b7m7vbh+KJpGWXNiCNHI0qVC+taQPk/6DccsStFfvexT1v4+31R0RJ0m6AvgsacT3\nh8D3JG1THEU0K/OIkln3PEyaSHtreUQqv2r/4D6W34cXd5a0GunyWdGLwEBJy5fK167TNsCzFW1P\novseAtYs31lVFhELyJOGJa1MmlD9mwbqfyy/N3Ieqtq+LyJ+FBHbANuQRuu+XgxppJ4GfbBO2XDg\npYh4MX9+kTRvq2ztOmWN9u2x/L5+sVDScrnexxusp5F2lpH0gVI7awArLkE7d5Au4Y4mjSDVfhZv\nAf6DdFk4SCNPxb6sKak8qX+D/N5IX2p1lC/Nrl8nloh4JCLGRcT2wEdJk8sbulvQll5OlMy65xLS\nxNkjyxvy7c+1hOOPpP/1H1wKa69T58Ok/wGPLtS1ImnUqug64GXg+3mOULn9VRs8hqLLSSPLjay6\nfBEpSTyL9AumkUSpO+dhMZIGSyr/G3Uf6RfuoELZPOonLs34eJ5jU+vD2qTLNdcXYh4G3iNpg0Lc\ne0nJY1mjfaudp0NK5d8g3dl4dQN1NOJa0s/a/5TKv006r9c0U2m+fDaN9DO7OouPKK0AHATMiIji\nSOi1pL9LB5Sqayedi+saaPpa0s/CN2oF+e/GQSx+B+W7812URY+Q/j4NKsStJmn9Oj93thTzpTdb\nGjV9iSEibsqXgo6UNAKYSPqf9HDSRO/9SXcuzZI0HjhU0pWkf/Q3I00cf6FU7XXA08D5kn6Wy/YB\nngHeXGcnIuZIOoi0LME0SReTLoe9nzQJ9//o5v+OI2KipA7gW0prG91AuoS0NTAhIoqTZO+SNJ00\nifuvjVyu6OZ5gMW/m+2A8ZIuBf5Omhi8F+kS4/8W4qYCY/P6O88AD0dE3XWfGnAfcIOk00jf6wH5\n/QeFmN+Sljy4MsetCOxHWoJg41J9DfUtn6cTgCMkXUtKjDbI9U4mjeYtsYiYJuk3wAH5RoBJwCjS\npdRLIqJqCYBGTAIOBZ6PiOm5vWckPUz6+/HLUvwVpBGnEyStx1vLA+wE/DQi6s3DKruCNJr1szxK\nVlseoDw6uyFwvaRLgPtJidhupHl2HYW4n5FuWliTdIndzMsD+LV0vUi/aBcCIzqJGZhjTuok5muk\nu4xeJl2KuRv4ETC0FPf/SEnQy6RRg/VJE3HPLsWNIP1CfJX0P90DKd1CXojdljTC8WKudwZwDrBJ\nIeYi0i+scr+PA14vlYn0C+7+3P5M0p1eG9XZ/7u5T9/q5nmvdx6eAM4qxJSXB1g3H9ffSSMzz+Z9\nR5fq/hDwp1z3wtq5zce6kLTmUqfnofidk5KGB/O5mFLrT2n/scC9pNvf/0Zax6je8gBVfav6bg/M\n9b2Wz9cpwEqlmEnA1Dp9uog0atPVdzEwfx8P53YeJSWCy9Sp720/Q53U++l8TFeUys+jtMRBYdsK\npLvcnsp9eQA4pDt/H0kT2C8k3SX5PGnNqk1ZfHmAVUl3Xt4PzCUl6bcBn61zzAvK34tfS/dLET15\ned/MuiLpSeC6iPh6l8G9jKRvk9ZAel9EPNPq/piZ/av5OqyZdcc+pPVsnCSZ2VLBc5TMrFNKz/Da\nhTSv6EP4LiEzW4o4UTJ751U9K6y3Wo10h9sLpMeYTGhxf8zM3jGeo2RmZmZWwXOUzMzMzCo4UTIz\nMzOr4ETJzFpO0g8llZ9V9073YaCkRZLG9WCdY3Kdu/RUnd1o+9eS/v5Ot2vW3zhRMuvlJO2Vf9nW\nXq9KmiHptH709PO+NsG9O1p1XAEsalHbZv2G73oz6xuC9Dy2x4B3kZ7Qvj+wg6SPRMRrLeybda7p\nR+Ysob1b2LZZv+FEyazvuD4ipuU/nyfpBdIDRD8D/K513eqapOUj4pVW92NpEhELW9Guv2vrb3zp\nzazvuok0YrBOrUDSOpIulfS8pHmSJkvasbiTpOcKD99FyT8lvSFpcKH88Fy2fKFsfUmX5fpflXSn\npE+X6q9dKhwt6UxJs0jPt+sWSftKulHSrNzWfZK+Voo5RdLMUtnPc/v7FcrWyGX7NNj2l/PlzVcl\nTZH0H3Vi3ivpfEkzJb0m6V5Je9WpLoABko6S9JSkVyT9UdI6pfq2yd/dE7m+xyX9rPjUe0nflbRQ\n0hrlRnLsq5JWyp/fNkdJ0oqSxkt6MrcxPT+wtxjzgXyu9iiV1+ZwHVEo+2EuGy7pd5JeJD2c2azf\ncKJk1netl9+fB8jzlSYD2wGnA0cAg4CrJH2msN9twOjC542AWoK0VaH848C02uiApA+TntS+PvBj\n0grdLwO/L9VfcyZpJe8fkJ4P1137kx4Q/CPg26SHxJ5VSpYmAf8uaXip3wuBrQtlo0kJy6QG2h0D\n/BS4gPTw2KHABEnr1wIkrUZ6YO42wKnAIbmvv5J0QKk+kS6b7gSckF//QXqQa9EXSN/X6cBBpAcA\nH0J6qGzNxbm+z9fp927AtRHxUv682LwvSQKuAQ4GriaNRv4dGCfphM5OSCdq9f8v6eG13yU9lNas\n/2j1U3n98suvzl/AXqRf/J8A3gO8F9gdeI6UqKye48bnuFGFfVcgPSX+4ULZt4H5wAr580GkX/KT\ngeMLcS8APyt8ngjczdufMn8r8ECpv4uAP5EXtW3gGI8D5pfKBtWJ+yMwvfB5WG5r3/x5lXwOLgae\nKMSdDszsog8Dc10LgI8Uyt9PerL9xYWy84EngCGlOi4BZgPL5s9jcp33AAMLce25n8O7ON7v5/6s\nXij7M3B7KW5UbucLhbKLgAcLn3fNMYeW9r0ceIP0oGOAD+S4PSrOzxGl720RcH6r/5745de/6uUR\nJbO+QcCNpOToSeC3wFzgs/HWA2p3AKZExOTaThExDzgbWFvShrl4Eml+Yu1y0ta5bFL+M5I2AlbO\nZUhahZSoXQoMkfSe2gu4AfigpNUL/Q3glxHR9B1fEfH6mwcvDc5t3QwMl/TuHDMLeIi3Rsi2Bl4H\nTgLWlPT+0jE2YlJE3Ffox+PAVcCncl8EfA74A7BMnXOxCrBJqc5zY/E5Q5NI3+m6Fce7fK7v9hxX\nrO93wBaS3lco2x14hTRSVGUHUoJ8Rql8HCkJ+lQn+3YmgF80ua9Zr+dEyaxvCNKlqE8C2wIbRsQH\nImJiIeb9wIw6+04vbAeYRvqlWrs09XHeSpQ2k7Rc3hak0SJIl/lEGkF4rvQ6JseUlyp4rPhB0rKS\nhhVfnR2wpK0l3STpZeCfua1j8+YhhdBbS8cyBbgLmANsLWkI8BEaT5QeqlP2ILBSThhXA1YCDuDt\n5+LsHF8+F+U5Wi/m91VqBZLeL+lCSc+TRgqfIyXHsPjxXpLfv1Ao2xW4OjqfRP1+4KmIeLVUXv75\naMajS7CvWa/mu97M+o4746273poWEQsk/RkYLekDwOrALaRfzMsCW5ASjukR8Xzerfafqp8BVQ/F\nLScY5V/Io0mXzoKUdIWktSLiH+WKJH0wx95Hukz1JGk0ZBfSHJvif/ImAXtJWouUME2MiJB0W/5c\nS0puqeh3I4q32dfavgD4dUX8PaXPVXegCdJEadKlzZWA40kJ7yvA+0hzlN483oh4StJkUqL0M0lb\nky7HXtyNY+hM1SjgwE72KX/XZv2GEyWz/uNx0kTrsg0K22smAYeRJn4/FxEPAkj6Gymh2Zp0uanm\nkfz+RkTc1GT/ppJGxIqeq4jdhZS07ZQvr5H7t32d2NpI0fbACODo/PkW4CukROkl3p68VPlgnbLh\nwEsR8aKkucA8YMASnIuyTUhzg9oi4s2lHiRVXQ67GDhF0rqky24vAdd10cZjwMclvbs0qlT++agl\nliuX9l+SESezPsuX3sz6j2uBj0naolYgaQXg68CjEXF/IXYSaeHKQ3jr8hr5z18mjTK9eakqIp4j\nTc7+Rr7jazGSVu2qcxHxz4i4qfSqemxJbQTmzX+j8mWv/65T70PALNIk9QGkeT21Y1yfNJ/o9m7M\nl/p4nqNVa3dtYGfg+tzeQuAK4AuSNijvXOdcNNJuveMV6fupt/+l5AnXpMtuVxbnOFW4FliOdMmw\nqDax/DqAiHiRdKlzdCnuoIq+1CVpiNJyEis2uo9Zb+QRJbO+oZHLJj8B2oDrJZ1Kumttb9JIwH+V\nYieT7qYaDpxVKL+FNBeq3q30B+ayeyX9kjTKNIx0x9V7gU272d/OTCDdRn9tbmsw8DXgGd4+/wdS\ngrcbaTmDl3PZnaRLQuuR7lJr1H3ADZJOI52jA/L7Dwoxh5ESiSm5f9OBfwM2I43GFZPJRs7F30jz\nfE7OE9BfzsczuF5wRMySNAn4DrAijS04egXp+z1B0nrAX0kTvHcCfhoRxXlU5wCHSppDmtO2LWnE\nqzvf6xeBn+f3S7qINeu1PKJk1jd0+T/5iHiWlLTcQPrf//Gk29p3jogrS7GvkG71L07YhpQIBenW\n+idL+0wnJQJXk5YAOB34Bmk04lgW18zdbm/uk9vajfRv1M+ArwKnkdZmqqfW7+Io2ALSrfSNrp9U\n68ONwKGkYzyGNFo1NvepVvdMYHPSPKX/yn37JimxObzquKrK88jazqTk5QjgSFLy9JVO+vo7UpL0\nT6rnjRXbCFJSdCrwadJyEsOBb0XEd0v7HU2aG/UFUsK6IPevu8/k66/P77OliJbg7l0zMzOzfq3X\njShJ+l5eEn9coWyQpDMkzZb0ktIjFIaW9ltL0jVKj22YKelESQNKMdtKmpqX7n9QdR43IOlASY/m\nRwHcIWnz0vYu+2JmZmb9Q69KlHJS8jXefnfKyaQh411J8wLWIK0mW9tvAGmi4jLAlqQh870pXA7I\nEzKvJg2rbwycApwjabtCzO6kheqOJs23uIf06ILi5MxO+2JmZmb9R6+59JbvjJhKmkh6FHB3RHxL\n6SGdzwFfjIgrcuz6pMmTW0bEFEk7AFeSlvmfnWO+QZrc+u953ZgTgB0iong3SwfpEQQ75s93AH+O\niEPyZ5HWbzk1Ik5spC//0pNkZmZm76jeNKJ0BnBVnXVJNiONFNVWqCUiZpCeszQqF20J3FtLkrIJ\npNVsP1yIKa5iXIsZBWnVYGBkqZ3I+9TaaaQvZmZm1k/0ikRJ0hdJC659r87mYaSHZc4tlc/irVtw\nV8ufy9tpIGawpEHAqqSVZ+vF1OpopC+Lyc9sGiFp+XrbzczMrL7e8Du05esoSVqTNO9nu04Wn6u7\nK43detpZjBqM6aqdzmI2AW4DpuVnVhVdT/VtvWZmZkuT7Xn7w5lXJK24vxVvLSb7jmp5okS63PXv\nwNQ8JwjSyM5oSQeRTtogSYNLIzlDeWv0p7amSdGwwrbae/khnEOBuRExX9Js0now9WKK7SzXRV/K\n1s7vI+psG01a68bMzMyqrc1SnChNBD5aKjufNEH6J8DTwBvAGNLKskgaTnpYZO2kTQaOkLRqYZ7S\nWNLTw6cXYnYotTM2lxMRb0iamtu5Mrej/PnUHD+VtPBavb5Mrji+xwB+/etfs8EGb3vaQb/S3t7O\n+PHjW92Nf7ml5Thh6TlWH2f/4uPsP6ZPn86XvvQlyL9LW6HliVJEzAOKz6BC0jzg+dpKuJLOBcZJ\nqj3c8lTgtoi4M+9yQ67jIkmHk55TdRxweuFy3i+Ag/Ldb+eRkp3dgB0LTY8DLsgJ0xTSM5CWJz/+\nICLmdtKXqjveXgPYYIMNGDGi3qBS/zFkyJB+f4yw9BwnLD3H6uPsX3yc/dJrrWq45YlShfJ8n9pD\nGy8DBpHm9hz4ZnDEIkk7k54rdDvpyd7n89ZTxImIxyTtREqGvgk8BewbERMLMZfkNZOOJV2C+wuw\nfX4gaEN9MTMzs/6jVyZKEfGfpc+vAwfnV9U+T5KeRdRZvTeT5kR1FnMm1c+TaqgvZmZm1j/0iuUB\nzMzMzHojJ0rWY9ra2lrdhXfE0nKcsPQcq4+zf/FxWk/qNY8w6a8kjQCmTp06dWmadGdmZrbEpk2b\nxsiRIwFGRsS0VvTBI0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCi\nZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVw\nomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlV\ncKJkZmZmvcr//A987Wut7kWyTKs7YGZmZlb01FPw8sut7kXS8hElSftJukfSnPy6XdKnCtv/JGlR\n4bVQ0pmlOtaSdI2keZJmSjpR0oBSzLaSpkp6TdKDkvaq05cDJT0q6VVJd0javLR9kKQzJM2W9JKk\nyyQN7elzYmZmZr1DyxMl4EngcGBkft0E/EHSBnl7AGcDw4DVgNWBw2o754ToWtLo2JbAXsDewLGF\nmLWBq4EbgY2BU4BzJG1XiNkdOAk4GtgUuAeYIGnVQl9PBnYCdgVGA2sAly/pCTAzM7PeqeWJUkRc\nExHXR8RD+XUk8DIp6al5JSKei4hn86s4ILc98CFgz4i4NyImAEcBB0qqXVrcH3gkIg6LiBkRcQZw\nGdBeqKcdOCsiLoyIB4D9gFeAfQAkDc5/bo+ImyPibuArwFaSPtbDp8XMzMx6gZYnSkWSBkj6IrA8\ncHth056SnpN0r6TjJb27sG1L4N6ImF0omwAMAT5ciJlYam4CMCq3uyxpNOvG2saIiLzPqFy0GWnU\nqhgzA3iiEGNmZmb9SK+YzC3pI8Bk4F3AS8DnchIC8BvgceAfwEbAicBwYLe8fTVgVqnKWYVt93QS\nM1jSIODfgIEVMevnPw8D5kfE3DoxqzV0oGZmZtan9IpECXiANHdoZdL8nwsljY6IByLinELc3yTN\nBG6UtE5EPNpFvdHJNjUY09n2RmPMzMysD+oViVJELAAeyR+n5Tk/h5DmFpX9Ob+vBzwKzAQ2L8UM\ny+8zC+/DSjFDgbkRMV/SbGBhRUxtlGkmsJykwaVRpWJMpfb2doYMGbJYWVtbG21tbV3tamZm1u91\ndHTQ0dEBwF13wYIF0N4+p8W96iWJUh0DgEEV2zYljeA8kz9PBo6QtGphntJYYA4wvRCzQ6mesbmc\niHhD0lRgDHAlgCTlz6fm+KnAglx2RY4ZDryvVk9nxo8fz4gRI7oKMzMzWyoVBw922y2to3T88dMY\nOXJkS/vV8kRJ0o+A60jLBKwE7AlsA4yVtC6wB+n2/+dJl+fGATdHxH25ihuA+4GLJB1OWj7gOOD0\niHgjx/wCOEjSCcB5pGRnN2DHQlfGARfkhGkK6S645YHzASJirqRzgXGSXiTNpToVuC0ipvToSTEz\nM7NeoeWJEuly14WkBGcO8FdgbETcJGlN4JOky3ArkJKpS4Ef1XaOiEWSdgZ+TrpTbh4puTm6EPOY\npJ1IydA3gaeAfSNiYiHmkrxm0rG5T38Bto+I5wp9bSddoruMNOJ1PXBgj50JMzMz61VanihFxFc7\n2fYUsG0DdTwJ7NxFzM2kJQA6izkTOLOT7a8DB+eXmZmZ9XO9ah0lMzMzs97EiZKZmZlZBSdKZmZm\nZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZm\nZtbrSK3uQeJEyczMzHqViFb34C1OlMzMzMwqOFEyMzMzq+BEyczMzKyCEyUzMzOzCk6UzMzMzCo4\nUTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMjMzM6vgRMnMzMysghMlMzMzswpOlMzMzMwq\nOFEyMzMzq+BEyczMzKyCEyUzMzOzCi1PlCTtJ+keSXPy63ZJnypsHyTpDEmzJb0k6TJJQ0t1rCXp\nGknzJM2UdKKkAaWYbSVNlfSapAcl7VWnLwdKelTSq5LukLR5aXuXfTEzM7P+o+WJEvAkcDgwMr9u\nAv4gaYO8/WRgJ2BXYDSwBnB5beecEF0LLANsCewF7A0cW4hZG7gauBHYGDgFOEfSdoWY3YGTgKOB\nTYF7gAmSVi30tdO+mJmZWf/S8kQpIq6JiOsj4qH8OhJ4GdhS0mBgH6A9Im6OiLuBrwBbSfpYrmJ7\n4EPAnhFxb0RMAI4CDpS0TI7ZH3gkIg6LiBkRcQZwGdBe6Eo7cFZEXBgRDwD7Aa/k9mmwL2ZmZtaP\ntDxRKpI0QNIXgeWByaQRpmVII0EARMQM4AlgVC7aErg3ImYXqpoADAE+XIiZWGpuQq0OScvmtort\nRN6n1s5mDfTFzMzM+pFekShJ+oikl4DXgTOBz+VRndWA+RExt7TLrLyN/D6rznYaiBksaRCwKjCw\nIqZWx7B7GWMFAAAgAElEQVQG+mJmZmY9QGp1D5Jlug55RzxAmju0Mmn+z4WSRncSLyAaqLezGDUY\n01U7jfbFzMzM+phekShFxALgkfxxWp7zcwhwCbCcpMGlkZyhvDX6MxNY7O400uhPbVvtfVgpZigw\nNyLmS5oNLKyIKbbTVV8qtbe3M2TIkMXK2traaGtr62pXMzOzfq+jo4OOjg4A7rwTFi2C9vY5Le5V\nL0mU6hgADAKmAguAMcAVAJKGA+8Dbs+xk4EjJK1amKc0FpgDTC/E7FBqY2wuJyLekDQ1t3Nlbkf5\n86k5vrO+TO7qgMaPH8+IESMaO3ozM7OlTHHwYNdd4dVX4Yc/nMbIkSNb2q+WJ0qSfgRcR1omYCVg\nT2AbYGxEzJV0LjBO0ovAS6TE5baIuDNXcQNwP3CRpMOB1YHjgNMj4o0c8wvgIEknAOeRkp3dgB0L\nXRkHXJATpimku+CWB84H6KIvU3r4tJiZmS21ohdNaGl5okS63HUhKcGZA/yVlCTdlLe3ky6LXUYa\nZboeOLC2c0QskrQz8HPSKNM8UnJzdCHmMUk7kZKhbwJPAftGxMRCzCV5zaRjc5/+AmwfEc8V+tpp\nX8zMzKx/aXmiFBFf7WL768DB+VUV8ySwcxf13ExaAqCzmDNJd9013RczMzPrP3rF8gBmZmZmvZET\nJTMzM7MKTpTMzMzMKjhRMjMzM6vgRMnMzMysghMlMzMzswpOlMzMzMwqOFEyMzMzq+BEyczMzKyC\nEyUzMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhRMjMzs15HanUPEidK\nZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZWa8S0eoevMWJkpmZmVkFJ0pm\nZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVWp4oSfqepCmS5kqaJekKScNLMX+StKjw\nWijpzFLMWpKukTRP0kxJJ0oaUIrZVtJUSa9JelDSXnX6c6CkRyW9KukOSZuXtg+SdIak2ZJeknSZ\npKE9eU7MzMysd+iRREnSQEmbSFqlid23Bk4DtgA+CSwL3CDp3YWYAM4GhgGrAasDhxXaHwBcCywD\nbAnsBewNHFuIWRu4GrgR2Bg4BThH0naFmN2Bk4CjgU2Be4AJklYt9OVkYCdgV2A0sAZweRPHbWZm\nZr1cU4mSpJMl7Zv/PBC4GZgGPClp2+7UFRE7RsRFETE9Iu4lJTjvA0aWQl+JiOci4tn8ermwbXvg\nQ8CeEXFvREwAjgIOlLRMjtkfeCQiDouIGRFxBnAZ0F6opx04KyIujIgHgP2AV4B98rEOzn9uj4ib\nI+Ju4CvAVpI+1p3jNjMzs96v2RGl3UijLQCfBtYhJSrjgR8tYZ9WJo0gvVAq31PSc5LulXR8acRp\nS+DeiJhdKJsADAE+XIiZWKpzAjAKQNKypOTsxtrGiIi8z6hctBlp1KoYMwN4ohBjZmZm/USzidKq\nwMz85x2BSyPiQeA84KPNdkaSSJe2bo2I+wubfgN8CdgWOB74MnBRYftqwKxSdbMK2zqLGSxpEOmY\nBlbE1OoYBsyPiLmdxJiZmVk/sUzXIXXNAjaU9AzwKeCAXL48sHAJ+nMmsCGwVbEwIs4pfPybpJnA\njZLWiYhHu6izsyfGqMGYrp4600iMmZmZ9THNJkq/Ai4BniElCH/M5VsADzRToaTTSaNTW0fEM12E\n/zm/rwc8Shrd2rwUMyy/zyy8DyvFDAXmRsR8SbNJSV69mNoo00xgOUmDS6NKxZi62tvbGTJkyGJl\nbW1ttLW1dbabmZnZUqGjo4OOjg4A7rwTFi2C9vY5Le5Vk4lSRBwj6T5gLdJlt9fzpoXAT7pbX06S\nPgNsExFPNLDLpqQErZZQTQaOkLRqYZ7SWGAOML0Qs0OpnrG5nIh4Q9JUYAxwZe6X8udTc/xUYEEu\nuyLHDCdNPp/cWYfHjx/PiBEjGjg0MzOzpU9x8OBzn4P58+G446YxcmT53q53VrMjSkTEZQCS3lUo\nu6C79eT1kNqAXYB5kmojOnMi4jVJ6wJ7kG7/f550a/844OaIuC/H3gDcD1wk6XDS8gHHAadHxBs5\n5hfAQZJOIM2lGkOalL5joTvjgAtywjSFdBfc8sD5+fjmSjoXGCfpReAlUhJ1W0RM6e6xm5mZWe/W\n7PIAAyUdJelp4OWczCDpuNqyAd2wHzAY+BPwj8LrC3n7fNL6ShNIo0M/BS4lJVYARMQiYGfSiNbt\nwIWk5OboQsxjpPWPPgn8hZQE7RsREwsxlwDfJq2/dDewEbB9RDxX6G87aT2mywp93rWbx2xmZmZ9\nQLMjSt8nLep4GPDLQvl9wP8A5zZaUUR0mqxFxFOku926qudJUrLUWczNvH19pnLMmaRJ5VXbXwcO\nzi8zMzP7F5C6jnknNLs8wH8DX4+I37D4XW73kNZTMjMzM+vzmk2U3gs8VFHfss13x8zMzKz3aDZR\nup/0jLay3Uhze8zMzMz6vGbnKB1LujvsvaRk678krU+6JNfpPCEzMzOzvqKpEaWI+AMpIfokMI+U\nOG0AfDoi/tjZvmZmZmZ9xZKso3QrsF0P9sXMzMysV2l2HaXNJW1Rp3wLSZstebfMzMzMWq/Zydxn\nkB5fUvbevM3MzMysKdGLHjPfbKK0ITCtTvndeZuZmZlZn9dsovQ6MKxO+eqkh8aamZmZ9XnNJko3\nAD+WNKRWIGll4HjAd72ZmZlZv9DsXW+HArcAj0uqLTC5CTAL+HJPdMzMzMys1ZpKlCLiaUkbAXsC\nGwOvAr8COiLijR7sn5mZmVnLLMk6SvOAs3uwL2ZmZma9StOJkqThwLbAUEpznSLi2CXrlpmZmVnr\nNZUoSfoa8HNgNjATKK54EKRHmpiZmZn1ac2OKB0JfD8iTujJzpiZmZn1Js0uD7AKcGlPdsTMzMys\nt2k2UboUGNuTHTEzMzPrbZq99PYQcJykLYF7gcWWBIiIU5e0Y2ZmZmat1myi9HXgZWCb/CoKwImS\nmZmZ9XnNLji5Tk93xMzMzKxGanUPkmbnKAEgaTlJ60tqej0mMzMzs96qqURJ0vKSzgVeAf4GvC+X\nnybpuz3YPzMzM7OWaXZE6cekZ7xtC7xWKJ8I7L6EfTIzMzPrFZq9ZPZZYPeIuENScVXuvwEfWPJu\nmZmZmbVesyNK/w48W6d8BRZ/nImZmZlZn9VsonQXsFPhcy05+ioweYl6ZGZmZtZLNJsoHQEcL+nn\npMt3h0j6I/AV4PvdqUjS9yRNkTRX0ixJV0gaXooZJOkMSbMlvSTpMklDSzFrSbpG0jxJMyWdKGlA\nKWZbSVMlvSbpQUl71enPgZIelfSqpDskbd7dvpiZmVn/0FSiFBG3kiZzL0NamXssMAsYFRFTu1nd\n1sBpwBbAJ4FlgRskvbsQczJpBGtXYDSwBnB5bWNOiK7N/dkS2AvYGzi2ELM2cDVwY+77KcA5krYr\nxOwOnAQcDWwK3ANMkLRqo30xMzOz/qPbk7nzmkl7ABMi4mtL2oGI2LFU/96k+U8jgVslDQb2Ab4Y\nETfnmK8A0yV9LCKmANsDHwI+ERGzgXslHQX8RNIxEbEA2B94JCIOy03NkPRxoB34Yy5rB86KiAtz\nO/uRkqJ9gBMb7IuZmZn1E90eUcpJxy+Ad/V8dwBYmTTn6YX8eSQpobux0IcZwBPAqFy0JXBvTpJq\nJgBDgA8XYiaW2ppQq0PSsrmtYjuR96m1s1kDfTEzM7MlEL3otrBm5yhNIV2a6lGSRLq0dWtE3J+L\nVwPmR8TcUvisvK0WM6vOdhqIGSxpELAqMLAiplbHsAb6YmZmZv1Es+sonQmcJGlNYCowr7gxIv66\nBPVuCHy8gVjR2FIEncWowZiu2ukypr29nSFDhixW1tbWRltbWxdVm5mZ9X8dHR10dHQAMCVPZGlv\nn9PCHiXNJkoX5/dTC2XBWwnDwO5WKOl0YEdg64j4R2HTTGA5SYNLIzlDeWv0Zyaw2N1ppNGf2rba\n+7BSzFBgbkTMlzQbWFgRU2ynq77UNX78eEaMGNFZiJmZ2VKrOHjwmc/AokXwgx9MY+TIkS3tV7OX\n3tap81q38N4tOUn6DGky9hOlzVOBBcCYQvxw0vPlbs9Fk4GPlu5OGwvMAaYXYsawuLG5nIh4I7dV\nbEf5c62dzvri9aPMzMz6maZGlCLi8Z7qgKQzgTZgF2CepNqIzpyIeC0i5uYH8I6T9CLwEmkk67aI\nuDPH3gDcD1wk6XBgdeA44PScAEGagH6QpBOA80jJzm6kUayaccAFkqaS5mG1A8sD5+fj7qwvvuPN\nzMysn2kqUZL0351tr91e36D9SJfr/lQq/wpQq6eddFnsMmAQcD1wYKG9RZJ2Bn5OGv2ZR0puji7E\nPCZpJ1Iy9E3gKWDfiJhYiLkkj0odS7oE9xdg+4h4rtCvTvtiZmZm/Uezc5ROKX1eljTyMh94hbcS\nnC5FRJeX/yLideDg/KqKeRLYuYt6biYtAdBZzJmkSeVN98XMzMz6h2Yvva1SLpP0QdKIzk+XtFNm\nZmZmvUGzk7nfJiL+DnyXt482mZmZmfVJPZYoZQtIzz4zMzMz6/Oancy9S7mIdKfZQcBtS9opMzMz\ns96g2cncvy99DuA54Cbg20vUIzMzM1vqSV3HvBOanczd05fszMzMzHodJzxmZmZmFZpKlCRdJum7\ndcq/I+nSJe+WmZmZWes1O6K0DXBNnfLrgdHNd8fMzMys92g2UVqRtAp32RvA4Oa7Y2ZmZtZ7NJso\n3QvsXqf8i6SH05qZmZn1ec0uD3Ac8L+SPkBaEgBgDNAGfL4nOmZmZmbWas0uD3CVpM8CRwC7Aa8C\nfwU+mR88a2ZmZtbnNTuiRERcQ/0J3WZmZmb9QrPLA2wuaYs65VtI2mzJu2VmZmbWes1O5j4DWKtO\n+XvzNjMzM7M+r9lEaUNgWp3yu/M2MzMzsz6v2UTpdWBYnfLVgQXNd8fMzMyWdhGt7sFbmk2UbgB+\nLGlIrUDSysDxwB97omNmZmZmrdbsXW+HArcAj0u6O5dtAswCvtwTHTMzMzNrtWbXUXpa0kbAnsDG\npHWUfgV0RMQbPdg/MzMzs5ZZknWU5gFn92BfzMzMzHqVphIlSZ8nPa5kOBDA34HfRsRlPdg3MzMz\ns5bq1mRuSQMk/Q74HWkZgIeAR4APA5dIuliSer6bZmZmZu+87o4oHQJ8EtglIq4ubpC0C2me0iHA\nyT3TPTMzM7PW6e7yAF8BvlNOkgAi4krgMGCfnuiYmZmZWat1N1H6IDCxk+0Tc4yZmZlZn9fdROlV\nYOVOtg8GXutuJyRtLelKSU9LWpQv4xW3/yqXF1/XlmJWkfQbSXMkvSjpHEkrlGI2knSLpFclPS7p\nO3X68nlJ03PMPZJ2qBNzrKR/SHpF0h8lrdfdYzYzM7NqvWXGc3cTpcnA/p1sPzDHdNcKwF/y/lUL\nl19HemzKavnVVtr+W2ADYAywEzAaOKu2UdJKwATgUWAE8B3gGElfLcSMyvX8krSA5u+B30vasBBz\nOHAQ8A3gY8A8YIKk5Zo4bjMzM+vFujuZ+0fAnyS9B/gZ8AAgUoLybeAzwCe624mIuB64HqCTu+Ze\nj4jn6m2Q9CFge2BkRNydyw4GrpF0aETMBL4ELAvsGxELgOmSNgW+BZyTqzoEuC4ixuXPR0saS0qM\nDijEHBcRV+V2/pu0IvlngUu6e+xmZmbWe3VrRCkibgd2JyVDk4EXgReA23JZW0Tc1tOdzLaVNEvS\nA5LOlPRvhW2jgBdrSVI2kTQ6tUX+vCVwS06SaiYA6xeeWTeKt8/BmpDLkbQuaTTrxtrGiJgL/LkW\nY2ZmZv1HtxecjIgrJE0AxpIWnAR4ELghIl7pyc4VXAdcTrps9gHgx8C1kkZFRJCSl2dL/Vwo6YW8\njfz+SKneWYVtc/L7rDoxtTqGkZKvzmLMzMysn2j2WW+vSPok8P8i4oUe7lO99oqXtP4m6V7gYWBb\n4P862VVUz3mqbW8kprPtjcaYmZlZH9OtREnSmhHxVP64B3Ai8EJOXHaMiCd7uoP1RMSjkmYD65ES\npZnA0FJfBwKr5G3k92Glqoay+AhRVUxxu3LMrFLM3XSivb2dIUOGLFbW1tZGW1t5TrqZmdnSp6Oj\ng46ODgCmTEll7e1zWtijpLsjSg9Iep40J+ldwFrAE8DapInS7whJawLvAZ7JRZOBlSVtWpinNIaU\n1EwpxPxQ0sCIWJjLxgIzImJOIWYMcGqhue1yeS1Bm5lj/pr7Mpg0D+qMzvo8fvx4RowY0czhmpmZ\n9XvFwYNddknLAxx99DRGjhzZ0n51d3mAIcDngal532slPQgMAraX1NQ8HUkrSNpY0ia5aN38ea28\n7URJW0h6v6QxpNv2HyRNtCYiHsh//qWkzSVtBZwGdOQ73iDd9j8fOE/ShpJ2B74JnFToyinADpK+\nJWl9SccAI4HTCzEnA0dK+rSkjwIXAk8Bf2jm2M3MzKz36m6itGxETImIk0iLT25KeqzJQtKjSx6W\nNKOJfmxGunQ1lXQp7CRgGvCDXPdGpERkBmmNozuB0RHxRqGOPUjLFUwErgZuIa11BLx5d9r2pNGv\nu4CfAsdExLmFmMmk9Zm+TlrX6b+Az0TE/YWYE0lJ2Fmku93eDewQEfObOG4zMzPrxbp76W2upLtJ\nl96WA5aPiNskLSAtG/AUaRHGbomIm+k8aftUA3X8k7RWUmcx9wLbdBFzOekOu85ijgGO6apPZmZm\n1rd1d0RpDeCHwOukJOsuSZNISdMIICLi1p7topmZmVlrdHfBydkRcVVEfA94BdicdBkqSCt1z5V0\nc89308zMzOyd190RpbI5eY2jN4D/BNYBzlziXpmZmZn1Ak0tOJltBDyd//w48Ea+w+x3S9wrMzMz\nW2pFpOUBeoOmE6Xi4pIR8ZGe6Y6ZmZlZ77Gkl97MzMzM+i0nSmZmZmYVnCiZmZmZVXCiZGZmZlbB\niZKZmZlZBSdKZmZmZhWcKJmZmVmv01vWUXKiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJk\nZmZmVsGJkpmZmVkFJ0pmZmZmFZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYVnCiZmZmZVXCi\nZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlV6BWJkqStJV0p6WlJiyTtUifmWEn/kPSKpD9KWq+0\nfRVJv5E0R9KLks6RtEIpZiNJt0h6VdLjkr5Tp53PS5qeY+6RtEN3+2JmZmbNi2h1D97SKxIlYAXg\nL8CBwNtOj6TDgYOAbwAfA+YBEyQtVwj7LbABMAbYCRgNnFWoYyVgAvAoMAL4DnCMpK8WYkblen4J\nbAL8Hvi9pA272RczMzPrB5ZpdQcAIuJ64HoASaoTcghwXERclWP+G5gFfBa4RNIGwPbAyIi4O8cc\nDFwj6dCImAl8CVgW2DciFgDTJW0KfAs4p9DOdRExLn8+WtJYUmJ0QCN96ZETYmZmZr1CbxlRqiRp\nHWA14MZaWUTMBf4MjMpFWwIv1pKkbCJpdGqLQswtOUmqmQCsL2lI/jwq70cpZlTuy7oN9MXMzMz6\niV6fKJESkyCN2hTNyttqMc8WN0bEQuCFUky9OmggprZ9WAN9MTMzs36iLyRKVUSd+UzdjFGDMUva\njpmZmXVD3Yk4LdAr5ih1YSYpERnG4iM5Q4G7CzFDiztJGgiskrfVYoaV6h7K4iNEVTHF7V31pa72\n9naGDBmyWFlbWxttbW2d7WZmZrZU6OjooKOjA4A774QBA6C9fU6Le9UHEqWIeFTSTNLdbH8FkDSY\nNPfojBw2GVhZ0qaFeUpjSEnNlELMDyUNzJflAMYCMyJiTiFmDHBqoQvb5fJG+1LX+PHjGTFiRHcP\n38zMbKlQHDzYeWdYdlk46qhpjBw5sqX96hWX3iStIGljSZvkonXz57Xy55OBIyV9WtJHgQuBp4A/\nAETEA6RJ17+UtLmkrYDTgI58xxuk2/7nA+dJ2lDS7sA3gZMKXTkF2EHStyStL+kYYCRweiGm076Y\nmZlZ/9FbRpQ2A/6PdBkseCt5uQDYJyJOlLQ8aV2klYFJwA4RMb9Qxx6khGYisAi4jHQrP5DuTpO0\nfY65C5gNHBMR5xZiJktqA36UX38HPhMR9xdiGumLmZmZ9QO9IlGKiJvpYnQrIo4Bjulk+z9JayV1\nVse9wDZdxFwOXL4kfTEzM7P+oVdcejMzMzPrjZwomZmZmVVwomRmZmZWwYmSmZmZWQUnSmZmZmYV\nnCiZmZmZVXCiZGZmZlbBiZKZmZlZBSdKZmZmZhWcKJmZmZlVcKJkZmZmVsGJkpmZmVkFJ0pmZmZm\nFZwomZmZWa8S0eoevMWJkpmZmVkFJ0pmZmZmFZwomZmZWa8jtboHiRMlMzMzswpOlMzMzMwqOFEy\nMzMzq+BEyczMzKyCEyUzMzOzCk6UzMzMzCo4UTIzMzOr4ETJzMzMrIITJTMzM7MKTpTMzMzMKjhR\nMjMzM6vQJxIlSUdLWlR63V/YPkjSGZJmS3pJ0mWShpbqWEvSNZLmSZop6URJA0ox20qaKuk1SQ9K\n2qtOXw6U9KikVyXdIWnzf92Rm5mZWSv1iUQpuw8YBqyWXx8vbDsZ2AnYFRgNrAFcXtuYE6JrgWWA\nLYG9gL2BYwsxawNXAzcCGwOnAOdI2q4QsztwEnA0sClwDzBB0qo9eJxmZmbWS/SlRGlBRDwXEc/m\n1wsAkgYD+wDtEXFzRNwNfAXYStLH8r7bAx8C9oyIeyNiAnAUcKCkZXLM/sAjEXFYRMyIiDOAy4D2\nQh/agbMi4sKIeADYD3glt29mZmb9TF9KlD4o6WlJD0v6taS1cvlI0kjRjbXAiJgBPAGMykVbAvdG\nxOxCfROAIcCHCzETS21OqNUhadncVrGdyPuMwszMzPqdvpIo3UG6VLY9aRRnHeAWSSuQLsPNj4i5\npX1m5W3k91l1ttNAzGBJg4BVgYEVMathZmZm/c4yXYe0Xr5UVnOfpCnA48AXgNcqdhMQjVTfyTY1\nGNNIO2ZmZtbH9IlEqSwi5kh6EFiPdOlrOUmDS6NKQ3lr9GcmUL47bVhhW+19WClmKDA3IuZLmg0s\nrIgpjzK9TXt7O0OGDFmsrK2tjba2tq52NTMz6/c6Ojro6OgA4K67YMAAaG+f0+Je9dFESdKKwAeA\nC4CpwAJgDHBF3j4ceB9we95lMnCEpFUL85TGAnOA6YWYHUpNjc3lRMQbkqbmdq7M7Sh/PrWrPo8f\nP54RI0Z0+1jNzMyWBsXBg512gkGD4MgjpzFy5MiW9qtPJEqSfgpcRbrc9l7gB6Tk6OKImCvpXGCc\npBeBl0iJy20RcWeu4gb4/+3de7QdZXnH8e8vYBIIDaFACK2BIJGbIGCCBLmFWwLBaFELLARtwSUU\nLBZX1dLaFW4WwZqKQERRW26CyFpFRCgFCUkVbZYhRgohwRDCJSQkBEJuwMk5T/94300mO3tyPefs\ns+f8PmvtleyZd2ae58ycOc9+553ZPA3cJumrwO7AlcANEdGW29wEfEHSNcCPSAXQp4BxhVAmArfk\ngmka6S647YH/6JLEzczMrKlaolAC3gv8GNgZWAz8ChgVEa/l+ZeQLovdA/QD/gu4qLZwRHRI+ijw\nXVIv00pScTOh0OZ5SaeSiqGLgZeA8yLikUKbu/Mzk64gXYL7PTA2IhZ3Qc5mZmbWZC1RKEXEBgfy\nRMTbwN/mV1mbF4GPbmQ9U0iPANhQm0nApA21MTMzsy0XPegWqVZ5PICZmZn1Eh0daTB3T9BDwjAz\nMzNLIkDaeLvu4ELJzMzMepQI9yiZmZmZNdTR4R4lMzMzs4Z86c3MzMyshAslMzMzsxK+683MzMys\nhHuUzMzMzEp4MLeZmZlZCT8ewMzMzKyEL72ZmZmZlfClNzMzM7MSvvRmZmZmVsKX3szMzMxK+NKb\nmZmZWQlfejMzMzMr4R4lMzMzsxIeo2RmZmZWwpfezMzMzEr40puZmZlZCV96MzMzMyvhS29mZmZm\nJdrbXSiZmZmZNbR6NWy3XbOjSFwomZmZWY+yYgUMGNDsKBIXSmZmZtajrFwJO+zQ7CgSF0pmZmbW\nY7S3w1tvuUfJzMzMbD0rV6Z/XSi1OEkXSZonabWk30o6rNkxNdudd97Z7BC6RW/JE3pPrs6zWpxn\na1uwIP27++7NjaPGhdIWkHQG8C1gAnAoMBN4SNIuTQ2syar6S1uvt+QJvSdX51ktzrO1zZuX/t1z\nz+bGUeNCactcAnwvIm6NiGeAC4BVwLnNDcvMzKy13Xtv6k3aY49mR5K4UNpMkt4DjAB+WZsWEQE8\nAhzRrLjMzMxa2dKlcOGF8P3vw8UX95wHTm7b7ABa0C7ANsCiuumLgH3LFpo1Kz2SfXNtyTLNWv6N\nN2DatOZtv7OXL1t26VJ4/PGu3XZPWf6112Dq1O7ffnfnvmQJTJ7cvO131/KLF8PDD3ft9ntC7q++\nCg8+2Lztd9fyixbB/fc3Z9tly3d0pLvW1qxJr7a2tf9fswbefhtWrUqvFSvSOebVV2H+fJg7F/r1\ng5tugs9/futi6UwulDqPgEaHWX+As8+e1b3RNMUyDj/8iWYH0Q2WceSRvSFPgGUce2xvyHUZxx/f\nO/IcM6Z35DluXO/Ic/z41shzm21SD1HfvtC/f3rqdv/+MGhQeo0aBeeck/7ddVeYMSMtN2vWu387\n+zcrdhdKm28J0A7sVjd9MOv3MgEMS/+c3ZUx9SAjmh1AN+kteULvydV5Vovz7Ena29OrrW3t7f+b\naRiwCX35nc+F0maKiDZJ04ETgPsAJCm//06DRR4CPg08D7zVTWGamZlVQX9SkfRQswJQbO1FyV5I\n0unALcD5wDTSXXCfAvaLiMXNjM3MzMw6j3uUtkBE3J2fmXQF6RLc74GxLpLMzMyqxT1KZmZmZiV6\nyCSLcAIAAAsFSURBVFMKzMzMzHoeF0pdrCd/J5ykoyXdJ+llSR2SPtagzRWSFkhaJelhScPr5u8k\n6Q5JyyS9LukHkgbUtfmgpKn5ZzBf0pcbbOcvJc3KbWZKOqWTcrxU0jRJb0paJOk/Je1T16afpBsl\nLZG0XNI9kgbXtRkq6ReSVkpaKOlaSX3q2oyWNF3SW5LmSPpsg3i67HiQdEH+2S3Lr8clnVy1POu2\nc2k+didWLU9JE3JuxdfTVcszr//PJN2Wc1mVj+MP1bVp9XPRvAb7s0PS9Xl+JfanpD6SrpT0XN5X\nf5T0tQbtWmd/RoRfXfQCziDd6fYZYD/ge8BSYJdmx5bjO5k0zuovSI88+Fjd/K/meMcDBwL3AnOB\nvoU2DwJPACOBjwBzgNsL8/8EeIU0+H1/4HRgJfC5QpsjgDbgS6SHdl4OvA0c0Ak5PgCck7d9EHA/\n6Q7E7QptvpunHUv67r7Hgf8pzO8DPEm66+IgYCzwKnBVoc0wYAVwbc7hopzTSd11PACn5n06PL+u\nyj/H/auUZ2E7hwHPATOAiRXcnxOAPwC7kh4/Mhj40wrmOQiYB/yAdK/7nsCJwF4VOxftXNiPg0l3\nSrcDR1dsf/5jjutkYA/gE8CbwBdadX922knLr4YHzG+B6wrvBbwEfKXZsTWItYP1C6UFwCWF9wOB\n1cDp+f3+eblDC23GAmuAIfn935CePbVtoc3VwNOF93cB99Vt+zfApC7Ic5cc81GFnN4GTiu02Te3\n+XB+f0r+Zdul0OZ84PVaXsA1wB/qtnUn8EAzjwfgNeCvq5YnsAMwGzgemEwulKqUJ6lQeqJkXpXy\n/AYwZSNtqngu+jYwp4L78+fAzXXT7gFubdX96UtvXUQt/p1wkvYChrBu/G8C/8va+EcBr0fEjMKi\nj5CeUH54oc3UiFhTaPMQsK+kHfP7I/Jy1LXpip/ToBzf0vx+BOnuz2Kes4EXWDfPJyNiSV18OwIf\nKLQpzaG7j4fc/X0msD3pxFC1PG8Efh4Rj9ZNH0m18ny/0qXxuZJulzQ0T6/S/hwP/E7S3UqXx5+Q\n9LnazCqei/LP9dPAD/OkKh23jwMnSHp/3ubBwJGk3v2W3J8ulLrOhr4Tbkj3h7PZhpAOyg3FP4TU\nxfquiGgnFSHFNo3WwSa06dSfkySRPsX9KiJqYz2GAO/kX9Sy7W9NDgMl9aObjgdJB0paTvp0Oon0\nCfUZKpRnLgAPAS5tMHs3KpIn6ZP/X5E+SV8A7AVMzeM0KrM/gfeRegdmA2OAm4DvSKp9nUHlzkXA\naaQC55b8vkrH7TeAnwDPSHoHmA58OyLuKsTYUvvTz1HqfmXfCdcqNiX+jbXRJrbp7J/TJOAA4KhN\naLup299YDpvSpjPzfAY4mNRz9kngVknHdML2e0Sekt5LKnZPioi2zVl0E7ffI/IEiIjik4j/T9I0\nYD5pLEbZU/5bLk/SB/ZpEfHP+f1MSR8gFU+3b2UMPfVcdC7wYEQs3Ei7VtyfZwBnAWcCT5M+1Fwn\naUFE3LaVMTRlf7pHqets7nfC9TQLSQfUhuJfmN+/S9I2wE55Xq1No3UUP1GUtem0n5OkG4BxwOiI\nWFCYtRDoK2ngBrbfKL7dCvPK2gwG3oyId+im4yEi1kTEcxHxRET8EzAT+CLVyXMEaXDzdEltktpI\ng1+/mD+9LgL6VSDP9UTEMtKA1uFUZ39CGpBb/63hs0gDgWsxVulctAdpsPrNhclV2p/XAldHxE8j\n4qmIuAP4N9b2ALfc/nSh1EXyp93ad8IB63wnXFO+2G9zRMQ80kFWjH8g6fpwLf7fAIMkHVpY9ATS\nL8G0Qptj8kFeMwaYnU/8tTYnsK6T8vStloukjwPHRcQLdbOnkwYIFvPch3SSLuZ5kNLT2Is5LGPt\nCb5RDmNqOTTxeOgD9KM6eT5CuuPnEFLP2cHA70g9D7X/t9H6ea5H0g7A3qSBsFXZnwC/Jg1cLtqX\n1HtWqXNRdi7pD/UDhWlV2p/bs36PTQe53mjJ/dkZo9z9Kh39fzppJH/xNszXgF2bHVuObwDpj8sh\n+UD+u/x+aJ7/lRzveNIfp3uBZ1n3Fs4HSH+cDiMN2JsN3FaYP5B0Yr+FdNnrDNLtq+cV2hwBvMPa\nWzgvI11a6IxbcieR7go5mvTJovbqX9dmHjCa1GPxa9a/LXcm6XbVD5LGjCwCriy0GZbzuibncGHO\n6cTuOh6Ar5MuK+5JuuX2atLJ9/gq5dkg73fveqtSnsA3gWPy/vwI8HCOc+eK5TmSNKbuUlIheBaw\nHDiz0Kblz0V5/SI9AuDrDeZVZX/+O2kQ+rh87J5GGm/0L626Pzv9pOXXegfNhfkXYzWpih3Z7JgK\nsR1LKpDa614/KrS5LB+Mq0h3CwyvW8cg0qf5ZaSC5GZg+7o2BwFT8jpeAP6+QSyfJI2vWU16dszY\nTsqxUX7twGcKbfoB15O6pZcDPwUG161nKOkZTCvyyekaoE+Dn+f0nMOzwDndeTyQnkPzXF73QuC/\nyUVSlfJssK1HWbdQqkSepNu6X8rrfgH4Mes+W6gSeeb1jyP93q8CngLObdDmMlr4XJTXfRLp/DO8\nwbxK7E/SB/CJpKJvZY7hcgq38bfa/vR3vZmZmZmV8BglMzMzsxIulMzMzMxKuFAyMzMzK+FCyczM\nzKyECyUzMzOzEi6UzMzMzEq4UDIzMzMr4ULJzMzMrIQLJTMzM7MSLpTMzDaDpMmSJjY7DjPrHi6U\nzKxlSDpf0puS+hSmDZDUJumXdW2Pk9QhaVh3x2lm1eFCycxayWTSl26OLEw7GngFGCWpb2H6scD8\niHh+czciadutCdLMqsOFkpm1jIiYQyqKRhcmjwbuJX1b+ai66ZMBJA2V9DNJyyUtk/QTSYNrDSVN\nkDRD0nmSngPeytO3l3RrXu5lSV+qj0nShZLmSFotaaGkuzs3azNrJhdKZtZqHgOOK7w/Lk+bUpsu\nqR9wOPBobvMzYBCp9+lEYG/grrr1Dgc+AZwGHJKn/WteZjwwhlR8jagtIGkkcB3wNWAfYCwwdSvz\nM7MexN3LZtZqHgMm5nFKA0hFzVSgL3A+cDlwZH7/mKSTgAOBYRGxAEDSOcBTkkZExPS83vcA50TE\n0txmAHAucFZEPJanfRZ4qRDLUGAF8IuIWAm8CMzsorzNrAnco2RmraY2Tukw4ChgTkQsIfUoHZ7H\nKY0G5kbES8B+wIu1IgkgImYBbwD7F9Y7v1YkZXuTiqdpheVeB2YX2jwMzAfm5Ut0Z0nartMyNbOm\nc6FkZi0lIuYCL5Musx1HKpCIiFdIPTpHUhifBAiIBquqn76ywXxKlq3FsgL4EHAmsIDUmzVT0sBN\nTsjMejQXSmbWiiaTiqTRpEtxNVOBU4APs7ZQehrYQ9Kf1xpJOgDYMc8r80dgDYUB4pJ2Io1FeldE\ndETEoxHxD8DBwDDg+C3Iycx6II9RMrNWNBm4kXQOm1KYPhW4gXTJ7DGAiHhE0pPAHZIuyfNuBCZH\nxIyyDUTESkk/BL4paSmwGLgKaK+1kXQq8L683deBU0k9UbPXX6OZtSIXSmbWiiYD/YFZEbG4MH0K\nsAPwTEQsLEz/OHB9nt8BPAhcvAnb+TJpPNR9wHLgW0DxstobpDvlJuR4ngXOzGOgzKwCFFF6+d3M\nzMysV/MYJTMzM7MSLpTMzMzMSrhQMjMzMyvhQsnMzMyshAslMzMzsxIulMzMzMxKuFAyMzMzK+FC\nyczMzKyECyUzMzOzEi6UzMzMzEq4UDIzMzMr4ULJzMzMrMT/A3OQOlsNsogaAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -328,7 +328,7 @@ }, { "cell_type": "code", - "execution_count": 257, + "execution_count": 344, "metadata": { "collapsed": true }, @@ -346,7 +346,7 @@ }, { "cell_type": "code", - "execution_count": 258, + "execution_count": 345, "metadata": { "collapsed": true }, @@ -360,7 +360,7 @@ }, { "cell_type": "code", - "execution_count": 259, + "execution_count": 346, "metadata": { "collapsed": false }, @@ -369,9 +369,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 3467\n", - "Number of unique tokens: 8640\n", - "Number of documents: 1740\n" + "Number of authors: 166\n", + "Number of unique tokens: 681\n", + "Number of documents: 90\n" ] } ], @@ -390,7 +390,7 @@ }, { "cell_type": "code", - "execution_count": 260, + "execution_count": 314, "metadata": { "collapsed": false }, @@ -419,18 +419,19 @@ }, { "cell_type": "code", - "execution_count": 261, + "execution_count": 315, "metadata": { - "collapsed": false + "collapsed": false, + "scrolled": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[(0,), (1, 27), (2,), (3,), (4,), (5,), (7,), (8,), (9,), (10,), (11, 44, 77, 54), (12,), (15,), (16,), (17,), (18,), (20,), (21,), (22,), (23,), (24,), (25,), (26,), (28,), (29,), (30,), (31,), (32,), (33,), (34, 13), (35, 19), (36,), (37,), (38,), (39,), (40,), (41,), (42,), (45,), (46,), (47,), (48,), (49,), (50,), (51,), (52,), (53,), (55,), (56,), (57,), (58,), (59,), (60,), (61,), (62,), (63,), (64,), (65,), (66,), (67,), (68,), (69,), (70,), (71,), (72,), (73,), (74,), (75, 43), (76,), (78,), (79,), (80,), (81,), (82, 14, 6), (83,), (84,), (85,), (86,), (87,), (88,), (89,), (90,), (92,), (94,), (95,), (96,), (97,), (98,), (100,), (102,), (103,), (104,), (105,), (106,), (107, 166), (108,), (109,), (110,), (111,), (112,), (113,), (114,), (115,), (116,), (117,), (118,), (119,), (120, 93), (121, 148), (122,), (123,), (124,), (125,), (126,), (127,), (128, 162, 99, 163, 101, 150), (129,), (130,), (131,), (133,), (134,), (135,), (136,), (137,), (138,), (139, 181), (140,), (141,), (142,), (143,), (144, 132), (145,), (146,), (147,), (149,), (151,), (152,), (153,), (154,), (155,), (156,), (157,), (158,), (159,), (160,), (161,), (164,), (165,), (167,), (168,), (169,), (170, 91), (171,), (172,), (173,), (174,), (175,), (176,), (177,), (178,), (179,), (180,), (182,), (183,), (184,), (185, 231), (186, 207), (187,), (189,), (190, 271), (191,), (192, 236, 268), (193,), (194,), (196,), (197,), (200,), (201, 210), (202, 237), (203,), (204,), (205,), (206,), (208,), (209,), (211, 252), (213,), (214,), (215,), (216,), (217, 281), (218, 228), (219,), (220,), (221,), (224,), (225,), (226, 242), (227,), (229,), (230,), (232,), (233,), (234, 198), (235, 212, 276, 270), (238,), (240,), (241,), (243,), (245,), (246,), (247,), (248, 284, 222, 239), (249,), (250,), (251, 188, 244), (253,), (254,), (255,), (256,), (257, 223), (258, 195), (259,), (260,), (261,), (262,), (263,), (264,), (265,), (266,), (267,), (269,), (272,), (273,), (274,), (275, 199), (277,), (278,), (279,), (280,), (282,), (283,), (285,), (286,), (287,), (288,), (289,), (290,), (291,), (292,), (294,), (296,), (297,), (298, 325), (299,), (300,), (301,), (302,), (303,), (304,), (305,), (306, 359), (307,), (310,), (311,), (312,), (313,), (314,), (315,), (316,), (317,), (318,), (319,), (320,), (321,), (322,), (323,), (328,), (329, 383), (330,), (331,), (332,), (333,), (335,), (336, 295), (337,), (338,), (340,), (341,), (345,), (346,), (347,), (348,), (349,), (350, 326), (351,), (352, 358, 327, 393, 367, 342, 406, 344), (353,), (354, 339), (357,), (360, 293), (361,), (362,), (363, 374), (364,), (365,), (366,), (368,), (369,), (370,), (371,), (372,), (373,), (375,), (376,), (378,), (379,), (380,), (381,), (382, 334), (384,), (385,), (386,), (387, 356, 414), (388,), (389, 343), (390,), (391,), (392,), (394,), (395,), (396,), (398,), (399,), (400,), (401,), (402,), (403,), (404,), (405,), (407,), (408, 308), (409,), (410, 355, 324), (411,), (412,), (413,), (415,), (416,), (417, 309), (418,), (419,), (420,), (421,), (422,), (423,), (424,), (425, 377, 397), (426,), (427,), (428,), (429,), (430,), (431, 515, 549, 535), (432, 489, 508, 510), (433,), (434,), (437,), (439,), (440, 435, 556), (441,), (442, 501), (443,), (448,), (449,), (450, 459), (451,), (452,), (453,), (454,), (455,), (456,), (457, 469, 566, 471), (458,), (460,), (462,), (463,), (464,), (465,), (466,), (467,), (468, 487), (470,), (472,), (473, 546, 509), (474,), (475,), (476,), (477,), (479,), (480,), (481, 524), (482,), (484,), (486,), (488,), (490,), (491, 461), (492,), (494,), (495,), (496,), (497,), (498, 478), (499,), (500,), (502,), (503,), (504,), (505, 444), (506,), (507,), (511,), (512,), (513,), (514, 541, 551), (516,), (517,), (518,), (520,), (521,), (522,), (523,), (525,), (526,), (527,), (528,), (529,), (530,), (531,), (532, 485, 565), (533,), (534,), (536, 562, 447), (537,), (538,), (539,), (540,), (542,), (543, 438, 446), (544,), (545,), (547,), (548,), (550,), (552,), (553,), (554, 436), (555,), (557,), (558,), (559,), (560,), (561,), (563, 445), (564,), (567,), (568,), (569, 493), (570,), (571, 483), (572, 519), (573,), (574,), (575,), (576, 674, 642, 666, 621), (577,), (578,), (579,), (581,), (582,), (583,), (584,), (585,), (586,), (587,), (588,), (590,), (592, 606), (593,), (594,), (595,), (597,), (599, 661, 639), (600,), (601,), (602,), (603,), (604,), (605,), (608, 641), (609,), (610, 580, 596), (611,), (614,), (615,), (618,), (619,), (620,), (622,), (624,), (625,), (626,), (627,), (628,), (629,), (630,), (632,), (633, 685), (634,), (636,), (637,), (638,), (640,), (643,), (644,), (645, 678), (646,), (647,), (648,), (649,), (650,), (651,), (652, 684), (653, 631), (654,), (656,), (657, 598), (658,), (659,), (660,), (663,), (665, 662), (667,), (668,), (669,), (670,), (671,), (672, 681, 617, 635, 607), (673,), (675, 613), (676,), (677,), (679,), (680,), (682,), (683,), (686,), (687, 655), (688,), (689, 612), (690,), (691,), (692,), (693,), (694,), (695, 591), (696, 664, 616), (697,), (698, 623), (699, 589), (700,), (704,), (705,), (706,), (707, 799), (708,), (709,), (710,), (711,), (713,), (714,), (715,), (717,), (720, 793, 701), (721,), (722,), (723,), (724,), (725,), (728,), (729, 788), (731,), (732,), (733,), (735, 822, 791), (736,), (737,), (738, 730, 758), (739,), (740,), (742,), (743,), (744, 786), (745,), (746,), (747, 798, 775), (748,), (749,), (750,), (751,), (752,), (753,), (754,), (755,), (756,), (757, 741), (759,), (760,), (761,), (762,), (763, 726), (764,), (765,), (766,), (768,), (769, 702), (772,), (773,), (774,), (776, 771, 767), (777, 831), (778,), (779,), (780,), (781,), (782,), (783,), (784,), (785,), (787, 805, 727, 734), (789,), (792,), (794,), (795, 716), (797, 790), (800,), (801, 712, 827, 718, 719), (802,), (803, 796), (807,), (808, 804, 703), (809,), (810,), (811,), (812, 806), (813,), (815,), (816,), (817,), (818, 814), (819,), (820,), (821,), (823,), (824, 825, 826), (829,), (830,), (832,), (833,), (834, 828), (835,), (836,), (837,), (838,), (839,), (840, 770), (841,), (842,), (843,), (845,), (846,), (847,), (848,), (850, 851), (853,), (855,), (856,), (857,), (859, 927), (861, 894, 869), (863,), (864,), (865,), (866,), (867,), (871,), (872,), (873, 858, 970, 973), (874, 958), (875,), (876,), (877, 862), (880,), (882,), (884, 967), (885,), (886,), (887,), (888,), (889,), (892,), (893,), (895,), (896,), (897, 849, 860), (898, 868), (900,), (902,), (904, 971), (905,), (906,), (907, 911), (908,), (909,), (910,), (912,), (913,), (915,), (916,), (917,), (918,), (919,), (920,), (921, 891), (922, 943), (923,), (924,), (925,), (926,), (929,), (930,), (931, 901, 903, 878, 942, 881, 946, 852, 980, 854, 890), (932,), (934,), (935,), (936,), (937,), (938,), (939,), (940,), (941,), (944,), (945, 914), (947, 870), (948,), (949,), (950,), (951,), (952,), (953,), (954,), (955,), (956,), (957,), (959,), (960, 928, 899, 883, 983, 974, 975), (961,), (962,), (963,), (964,), (965,), (966,), (968, 844, 933), (969,), (972,), (976, 977), (978,), (979,), (981, 879), (982,), (985, 1068), (986,), (987, 1111), (988,), (991,), (992, 1122), (993,), (994, 1007), (995,), (996,), (997, 1078), (999,), (1000,), (1001,), (1002,), (1003,), (1004,), (1006,), (1008, 1053), (1009,), (1010,), (1011, 1037), (1012,), (1014,), (1015,), (1016,), (1017,), (1018,), (1019,), (1022,), (1023,), (1024,), (1025,), (1027,), (1028,), (1029,), (1030, 1102, 1103, 1074, 1079, 984, 1020, 1055), (1031,), (1032, 1107, 1115), (1033,), (1034,), (1035,), (1036,), (1039,), (1040, 1089, 998, 1130), (1041,), (1042,), (1043,), (1044,), (1045,), (1046,), (1048,), (1049,), (1050, 1047), (1051,), (1054,), (1056,), (1057, 1021), (1058, 1099), (1059,), (1060,), (1061,), (1062,), (1064,), (1065, 1052), (1066, 1077, 1085), (1067,), (1069,), (1071,), (1072, 1026, 1063, 1005, 1070), (1073, 1125), (1075,), (1076,), (1080,), (1081, 1114), (1082,), (1083,), (1084,), (1086,), (1088,), (1090, 990), (1091,), (1092,), (1094,), (1095,), (1096,), (1097,), (1098, 1013, 1119), (1100,), (1101,), (1104,), (1105,), (1106,), (1108,), (1109,), (1110,), (1112,), (1113,), (1116, 1093, 989, 1038), (1117, 1087), (1118,), (1120,), (1121,), (1123,), (1124,), (1126,), (1127,), (1128,), (1129,), (1131,), (1132,), (1133,), (1134,), (1135,), (1136, 1186), (1137,), (1138, 1181), (1139,), (1140,), (1141,), (1142,), (1145,), (1146,), (1147,), (1148,), (1150,), (1151,), (1152,), (1155,), (1156,), (1157,), (1158,), (1160,), (1161,), (1162,), (1163,), (1166,), (1167,), (1169, 1211, 1279), (1170, 1172, 1189, 1159), (1171,), (1173,), (1175,), (1176,), (1177,), (1178,), (1179,), (1180,), (1182,), (1183, 1215), (1184, 1257), (1185,), (1187, 1285), (1188,), (1192,), (1193, 1164, 1230), (1194,), (1195,), (1196, 1254), (1197,), (1198,), (1200,), (1201,), (1202,), (1203, 1174, 1262), (1204,), (1205,), (1208,), (1209,), (1210, 1244, 1206), (1213,), (1216,), (1217,), (1219, 1259, 1227, 1144, 1212, 1214), (1220,), (1221,), (1222,), (1223,), (1225,), (1226,), (1228,), (1229,), (1231,), (1232,), (1233,), (1234,), (1235,), (1236, 1190, 1207), (1238,), (1239,), (1240,), (1241,), (1243,), (1245,), (1246,), (1247,), (1248,), (1250,), (1251, 1199), (1253, 1278), (1256,), (1258,), (1260,), (1261,), (1263,), (1264,), (1265,), (1266,), (1267,), (1268, 1143), (1269,), (1270,), (1271,), (1272, 1149), (1273,), (1274,), (1275,), (1277,), (1280, 1153, 1218, 1282, 1284, 1154, 1224, 1165, 1168, 1237, 1242, 1249, 1252, 1255, 1191, 1276), (1281,), (1283,), (1286,), (1287,), (1288,), (1290,), (1291,), (1292,), (1293,), (1294,), (1296,), (1297,), (1300,), (1301,), (1302,), (1303,), (1304, 1410, 1346), (1305, 1406), (1306,), (1307,), (1308,), (1309, 1327), (1311,), (1312,), (1313,), (1314, 1379, 1357, 1298, 1332, 1430, 1432, 1373, 1343), (1316,), (1318,), (1319,), (1320,), (1321,), (1322,), (1323,), (1324,), (1326,), (1328, 1333), (1329, 1364), (1330, 1334), (1331,), (1335,), (1336,), (1337,), (1338,), (1339,), (1341,), (1344,), (1345, 1402), (1347, 1355, 1366), (1348, 1349, 1317, 1325, 1425, 1363, 1428, 1374), (1350,), (1351, 1375), (1352, 1398), (1353,), (1354, 1365), (1356,), (1358,), (1359, 1362, 1407), (1360, 1340), (1361,), (1367,), (1368,), (1369, 1315), (1370,), (1371, 1422), (1372,), (1376,), (1377,), (1378, 1295), (1380,), (1381,), (1382,), (1383,), (1384,), (1385,), (1386, 1390), (1387, 1391), (1388,), (1389, 1405), (1392,), (1393, 1289), (1395,), (1396,), (1397,), (1399,), (1400, 1299), (1401,), (1403,), (1404,), (1408,), (1409, 1438), (1411,), (1412,), (1413,), (1414,), (1415,), (1416,), (1417,), (1418,), (1419,), (1420,), (1421,), (1423,), (1424, 1394), (1426,), (1427, 1310, 1342), (1429,), (1431,), (1433,), (1434,), (1435,), (1436,), (1437,), (1440, 1462, 1464, 1452, 1581, 1518, 1455), (1441, 1476), (1443,), (1444,), (1445,), (1446,), (1448,), (1450,), (1456,), (1457,), (1458, 1451, 1439), (1459, 1540, 1571), (1461,), (1465,), (1466,), (1467, 1516), (1468,), (1469,), (1470,), (1471,), (1472,), (1473,), (1474, 1579, 1565), (1475, 1580), (1477, 1463, 1449, 1482, 1548, 1454, 1519), (1478,), (1480, 1490), (1481,), (1483,), (1484,), (1485,), (1486,), (1487,), (1488, 1542, 1526), (1489,), (1491, 1532, 1589), (1493,), (1496,), (1497,), (1498,), (1500,), (1501,), (1502,), (1504, 1479), (1505,), (1506,), (1507, 1460, 1453), (1509,), (1510,), (1512,), (1513,), (1514,), (1517,), (1520,), (1521,), (1523,), (1525,), (1528,), (1529,), (1530,), (1531,), (1533,), (1536, 1572, 1508, 1559, 1545, 1535), (1537, 1442, 1515, 1524), (1538, 1499, 1527), (1539,), (1541,), (1543,), (1544,), (1546,), (1547,), (1549,), (1550,), (1551,), (1552, 1568, 1570, 1492, 1574, 1494), (1553,), (1554,), (1555,), (1557,), (1558,), (1560, 1503), (1561, 1495), (1562,), (1563,), (1564,), (1566, 1534), (1567,), (1569,), (1573,), (1575,), (1576,), (1577, 1522, 1511), (1578,), (1582, 1447), (1583,), (1584, 1556), (1585,), (1586,), (1587,), (1588,), (1590, 1695), (1591, 1598, 1711), (1592,), (1593,), (1594, 1653), (1595,), (1596,), (1597,), (1599,), (1600, 1721), (1601,), (1602, 1702), (1603, 1691, 1710), (1605,), (1606,), (1607,), (1608,), (1610,), (1611,), (1612,), (1613,), (1616,), (1617, 1679), (1621,), (1622,), (1623,), (1624, 1642, 1635, 1650), (1625,), (1627,), (1628,), (1629,), (1630,), (1631,), (1632,), (1633,), (1634, 1643, 1618, 1683, 1714, 1654, 1688, 1722, 1725, 1663), (1637,), (1639,), (1640, 1708), (1641,), (1644,), (1646,), (1647,), (1648,), (1649,), (1651,), (1655,), (1656, 1614), (1657,), (1658,), (1659,), (1660,), (1661, 1669), (1664, 1718), (1665, 1693), (1666,), (1667,), (1668, 1604, 1685), (1670,), (1672,), (1673, 1732, 1615), (1674, 1715), (1675,), (1676, 1694, 1678), (1677,), (1680, 1645), (1681,), (1682,), (1684,), (1686,), (1687,), (1689, 1739, 1735), (1690,), (1692,), (1696,), (1697,), (1698,), (1699,), (1701,), (1703,), (1704, 1652), (1705,), (1707,), (1709,), (1712, 1734), (1713, 1619, 1700, 1733, 1620, 1609, 1706), (1716, 1636), (1719, 1717, 1662, 1638), (1720,), (1723,), (1724,), (1726,), (1727,), (1728,), (1729,), (1730, 1626), (1731,), (1736, 1671), (1737,), (1738,)]\n", - "1312\n", - "278.3766186237335\n" + "[(0,), (1, 27), (2,), (3,), (4,), (5,), (8,), (9,), (10,), (11, 44, 77, 54), (12,), (14,), (15,), (16,), (17,), (18,), (20,), (21,), (22,), (23,), (24,), (25,), (26,), (28,), (29,), (30,), (32,), (33,), (34, 13), (35, 19), (37,), (38,), (39,), (40,), (41,), (42,), (45,), (46,), (48,), (49,), (50,), (51,), (52,), (53,), (55,), (56,), (57,), (60,), (61,), (62,), (63,), (64,), (65,), (67,), (68,), (69,), (70,), (71,), (72,), (73,), (74,), (75, 43), (76,), (78,), (79,), (81,), (82, 6), (83,), (84,), (86,), (87,), (89,), (90,), (92,), (94,), (95,), (96,), (97,), (98,), (100,), (101,), (102,), (103,), (104,), (106,), (107, 166), (108,), (109,), (110,), (111,), (112,), (113,), (114,), (115,), (116,), (117,), (118,), (119,), (120, 93), (121, 148), (122,), (123,), (124,), (125,), (126,), (127,), (128,), (131,), (133,), (134,), (135,), (136,), (137,), (139,), (140,), (141,), (143,), (145,), (146,), (149,), (151,), (152,), (153,), (154,), (156,), (157,), (160,), (161,), (162,), (163, 99, 150), (164,), (165,), (167,), (168,), (169,), (170, 91), (171,), (172,), (174,), (176,), (177,), (178,), (179,), (181,), (182,), (183,), (184,), (185, 231), (186, 207), (187,), (189,), (190,), (191,), (192, 268, 236), (193,), (194,), (196,), (197,), (200,), (201, 210), (202, 237), (203,), (204,), (205,), (206,), (208,), (209,), (211, 252), (213,), (214,), (215,), (216,), (217, 281), (218, 228), (219,), (220,), (221,), (224,), (225,), (226, 242), (227,), (229,), (230,), (232,), (233,), (234, 198), (235, 276, 212, 270), (238,), (240,), (241,), (243,), (245,), (246,), (247,), (248, 284, 222, 239), (249,), (250,), (251, 188, 244), (253,), (254,), (255,), (256,), (257, 223), (258, 195), (259,), (260,), (261,), (262,), (263,), (264,), (265,), (266,), (267,), (269,), (271,), (272,), (273,), (274,), (275, 199), (277,), (278,), (279,), (280,), (282,), (283,), (285,), (286,), (287,), (288,), (289,), (290,), (291,), (292,), (294,), (296,), (297,), (298, 325), (299,), (300,), (301,), (302,), (303,), (304,), (305,), (306, 359), (307,), (310,), (311,), (312,), (313,), (314,), (315,), (316,), (317,), (318,), (319,), (320,), (321,), (322,), (323,), (328,), (329, 383), (330,), (331,), (332,), (333,), (335,), (336, 295), (337,), (338,), (340,), (341,), (345,), (346,), (347,), (348,), (349,), (350, 326), (351,), (352, 358, 327, 393, 367, 342, 406, 344), (353,), (354, 339), (357,), (360, 293), (361,), (362,), (363, 374), (364,), (365,), (366,), (368,), (369,), (370,), (371,), (372,), (373,), (375,), (376,), (378,), (379,), (380,), (381,), (382, 334), (384,), (385,), (386,), (387, 356, 414), (388,), (389, 343), (390,), (391,), (392,), (394,), (395,), (396,), (398,), (399,), (400,), (401,), (402,), (403,), (404,), (405,), (407,), (408, 308), (409,), (410, 355, 324), (411,), (412,), (413,), (415,), (416,), (417, 309), (418,), (419,), (420,), (421,), (422,), (423,), (424,), (425, 377, 397), (426,), (427,), (428,), (430,), (431,), (433,), (434,), (435,), (436,), (440, 556), (441,), (442,), (446,), (448,), (449,), (450, 459), (452,), (453,), (454,), (455,), (457,), (458,), (460,), (461,), (462,), (463,), (465,), (466,), (467,), (468, 487), (471,), (472,), (473,), (474,), (475,), (477,), (480,), (484,), (485,), (486,), (489,), (490,), (491,), (492,), (494,), (495,), (497,), (498, 478), (499,), (500,), (501,), (502,), (503,), (505, 444), (506,), (507,), (508,), (510,), (511,), (512,), (513,), (515,), (516,), (518,), (519,), (520,), (521,), (522,), (523,), (524,), (526,), (527,), (528,), (529,), (531,), (532,), (534,), (538,), (539,), (541,), (544,), (546,), (547,), (548,), (549,), (550,), (553,), (555,), (557,), (558,), (560,), (561,), (562, 447), (563,), (567,), (569, 493), (570,), (572,), (574,), (576,), (577,), (578,), (579,), (580,), (581,), (582,), (583,), (584,), (585,), (586,), (588,), (589,), (590,), (593,), (594,), (595,), (597,), (598,), (600,), (601,), (604,), (605,), (606,), (607,), (608, 641), (609,), (610, 596), (614,), (616,), (617,), (618,), (619,), (620,), (622,), (623,), (624,), (626,), (627,), (628,), (629,), (632,), (636,), (637,), (638,), (639,), (640,), (643,), (644,), (645, 678), (646,), (649,), (652, 684), (653, 631), (656,), (657,), (658,), (659,), (660,), (661,), (662,), (663,), (664, 696), (666, 642), (667,), (669,), (671,), (673,), (674,), (675, 613), (679,), (680,), (682,), (683,), (687, 655), (688,), (689, 612), (691,), (693,), (694,), (695, 591), (698,), (700,), (702,), (704,), (705,), (708,), (709,), (710,), (711,), (712,), (713,), (714,), (715,), (717,), (718,), (720, 793, 701), (721,), (724,), (728,), (729,), (730,), (731,), (732,), (736,), (737,), (738,), (739,), (740,), (742,), (743,), (744,), (746,), (747,), (748,), (749,), (750,), (751,), (753,), (754,), (756,), (758,), (760,), (761,), (762,), (764,), (765,), (766,), (767,), (768,), (773,), (775,), (776,), (777,), (779,), (780,), (781,), (782,), (783,), (785,), (787, 734, 727), (788,), (789,), (792,), (794,), (795, 716), (799,), (801,), (805,), (807,), (808,), (809,), (810,), (811,), (812, 806), (813,), (815,), (816,), (817,), (818, 814), (819,), (820,), (821,), (822, 735), (823,), (825, 826), (829,), (830,), (833,), (834, 828), (835,), (836,), (837,), (838,), (839,), (841,), (842,), (843,), (846,), (847,), (848,), (850, 851), (853,), (855,), (856,), (857,), (859, 927), (862,), (863,), (864,), (865,), (866,), (867,), (868,), (871,), (872,), (873, 858), (874,), (875,), (876,), (879,), (880,), (881, 946), (882,), (883,), (884, 967), (885,), (886,), (887,), (888,), (889,), (891,), (892,), (893,), (894,), (895,), (896,), (897, 860), (900,), (904,), (905,), (906,), (907, 911), (908,), (909,), (910,), (912,), (913,), (915,), (917,), (918,), (919,), (920,), (921,), (923,), (924,), (925,), (926,), (929,), (930,), (931,), (932,), (934,), (935,), (936,), (937,), (938,), (939,), (940,), (941,), (942,), (943,), (944,), (945,), (947, 870), (948,), (949,), (950,), (952,), (953,), (954,), (956,), (957,), (958,), (959,), (960,), (961,), (962,), (963,), (964,), (965,), (966,), (968, 844, 933), (969,), (973,), (974,), (976, 977), (978,), (979,), (980, 854), (981,), (982,), (983,), (985, 1068), (986,), (987, 1111), (988,), (991,), (992, 1122), (993,), (994, 1007), (995,), (996,), (997, 1078), (999,), (1000,), (1001,), (1002,), (1003,), (1004,), (1006,), (1008, 1053), (1009,), (1010,), (1011,), (1012,), (1014,), (1015,), (1016,), (1017,), (1018,), (1019,), (1022,), (1023,), (1024,), (1025,), (1027,), (1028,), (1029,), (1030, 1102, 1103, 1074, 1079, 984, 1020, 1055), (1031,), (1032, 1107, 1115), (1033,), (1034,), (1035,), (1036,), (1037,), (1039,), (1040, 1089, 1130, 998), (1041,), (1042,), (1043,), (1044,), (1045,), (1046,), (1048,), (1049,), (1050, 1047), (1051,), (1054,), (1056,), (1057, 1021), (1058, 1099), (1059,), (1060,), (1061,), (1062,), (1064,), (1065, 1052), (1066, 1077, 1085), (1067,), (1069,), (1071,), (1072, 1026, 1063, 1005, 1070), (1073, 1125), (1075,), (1076,), (1080,), (1081, 1114), (1082,), (1083,), (1084,), (1086,), (1088,), (1090, 990), (1091,), (1092,), (1094,), (1095,), (1096,), (1097,), (1098, 1013, 1119), (1100,), (1101,), (1104,), (1105,), (1106,), (1108,), (1109,), (1110,), (1112,), (1113,), (1116, 1093, 989, 1038), (1117, 1087), (1118,), (1120,), (1121,), (1123,), (1124,), (1126,), (1127,), (1128,), (1129,), (1131,), (1132,), (1133,), (1134,), (1135,), (1136,), (1137,), (1139,), (1140,), (1141,), (1142,), (1145,), (1147,), (1148,), (1152,), (1153,), (1154, 1191), (1155,), (1156,), (1157,), (1158,), (1160,), (1162,), (1163,), (1164,), (1167,), (1168,), (1169,), (1170, 1159), (1172,), (1174,), (1175,), (1176,), (1178,), (1179,), (1180,), (1182,), (1184,), (1185,), (1187,), (1188,), (1190,), (1192,), (1193, 1230), (1195,), (1196,), (1200,), (1201,), (1202,), (1203,), (1204,), (1206,), (1208,), (1210,), (1211, 1279), (1212,), (1214,), (1216,), (1217,), (1220,), (1221,), (1222,), (1223,), (1225,), (1226,), (1228,), (1231,), (1232,), (1233,), (1234,), (1235,), (1236,), (1237,), (1238,), (1240,), (1241,), (1243,), (1244,), (1246,), (1248,), (1249, 1255), (1250,), (1251,), (1252,), (1253, 1278), (1254,), (1257,), (1258,), (1261,), (1262,), (1263,), (1264,), (1266,), (1267,), (1268, 1143), (1269,), (1270,), (1271,), (1272, 1149), (1273,), (1274,), (1275,), (1276,), (1277,), (1280, 1218), (1283,), (1285,), (1286,), (1287,), (1288,), (1290,), (1291,), (1292,), (1293,), (1294,), (1296,), (1297,), (1298,), (1299,), (1300,), (1301,), (1302,), (1303,), (1305,), (1307,), (1308,), (1311,), (1312,), (1313,), (1314,), (1316,), (1317,), (1318,), (1319,), (1321,), (1323,), (1324,), (1326,), (1327,), (1329, 1364), (1333,), (1336,), (1337,), (1339,), (1340,), (1341,), (1344,), (1347,), (1350,), (1351,), (1352, 1398), (1353,), (1354, 1365), (1355,), (1356,), (1357,), (1358,), (1359, 1362, 1407), (1361,), (1363, 1374), (1366,), (1367,), (1368,), (1369,), (1370,), (1371,), (1376,), (1377,), (1378,), (1379,), (1381,), (1382,), (1383,), (1384,), (1385,), (1386,), (1387, 1391), (1388,), (1389,), (1390,), (1392,), (1393, 1289), (1394,), (1395,), (1396,), (1397,), (1399,), (1401,), (1403,), (1406,), (1409, 1438), (1410,), (1411,), (1412,), (1413,), (1414,), (1415,), (1416,), (1417,), (1418,), (1419,), (1420,), (1421,), (1422,), (1423,), (1424,), (1426,), (1427, 1310), (1428, 1349), (1429,), (1431,), (1434,), (1435,), (1436,), (1437,), (1440,), (1441,), (1443,), (1444,), (1446,), (1448,), (1449, 1477, 1463), (1450,), (1451, 1439), (1452,), (1453,), (1454, 1519), (1455,), (1456,), (1457,), (1458,), (1461,), (1462,), (1465,), (1466,), (1468,), (1469,), (1470,), (1471,), (1475, 1580), (1476,), (1478,), (1483,), (1484,), (1485,), (1486,), (1487,), (1488,), (1489,), (1490,), (1491, 1532, 1589), (1493,), (1494,), (1496,), (1497,), (1498,), (1500,), (1501,), (1502,), (1503,), (1504, 1479), (1505,), (1506,), (1507, 1460), (1509,), (1510,), (1513,), (1514,), (1515,), (1516,), (1517,), (1520,), (1521,), (1523,), (1524,), (1525,), (1527,), (1528,), (1529,), (1530,), (1531,), (1533,), (1535,), (1536, 1508), (1537,), (1538, 1499), (1543,), (1544,), (1546,), (1547,), (1548,), (1549,), (1550,), (1551,), (1553,), (1554,), (1555,), (1557,), (1558,), (1559,), (1560,), (1561, 1495), (1562,), (1563,), (1564,), (1566, 1534), (1567,), (1569,), (1570,), (1571, 1459, 1540), (1572,), (1573,), (1574,), (1575,), (1576,), (1577, 1522, 1511), (1578,), (1581, 1518), (1582, 1447), (1583,), (1584, 1556), (1585,), (1586,), (1587,), (1588,), (1590, 1695), (1591, 1598, 1711), (1592,), (1593,), (1594, 1653), (1595,), (1596,), (1597,), (1599,), (1600, 1721), (1601,), (1602, 1702), (1603, 1691, 1710), (1605,), (1606,), (1607,), (1608,), (1610,), (1611,), (1612,), (1613,), (1616,), (1617, 1679), (1621,), (1622,), (1623,), (1624, 1642, 1650, 1635), (1625,), (1627,), (1628,), (1629,), (1630,), (1631,), (1632,), (1633,), (1634, 1643, 1714, 1618, 1683, 1654, 1688, 1722, 1725, 1663), (1637,), (1638, 1717, 1662, 1719), (1639,), (1640, 1708), (1641,), (1644,), (1646,), (1647,), (1648,), (1649,), (1651,), (1655,), (1656, 1614), (1657,), (1658,), (1659,), (1660,), (1664, 1718), (1665, 1693), (1666,), (1667,), (1668, 1604, 1685), (1669, 1661), (1670,), (1672,), (1673, 1732, 1615), (1674, 1715), (1675,), (1676, 1694, 1678), (1677,), (1680, 1645), (1681,), (1682,), (1684,), (1686,), (1687,), (1689, 1739, 1735), (1690,), (1692,), (1696,), (1697,), (1698,), (1699,), (1701,), (1703,), (1704, 1652), (1705,), (1707,), (1709,), (1712, 1734), (1713, 1619, 1700, 1733, 1620, 1609, 1706), (1716, 1636), (1720,), (1723,), (1724,), (1726,), (1727,), (1728,), (1729,), (1730, 1626), (1731,), (1736, 1671), (1737,), (1738,)]\n", + "1258\n", + "128.62311506271362\n" ] } ], @@ -476,7 +477,7 @@ }, { "cell_type": "code", - "execution_count": 98, + "execution_count": 351, "metadata": { "collapsed": true }, @@ -488,7 +489,7 @@ }, { "cell_type": "code", - "execution_count": 138, + "execution_count": 352, "metadata": { "collapsed": false }, @@ -497,51 +498,52 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 2min 35s, sys: 0 ns, total: 2min 35s\n", - "Wall time: 2min 35s\n" + "CPU times: user 58.1 s, sys: 4 ms, total: 58.1 s\n", + "Wall time: 58.1 s\n" ] } ], "source": [ "%time model_online = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1000, random_state=1, var_lambda=None)" + " iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None)" ] }, { "cell_type": "code", - "execution_count": 139, + "execution_count": 347, "metadata": { - "collapsed": false + "collapsed": false, + "scrolled": false }, "outputs": [ { "data": { "text/plain": [ "[(0,\n", - " '0.041*training + 0.040*hidden + 0.037*representation + 0.024*role + 0.023*gradient + 0.021*hidden_unit + 0.021*procedure + 0.020*back_propagation + 0.020*node + 0.018*connectionist'),\n", + " '0.027*node + 0.026*classifier + 0.025*speech + 0.024*hidden + 0.016*training + 0.015*hidden_layer + 0.012*hidden_unit + 0.011*propagation + 0.011*back_propagation + 0.010*table'),\n", " (1,\n", - " '0.039*differential + 0.035*search + 0.031*strategy + 0.023*control + 0.020*he + 0.018*goal + 0.017*target + 0.015*start + 0.014*question + 0.014*influence'),\n", + " '0.020*capacity + 0.017*theorem + 0.013*associative + 0.012*optimization + 0.011*probability + 0.010*orientation + 0.010*bound + 0.010*proof + 0.010*bit + 0.010*address'),\n", " (2,\n", - " '0.062*code + 0.056*node + 0.051*activation + 0.044*adaptive + 0.029*sequence + 0.022*update + 0.022*learned + 0.014*past + 0.013*summation + 0.013*machine'),\n", + " '0.041*chip + 0.038*pulse + 0.031*analog + 0.031*voltage + 0.028*delay + 0.020*activation + 0.018*vlsi + 0.018*annealing + 0.018*temperature + 0.017*digital'),\n", " (3,\n", - " '0.070*cell + 0.043*stimulus + 0.030*synapsis + 0.030*current + 0.029*firing + 0.028*activity + 0.023*synaptic + 0.022*spatial + 0.019*classification + 0.015*channel'),\n", + " '0.016*energy + 0.014*recognition + 0.012*field + 0.010*frequency + 0.009*animal + 0.008*surface + 0.008*transition + 0.008*class + 0.007*test + 0.007*classification'),\n", " (4,\n", - " '0.047*processor + 0.037*dynamic + 0.035*interconnection + 0.032*group + 0.029*iv + 0.023*temporal + 0.022*delay + 0.019*learning_rule + 0.018*vol + 0.017*sigmoid'),\n", + " '0.013*sensory + 0.013*dynamic + 0.013*motor + 0.012*phase + 0.011*movement + 0.011*device + 0.011*receptor + 0.011*control + 0.010*prediction + 0.009*resolution'),\n", " (5,\n", - " '0.058*image + 0.025*convergence + 0.025*energy + 0.024*matrix + 0.018*hopfield + 0.018*minimum + 0.015*recall + 0.015*recognition + 0.015*associative_memory + 0.013*field'),\n", + " '0.047*image + 0.028*fixed_point + 0.026*eye + 0.023*winner + 0.022*attractor + 0.021*gain + 0.020*equilibrium + 0.019*optical + 0.016*light + 0.016*oscillation'),\n", " (6,\n", - " '0.050*capacity + 0.042*bit + 0.025*stored + 0.024*analog + 0.023*bound + 0.016*definition + 0.015*off + 0.014*binary + 0.014*word + 0.013*correct'),\n", + " '0.017*stability + 0.015*curve + 0.014*rule + 0.012*nonlinear + 0.012*mode + 0.011*speed + 0.011*gradient + 0.011*plane + 0.010*attention + 0.009*tank'),\n", " (7,\n", - " '0.058*cell + 0.040*probability + 0.035*firing + 0.025*cycle + 0.019*phase + 0.019*active + 0.018*specific + 0.017*shape + 0.017*region + 0.015*action'),\n", + " '0.038*cell + 0.031*velocity + 0.024*stimulus + 0.023*membrane + 0.023*synapsis + 0.022*synaptic + 0.022*cortical + 0.022*stimulation + 0.021*spike + 0.021*cortex'),\n", " (8,\n", - " '0.057*visual + 0.048*constraint + 0.037*map + 0.034*noise + 0.027*optimization + 0.025*gain + 0.022*mapping + 0.020*field + 0.020*device + 0.019*stage'),\n", + " '0.029*joint + 0.024*motion + 0.017*visual + 0.016*role + 0.014*receptive_field + 0.014*receptive + 0.014*position + 0.011*array + 0.010*angle + 0.009*noise'),\n", " (9,\n", - " '0.074*loop + 0.044*path + 0.044*product + 0.037*circuit + 0.036*edge + 0.025*magnitude + 0.025*eq + 0.021*direction + 0.020*interaction + 0.017*higher')]" + " '0.031*circuit + 0.027*firing + 0.019*match + 0.015*trajectory + 0.015*gaussian + 0.014*peak + 0.014*link + 0.013*path + 0.013*direction + 0.012*fire')]" ] }, - "execution_count": 139, + "execution_count": 347, "metadata": {}, "output_type": "execute_result" } @@ -879,7 +881,7 @@ }, { "cell_type": "code", - "execution_count": 140, + "execution_count": 348, "metadata": { "collapsed": false }, @@ -891,7 +893,7 @@ }, { "cell_type": "code", - "execution_count": 144, + "execution_count": 349, "metadata": { "collapsed": false }, @@ -900,16 +902,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1min 19s, sys: 28 ms, total: 1min 19s\n", - "Wall time: 1min 20s\n" + "CPU times: user 25.7 s, sys: 20 ms, total: 25.7 s\n", + "Wall time: 25.7 s\n" ] } ], "source": [ "%time model_offline = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", - " eval_every=10, random_state=1)" + " iterations=3, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", + " eval_every=1, random_state=1)" ] }, { From 693b70b687046d2f8ea85722d6843ef8a00c9662 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Tue, 8 Nov 2016 15:41:43 +0100 Subject: [PATCH 037/100] Fixed mistake in interpolating gamma. Moved lambda update outside of 'iterations' loop. Updated notebook. --- docs/notebooks/at_with_nips.ipynb | 174 +++++++++++++++++++++--------- gensim/models/atvb.py | 7 ++ gensim/models/onlineatvb.py | 58 ++++------ 3 files changed, 155 insertions(+), 84 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 783f53d98a..325fcc987a 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 262, + "execution_count": 1, "metadata": { "collapsed": false }, @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 231, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -75,7 +75,19 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 5, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from gensim.models import onlineatvb2\n", + "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" + ] + }, + { + "cell_type": "code", + "execution_count": 6, "metadata": { "collapsed": false }, @@ -83,8 +95,8 @@ "source": [ "# Configure logging.\n", "\n", - "#log_dir = '../../../log_files/log.log' # On my own machine.\n", - "log_dir = '../../../../log_files/log.log' # On Hetzner\n", + "log_dir = '../../../log_files/log.log' # On my own machine.\n", + "#log_dir = '../../../../log_files/log.log' # On Hetzner\n", "\n", "logger = logging.getLogger()\n", "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", @@ -103,7 +115,7 @@ }, { "cell_type": "code", - "execution_count": 334, + "execution_count": 7, "metadata": { "collapsed": false }, @@ -113,8 +125,8 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "#data_dir = '../../../../data/nipstxt/' # On my own machine.\n", - "data_dir = '../../../nipstxt/' # On Hetzner.\n", + "data_dir = '../../../../data/nipstxt/' # On my own machine.\n", + "#data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", @@ -140,7 +152,7 @@ }, { "cell_type": "code", - "execution_count": 335, + "execution_count": 8, "metadata": { "collapsed": false }, @@ -168,19 +180,19 @@ }, { "cell_type": "code", - "execution_count": 336, + "execution_count": 9, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ "# Make a mapping from author ID to author name.\n", - "id2author = dict(zip(authors_names.values(), authors_names.keys()))" + "id2author = dict(zip(author2id.values(), author2id.keys()))" ] }, { "cell_type": "code", - "execution_count": 337, + "execution_count": 10, "metadata": { "collapsed": false }, @@ -198,7 +210,7 @@ }, { "cell_type": "code", - "execution_count": 338, + "execution_count": 11, "metadata": { "collapsed": false }, @@ -224,7 +236,7 @@ }, { "cell_type": "code", - "execution_count": 339, + "execution_count": 12, "metadata": { "collapsed": false }, @@ -247,7 +259,7 @@ }, { "cell_type": "code", - "execution_count": 340, + "execution_count": 13, "metadata": { "collapsed": true }, @@ -262,7 +274,7 @@ }, { "cell_type": "code", - "execution_count": 341, + "execution_count": 14, "metadata": { "collapsed": true }, @@ -281,7 +293,7 @@ }, { "cell_type": "code", - "execution_count": 342, + "execution_count": 15, "metadata": { "collapsed": true }, @@ -293,7 +305,7 @@ }, { "cell_type": "code", - "execution_count": 343, + "execution_count": 16, "metadata": { "collapsed": false }, @@ -302,7 +314,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -328,7 +340,7 @@ }, { "cell_type": "code", - "execution_count": 344, + "execution_count": 17, "metadata": { "collapsed": true }, @@ -346,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 345, + "execution_count": 18, "metadata": { "collapsed": true }, @@ -360,7 +372,7 @@ }, { "cell_type": "code", - "execution_count": 346, + "execution_count": 19, "metadata": { "collapsed": false }, @@ -390,7 +402,7 @@ }, { "cell_type": "code", - "execution_count": 314, + "execution_count": 20, "metadata": { "collapsed": false }, @@ -419,7 +431,7 @@ }, { "cell_type": "code", - "execution_count": 315, + "execution_count": 21, "metadata": { "collapsed": false, "scrolled": false @@ -429,9 +441,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "[(0,), (1, 27), (2,), (3,), (4,), (5,), (8,), (9,), (10,), (11, 44, 77, 54), (12,), (14,), (15,), (16,), (17,), (18,), (20,), (21,), (22,), (23,), (24,), (25,), (26,), (28,), (29,), (30,), (32,), (33,), (34, 13), (35, 19), (37,), (38,), (39,), (40,), (41,), (42,), (45,), (46,), (48,), (49,), (50,), (51,), (52,), (53,), (55,), (56,), (57,), (60,), (61,), (62,), (63,), (64,), (65,), (67,), (68,), (69,), (70,), (71,), (72,), (73,), (74,), (75, 43), (76,), (78,), (79,), (81,), (82, 6), (83,), (84,), (86,), (87,), (89,), (90,), (92,), (94,), (95,), (96,), (97,), (98,), (100,), (101,), (102,), (103,), (104,), (106,), (107, 166), (108,), (109,), (110,), (111,), (112,), (113,), (114,), (115,), (116,), (117,), (118,), (119,), (120, 93), (121, 148), (122,), (123,), (124,), (125,), (126,), (127,), (128,), (131,), (133,), (134,), (135,), (136,), (137,), (139,), (140,), (141,), (143,), (145,), (146,), (149,), (151,), (152,), (153,), (154,), (156,), (157,), (160,), (161,), (162,), (163, 99, 150), (164,), (165,), (167,), (168,), (169,), (170, 91), (171,), (172,), (174,), (176,), (177,), (178,), (179,), (181,), (182,), (183,), (184,), (185, 231), (186, 207), (187,), (189,), (190,), (191,), (192, 268, 236), (193,), (194,), (196,), (197,), (200,), (201, 210), (202, 237), (203,), (204,), (205,), (206,), (208,), (209,), (211, 252), (213,), (214,), (215,), (216,), (217, 281), (218, 228), (219,), (220,), (221,), (224,), (225,), (226, 242), (227,), (229,), (230,), (232,), (233,), (234, 198), (235, 276, 212, 270), (238,), (240,), (241,), (243,), (245,), (246,), (247,), (248, 284, 222, 239), (249,), (250,), (251, 188, 244), (253,), (254,), (255,), (256,), (257, 223), (258, 195), (259,), (260,), (261,), (262,), (263,), (264,), (265,), (266,), (267,), (269,), (271,), (272,), (273,), (274,), (275, 199), (277,), (278,), (279,), (280,), (282,), (283,), (285,), (286,), (287,), (288,), (289,), (290,), (291,), (292,), (294,), (296,), (297,), (298, 325), (299,), (300,), (301,), (302,), (303,), (304,), (305,), (306, 359), (307,), (310,), (311,), (312,), (313,), (314,), (315,), (316,), (317,), (318,), (319,), (320,), (321,), (322,), (323,), (328,), (329, 383), (330,), (331,), (332,), (333,), (335,), (336, 295), (337,), (338,), (340,), (341,), (345,), (346,), (347,), (348,), (349,), (350, 326), (351,), (352, 358, 327, 393, 367, 342, 406, 344), (353,), (354, 339), (357,), (360, 293), (361,), (362,), (363, 374), (364,), (365,), (366,), (368,), (369,), (370,), (371,), (372,), (373,), (375,), (376,), (378,), (379,), (380,), (381,), (382, 334), (384,), (385,), (386,), (387, 356, 414), (388,), (389, 343), (390,), (391,), (392,), (394,), (395,), (396,), (398,), (399,), (400,), (401,), (402,), (403,), (404,), (405,), (407,), (408, 308), (409,), (410, 355, 324), (411,), (412,), (413,), (415,), (416,), (417, 309), (418,), (419,), (420,), (421,), (422,), (423,), (424,), (425, 377, 397), (426,), (427,), (428,), (430,), (431,), (433,), (434,), (435,), (436,), (440, 556), (441,), (442,), (446,), (448,), (449,), (450, 459), (452,), (453,), (454,), (455,), (457,), (458,), (460,), (461,), (462,), (463,), (465,), (466,), (467,), (468, 487), (471,), (472,), (473,), (474,), (475,), (477,), (480,), (484,), (485,), (486,), (489,), (490,), (491,), (492,), (494,), (495,), (497,), (498, 478), (499,), (500,), (501,), (502,), (503,), (505, 444), (506,), (507,), (508,), (510,), (511,), (512,), (513,), (515,), (516,), (518,), (519,), (520,), (521,), (522,), (523,), (524,), (526,), (527,), (528,), (529,), (531,), (532,), (534,), (538,), (539,), (541,), (544,), (546,), (547,), (548,), (549,), (550,), (553,), (555,), (557,), (558,), (560,), (561,), (562, 447), (563,), (567,), (569, 493), (570,), (572,), (574,), (576,), (577,), (578,), (579,), (580,), (581,), (582,), (583,), (584,), (585,), (586,), (588,), (589,), (590,), (593,), (594,), (595,), (597,), (598,), (600,), (601,), (604,), (605,), (606,), (607,), (608, 641), (609,), (610, 596), (614,), (616,), (617,), (618,), (619,), (620,), (622,), (623,), (624,), (626,), (627,), (628,), (629,), (632,), (636,), (637,), (638,), (639,), (640,), (643,), (644,), (645, 678), (646,), (649,), (652, 684), (653, 631), (656,), (657,), (658,), (659,), (660,), (661,), (662,), (663,), (664, 696), (666, 642), (667,), (669,), (671,), (673,), (674,), (675, 613), (679,), (680,), (682,), (683,), (687, 655), (688,), (689, 612), (691,), (693,), (694,), (695, 591), (698,), (700,), (702,), (704,), (705,), (708,), (709,), (710,), (711,), (712,), (713,), (714,), (715,), (717,), (718,), (720, 793, 701), (721,), (724,), (728,), (729,), (730,), (731,), (732,), (736,), (737,), (738,), (739,), (740,), (742,), (743,), (744,), (746,), (747,), (748,), (749,), (750,), (751,), (753,), (754,), (756,), (758,), (760,), (761,), (762,), (764,), (765,), (766,), (767,), (768,), (773,), (775,), (776,), (777,), (779,), (780,), (781,), (782,), (783,), (785,), (787, 734, 727), (788,), (789,), (792,), (794,), (795, 716), (799,), (801,), (805,), (807,), (808,), (809,), (810,), (811,), (812, 806), (813,), (815,), (816,), (817,), (818, 814), (819,), (820,), (821,), (822, 735), (823,), (825, 826), (829,), (830,), (833,), (834, 828), (835,), (836,), (837,), (838,), (839,), (841,), (842,), (843,), (846,), (847,), (848,), (850, 851), (853,), (855,), (856,), (857,), (859, 927), (862,), (863,), (864,), (865,), (866,), (867,), (868,), (871,), (872,), (873, 858), (874,), (875,), (876,), (879,), (880,), (881, 946), (882,), (883,), (884, 967), (885,), (886,), (887,), (888,), (889,), (891,), (892,), (893,), (894,), (895,), (896,), (897, 860), (900,), (904,), (905,), (906,), (907, 911), (908,), (909,), (910,), (912,), (913,), (915,), (917,), (918,), (919,), (920,), (921,), (923,), (924,), (925,), (926,), (929,), (930,), (931,), (932,), (934,), (935,), (936,), (937,), (938,), (939,), (940,), (941,), (942,), (943,), (944,), (945,), (947, 870), (948,), (949,), (950,), (952,), (953,), (954,), (956,), (957,), (958,), (959,), (960,), (961,), (962,), (963,), (964,), (965,), (966,), (968, 844, 933), (969,), (973,), (974,), (976, 977), (978,), (979,), (980, 854), (981,), (982,), (983,), (985, 1068), (986,), (987, 1111), (988,), (991,), (992, 1122), (993,), (994, 1007), (995,), (996,), (997, 1078), (999,), (1000,), (1001,), (1002,), (1003,), (1004,), (1006,), (1008, 1053), (1009,), (1010,), (1011,), (1012,), (1014,), (1015,), (1016,), (1017,), (1018,), (1019,), (1022,), (1023,), (1024,), (1025,), (1027,), (1028,), (1029,), (1030, 1102, 1103, 1074, 1079, 984, 1020, 1055), (1031,), (1032, 1107, 1115), (1033,), (1034,), (1035,), (1036,), (1037,), (1039,), (1040, 1089, 1130, 998), (1041,), (1042,), (1043,), (1044,), (1045,), (1046,), (1048,), (1049,), (1050, 1047), (1051,), (1054,), (1056,), (1057, 1021), (1058, 1099), (1059,), (1060,), (1061,), (1062,), (1064,), (1065, 1052), (1066, 1077, 1085), (1067,), (1069,), (1071,), (1072, 1026, 1063, 1005, 1070), (1073, 1125), (1075,), (1076,), (1080,), (1081, 1114), (1082,), (1083,), (1084,), (1086,), (1088,), (1090, 990), (1091,), (1092,), (1094,), (1095,), (1096,), (1097,), (1098, 1013, 1119), (1100,), (1101,), (1104,), (1105,), (1106,), (1108,), (1109,), (1110,), (1112,), (1113,), (1116, 1093, 989, 1038), (1117, 1087), (1118,), (1120,), (1121,), (1123,), (1124,), (1126,), (1127,), (1128,), (1129,), (1131,), (1132,), (1133,), (1134,), (1135,), (1136,), (1137,), (1139,), (1140,), (1141,), (1142,), (1145,), (1147,), (1148,), (1152,), (1153,), (1154, 1191), (1155,), (1156,), (1157,), (1158,), (1160,), (1162,), (1163,), (1164,), (1167,), (1168,), (1169,), (1170, 1159), (1172,), (1174,), (1175,), (1176,), (1178,), (1179,), (1180,), (1182,), (1184,), (1185,), (1187,), (1188,), (1190,), (1192,), (1193, 1230), (1195,), (1196,), (1200,), (1201,), (1202,), (1203,), (1204,), (1206,), (1208,), (1210,), (1211, 1279), (1212,), (1214,), (1216,), (1217,), (1220,), (1221,), (1222,), (1223,), (1225,), (1226,), (1228,), (1231,), (1232,), (1233,), (1234,), (1235,), (1236,), (1237,), (1238,), (1240,), (1241,), (1243,), (1244,), (1246,), (1248,), (1249, 1255), (1250,), (1251,), (1252,), (1253, 1278), (1254,), (1257,), (1258,), (1261,), (1262,), (1263,), (1264,), (1266,), (1267,), (1268, 1143), (1269,), (1270,), (1271,), (1272, 1149), (1273,), (1274,), (1275,), (1276,), (1277,), (1280, 1218), (1283,), (1285,), (1286,), (1287,), (1288,), (1290,), (1291,), (1292,), (1293,), (1294,), (1296,), (1297,), (1298,), (1299,), (1300,), (1301,), (1302,), (1303,), (1305,), (1307,), (1308,), (1311,), (1312,), (1313,), (1314,), (1316,), (1317,), (1318,), (1319,), (1321,), (1323,), (1324,), (1326,), (1327,), (1329, 1364), (1333,), (1336,), (1337,), (1339,), (1340,), (1341,), (1344,), (1347,), (1350,), (1351,), (1352, 1398), (1353,), (1354, 1365), (1355,), (1356,), (1357,), (1358,), (1359, 1362, 1407), (1361,), (1363, 1374), (1366,), (1367,), (1368,), (1369,), (1370,), (1371,), (1376,), (1377,), (1378,), (1379,), (1381,), (1382,), (1383,), (1384,), (1385,), (1386,), (1387, 1391), (1388,), (1389,), (1390,), (1392,), (1393, 1289), (1394,), (1395,), (1396,), (1397,), (1399,), (1401,), (1403,), (1406,), (1409, 1438), (1410,), (1411,), (1412,), (1413,), (1414,), (1415,), (1416,), (1417,), (1418,), (1419,), (1420,), (1421,), (1422,), (1423,), (1424,), (1426,), (1427, 1310), (1428, 1349), (1429,), (1431,), (1434,), (1435,), (1436,), (1437,), (1440,), (1441,), (1443,), (1444,), (1446,), (1448,), (1449, 1477, 1463), (1450,), (1451, 1439), (1452,), (1453,), (1454, 1519), (1455,), (1456,), (1457,), (1458,), (1461,), (1462,), (1465,), (1466,), (1468,), (1469,), (1470,), (1471,), (1475, 1580), (1476,), (1478,), (1483,), (1484,), (1485,), (1486,), (1487,), (1488,), (1489,), (1490,), (1491, 1532, 1589), (1493,), (1494,), (1496,), (1497,), (1498,), (1500,), (1501,), (1502,), (1503,), (1504, 1479), (1505,), (1506,), (1507, 1460), (1509,), (1510,), (1513,), (1514,), (1515,), (1516,), (1517,), (1520,), (1521,), (1523,), (1524,), (1525,), (1527,), (1528,), (1529,), (1530,), (1531,), (1533,), (1535,), (1536, 1508), (1537,), (1538, 1499), (1543,), (1544,), (1546,), (1547,), (1548,), (1549,), (1550,), (1551,), (1553,), (1554,), (1555,), (1557,), (1558,), (1559,), (1560,), (1561, 1495), (1562,), (1563,), (1564,), (1566, 1534), (1567,), (1569,), (1570,), (1571, 1459, 1540), (1572,), (1573,), (1574,), (1575,), (1576,), (1577, 1522, 1511), (1578,), (1581, 1518), (1582, 1447), (1583,), (1584, 1556), (1585,), (1586,), (1587,), (1588,), (1590, 1695), (1591, 1598, 1711), (1592,), (1593,), (1594, 1653), (1595,), (1596,), (1597,), (1599,), (1600, 1721), (1601,), (1602, 1702), (1603, 1691, 1710), (1605,), (1606,), (1607,), (1608,), (1610,), (1611,), (1612,), (1613,), (1616,), (1617, 1679), (1621,), (1622,), (1623,), (1624, 1642, 1650, 1635), (1625,), (1627,), (1628,), (1629,), (1630,), (1631,), (1632,), (1633,), (1634, 1643, 1714, 1618, 1683, 1654, 1688, 1722, 1725, 1663), (1637,), (1638, 1717, 1662, 1719), (1639,), (1640, 1708), (1641,), (1644,), (1646,), (1647,), (1648,), (1649,), (1651,), (1655,), (1656, 1614), (1657,), (1658,), (1659,), (1660,), (1664, 1718), (1665, 1693), (1666,), (1667,), (1668, 1604, 1685), (1669, 1661), (1670,), (1672,), (1673, 1732, 1615), (1674, 1715), (1675,), (1676, 1694, 1678), (1677,), (1680, 1645), (1681,), (1682,), (1684,), (1686,), (1687,), (1689, 1739, 1735), (1690,), (1692,), (1696,), (1697,), (1698,), (1699,), (1701,), (1703,), (1704, 1652), (1705,), (1707,), (1709,), (1712, 1734), (1713, 1619, 1700, 1733, 1620, 1609, 1706), (1716, 1636), (1720,), (1723,), (1724,), (1726,), (1727,), (1728,), (1729,), (1730, 1626), (1731,), (1736, 1671), (1737,), (1738,)]\n", - "1258\n", - "128.62311506271362\n" + "[(0,), (1,), (2,), (3,), (4,), (6,), (7,), (8,), (9,), (10,), (11,), (12,), (13,), (14,), (15,), (16, 63, 39), (18,), (19, 59), (20,), (21,), (22,), (23,), (24, 53), (25, 84), (26,), (27,), (28,), (29,), (30,), (32,), (33,), (34,), (35,), (36,), (37,), (38,), (40,), (41,), (42,), (43,), (44,), (45,), (46,), (47,), (48, 17, 58, 5), (49,), (50,), (51,), (52,), (54,), (55,), (56,), (57,), (60,), (61,), (62,), (64,), (65,), (66,), (67,), (68,), (69,), (70,), (71,), (72,), (73, 31), (74,), (75,), (76,), (77,), (78,), (79,), (80,), (81,), (82,), (83,), (85,), (86,), (87,), (88,), (89,)]\n", + "81\n", + "0.0870358943939209\n" ] } ], @@ -446,6 +458,70 @@ "print(time() - start)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Online AT VB 2" + ] + }, + { + "cell_type": "code", + "execution_count": 122, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(onlineatvb2)\n", + "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" + ] + }, + { + "cell_type": "code", + "execution_count": 123, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'time model_online2 = OnlineAtVb2(corpus=corpus, grouped_corpus=disjoint_authors, num_topics=10, id2word=dictionary.id2token, id2author=id2author, author2doc=author2doc, doc2author=doc2author, threshold=1e-19, iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, eval_every=1, random_state=1, var_lambda=None)'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb2.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, grouped_corpus, num_topics, id2word, id2author, author2doc, doc2author, threshold, minimum_probability, iterations, passes, alpha, eta, decay, offset, eval_every, random_state, var_lambda)\u001b[0m\n\u001b[1;32m 140\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 141\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 142\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvar_lambda\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 143\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 144\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mt\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb2.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, var_lambda)\u001b[0m\n\u001b[1;32m 303\u001b[0m \u001b[0mcts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnumpy\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mcnt\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcnt\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mdoc\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# Word counts.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 304\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 305\u001b[0;31m \u001b[0mvar_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcts\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_mu\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 306\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 307\u001b[0m \u001b[0;31m# Update Elogtheta, since gamma has been updated.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "%time model_online2 = OnlineAtVb2(corpus=corpus, grouped_corpus=disjoint_authors, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-19, \\\n", + " iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "model_online.show_topics()" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -477,7 +553,7 @@ }, { "cell_type": "code", - "execution_count": 351, + "execution_count": 130, "metadata": { "collapsed": true }, @@ -489,7 +565,7 @@ }, { "cell_type": "code", - "execution_count": 352, + "execution_count": 131, "metadata": { "collapsed": false }, @@ -498,8 +574,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 58.1 s, sys: 4 ms, total: 58.1 s\n", - "Wall time: 58.1 s\n" + "CPU times: user 1min 7s, sys: 8 ms, total: 1min 7s\n", + "Wall time: 1min 7s\n" ] } ], @@ -512,7 +588,7 @@ }, { "cell_type": "code", - "execution_count": 347, + "execution_count": 132, "metadata": { "collapsed": false, "scrolled": false @@ -522,28 +598,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.027*node + 0.026*classifier + 0.025*speech + 0.024*hidden + 0.016*training + 0.015*hidden_layer + 0.012*hidden_unit + 0.011*propagation + 0.011*back_propagation + 0.010*table'),\n", + " '0.065*role + 0.054*firing + 0.041*stimulus + 0.035*potential + 0.030*connectivity + 0.030*temporal + 0.028*activity + 0.024*cycle + 0.023*action + 0.019*strength'),\n", " (1,\n", - " '0.020*capacity + 0.017*theorem + 0.013*associative + 0.012*optimization + 0.011*probability + 0.010*orientation + 0.010*bound + 0.010*proof + 0.010*bit + 0.010*address'),\n", + " '0.078*image + 0.036*visual + 0.035*field + 0.028*location + 0.025*map + 0.021*position + 0.020*surface + 0.020*center + 0.020*human + 0.019*computed'),\n", " (2,\n", - " '0.041*chip + 0.038*pulse + 0.031*analog + 0.031*voltage + 0.028*delay + 0.020*activation + 0.018*vlsi + 0.018*annealing + 0.018*temperature + 0.017*digital'),\n", + " '0.047*loop + 0.031*energy + 0.024*device + 0.021*activation + 0.021*interconnection + 0.019*vi + 0.019*path + 0.018*hardware + 0.016*circuit + 0.014*analog'),\n", " (3,\n", - " '0.016*energy + 0.014*recognition + 0.012*field + 0.010*frequency + 0.009*animal + 0.008*surface + 0.008*transition + 0.008*class + 0.007*test + 0.007*classification'),\n", + " '0.043*capacity + 0.041*sequence + 0.036*bit + 0.028*associative_memory + 0.025*stage + 0.022*eq + 0.020*code + 0.015*bound + 0.013*delay + 0.012*xi'),\n", " (4,\n", - " '0.013*sensory + 0.013*dynamic + 0.013*motor + 0.012*phase + 0.011*movement + 0.011*device + 0.011*receptor + 0.011*control + 0.010*prediction + 0.009*resolution'),\n", + " '0.054*hopfield + 0.050*processor + 0.049*code + 0.047*matrix + 0.042*convergence + 0.039*stored + 0.025*product + 0.024*address + 0.023*storage + 0.021*column'),\n", " (5,\n", - " '0.047*image + 0.028*fixed_point + 0.026*eye + 0.023*winner + 0.022*attractor + 0.021*gain + 0.020*equilibrium + 0.019*optical + 0.016*light + 0.016*oscillation'),\n", + " '0.070*training + 0.069*hidden + 0.042*hidden_unit + 0.030*trained + 0.025*back + 0.024*decision + 0.023*back_propagation + 0.021*gradient + 0.019*propagation + 0.018*node'),\n", " (6,\n", - " '0.017*stability + 0.015*curve + 0.014*rule + 0.012*nonlinear + 0.012*mode + 0.011*speed + 0.011*gradient + 0.011*plane + 0.010*attention + 0.009*tank'),\n", + " '0.086*cell + 0.030*stimulus + 0.029*firing + 0.021*probability + 0.018*synaptic + 0.017*activity + 0.017*phase + 0.017*feedback + 0.016*via + 0.015*synapsis'),\n", " (7,\n", - " '0.038*cell + 0.031*velocity + 0.024*stimulus + 0.023*membrane + 0.023*synapsis + 0.022*synaptic + 0.022*cortical + 0.022*stimulation + 0.021*spike + 0.021*cortex'),\n", + " '0.045*representation + 0.021*connectionist + 0.020*move + 0.017*feature + 0.015*scheme + 0.012*represented + 0.010*mcclelland + 0.010*representing + 0.010*path + 0.009*represented_by'),\n", " (8,\n", - " '0.029*joint + 0.024*motion + 0.017*visual + 0.016*role + 0.014*receptive_field + 0.014*receptive + 0.014*position + 0.011*array + 0.010*angle + 0.009*noise'),\n", + " '0.073*node + 0.023*target + 0.017*neural_net + 0.016*standard + 0.015*mapping + 0.015*learned + 0.014*log + 0.014*learning_algorithm + 0.012*back_propagation + 0.012*activation'),\n", " (9,\n", - " '0.031*circuit + 0.027*firing + 0.019*match + 0.015*trajectory + 0.015*gaussian + 0.014*peak + 0.014*link + 0.013*path + 0.013*direction + 0.012*fire')]" + " '0.031*constraint + 0.027*noise + 0.026*minimum + 0.021*iv + 0.016*optimization + 0.013*search + 0.013*differential + 0.011*find + 0.011*recall + 0.010*distance')]" ] }, - "execution_count": 347, + "execution_count": 132, "metadata": {}, "output_type": "execute_result" } @@ -881,7 +957,7 @@ }, { "cell_type": "code", - "execution_count": 348, + "execution_count": 98, "metadata": { "collapsed": false }, @@ -893,7 +969,7 @@ }, { "cell_type": "code", - "execution_count": 349, + "execution_count": 99, "metadata": { "collapsed": false }, @@ -902,8 +978,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 25.7 s, sys: 20 ms, total: 25.7 s\n", - "Wall time: 25.7 s\n" + "CPU times: user 39.4 s, sys: 16 ms, total: 39.4 s\n", + "Wall time: 39.4 s\n" ] } ], @@ -1423,7 +1499,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.4.3+" + "version": "3.5.2" } }, "nbformat": 4, diff --git a/gensim/models/atvb.py b/gensim/models/atvb.py index 954f468c42..21490bde67 100644 --- a/gensim/models/atvb.py +++ b/gensim/models/atvb.py @@ -338,6 +338,8 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No logprob = self.eval_logprob() logger.info('Log prob: %.3e.', logprob) for iteration in xrange(self.iterations): + lastgamma = var_gamma.copy() + lastlambda = var_lambda.copy() #logger.info('Starting iteration %d.', iteration) # Update phi. for d, doc in enumerate(corpus): @@ -454,6 +456,11 @@ def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=No self.var_gamma = var_gamma + #meanchange = numpy.mean(abs(var_gamma - lastgamma)) + #logger.info('meanchange in gamma: %.3e', meanchange) + #meanchange = numpy.mean(abs(var_lambda - lastlambda)) + #logger.info('meanchange in lambda: %.3e', meanchange) + # Print topics: #pprint(self.show_topics()) diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 378242bfa2..3c85191b07 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -214,8 +214,6 @@ def inference(self, corpus=None, var_lambda=None): #logger.info('iteration %i', iteration) lastgamma = tilde_gamma.copy() - if self.optimize_lambda: - lastlambda = tilde_lambda.copy() # Update phi. for v in ids: @@ -262,19 +260,6 @@ def inference(self, corpus=None, var_lambda=None): tilde_gamma[a, k] *= len(self.author2doc[a]) tilde_gamma[a, k] += self.alpha[k] - # TODO: see what happens if we put the lambda update outside this loop (i.e. - # only one update per document). - if self.optimize_lambda: - # Update lambda. - for k in xrange(self.num_topics): - for vi, v in enumerate(ids): - # cnt = dict(doc).get(v, 0) - cnt = cts[vi] - tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * var_phi[v, k] - - # This is a little bit faster: - # tilde_lambda[:, ids] = self.eta[ids] + self.num_docs * cts * var_phi[ids, :].T - # Update gamma and lambda. # Interpolation between document d's "local" gamma (tilde_gamma), # and "global" gamma (var_gamma). Same goes for lambda. @@ -282,43 +267,46 @@ def inference(self, corpus=None, var_lambda=None): # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). # FIXME: if tilde_gamma is computed like this in every iteration, then I can't compare # lastgamma to it for convergence test. FIXME. - tilde_gamma = (1 - rhot) * var_gamma + rhot * tilde_gamma + var_gamma_temp = (1 - rhot) * var_gamma + rhot * tilde_gamma # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. # FIXME: I don't need to update the entire gamma, as I only updated a few rows of it, # corresponding to the authors in the document. The same goes for Elogtheta. - Elogtheta = dirichlet_expectation(tilde_gamma) + Elogtheta = dirichlet_expectation(var_gamma_temp) - if self.optimize_lambda: - # Note that we only changed the elements in lambda corresponding to - # the words in document d, hence the [:, ids] indexing. - tilde_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] - Elogbeta = dirichlet_expectation(tilde_lambda) - expElogbeta = numpy.exp(Elogbeta) - # Check for convergence. # Criterion is mean change in "local" gamma and lambda. - # TODO: consider using separate thresholds for lambda and gamma. if iteration > 0: - meanchange_gamma = numpy.mean(abs(tilde_gamma - lastgamma)) + meanchange_gamma = numpy.mean(abs(var_gamma_temp - lastgamma)) gamma_condition = meanchange_gamma < self.threshold - if self.optimize_lambda: - meanchange_lambda = numpy.mean(abs(tilde_lambda - lastlambda)) - lambda_condition = meanchange_lambda < self.threshold - else: - lambda_condition = True # logger.info('Mean change in gamma: %.3e', meanchange_gamma) - # logger.info('Mean change in lambda: %.3e', meanchange_lambda) - if gamma_condition and lambda_condition: + if gamma_condition: # logger.info('Converged after %d iterations.', iteration) converged += 1 break # End of iterations loop. - var_gamma = tilde_gamma.copy() + # FIXME: there are too many different gamma variables! + var_gamma = var_gamma_temp.copy() if self.optimize_lambda: - var_lambda = tilde_lambda.copy() + # Update lambda. + # only one update per document). + for k in xrange(self.num_topics): + for vi, v in enumerate(ids): + # cnt = dict(doc).get(v, 0) + cnt = cts[vi] + tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * var_phi[v, k] + + # This is a little bit faster: + # tilde_lambda[:, ids] = self.eta[ids] + self.num_docs * cts * var_phi[ids, :].T + + # Note that we only changed the elements in lambda corresponding to + # the words in document d, hence the [:, ids] indexing. + var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + var_lambda = var_lambda.copy() # Print topics: # pprint(self.show_topics()) From 77832618ac226f89944988669e269124d71cce6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Tue, 8 Nov 2016 15:42:50 +0100 Subject: [PATCH 038/100] Working on an algorithm that tries to process each 'disjoint' set of authors independently in a mini-batch sort of way. --- gensim/models/onlineatvb2.py | 565 +++++++++++++++++++++++++++++++++++ 1 file changed, 565 insertions(+) create mode 100644 gensim/models/onlineatvb2.py diff --git a/gensim/models/onlineatvb2.py b/gensim/models/onlineatvb2.py new file mode 100644 index 0000000000..be9f31dbed --- /dev/null +++ b/gensim/models/onlineatvb2.py @@ -0,0 +1,565 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2016 Radim Rehurek +# Copyright (C) 2016 Olavur Mortensen +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Author-topic model. +""" + +# NOTE: from what I understand, my name as well as Radim's should be attributed copyright above? + +import pdb +from pdb import set_trace as st + +import logging +import numpy +import numbers + +from gensim import utils, matutils +from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim.models import LdaModel +from six.moves import xrange +from scipy.special import gammaln + +from pprint import pprint + +# log(sum(exp(x))) that tries to avoid overflow +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger(__name__) + + +class OnlineAtVb2(LdaModel): + """ + Train the author-topic model using online variational Bayes. + """ + # TODO: inherit interfaces.TransformationABC. + + def __init__(self, corpus=None, grouped_corpus=None, num_topics=100, id2word=None, id2author=None, + author2doc=None, doc2author=None, threshold=0.001, minimum_probability=0.01, + iterations=10, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, + eval_every=1, random_state=None, var_lambda=None): + + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + if grouped_corpus is None: + # FIXME: shouldn't be necessary. It should be an option, and if it is not supplied, + # it should be constructed automatically. + raise ValueError('grouped_corpus must be supplied.') + + # NOTE: this stuff is confusing to me (from LDA code). Why would id2word not be none, but have length 0? + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + logger.info('Vocabulary consists of %d words.', self.num_terms) + + if doc2author is None and author2doc is None: + raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') + + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). + # If either doc2author or author2doc is missing, construct them from the other. + if doc2author is None: + # Make a mapping from document IDs to author IDs. + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + elif author2doc is None: + # Make a mapping from author IDs to document IDs. + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + + self.author2doc = author2doc + self.doc2author = doc2author + + self.num_authors = len(self.author2doc) + logger.info('Number of authors: %d.', self.num_authors) + + self.id2author = id2author + if self.id2author is None: + logger.warning("no author id mapping provided; initializing from corpus, assuming identity") + author_integer_ids = [str(i) for i in range(len(author2doc))] + self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) + + # Make the reverse mapping, from author names to author IDs. + self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) + + self.corpus = corpus + self.grouped_corpus = grouped_corpus + self.iterations = iterations + self.passes = passes + self.num_topics = num_topics + self.threshold = threshold + self.minimum_probability = minimum_probability + self.decay = decay + self.offset = offset + self.num_docs = len(corpus) + self.num_authors = len(author2doc) + self.eval_every = eval_every + self.random_state = random_state + + # NOTE: I don't think this necessarily is a good way to initialize the topics. + self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) + self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + + self.random_state = get_random_state(random_state) + + if corpus is not None: + self.inference(corpus, var_lambda=var_lambda) + + def rho(self, t): + return pow(self.offset + t, -self.decay) + + def inference(self, corpus=None, var_lambda=None): + if corpus is None: + # TODO: I can't remember why I used "copy()" here. + corpus = self.corpus.copy() + + self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. + + logger.info('Starting inference. Training on %d documents.', self.num_docs) + + # Whether or not to evaluate bound and log probability, respectively. + bound_eval = True + logprob_eval = False + + if var_lambda is None: + self.optimize_lambda = True + else: + # We have topics from LDA, thus we do not train the topics. + self.optimize_lambda = False + + # Initial values of gamma and lambda. + # Parameters of gamma distribution same as in `ldamodel`. + # TODO: gamma shouldn't be num_authors in size, but this is + # needed to compute the bound at the moment. + var_gamma = self.random_state.gamma(100., 1. / 100., + (self.num_authors, self.num_topics)) + self.var_gamma = var_gamma + + if var_lambda is None: + var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + tilde_lambda = var_lambda.copy() + else: + self.norm_lambda = var_lambda.copy() + for k in xrange(self.num_topics): + self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + + self.var_lambda = var_lambda + + # Initialize dirichlet expectations. + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + t = 0 + if self.eval_every > 0: + if bound_eval: + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + for _pass in xrange(self.passes): + converged = 0 # Number of documents converged for current pass over corpus. + # Loop over "groups" + for chunk_no, chunk in enumerate(self.grouped_corpus): + #logger.info('Processing chunk %d.', chunk_no) + rhot = self.rho(chunk_no + _pass) + + chunksize = len(chunk) + + authors_chunk = set() + for d in chunk: + for a in self.doc2author[d]: + authors_chunk.add(a) + authors_chunk = list(authors_chunk) + + # Initialize phi and mu. + var_phi = dict() + var_mu = dict() + for d in chunk: + doc = corpus[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + authors_d = self.doc2author[d] # List of author IDs for document d. + for v in ids: + for k in xrange(self.num_topics): + var_phi[(d, v, k)] = 0.0 + for a in authors_d: + # Draw mu from gamma distribution. + var_mu[(d, v, a)] = 1 / len(authors_d) + + # Do batch inference of group until convergence + for iteration in xrange(self.iterations): + lastgamma = var_gamma.copy() # TODO: doesn't have to be entire gamma. + #logger.info('Starting iteration %d.', iteration) + # Update phi. + for d in chunk: + doc = corpus[d] + #logger.info('Updating phi, document %d.', d) + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + authors_d = self.doc2author[d] # List of author IDs for document d. + + # Update phi. + for v in ids: + phi_sum = 0.0 + for k in xrange(self.num_topics): + # Average Elogtheta over authors a in document d. + # NOTE: avgElogtheta may become numerically unsable. If + # it is a large positive number, exponentiating it may + # cause overflow, which probably results in the value + # "inf". If it is a large negative number, exponentiating + # it may result in 0.0. + avgElogtheta = 0.0 + for a in authors_d: + avgElogtheta += var_mu[(d, v, a)] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute phi. + # TODO: avoid computing phi if possible. + # NOTE: computation can be made more stable by adding the maximal value + # inside the exponential, which will disappear in the normalization. + var_phi[(d, v, k)] = expavgElogtheta * expElogbeta[k, v] + phi_sum += var_phi[(d, v, k)] + + # Normalize phi. + phi_norm_const = 1.0 / (phi_sum + 1e-100) + for k in xrange(self.num_topics): + var_phi[(d, v, k)] *= phi_norm_const + + # Update mu. + for d in chunk: + doc = corpus[d] + #logger.info('Updating mu, document %d.', d) + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + authors_d = self.doc2author[d] # List of author IDs for document d. + for v in ids: + mu_sum = 0.0 + for a in authors_d: + # Average Elogtheta over topics k. + # NOTE: we may have same problems as with phi update, above. + avgElogtheta = 0.0 + for k in xrange(self.num_topics): + avgElogtheta += var_phi[(d, v, k)] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute mu. + # TODO: avoid computing mu if possible. + var_mu[(d, v, a)] = expavgElogtheta + mu_sum += var_mu[(d, v, a)] + + mu_norm_const = 1.0 / (mu_sum + 1e-100) + for a in authors_d: + var_mu[(d, v, a)] *= mu_norm_const + + # Update gamma. + #logger.info('Updating gamma.') + for a in authors_chunk: + for k in xrange(self.num_topics): + docs_a = self.author2doc[a] + var_gamma[a, k] = self.alpha[k] + for d in docs_a: + # TODO: if this document doesn't exist, we will have problems here. Could to an "if corpus.get(d)" type of thing. + doc = corpus[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + for vi, v in enumerate(ids): + var_gamma[a, k] += cts[vi] * var_mu[(d, v, a)] * var_phi[(d, v, k)] + + # Update Elogtheta, since gamma has been updated. + for a in authors_chunk: + Elogtheta[a, :] = dirichlet_expectation(var_gamma[a, :]) + + + # Check for convergence. + # Compute the bound for the current chunk only. + if iteration > 0: + meanchange = numpy.mean(abs(var_gamma[authors_chunk, :] - lastgamma[authors_chunk, :])) + #logger.info('Mean change in gamma: %.3e', meanchange) + if meanchange < self.threshold: + #logger.info('Converged after %d iterations.', iteration) + converged += 1 + break + # End of iterations loop. + + # Update lambda. + if self.optimize_lambda: + #logger.info('Updating lambda.') + for k in xrange(self.num_topics): + #logger.info('k = %d.', k) + for v in xrange(self.num_terms): + #logger.info('v = %d.', v) + tilde_lambda[k, v] = 0.0 + + # The following commented-out code is used for "sampling" documents when + # updating lambda: + # sample_ratio = 1.0 # When sample_ratio is 1.0, the whole dataset is used. + # nsamples = int(numpy.ceil(self.num_docs * sample_ratio)) + # doc_idxs = sample(xrange(self.num_docs), nsamples) + + # TODO: this would be more efficient if there was a mapping from words + # to the documents that contain that word, although that mapping would be + # very large. + # NOTE: the below might cause overflow if number of documents is very large, + # although it seems somewhat unlikely. + for d in chunk: + doc = corpus[d] + # Get the count of v in doc. If v is not in doc, return 0. + cnt = dict(doc).get(v) + if cnt is not None: + # TODO: this can be computed as "sstats" inside chunk loop. + tilde_lambda[k, v] += cnt * var_phi[(d, v, k)] + + tilde_lambda[k, v] *= self.num_docs / chunksize + tilde_lambda[k, v] += self.eta[v] + + # Note that we only changed the elements in lambda corresponding to + # the words in document d, hence the [:, ids] indexing. + var_lambda = (1 - rhot) * var_lambda + rhot * tilde_lambda + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + # Print topics: + # pprint(self.show_topics()) + + #logger.info('Mean change in gamma: %.3e', meanchange) + # End of chunk loop. + + + if _pass % self.eval_every == 0: + self.var_gamma = var_gamma + self.var_lambda = var_lambda + if self.eval_every > 0: + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + + #logger.info('Converged documents: %d/%d', converged, self.num_docs) + + # TODO: consider whether to include somthing like this: + #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold: + # break + # End of pass over corpus loop. + + # Ensure that the bound (or log probabilities) is computed at the very last pass. + if self.eval_every != 0 and not _pass % self.eval_every == 0: + # If the bound should be computed, and it wasn't computed at the last pass, + # then compute the bound. + self.var_gamma = var_gamma + self.var_lambda = var_lambda + if self.eval_every > 0: + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + + + self.var_lambda = var_lambda + self.var_gamma = var_gamma + + return var_gamma, var_lambda + + def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): + """ + Compute the expectation of the log conditional likelihood of the data, + + E_q[log p(w_d | theta, beta, A_d)], + + where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. + """ + + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + # NOTE: computing the bound this way is very numerically unstable, which is why + # "logsumexp" is used in the LDA code. + # NOTE: computing bound is very very computationally intensive. I could, for example, + # only use a portion of the data to do that (even a held-out set). + bound= 0.0 + for d, doc in enumerate(docs): + authors_d = self.doc2author[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + bound_d = 0.0 + for vi, v in enumerate(ids): + bound_v = 0.0 + for k in xrange(self.num_topics): + for a in authors_d: + bound_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + bound_d += cts[vi] * numpy.log(bound_v) + bound += numpy.log(1.0 / len(authors_d)) + bound_d + + # For per-word likelihood, do: + # likelihood *= 1 /sum(len(doc) for doc in docs) + + # TODO: can I do something along the lines of (as in ldamodel): + # likelihood += numpy.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, id]) for id, cnt in doc) + # If I computed the LDA bound the way I compute the author-topic bound above: + # bound = 0.0 + # for d, doc in enumerate(docs): + # ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + # cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + # bound_d = 0.0 + # for vi, v in enumerate(ids): + # bound_v = 0.0 + # for k in xrange(self.num_topics): + # bound_v += numpy.exp(Elogtheta[d, k] + Elogbeta[k, v]) + # bound_d += cts[vi] * numpy.log(bound_v) + # bound += bound_d + + return bound + + def theta_bound(self, Elogtheta): + bound = 0.0 + for a in xrange(self.num_authors): + var_gamma_a = self.var_gamma[a, :] + Elogtheta_a = Elogtheta[a, :] + # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector + bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) + bound += numpy.sum(gammaln(var_gamma_a) - gammaln(self.alpha)) + bound += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(var_gamma_a)) + + return bound + + def beta_bound(self, Elogbeta): + bound = 0.0 + bound += numpy.sum((self.eta - self.var_lambda) * Elogbeta) + bound += numpy.sum(gammaln(self.var_lambda) - gammaln(self.eta)) + bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(self.var_lambda, 1))) + + return bound + + def eval_logprob(self, doc_ids=None): + """ + Compute the liklihood of the corpus under the model, by first + computing the conditional probabilities of the words in a + document d, + + p(w_d | theta, beta, A_d), + + summing over all documents, and dividing by the number of documents. + """ + + # TODO: if var_lambda is supplied from LDA, normalizing it every time + # is unnecessary. + norm_gamma = self.var_gamma.copy() + for a in xrange(self.num_authors): + norm_gamma[a, :] = self.var_gamma[a, :] / self.var_gamma.sum(axis=1)[a] + + if self.optimize_lambda: + norm_lambda = self.var_lambda.copy() + for k in xrange(self.num_topics): + norm_lambda[k, :] = self.var_lambda[k, :] / self.var_lambda.sum(axis=1)[k] + else: + norm_lambda = self.norm_lambda + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + logprob = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + logprob_d = 0.0 + for vi, v in enumerate(ids): + logprob_v = 0.0 + for k in xrange(self.num_topics): + for a in authors_d: + logprob_v += norm_gamma[a, k] * norm_lambda[k, v] + logprob_d += cts[vi] * numpy.log(logprob_v) + logprob += numpy.log(1.0 / len(authors_d)) + logprob_d + + return logprob + + # Overriding LdaModel.get_topic_terms. + def get_topic_terms(self, topicid, topn=10): + """ + Return a list of `(word_id, probability)` 2-tuples for the most + probable words in topic `topicid`. + Only return 2-tuples for the topn most probable words (ignore the rest). + """ + topic = self.var_lambda[topicid, :] + topic = topic / topic.sum() # normalize to probability distribution + bestn = matutils.argsort(topic, topn, reverse=True) + return [(id, topic[id]) for id in bestn] + + + def get_author_topics(self, author_id, minimum_probability=None): + """ + Return topic distribution the given author, as a list of + (topic_id, topic_probability) 2-tuples. + Ignore topics with very low probability (below `minimum_probability`). + """ + if minimum_probability is None: + minimum_probability = self.minimum_probability + minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output + + topic_dist = self.var_gamma[author_id, :] / sum(self.var_gamma[author_id, :]) + + author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) + if topicvalue >= minimum_probability] + + # author_name = self.id2author[author_id] + + return author_topics + + + From 868b17427b001a4fd7b65e60ec3c68460450eaf2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 9 Nov 2016 11:22:55 +0100 Subject: [PATCH 039/100] Working on a minibatch algorithm. Updated notebook. --- docs/notebooks/at_with_nips.ipynb | 147 +++++---- gensim/models/__init__.py | 1 + gensim/models/minibatchatvb.py | 516 ++++++++++++++++++++++++++++++ 3 files changed, 610 insertions(+), 54 deletions(-) create mode 100644 gensim/models/minibatchatvb.py diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 325fcc987a..18fe5fb1d3 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -67,6 +67,8 @@ "from gensim.models import atvb\n", "from gensim.models import OnlineAtVb\n", "from gensim.models import onlineatvb\n", + "from gensim.models import MinibatchAtVb\n", + "from gensim.models import minibatchatvb\n", "\n", "from time import time\n", "\n", @@ -115,7 +117,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 221, "metadata": { "collapsed": false }, @@ -130,7 +132,7 @@ "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00']\n", + "yrs = ['00', '01', '02']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -152,7 +154,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 222, "metadata": { "collapsed": false }, @@ -172,15 +174,17 @@ " ids = [c.strip() for c in contents[2:]]\n", " if not author2id.get(author_name):\n", " author2id[author_name] = i\n", + " author2doc[i] = []\n", " i += 1\n", - "\n", + " \n", " author_id = author2id[author_name]\n", - " author2doc[author_id] = [yr + '_' + id for id in ids]" + " author2doc[author_id].extend([yr + '_' + id for id in ids])\n", + " " ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 223, "metadata": { "collapsed": false }, @@ -192,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 224, "metadata": { "collapsed": false }, @@ -210,7 +214,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 225, "metadata": { "collapsed": false }, @@ -236,7 +240,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 226, "metadata": { "collapsed": false }, @@ -259,7 +263,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 227, "metadata": { "collapsed": true }, @@ -274,7 +278,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 228, "metadata": { "collapsed": true }, @@ -293,7 +297,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 229, "metadata": { "collapsed": true }, @@ -305,16 +309,16 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 230, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFW9//H3h4RFlgSEG0BZBYJBZUlAQFkvm8iiVxAM\nLoigKCD8giDK1QuCG4iEXVFUFjUIKIiyBIMKAaJIgqxh38GEPSxJyPb9/XFOk5pKz2SmZunp6c/r\nefrpqVOnqs6p7un+9qlz6igiMDMzM2s1SzS6AGZmZmaN4CDIzMzMWpKDIDMzM2tJDoLMzMysJTkI\nMjMzs5bkIMjMzMxakoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMx6nKRnJP2ssLyTpAWSPtQHx/6u\npLmF5UH52Kf39rHz8Q7Jx3tXXxyvKknfkPSYpHmSbm90eTpL0nr5/B7Q6LJY83MQZAOGpAPzh2O9\nx/cbXb4WU28+ni7P0SPpfyXtVeHYC7p6rK7qoGxBhbr2JUkfBb4P/A34PPDthhbIrEEGN7oAZj0s\nSB/oT5TS7+37olhNRNwo6R0RMaeLm34LuAT4Uxe2OQE4qYvHqaK9sv0SuKRCXfvSjsBc4JDwBJLW\nwhwE2UB0fURM6WxmSQKWioi3erFMLa+3gwJJy0bEzIhYQB+0BLUnBxX9OQACWBV4sz8GQP5/tL7k\ny2HWUor9QyR9VtJ9wGxgp7xeko6WdJ+k2ZL+I+k8SUNK+5Gk/8t9X96QNEHSeyU9XeoL06Z/SiG9\nbr8RSXtImpj3OUPS1ZLeW8rza0mvSFojr39d0vOSfljnOJI0RtLdkmblfNdK2jSvv1XSHe2cq0cl\nddgC0955qJNvkT5BkoZL+oOkablsT0n6jaTlaq8TsBRQO1cLauc2n9cFeR+/k/QK6dJOu+c8r/us\npAfz8W4v91HK5/bhOtu9vc9OlK291/arhffVs5LOqvO+ukXSFEnvk/Q3STPzuT26o9ehsP1gSSfk\n1262Up+fkyQtWSr7p4GhuZzz1U7/mvzemStpuULacXm7HxbSBufX/6RC2vKSxub/idmSpkr6f6X9\nL+7/cSVJF0t6VdLLkn4BtDlnOd/qki7K52q2pOckXSlpjc6cN2tdbgmygWiopJWLCRHxUinPrsCn\ngHOBl4GncvovgdH5+QzgPcBXgU0kbZtbGSD1pzgOuBoYD4wCbgDeUTpOe/1DFkmX9HngF8C1wNeB\n5YDDgImSNouIZwrbDs7Hmwh8LdfnWEkPR8QvCru9mPSF9yfgZ6Qv7u2ALYF/5/XnSRoeEQ8VyrI1\nsC7wzTplL+rseaiVu7b/pXO+JUjneTqwBrAXMCQi3pT0GeBXwC35vAA8UtrXH4AHgG8U0to75zsB\nBwBnkS4FHQ6Ml7R5RDy4mG3fTo+I+Z0oW/m1/S5wPHA96T03gvTajiq9rwJYBbgOuBy4FNgP+JGk\nuyLixjplK7ow1/FS0ntjK9Jluw2B/QtlPwzYBPgSIODWdvY3kfQafZj0egFsA8wHti3kG0V6zW/O\n9RVwTd7u58DdwO7A6ZJWj4jjSsdZ5P8x7+NPpPfqecCDwD6k815+ja4C1ie9tk+RWrp2Jb2nnsGs\nPRHhhx8D4gEcSLoMUn7ML+QZlNPmAOuXtt8hr9unlL57Tt83Lw/L2/++lO+HOd/PCmknA3PqlPVg\n0hfJu/LyCsCrwNmlfKvm9HMKaZfkbb9eyvtv4LbC8i65PKd2cM5WBGYBJ5XSz83HXaaDbbtyHnbK\nZf5QXh6V8+y1mNd0VnE/pfO6ALiwnXVzCsu113we8P5C+tqkVodLS+f2ocXtczFlK7+2q+bzdHUp\n35E536cLaRNz2n6FtKVIQeJvF3OuRuZ6nltKPz3v88Oler7cif+pQcDrwMmFtJdJQdbs2vsDODbX\ncfm8vE8uyzGl/f2eFICu1Yn/x9o+jiykLUEKPOcDB+S0d5bz+eFHZx++HGYDTQBfAXYuPHapk+/G\niHiklLYv6QP+75JWrj2AO0hfeDvmfLuRPrzPLm1/RjfK/RFSIHRp6djzgX8Vjl30s9LyLaSWq5p9\nSF/8J7d30Ih4FfgzqfUASJcogE+SgpvZHZR5V6qfh1fz8+6SlulE/noC+GkX8k+MiLc7yEfEk6SW\nho9UPH5n7UI6T+Xzcj4wE9ijlD4jIi6rLUTqS/Uv2r629XyUdE7KtwL4Mam1p3ycxYqI+cAkUush\nkjYGhgI/AJYktdJAah26KyLeyMu7kwKbc0u7PJ10LsrnvN7/4+7AWxTe55FazM7J9amZSQqsdpQ0\ntItVtBbnIMgGon9FxF+Ljzp5nqiTtgHpV+ULpcd0YBlSywfAWvm5zYd2REwj/WquYn3SB/vE0rGf\nB/67cOyaN3IAU/QKsFJh+T3AMxGxuDJdDKwraau8/BFgZVJrQUfWzs9dPg8R8ShwJnAo8JKk6yR9\nRdIKizlm2eNdyFv+kgV4CFhB0kp11vWU2nl6qJgYqePv44X1NU/X2Uf5tW3vOPPyuS0e51nS61E+\nTmfdAmyR+xVtCzwdEXeRRlzWLol9mPTeLZblmYiYVdrX1ML6oifqHHdt4Nk6gfiDxYW8/nhgT+B5\nSX+XdIyk8v+M2SLcJ8haVfnDGdKPgueAz9L2l2bN8/m5tq4zI2vayzOozrGD1B/pxTr5yx1957ez\nX7Xzd0euy8f8DPCP/PxsRPx9Mdt15TwsIiLG5I6uHyO1Kp0DHCdpqxxIdUa917Eryueos69Xd46x\nOJ15bbu6vqtlKJpIuu3AlqQWn4mF9G0lvY/04+Hmbhyv3uso6r8ei+w7In4s6Urg46SW2u8C35S0\nfbH1z6zMLUFmCz1K6pR6S7klKT9qH6ZP5OfhxY0lrUa6pFX0CjBI0rKl9HXqHBvg+XaOPZGuewRY\nozwCqSwi5pE74EpakdQ5+Ted2P8T+bkz56G9Y98bEd+LiO2B7UmtbF8qZunMfjppgzppw4HXI+KV\nvPwKqZ9U2Tp10jpbtify84bFRElL5f0+2cn9dOY4gyWtVzrOu4Dlu3Gcf5Auq25HavmpvRdvBj5E\nulQbpBajYlnWkFTuID8iP3emLLV9lC+XblgnLxHxWEScHhG7AR8gddTu1Kg6a10OgswWuozUCfVb\n5RV5CHAtmPgL6df6V0vZxtTZ56OkX67bFfa1PKm1qeg64A3gf3OfnPLxV+lkHYp+T2rt7czdgC8h\nBYDnk748OhMEdeU8tCFpiKTy58+9pC/TpQtpb1I/KKlim9ynpVaGdUiXUK4v5HkUWFnSiEK+d5MC\nw7LOlq12no4qpR9KGgH4507sozOuJb3X/l8p/Wuk83pNlZ3mS1pTSO/Z1WnbErQccATwYEQUWzCv\nJf0vHVba3RjSubiuE4e+lvReOLSWkP83jqDtSMN35NGGRY+R/p+WLuRbTdKGdd531sJ8OcwGmsrN\n/hHx13x55luSRgITSL+Ah5M6TX+FNMJnuqSxwDGSriZ9oG9O6oT9cmm31wHPAhdKOi2nfQH4D/D2\nfWQiYoakI0hD86dIupR0iWptUofWv9HFX7URMUHSOOBopXv33EC6rLMtMD4iih1O75A0ldQh+u7O\nXELo4nmAtq/NLsBYSZcDD5M62R5Iuuz3h0K+ycCu+f4y/wEejYi69zXqhHuBGySdTXpdD8vP3ynk\n+S1p2P/VOd/ywJdJw/A3Ke2vU2XL5+kU4HhJ15KCnhF5v5NIrXDdFhFTJP0GOCx3qp8IbE26vHlZ\nRLQ3DL4zJgLHAC9FxNR8vP9IepT0//HzUv4rSS1Fp0han4VD5PcAfhQR9fo9lV1JaoU6Lbdu1YbI\nl1tVNwKul3QZcD8pyNqX1K9tXCHfaaQBAGuQLnubeYi8HwPnQfoSnQ+M7CDPoJznxx3k+SJpNM4b\npMsjdwLfA4aV8v0fKcB5g/Rrf0NSp9aflfKNJH3ZzSL9Qj2c0jDqQt4dSC0Tr+T9PghcAGxayHMJ\n6cuoXO6TgbdKaSJ9ed2fjz+NNCJq4zrbfyOX6egunvd65+Ep4PxCnvIQ+ffkej1MalF5Pm+7XWnf\n7wX+nvc9v3Zuc13nk+4p1OF5KL7mpIDgoXwubq+Vp7T9rsA9pCHg95Hu01NviHx7ZWvvtT087292\nPl9nAiuU8kwEJtcp0yWk1pbFvRaD8uvxaD7O46Qgb3Cd/S3yHupgv3vlOl1ZSv8lpWH+hXXLkUaD\nPZPL8gBwVFf+H0mdwS8mjSZ8iXRPps1oO0R+FdIIxfuB10gB+K3Ax+vUeV75dfGjtR+K6MlL7mat\nTdLTwHUR8aXFZu5nJH2NdI+ftSLiP40uj5lZb/O1UTOr+QLpfi0OgMysJbhPkFkLU5oTam9SP573\n4tE0ZtZCHASZ9az25p7qr1YjjQR7mTR1xvgGl8fMrM+4T5CZmZm1JPcJMjMzs5bkIMjMzMxakoMg\nM+tVkr4rqTz3WV+XYZCkBZLKM6x3Z5875X3u3VP77MKxfy3p4b4+rtlA4yDIrIEkHZi/SGuPWZIe\nlHT2AJoFu9k6i3dFo+oVwIIGHdtswPDoMLPGC9L8Xk8Ay5Bm6v4KsLuk90fE7AaWzTrWndnZu+Pz\nDTy22YDhIMisf7g+Iqbkv38p6WXSZJMfA37XuGItnqRlI2Jmo8vRSiJifiOO69faBhpfDjPrn/5K\n+qW/bi1B0rqSLpf0kqQ3JU2S9NHiRpJeKEzUipJXJc2VNKSQflxOW7aQtqGkK/L+Z0n6l6S9Svuv\nXb7bTtJ5kqaT5kvrEkkHS7pR0vR8rHslfbGU50xJ00ppP8nH/3Ih7V057QudPPZn8yXHWZJul/Sh\nOnneLelCSdMkzZZ0j6QD6+wugCUkfVvSM5JmSvqLpHVL+9s+v3ZP5f09Kem04uznkr4hab6kd5UP\nkvPOkrRCXl6kT5Ck5SWNlfR0PsbUPLlrMc96+VwdUEqv9Zk6vpD23Zw2XNLvJL1CmsjXbMBwEGTW\nP62fn18CyP2DJpFmXz8HOB5YGviTpI8VtrsV2K6wvDFQC34+XEjfBphS+1Uv6X2kGbs3BH5AunP0\nG8BVpf3XnEe6w/R3SPONddVXSJPJfg/4GmlC0fNLgdBE4L8kDS+Vez6wbSFtO1IwMrETx90J+BFw\nEWmi0WHAeEkb1jJIWo00uer2wFnAUbmsv5J0WGl/Il3K3AM4JT8+RJr0s2g/0ut1DnAEabLYo0gT\nkNZcmvf3yTrl3he4NiJez8tt+llJEnAN8FXSLPVjSJPTnq40g30Vtf3/gTTR6TdIE5iaDRyNnsHV\nDz9a+cHCme93BFYG3g3sD7xACkJWz/nG5nxbF7ZdjjRb+KOFtK8Bc4Dl8vIRpC/wScD3C/leBk4r\nLE8A7mTR2cZvAR4olXcBafZ0dbKO9WZgX7pOvr8AUwvLq+ZjHZyXV8rn4FLgqUK+c4BpiynDoLyv\necD7C+lrk2Y4v7SQdiHwFDC0tI/LgBeBJfPyTnmfdwGDCvnG5HIOX0x9/zeXZ/VC2j+B20r5ts7H\n2a+QdgnwUGF5n5znmNK2vwfmkibFBVgv5zugnfNzfOl1WwBc2Oj/Ez/86K2HW4LMGk/AjaTA52ng\nt8BrwMdj4WSmuwO3R8Sk2kYR8SbwM2AdSRvl5Imkvn61Szzb5rSJ+W8kbQysmNOQtBIpCLscGCpp\n5doDuAHYQNLqhfIG8POIqDwyKiLeervy0pB8rJuA4ZLekfNMBx5hYcvWtsBbwI+BNSStXapjZ0yM\niHsL5XgS+BPwkVwWAf8D/BEYXOdcrARsWtrnL6JtH52JpNf0Pe3Ud9m8v9tyvuL+fgdsKWmtQtr+\nwExSC097dicFv+eW0k8nBTgf6WDbjgTw04rbmvV7DoLMGi9Il4d2BnYANoqI9SJiQiHP2sCDdbad\nWlgPMIX0hVm7XLQNC4OgzSUtldcFqZUH0qU3kX75v1B6nJjzlIfrP1FckLSkpFWLj44qLGlbSX+V\n9Abwaj7WSXn10ELWW0p1uR24A5gBbCtpKPB+Oh8EPVIn7SFghRwMrgasABzGoufiZzl/+VyU+0S9\nkp9XqiVIWlvSxZJeIrXwvUAKfKFtfS/Lz/sV0vYB/hwdd0heG3gmImaV0svvjyoe78a2Zv2aR4eZ\n9Q//ioWjwyqLiHmS/glsJ2k9YHXgZtKX7pLAlqRgYmpEvJQ3q/0YOg1obwLVcvBQ/rLdjnQ5K0gB\nVUhaMyKeK+9I0gY5772kS0dPk1ox9ib1aSn+OJsIHChpTVIwNCEiQtKtebkWcNzcTrk7ozjUvHbs\ni4Bft5P/rtJyeyO1BKnTMely4wrA90nB7ExgLVKfoLfrGxHPSJpECoJOk7Qt6RLppV2oQ0faa70b\n1ME25dfabMBwEGTWHJ4kdVouG1FYXzMR+DqpE/ULEfEQgKT7SMHKtqRLQDWP5ee5EfHXiuWbTGrJ\nKnqhnbx7kwKyPfIlL3L5dquTt9bCsxswEjghL98MHEQKgl5n0cCkPRvUSRsOvB4Rr0h6DXgTWKIb\n56JsU1JfnNER8fbtDiS1d4nqUuBMSe8hXQp7HbhuMcd4AthG0jtKrUHl90ctaFyxtH13WorMmpYv\nh5k1h2uBD0raspYgaTngS8DjEXF/Ie9E0k0Xj2LhJS/y358ltQ69ffkoIl4gdXQ+NI+MakPSKosr\nXES8GhF/LT3amyqj1nLy9udPvhT1uTr7fQSYTurwvQSpH02tjhuS+u/c1oX+SdvkPlG1464D7Alc\nn483H7gS2E/SiPLGdc5FZ45br74ivT71tr+c3HmZdCns6mKfonZcCyxFuoxXVOukfR1ARLxCuvy4\nXSnfEe2UpS5JQ5VuqbB8Z7cx64/cEmTWeJ25lPFDYDRwvaSzSKO7Pk/6Bf+JUt5JpFFHw4HzC+k3\nk/oe1RtOfnhOu0fSz0mtQ6uSRia9G9isi+XtyHjSUPJr87GGAF8E/sOi/W0gBW/7kob0v5HT/kW6\nTLM+aTRXZ90L3CDpbNI5Oiw/f6eQ5+ukIOH2XL6pwDuBzUmtaMVAsTPn4j5Sv5ozcmfuN3J9htTL\nHBHTJU0EjgWWp3M3y7yS9PqeIml94G5SZ+k9gB9FRLHf0gXAMZJmkPqQ7UBqqerK6/op4Cf5+bLF\n5DXrt9wSZNZ4i/0FHhHPkwKSG0i/2r9PGtq9Z0RcXco7kzTcvdj5GVKQE6Th5U+XtplK+pL/M2kY\n/DnAoaRWhJNoq8qosLe3ycfal/T5cxpwCHA26d5D9dTKXWy9mkcaTt7Z+wPVynAjcAypjieSWpl2\nzWWq7XsasAWpX9AnctmOJAUtx7VXr/bSc4vYnqTA5HjgW6TA6KAOyvo7UgD0Ku330yoeI0gBz1nA\nXqRbKgwHjo6Ib5S2O4HUF2k/UjA6L5evq3O8DdT54KyFqBujXM3MzMyaVsNbgiR9U+nW9a8p3UL/\nytIdYpH0d7WdaXu+pPNKedaUdI3SdALTJJ0qaYlSnh0kTc63lH9IdW6DL+lwSY8r3aL+H5K26J2a\nm5mZWSM1PAgiXWM/mzR0d2fSqJEbajdMy4J0j45VSdfjVyddtwcgBzvXkvo4bUVq6v48hWb83AHy\nz6Tm8E2AM4ELJO1SyLM/6UZsJ5D6QNxFuqX+YjuGmpmZWXPpd5fDcsDxPLBdRNyS0/4G3BkRR7ez\nze7A1aTbz7+Y0w4ldSb9r3zvlFOA3SOiODJkHOnW+B/Ny/8A/hkRR+Vlke5hclZEnNo7NTYzM7NG\n6A8tQWUrklp+Xi6lf1pphux7JH2/1FK0FXBPLQDKxpPuxPq+Qp7iHXhrebaGdMdbYBQL7+Ja62w4\noZbHzMzMBo5+NUQ+t7ycAdxSuu/Jb0g3+3qONCv2qaSRD/vm9auRRnkUTS+su6uDPEMkLU0aAjuo\nnTz1blKHpGVJM2k/sJhb2puZmVlBf/gO7VdBEGmI7EbAh4uJEXFBYfE+SdOAGyWtGxGLm9emo+t9\n6mSe9tZvCtwKTMlzIBVdT/tDW83MzFrJbiw6ke/ypDvBf5iFN0LtU/0mCJJ0DvBRYNvCzNnt+Wd+\nXp90E7LafT2KahM4Tis8lyd1HAa8FhFzJL1IuidKvTzl1qGadfLzyDrrtiPdy8XMzMzatw6tHATl\nAOhjwPYR8VQnNtmM1DpTC5YmAcdLWqXQL2hX0kzTUwt5di/tZ9ecTkTMlTQZ2InUybp2eW4n0g3I\n6nkC4Ne//jUjRixyh/0BZ8yYMYwdO7bRxeh1rufA4noOLK7nwDF16lQ+85nPQP4ubYSGB0H5fj+j\nSZMqvimp1hIzIyJm50kEDyANgX+JNLz9dOCmiLg3570BuB+4RNJxpCH0JwPnFOYv+ilwRB4l9ktS\ncLMvqfWp5nTgohwM3U6ad2dZ2r8t/2yAESNGMHJkvcaggWXo0KGu5wDieg4srufA0ir1zGY36sAN\nD4KAL5Nadf5eSj8IuBiYQ7p/0FHAcqQh65cD36tljIgFkvYkzWVzG2kW6AtZOOM0EfGEpD1Igc6R\nwDPAwRExoZDnsjxE/yTSZbF/A7vlCSbNzMxsAGl4EBQRHQ7Tj4hnSBP8LW4/T5Pmv+koz02kYfAd\n5TmP9ucwMjMzswGiP94nyMzMzKzXOQiyThs9enSji9AnXM+BxfUcWFxP60n9btqMZiJpJDB58uTJ\nrdSBzczMrNumTJnCqFGjAEZFxJRGlMEtQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm\n1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0FmZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZm\nZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmZmLclBkJmZmbUkB0Fm\nZmbWkhwEmZmZWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZk1sWuugYcfbnQpmpODIDMzsyb2\n6U/D1Vc3uhTNyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZm1sQiGl2C5uUgyMzMrMlJjS5Bc3IQ\nZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZmVlLchBkZmbWxDw6rDoHQWZmZk3Oo8OqcRBkZmZmLclB\nkJmZmbUkB0FmZmbWkhwEmZmZNTF3jK7OQZCZmVmTc8foahwEmZmZWUtyEGRmZmYtyUGQmZmZtSQH\nQWZmZk3MHaOrcxBkZmbW5NwxupqGB0GSvinpdkmvSZou6UpJw0t5lpZ0rqQXJb0u6QpJw0p51pR0\njaQ3JU2TdKqkJUp5dpA0WdJsSQ9JOrBOeQ6X9LikWZL+IWmL3qm5mZmZNVLDgyBgW+BsYEtgZ2BJ\n4AZJ7yjkOQPYA9gH2A54F/D72soc7FwLDAa2Ag4EPg+cVMizDvBn4EZgE+BM4AJJuxTy7A/8GDgB\n2Ay4CxgvaZWeq66ZmZn1B4MbXYCI+GhxWdLngeeBUcAtkoYAXwA+FRE35TwHAVMlfTAibgd2A94L\n7BgRLwL3SPo28ENJJ0bEPOArwGMR8fV8qAclbQOMAf6S08YA50fExfk4XyYFX18ATu2dM2BmZmaN\n0B9agspWBAJ4OS+PIgVrN9YyRMSDwFPA1jlpK+CeHADVjAeGAu8r5JlQOtb42j4kLZmPVTxO5G22\nxszMrB9yx+jq+lUQJEmkS1+3RMT9OXk1YE5EvFbKPj2vq+WZXmc9ncgzRNLSwCrAoHbyrIaZmVk/\n5Y7R1TT8cljJecBGwDadyCtSi9HidJRHnczT4XHGjBnD0KFD26SNHj2a0aNHd6J4ZmZmA9u4ceMY\nN25cm7QZM2Y0qDQL9ZsgSNI5wEeBbSPiucKqacBSkoaUWoOGsbDVZhpQHsW1amFd7XnVUp5hwGsR\nMUfSi8D8dvKUW4faGDt2LCNHjuwoi5mZWcuq1zAwZcoURo0a1aASJf3iclgOgD5G6tj8VGn1ZGAe\nsFMh/3BgLeC2nDQJ+EBpFNeuwAxgaiHPTrS1a04nIubmYxWPo7x8G2ZmZjagNLwlSNJ5wGhgb+BN\nSbWWmBkRMTsiXpP0C+B0Sa8ArwNnAbdGxL9y3huA+4FLJB0HrA6cDJyTgxuAnwJHSDoF+CUpuNmX\n1PpUczpwkaTJwO2k0WLLAhf2QtXNzMy6zR2jq2t4EAR8mdTn5u+l9IOAi/PfY0iXqq4AlgauBw6v\nZYyIBZL2BH5CarV5kxS4nFDI84SkPUiBzpHAM8DBETGhkOey3Jp0Eumy2L+B3SLihR6qq5mZWY9z\nx+hqGh4ERcRiL8lFxFvAV/OjvTxPA3suZj83kYbBd5TnPFIHbTMzMxvA+kWfIDMzM7O+5iDIzMzM\nWpKDIDMzM2tJDoLMzMyamEeHVecgyMzMrMl5dFg1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrIm5Y3R1DoLMzMyanDtG\nV+MgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2ti7hhdnYMgMzOzJueO0dU4CDIzM7OW5CDIzMzM\nWpKDIDMzM2tJDoLMzMyamDtGV+cgyMzMrMm5Y3Q1DoLMzMysJTkIMjMzs5bkIMjMzMxakoMgMzOz\nJuaO0dU5CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMzFqSgyAzM7Mm5tFh1TkI\nMjMza3IeHVaNgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMysibljdHUOgszMzJqcO0ZX4yDIzMzM\nWpKDIDMzM2tJDoLMzMysJfVIECRpkKRNJa3UE/szMzOzznHH6OoqBUGSzpB0cP57EHATMAV4WtIO\nPVc8MzMzWxx3jK6makvQvsBd+e+9gHWB9wJjge/1QLnMzMzMelXVIGgVYFr++6PA5RHxEPBL4AM9\nUTAzMzOz3lQ1CJoObJQvhX0EmJDTlwXm90TBzMzMzHrT4Irb/Qq4DPgPEMBfcvqWwAM9UC4zMzOz\nXlUpCIqIEyXdC6xJuhT2Vl41H/hhTxXOzMzMFs8do6upPEQ+Iq6IiLHAi4W0iyLij13dl6RtJV0t\n6VlJCyTPSTkjAAAgAElEQVTtXVr/q5xefFxbyrOSpN9ImiHpFUkXSFqulGdjSTdLmiXpSUnH1inL\nJyVNzXnukrR7V+tjZmZm/V/VIfKDJH1b0rPAG5Lek9NPrg2d76LlgH8Dh5Mur9VzHbAqsFp+jC6t\n/y0wAtgJ2APYDji/UOYVgPHA48BI4FjgREmHFPJsnffzc2BT4CrgKkkbVaiTmZmZ9WNVW4L+F/g8\n8HVgTiH9XuCQeht0JCKuj4j/i4irgPYa9d6KiBci4vn8mFFbIem9wG7AwRFxR0TcBnwV+JSk1XK2\nzwBL5jxTI+Iy4Czg6MIxjgKui4jTI+LBiDiBdP+jI7paJzMzM+vfqgZBnwO+FBG/oe1osLtI9wvq\nDTtImi7pAUnnSXpnYd3WwCsRcWchbQKpVWnLvLwVcHNEzCvkGQ9sKGloYT8TaGt8TjczM+tXfLfo\n7qkaBL0beKSd/S1ZvTjtuo4UeP03qfVpe+Ba6e2uYKsBzxc3iIj5wMt5XS3P9NJ+pxfWdZRnNczM\nzPopd4yupuoQ+fuBbYEnS+n7Ancumr178qWrmvsk3QM8CuwA/K2DTUX7fYxq6zuTp8NYe8yYMQwd\nOrRN2ujRoxk9utxtyczMrPWMGzeOcePGtUmbMWNGO7n7TtUg6CTgIknvJrX+fELShqTWmj17qnDt\niYjHJb0IrE8KgqYBw4p58o0cV2Lhna2nkTpWFw0jBTjTF5On3DrUxtixYxk5cmQXa2FmZtYa6jUM\nTJkyhVGjRjWoREmly2F5GPyewM7Am6SgaASwV0T8paNte4KkNYCVSTdrBJgErChps0K2nUitOLcX\n8myXg6OaXYEHC52sJ+XtinbJ6WZmZjaAVG0JIiJuIQUI3Zbv57M+C0eGvUfSJqQ+PS8DJwC/J7XU\nrA+cAjxE6rRMRDwgaTzwc0lfAZYCzgbGRUStJei3wP8Bv5R0CmmOsyNJI8JqzgRuknQ0cA1pGP4o\n4Is9UU8zMzPrP6reJ2gLSVvWSd9S0uYVdrk5qS/RZNLlqR+ThqZ/hzT6bGPgj8CDpHv4/AvYLiLm\nFvZxAGnKjgnAn4GbgUNrKyPiNdIw+nWAO4AfASdGxC8KeSaRAp8vke5b9AngYxFxf4U6mZmZ9SqP\nDuueqi1B5wKnAv8spb8bOI6Fw9I7JSJuouOA7COd2MerpHsBdZTnHtLIso7y/J7U6mRmZtYUPDqs\nmqpD5DcitdSU3ZnXmZmZmfVrVYOgt1h0FBXA6sC8OulmZmZm/UrVIOgG4AeFOy0jaUXg+0Cvjw4z\nMzMz666qfYKOIXU8flJS7eaIm5Lup/PZniiYmZmZdcwdo7unUhAUEc9K2hj4NLAJMAv4FWlI+twO\nNzYzM7Me5Y7R1XTnPkFvAj/rwbKYmZmZ9ZnKQZCk4aS5u4ZR6lsUESd1r1hmZmZmvatSECTpi8BP\ngBdJd3EuXpUM0jQaZmZmZv1W1ZagbwH/GxGn9GRhzMzMrPPcMbp7qg6RXwm4vCcLYmZmZtW4Y3Q1\nVYOgy0kzsJuZmZk1paqXwx4BTpa0FXAP0GZYfESc1d2CmZmZmfWmqkHQl4A3SJORlickDcBBkJmZ\nmfVrVW+WuG5PF8TMzMy6xh2ju6dqnyAAJC0laUNJle83ZGZmZt3jjtHVVAqCJC0r6RfATOA+YK2c\nfrakb/Rg+czMzMx6RdWWoB+Q5gzbAZhdSJ8A7N/NMpmZmZn1uqqXsT4O7B8R/5BUvCJ5H7Be94tl\nZmZm1ruqtgT9F/B8nfTlaDuFhpmZmfUSd4zunqpB0B3AHoXl2stwCDCpWyUyMzOzLnHH6GqqXg47\nHrhO0kZ5H0dJeh+wNYveN8jMzMys36nUEhQRt5A6Rg8m3TF6V2A6sHVETO654pmZmZn1ji63BOV7\nAh0AjI+IL/Z8kczMzMx6X5dbgiJiHvBTYJmeL46ZmZlZ36jaMfp2YLOeLIiZmZl1jUeHdU/VjtHn\nAT+WtAYwGXizuDIi7u5uwczMzKxzPDqsmqpB0KX5uThbfADKz4O6UygzMzOz3lY1CPIs8mZmZtbU\nKgVBEfFkTxfEzMzMrC9VCoIkfa6j9RFxcbXimJmZWWe5Y3T3VL0cdmZpeUlgWWAOMBNwEGRmZtZH\n3DG6mqqXw1Yqp0naAPgJ8KPuFsrMzMyst1W9T9AiIuJh4Bss2kpkZmZm1u/0WBCUzQPe1cP7NDMz\nM+txVTtG711OAlYHjgBu7W6hzMzMbPHcMbp7qnaMvqq0HMALwF+Br3WrRGZmZtYl7hhdTdWO0T19\nGc3MzMysTzmYMTMzs5ZUKQiSdIWkb9RJP1bS5d0vlpmZmVnvqtoStD1wTZ3064HtqhfHzMzMOssd\no7unahC0POnu0GVzgSHVi2NmZmZd5Y7R1VQNgu4B9q+T/ing/urFMTMzM+sbVYfInwz8QdJ6pGHx\nADsBo4FP9kTBzMzMzHpT1SHyf5L0ceB4YF9gFnA3sHNE3NSD5TMzMzPrFVVbgoiIa6jfOdrMzMz6\ngDtGd0/VIfJbSNqyTvqWkjbvfrHMzMyss9wxupqqHaPPBdask/7uvM7MzMysX6saBG0ETKmTfmde\nZ2ZmZtavVQ2C3gJWrZO+OjCvenHMzMzM+kbVIOgG4AeShtYSJK0IfB/4S08UzMzMzKw3VR0ddgxw\nM/CkpDtz2qbAdOCzPVEwMzMz65hHh3VPpZagiHgW2Bj4OukO0ZOBo4APRMTTXd2fpG0lXS3pWUkL\nJO1dJ89Jkp6TNFPSXyStX1q/kqTfSJoh6RVJF0harpRnY0k3S5ol6UlJx9Y5ziclTc157pK0e1fr\nY2Zm1pc8OqyaqpfDiIg3I+JnEXF4RBwTERdHxNyKu1sO+DdwOLBIXCvpOOAI4FDgg8CbwHhJSxWy\n/RYYQbpz9R6kiVzPL+xjBWA88DgwEjgWOFHSIYU8W+f9/JzUsnUVcJUkd/Y2MzMbYCpdDpP0SdIU\nGcNJQcvDwG8j4ooq+4uI60kz0CPVjWePAk6OiD/lPJ8jXXr7OHCZpBHAbsCoiLgz5/kqcI2kYyJi\nGvAZYEng4IiYB0yVtBlwNHBB4TjXRcTpefkESbuSArDDqtTNzMzM+qcutQRJWkLS74DfkYbCPwI8\nBryPFIxc2k4QU5mkdYHVgBtraRHxGvBPYOuctBXwSi0AyiaQArQtC3luzgFQzXhgw0IH763zdpTy\nbI2ZmZkNKF1tCToK2BnYOyL+XFyR+/H8Kuc5o2eKB6QAKEgtP0XT87panueLKyNivqSXS3keq7OP\n2roZ+bmj45iZmfUb7hjdPV3tE3QQcGw5AAKIiKtJHaW/0BMF6wRRp/9QF/Ook3n8NjMzs37LHaOr\n6WpL0AYsermoaAJwTvXi1DWNFIisSttWmmGkO1TX8gwrbiRpELBSXlfLU77B4zDatjK1l6fcOtTG\nmDFjGDp0aJu00aNHM3r06I42MzMzawnjxo1j3LhxbdJmzJjRoNIs1NUgaBawIvBUO+uHALO7VaKS\niHhc0jTSqK+7ASQNIfX1qc1TNglYUdJmhX5BO5GCp9sLeb4raVBEzM9puwIPRsSMQp6dgLMKRdgl\np7dr7NixjBw5smoVzczMBrR6DQNTpkxh1KhRDSpR0tXLYZOAr3Sw/nAWEzDUI2k5SZtI2jQnvScv\n1yZpPQP4lqS9JH0AuBh4BvgjQEQ8QOrA/PM8w/2HgbOBcXlkGKSh73OAX0raSNL+wJHAjwtFORPY\nXdLRkjaUdCIwip5v3TIzM7MG62pL0PeAv0taGTgNeIDU2jIC+BrwMWDHCuXYHPgb6dJUsDAwuQj4\nQkScKmlZ0n1/VgQmArtHxJzCPg4gBSsTgAXAFaRO2kAaUSZpt5znDuBF4MSI+EUhzyRJo3M9v0ca\n+v+xiLi/Qp3MzMx6lTtGd0+XgqCIuC23oPwM2Ke0+hVgdETc2tVCRMRNLKZVKiJOBE7sYP2rpHsB\ndbSPe4DtF5Pn98DvO8pjZmbWn7hjdDVdvlliRFwpaTypP83wnPwQcENEzOzJwpmZmZn1lkp3jI6I\nmZJ2Bv4vIl7u4TKZmZmZ9bqu3jF6jcLiAcDyOf2eQidmMzMzs36vqy1BD0h6CbgVWAZYkzRcfh3S\nvFxmZmbWR9wxunu6OkR+KPBJYHLe9lpJDwFLA7tJ8vQSZmZmfcwdo6vpahC0ZETcHhE/Jt04cTPS\nVBrzSdNlPCrpwR4uo5mZmVmP6+rlsNck3Um6HLYUsGxE3CppHrA/6QaGH+zhMpqZmZn1uK62BL0L\n+C7wFimAukPSRFJANBKIiLilZ4toZmZm1vO6FARFxIsR8aeI+CYwE9iCND1FkO4g/Zqkm3q+mGZm\nZlbmjtHd09WWoLIZEXEZMBf4b2Bd4Lxul8rMzMw6zR2jq6l0s8RsY+DZ/PeTwNw8Wenvul0qMzMz\ns15WOQiKiKcLf7+/Z4pjZmZm1je6eznMzMzMrCk5CDIzM2tS7hjdPQ6CzMzMmpw7RlfjIMjMzMxa\nkoMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEl5dFj3OAgyMzNrch4dVo2DIDMzM2tJDoLMzMys\nJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzMrCU5CDIzM7OW5CDIzMys\nSbljdPc4CDIzM2ty7hhdjYMgMzMza0kOgszMzKwlOQgyMzOzluQgyMzMrEm5Y3T3OAgyMzNrcu4Y\nXY2DIDMzM2tJDoLMzMysJTkIMjMzs5bkIMjMzKxJuWN09zgIMjMza3LuGF2NgyAzMzNrSQ6CzMzM\nrCU5CDIzM7OW5CDIzMzMWpKDIDMzsybl0WHd4yDIzMysyXl0WDUOgszMzKwlOQgyMzOzluQgyMzM\nzFqSgyAzM7Mm5Y7R3dMUQZCkEyQtKD3uL6xfWtK5kl6U9LqkKyQNK+1jTUnXSHpT0jRJp0paopRn\nB0mTJc2W9JCkA/uqjmZmZlW5Y3Q1TREEZfcCqwKr5cc2hXVnAHsA+wDbAe8Cfl9bmYOda4HBwFbA\ngcDngZMKedYB/gzcCGwCnAlcIGmX3qmOmZmZNdLgRhegC+ZFxAvlRElDgC8An4qIm3LaQcBUSR+M\niNuB3YD3AjtGxIvAPZK+DfxQ0okRMQ/4CvBYRHw97/pBSdsAY4C/9HrtzMzMrE81U0vQBpKelfSo\npF9LWjOnjyIFczfWMkbEg8BTwNY5aSvgnhwA1YwHhgLvK+SZUDrm+MI+zMzMbABpliDoH6TLV7sB\nXwbWBW6WtBzp0ticiHittM30vI78PL3OejqRZ4ikpbtbATMzs57mjtHd0xSXwyJifGHxXkm3A08C\n+wGz29lMQGfeHh3lUSfymJmZNZQ7RlfTFEFQWUTMkPQQsD7pEtZSkoaUWoOGsbBlZxqwRWk3qxbW\n1Z5XLeUZBrwWEXM6Ks+YMWMYOnRom7TRo0czevTozlTHzMxsQBs3bhzjxo1rkzZjxowGlWahpgyC\nJC0PrAdcBEwG5gE7AVfm9cOBtYDb8iaTgOMlrVLoF7QrMAOYWsize+lQu+b0Do0dO5aRI0dWro+Z\nmdlAVq9hYMqUKYwaNapBJUqaok+QpB9J2k7S2pI+RAp25gGX5tafXwCn5/v8jAJ+BdwaEf/Ku7gB\nuB+4RNLGknYDTgbOiYi5Oc9PgfUknSJpQ0mHAfsCp/ddTc3MzKyvNEtL0BrAb4GVgReAW4CtIuKl\nvH4MMB+4AlgauB44vLZxRCyQtCfwE1Lr0JvAhcAJhTxPSNqDFPQcCTwDHBwR5RFjZmZm/YI7RndP\nUwRBEdFh55qIeAv4an60l+dpYM/F7Ocm0pB7MzOzpuGO0dU0xeUwMzMzs57mIMjMzMxakoMgMzMz\na0kOgszMzJqUO0Z3j4MgMzOzJueO0dU4CDIzM7OW5CDIzMzMWpKDIDMzM2tJDoLMzMysJTkIMjMz\na1IeHdY9DoLMzMyanEeHVeMgyMzMzFqSgyAzMzNrSQ6CzMzMrCU5CDIzM2tS7hjdPQ6CzMzMmtSC\nBel50KDGlqNZOQgyMzNrUrUgaAl/m1fi02ZmZtakHAR1j0+bmZlZk6oFQb5PUDUOgszMzJqUW4K6\nx6fNzMysSTkI6h6fNjMzsyblIKh7fNrMzMyalIOg7vFpMzMza1IOgrrHp83MzKxJOQjqHp82MzOz\nJlWbNsNBUDU+bWZmZk3K9wnqHgdBZmZmTcqXw7rHp83MzKxJOQjqHp82MzOzJjV/fnp2EFSNT5uZ\nmVmTmjkzPS+7bGPL0awcBJmZmTWpOXPS89JLN7YczcpBkJmZWZNyENQ9DoLMzMya1Ftvpeellmps\nOZqVgyAzM7MmVWsJchBUjYMgMzOzJlVrCVpyycaWo1k5CDIzM2tSc+akViDfMboaB0FmZmZNas4c\nd4ruDgdBZmZmTeqtt9wfqDscBJmZmTWp2uUwq8ZBkJmZWZN66y1fDusOB0FmZmZNyi1B3eMgyMzM\nrEm5T1D3OAgyMzNrUi+8ACuv3OhSNC8HQWZmZk3qqadg7bUbXYrm5SDIzMysST35JKy1VqNL0bwc\nBJmZmTWh2bPhuedgnXUaXZLm5SDIzMysCT36KETA8OGNLknzchBkZmbWhO69Nz1vsEFjy9HMHASZ\nmZk1oauvhve/H1ZbrdElaV4OgszMzJrMSy/BlVfC6NGNLklzcxBUh6TDJT0uaZakf0jaotFl6g/G\njRvX6CL0CddzYHE9BxbXMznnnNQf6Etf6qMCDVAOgkok7Q/8GDgB2Ay4CxgvaZWGFqwf8IfPwOJ6\nDiyu58DSXj3nzYMLL4TvfhcOPhhWaflvpu5xELSoMcD5EXFxRDwAfBmYCXyhscUyM7NWFAGPPAJn\nnAEbbggHHQT77QenndbokjW/wY0uQH8iaUlgFPD9WlpEhKQJwNYNK5iZmQ04ETBzJrz+Orz6ano8\n/zxMnw4PPQSHHAL33Zcer78OgwfDJz4BV1wBm23W6NIPDA6C2loFGARML6VPBzZsb6N7701NlJ0V\n0fWCVdmmp4/18stw2219c6ye3qYr2730Etx8c98cqxHb1LZ78UW48ca+OVYjt3n+eRg/vm+O1Rvb\ndXab6dPhmmv65ljd3QZg/vz0iFi4j9rfHT2efjpdDlpcvvL+FixIx1uwYOFy+e/21s2fD3Pnps/5\nWrnLj7feSnlqjzlzFj6KabNnw6xZC5/rnT8JllwSll8eRoyAj38cNt4YPvQhGDq02vm2+hwEdY6A\nev/qywAceODUvi1Nw8zgwx+e0uhC9IEZbL99a9Rz551bo54f+Uhr1HPPPVujngcd1HE9pfQo/i2l\nlhQJllhiYVrt73ppxb8HD4ZBg9LyoEFt/15iiRS0DB688Pkd74AhQxYu1x5LL73wscwysOyy6bHC\nCinoeec7U6Bz7LEzGDu2bT0ffbS3zmljTJ369nfnMo0qg6JqGD8A5cthM4F9IuLqQvqFwNCI+J9S\n/gOA3/RpIc3MzAaWT0fEbxtxYLcEFUTEXEmTgZ2AqwEkKS+fVWeT8cCngSeA2X1UTDMzs4FgGWAd\n0ndpQ7glqETSfsBFwKHA7aTRYvsC742IFxpZNjMzM+s5bgkqiYjL8j2BTgJWBf4N7OYAyMzMbGBx\nS5CZmZm1JN8s0czMzFqSg6BuaKY5xiR9U9Ltkl6TNF3SlZKGl/IsLelcSS9Kel3SFZKGlfKsKeka\nSW9KmibpVElLlPLsIGmypNmSHpJ0YF/UsZ5c7wWSTi+kDYh6SnqXpEtyPWZKukvSyFKekyQ9l9f/\nRdL6pfUrSfqNpBmSXpF0gaTlSnk2lnRzfp8/KenYvqhfPvYSkk6W9FiuwyOSvlUnX9PVU9K2kq6W\n9Gx+j+7dqHpJ+qSkqTnPXZJ274t6Shos6RRJd0t6I+e5SNLqA6medfKen/McORDrKWmEpD9KejW/\nrv+UtEZhff/5DI4IPyo8gP1JI8I+B7wXOB94GVil0WVrp7zXAp8FRgAfAP5MGtX2jkKen+S07Unz\npt0GTCysXwK4h9ST/wPAbsDzwHcLedYB3gBOJd1g8nBgLrBLA+q8BfAYcCdw+kCqJ7Ai8DhwAeku\n52sDOwPrFvIcl9+TewHvB64CHgWWKuS5DpgCbA58CHgI+HVh/QrAf0iDBUYA+wFvAof0UT2Pz+f+\nI8BawCeA14Ajmr2euU4nAR8H5gN7l9b3Sb1Id8OfCxyd38vfAd4CNurtegJD8v/ZPsAGwAeBfwC3\nl/bR1PUs5fs46TPpaeDIgVZPYD3gReAHwMbAusCeFL4b6Uefwb36ATaQH/kf9czCsoBngK83umyd\nLP8qwAJgm7w8JP+j/E8hz4Y5zwfz8u75TVZ8Mx8KvAIMzsunAHeXjjUOuLaP67c88CDw38DfyEHQ\nQKkn8EPgpsXkeQ4YU1geAswC9svLI3K9Nyvk2Q2YB6yWl7+SP9AGF/L8ALi/j+r5J+DnpbQrgIsH\nWD0XsOiXSZ/UC7gUuLp07EnAeX1Rzzp5Nid9ua4x0OoJvBt4KtfpcQpBEOnHdNPXk/Q5eFEH2/Sr\nz2BfDqtAC+cYe3vSgUivQDPNMbYi6S7YL+flUaTRgsU6PUj6h63VaSvgnoh4sbCf8cBQ4H2FPBNK\nxxpP35+Xc4E/RcRfS+mbMzDquRdwh6TLlC5vTpF0SG2lpHWB1Whbz9eAf9K2nq9ExJ2F/U4gvS+2\nLOS5OSKKE8OMBzaU1Bc38L8N2EnSBgCSNgE+TGrZHEj1bKOP67U1/eN/tqb22fRqXh4Q9ZQk4GLg\n1IioN83A1jR5PXMd9wAelnR9/mz6h6SPFbL1q+8aB0HVdDTH2Gp9X5yuyW/UM4BbIuL+nLwaMCd/\n0BYV67Qa9etMJ/IMkbR0d8veGZI+BWwKfLPO6lUZGPV8D+lX4YPArsBPgbMkfaZQvminjMU6PF9c\nGRHzSYFxV85Fb/oh8DvgAUlzgMnAGRFxaaEMA6GeZX1Zr/by9Hm98//OD4HfRsQbOXmg1PMbpM+e\nc9pZPxDqOYzUCn8c6YfKLsCVwB8kbVsoX7/5DPZ9gnpWe3OM9TfnARsB23Qib2fr1FEedSJPj8id\n784gXRee25VNaaJ6kn7A3B4R387Ld0l6Hykw+nUH23WmnovL05f13B84APgUcD8puD1T0nMRcUkH\n2zVbPTurp+rVmTx9Wm9Jg4HL83EP68wmNEk9JY0CjiT1f+ny5jRJPVnYsHJVRNRmWbhb0oeALwMT\nO9i2IZ/Bbgmq5kXSNetVS+nDWDQy7VcknQN8FNghIp4rrJoGLCVpSGmTYp2msWidVy2say/PMOC1\niJjTnbJ30ijgv4DJkuZKmkvqfHdUbkmYDiw9AOr5H6DcpD6V1HkYUvlEx+/RaXn5bZIGASux+HpC\n37zXTwV+EBGXR8R9EfEbYCwLW/kGSj3LertexVam9vL0Wb0LAdCawK6FViAYGPXchvS59HThc2lt\n4HRJjxXK1+z1fJHUh2lxn0395rvGQVAFuYWhNscY0GaOsdsaVa7FyQHQx4AdI+Kp0urJpDdvsU7D\nSW/cWp0mAR9QuqN2za7ADBa+6ScV91HIM6kn6tAJE0ijCTYFNsmPO0itI7W/59L89byV1JmwaEPg\nSYCIeJz0IVGs5xBS34JiPVeUVPx1uhPpy/f2Qp7t8odxza7AgxExo2eq0qFlWfRX3QLyZ9cAqmcb\nfVyveu/lXeij93IhAHoPsFNEvFLKMhDqeTFppNQmhcdzpCB/t0L5mrqe+bvxXyz62TSc/NlEf/uu\n6ene4q3yIA1NnEXbIfIvAf/V6LK1U97zSD3rtyVFz7XHMqU8jwM7kFpUbmXRYYt3kYZxbkz6550O\nnFzIsw5p2OIppH+Ew4A5wM4NrPvbo8MGSj1JHbzfIrWIrEe6ZPQ68KlCnq/n9+RepMDwKuBh2g6x\nvpYUGG5B6nD8IHBJYf0Q0of1RaRLqPvneh/cR/X8FanD5EdJv5z/h9Rv4vvNXk9gOdKX4aakwO7/\n5eU1+7JepI6kc1g4pPpE0u0/empIdbv1JPWt/CPpC/IDtP1sWnKg1LOd/G1Ghw2UepKGzs8GDiF9\nNh2Ry7N1YR/95jO41z/EBvIjn/QnSMHQJGDzRpepg7IuIF3CKz8+V8izNHA2qUnzddKvs2Gl/axJ\nusfQG/lNeQqwRCnP9qRofxbpQ/uzDa77X2kbBA2IepICg7uBmcB9wBfq5Dkxf2jOJI2cWL+0fkVS\nK9kMUpD8c2DZUp4PADflfTwFHNOHdVwOOD1/YL6Zz/N3KAwRbtZ65vdPvf/LX/Z1vUj36Xkgv5fv\nJs2X2Ov1JAW25XW15e0GSj3byf8YiwZBA6KewOdJ9zh6k3Tfoz1L++g3n8GeO8zMzMxakvsEmZmZ\nWUtyEGRmZmYtyUGQmZmZtSQHQWZmZtaSHASZmZlZS3IQZGZmZi3JQZCZmZm1JAdBZmZm1pIcBJmZ\nmVlLchBkZpZJ+puk0xtdDjPrGw6CzKxfkHSopNckLVFIW07SXEk3lvLuKGmBpHX6upxmNnA4CDKz\n/uJvpAlTNy+kbQv8B9hK0lKF9O2BJyPiia4eRNLg7hTSzAYOB0Fm1i9ExEOkgGeHQvIOwFWkWeS3\nKqX/DUDSmpL+KOl1STMk/U7SsFpGSSdIulPSwZIeA2bn9GUlXZy3e1bS0eUySTpM0kOSZkmaJumy\nnq21mTWSgyAz60/+DuxYWN4xp91US5e0NLAl8Nec5/+3by+hOkVhHMafd0BELmORHAkpch0wOOSS\nThIjKSkTM2WgDJSUmUsJQ0NlSsmAnMtQSZLLwUlyLUJxZngN1v7Y4ZRcstnPb/at295r8vVvrXef\nBSZRTo1WA13Ama/WnQlsBjYBC6q2w9WcDcBaSrBa1JkQEYuBY8A+YBawDhj4xf1JahCPhSU1SR9w\ntKoLGkcJLAPAaGAncABYXv3ui4g1wDxgemY+BYiIbcDNiFiUmVerdUcB2zLzVTVmHLAD2JqZfVXb\ndseoTiMAAAHGSURBVOBx7V2mAu+A85k5DDwCrv+hfUv6CzwJktQknbqgJcAK4G5mvqScBC2r6oK6\ngaHMfAzMBh51AhBAZt4G3gBzaus+7ASgShclGF2pzXsNDNbGXAQeAg+qa7OtETH2t+1U0l9nCJLU\nGJk5BDyhXH2tpIQfMvMZ5SRmObV6ICCA/M5SX7cPf6efEeZ23uUdsBDYAjylnEJdj4gJP7whSY1m\nCJLUNL2UANRNuR7rGADWA0v5EoJuAdMiYkpnUETMBSZWfSO5D7ynVmwdEZMptT+fZebHzLycmXuB\n+cB0YNVP7ElSA1kTJKlpeoGTlP+n/lr7AHCCco3VB5CZlyLiBnA6InZXfSeB3sy8NtIDMnM4Ik4B\nhyLiFfACOAh86IyJiB5gRvXc10AP5QRp8NsVJf2LDEGSmqYXGAPczswXtfZ+YDxwJzOf19o3Aser\n/o/ABWDXDzxnD6X+6BzwFjgC1K+63lC+KNtfvc89YEtVcyTpPxCZI16JS5Ik/besCZIkSa1kCJIk\nSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1kCJIkSa1k\nCJIkSa30CdNytzqRWg5AAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHVWZ//HPlwBBlgTGmAQEBUUQcIFEhKgsDpLIqjOg\n2G4g4IKA/KIoijowoI6iJOyKArKojQiDwx4IjhC2CSaIICEoO2gCgdCBBMj2/P4455JK5fZ2b3du\n377f9+tVr9v31FN1TtXt5elT51QpIjAzMzNrNWs0ugFmZmZmjeAkyMzMzFqSkyAzMzNrSU6CzMzM\nrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMysJTkJMjMzs5bkJMjM+pykpyT9vPB+D0nLJb1v\nNdT9PUlLCu+H5Lon9Xfdub7Dc32brI76aiXpm5IekbRU0vRGt6enJL01n99PNrot1vycBNmgIeng\n/Mux2vKDRrevxVR7Hk+vn9Ej6duS9quh7uW9rau3umhbUMOxrk6S9gZ+APwvcAjw3YY2yKxB1mx0\nA8z6WJB+oT9WKr9/9TfFKiLiZkmvi4jFvdz0O8AlwNW92OYE4KRe1lOLztp2AXBJDce6On0QWAIc\nHn6ApLUwJ0E2GN0QETN7GixJwNoR8Wo/tqnl9XdSIGndiFgUEctZDT1BnclJxUBOgABGAQsHYgLk\nn0dbnXw5zFpKcXyIpM9I+ivwCrBHXi9JX5X0V0mvSPqnpHMkDSvtR5L+I499eUnSVElvl/RkaSzM\nSuNTCuVVx41I2kfStLzPDklXSXp7KeZXkuZL2jSvf1HSM5J+WKUeSZoo6S+SXs5x10naPq+/XdKf\nOjlXD0vqsgems/NQJW6VMUGStpL035Lm5LY9IenXktarfE7A2kDlXC2vnNt8XpfnffxW0nzSpZ1O\nz3le9xlJs3N908tjlPK5/VuV7V7bZw/a1tlne3Th++ppSWdU+b66TdJMSdtJ+l9Ji/K5/WpXn0Nh\n+zUlnZA/u1eUxvycJGmtUts/BQzP7VymTsbX5O+dJZLWK5Qdl7f7YaFszfz5n1QoW1/S5Pwz8Yqk\nWZL+X2n/3f08biTpYkkvSHpe0vnASucsx20s6aJ8rl6R9A9JV0ratCfnzVqXe4JsMBou6fXFgoh4\nrhQzHvgEcDbwPPBELr8AaMuvpwFvAY4G3i1pl9zLAGk8xXHAVcAUYCxwI/C6Uj2djQ9ZpVzSIcD5\nwHXAN4D1gC8D0yTtEBFPFbZdM9c3DfhaPp6vS/pbRJxf2O3FpD94VwM/J/3h3hXYCfhzXn+OpK0i\n4qFCW8YBWwDfqtL2op6eh0q7K/sfmuPWIJ3nucCmwH7AsIhYKOnTwC+B2/J5Afh7aV//DTwIfLNQ\n1tk53wP4JHAG6VLQkcAUSe+JiNndbPtaeUQs60Hbyp/t94DjgRtI33PbkD7bsaXvqwBGANcDvwMu\nBT4O/FjSvRFxc5W2FV2Yj/FS0vfGzqTLdlsDBxXa/mXg3cAXAAG3d7K/aaTP6P2kzwvgA8AyYJdC\n3FjSZ35rPl4B1+btfgH8BdgLmCRp44g4rlTPKj+PeR9Xk75XzwFmAweQznv5M/o9sCXps32C1NM1\nnvQ99RRmnYkIL14GxQIcTLoMUl6WFWKG5LLFwJal7XfP6w4ole+Vyw/M70fm7a8oxf0wx/28UHYy\nsLhKWw8j/SHZJL/fAHgBOLMUNyqXn1UouyRv+41S7J+BOwrv98ztOaWLc7Yh8DJwUqn87FzvOl1s\n25vzsEdu8/vy+7E5Zr9uPtOXi/spndflwIWdrFtceF/5zJcC7yiUv5nU63Bp6dw+1N0+u2lb+bMd\nlc/TVaW4r+S4TxXKpuWyjxfK1iYlib/p5lyNycd5dql8Ut7n+0vH+XwPfqaGAC8CJxfKniclWa9U\nvj+Ar+djXD+/PyC35djS/q4gJaBv6sHPY2UfXymUrUFKPJcBn8xl/1KO8+Klp4svh9lgE8ARwIcK\ny55V4m6OiL+Xyg4k/YL/o6TXVxbgT6Q/eB/McRNIv7zPLG1/Wh3t/jApEbq0VPcy4O5C3UU/L72/\njdRzVXEA6Q//yZ1VGhEvANeQeg+AdIkC+BgpuXmlizaPp/bz8EJ+3UvSOj2IryaAn/UiflpEvDZA\nPiIeJ/U0fLjG+ntqT9J5Kp+Xc4FFwD6l8o6IuKzyJtJYqrtZ+bOtZm/SOSnfCuBUUm9PuZ5uRcQy\n4E5S7yGS3gUMB/4LWIvUSwOpd+jeiHgpv9+LlNicXdrlJNK5KJ/zaj+PewGvUvg+j9RjdlY+nopF\npMTqg5KG9/IQrcU5CbLB6O6I+ENxqRLzWJWyt5H+q3y2tMwF1iH1fAC8Kb+u9Es7IuaQ/muuxZak\nX+zTSnU/A/xroe6Kl3ICUzQf2Kjw/i3AUxHRXZsuBraQtHN+/2Hg9aTegq68Ob/2+jxExMPA6cAX\ngeckXS/pCEkbdFNn2aO9iC3/kQV4CNhA0kZV1vWVynl6qFgYaeDvo4X1FU9W2Uf5s+2snqX53Bbr\neZr0eZTr6anbgB3zuKJdgCcj4l7SjMvKJbH3k753i215KiJeLu1rVmF90WNV6n0z8HSVRHx28U1e\nfzywL/CMpD9KOlZS+WfGbBUeE2StqvzLGdI/Bf8APsPK/2lWPJNfK+t6MrOms5ghVeoO0nikeVXi\nywN9l3WyX3XydVeuz3V+Grgrvz4dEX/sZrvenIdVRMTEPND1I6RepbOA4yTtnBOpnqj2OfZG+Rz1\n9POqp47u9OSz7e363rahaBrptgM7kXp8phXKd5G0Hemfh1vrqK/a5yiqfx6r7DsiTpV0JfBRUk/t\n94BvSdqt2PtnVuaeILMVHiYNSr2t3JOUl8ov08fy61bFjSWNJl3SKpoPDJG0bql88yp1AzzTSd3T\n6L2/A5uWZyCVRcRS8gBcSRuSBif/ugf7fyy/9uQ8dFb3/RHx/YjYDdiN1Mv2hWJIT/bTQ2+rUrYV\n8GJEzM/v55PGSZVtXqWsp217LL9uXSyUtHbe7+M93E9P6llT0ltL9WwCrF9HPXeRLqvuSur5qXwv\n3gq8j3SpNkg9RsW2bCqpPEB+m/zak7ZU9lG+XLp1lVgi4pGImBQRE4B3kgZq92hWnbUuJ0FmK1xG\nGoT6nfKKPAW4kkzcRPpv/ehS2MQq+3yY9J/rroV9rU/qbSq6HngJ+HYek1Ouf0QPj6HoClJvb0/u\nBnwJKQE8l/THoydJUG/Ow0okDZNU/v1zP+mP6dBC2UKqJyW1+EAe01Jpw+akSyg3FGIeBl4vaZtC\n3BtJiWFZT9tWOU/HlMq/SJoBeE0P9tET15G+1/5fqfxrpPN6bS07zZe0ZpK+Zzdm5Z6g9YCjgNkR\nUezBvI70s/Tl0u4mks7F9T2o+jrS98IXKwX5Z+MoVp5p+Lo827DoEdLP09BC3GhJW1f5vrMW5sth\nNtjU3O0fEX/Il2e+I2kMMJX0H/BWpEHTR5Bm+MyVNBk4VtJVpF/o7yENwn6+tNvrgaeBCyX9JJcd\nCvwTeO0+MhHRIeko0tT8mZIuJV2iejNpQOv/0sv/aiNiqqR24KtK9+65kXRZZxdgSkQUB5z+SdIs\n0oDov/TkEkIvzwOs/NnsCUyW9Dvgb6RBtgeTLvv9dyFuBjA+31/mn8DDEVH1vkY9cD9wo6QzSZ/r\nl/PrfxZifkOa9n9Vjlsf+BJpGv67S/vrUdvyefoRcLyk60hJzzZ5v3eSeuHqFhEzJf0a+HIeVD8N\nGEe6vHlZRHQ2Db4npgHHAs9FxKxc3z8lPUz6+fhFKf5KUk/RjyRtyYop8vsAP46IauOeyq4k9UL9\nJPduVabIl3tVtwVukHQZ8AApyTqQNK6tvRD3E9IEgE1Jl73NPEXey+BZSH9ElwFjuogZkmNO7SLm\n86TZOC+RLo/cA3wfGFmK+w9SgvMS6b/9rUmDWn9eihtD+mP3Muk/1CMpTaMuxO5O6pmYn/c7GzgP\n2L4Qcwnpj1G53ScDr5bKRPrj9UCufw5pRtS7qmz/zdymr/byvFc7D08A5xZiylPk35KP62+kHpVn\n8ra7lvb9duCPed/LKuc2H+sy0j2FujwPxc+clBA8lM/F9Ep7StuPB+4jTQH/K+k+PdWmyHfWts4+\n2yPz/l7J5+t0YINSzDRgRpU2XULqbenusxiSP4+Hcz2PkpK8Navsb5XvoS72u18+pitL5RdQmuZf\nWLceaTbYU7ktDwLH9ObnkTQY/GLSbMLnSPdk2oGVp8iPIM1QfABYQErAbwc+WuWYl5Y/Fy+tvSii\nLy+5m7U2SU8C10fEF7oNHmAkfY10j583RcQ/G90eM7P+5mujZlZxKOl+LU6AzKwleEyQWQtTeibU\n/qRxPG/Hs2nMrIU4CTLrW509e2qgGk2aCfY86dEZUxrcHjOz1cZjgszMzKwleUyQmZmZtSQnQWZm\nZtaSnASZWb+S9D1J5Wefre42DJG0XFL5Cev17HOPvM/9+2qfvaj7V5L+trrrNRtsnASZNZCkg/Mf\n0srysqTZks4cRE/BbrbB4r3RqOMKYHmD6jYbNDw7zKzxgvR8r8eAdUhP6j4C2EvSOyLilQa2zbpW\nz9PZ63FIA+s2GzScBJkNDDdExMz89QWSnic9bPIjwG8b16zuSVo3IhY1uh2tJCKWNaJef9Y22Phy\nmNnA9AfSf/pbVAokbSHpd5Kek7RQ0p2S9i5uJOnZwoNaUfKCpCWShhXKj8tl6xbKtpZ0ed7/y5Lu\nlrRfaf+Vy3e7SjpH0lzS89J6RdJhkm6WNDfXdb+kz5diTpc0p1T201z/lwplm+SyQ3tY92fyJceX\nJU2X9L4qMW+UdKGkOZJekXSfpIOr7C6ANSR9V9JTkhZJuknSFqX97ZY/uyfy/h6X9JPi088lfVPS\nMkmblCvJsS9L2iC/X2VMkKT1JU2W9GSuY1Z+uGsx5q35XH2yVF4ZM3V8oex7uWwrSb+VNJ/0IF+z\nQcNJkNnAtGV+fQ4gjw+6k/T09bOA44GhwNWSPlLY7nZg18L7dwGV5Of9hfIPADMr/9VL2o70xO6t\ngf8i3Tn6JeD3pf1XnEO6w/R/kp431ltHkB4m+33ga6QHip5bSoSmAW+QtFWp3cuAXQplu5KSkWk9\nqHcP4MfARaQHjY4EpkjauhIgaTTp4aq7AWcAx+S2/lLSl0v7E+lS5j7Aj/LyPtJDP4s+Tvq8zgKO\nIj0s9hjSA0grLs37+1iVdh8IXBcRL+b3K42zkiTgWuBo0lPqJ5IeTjtJ6Qn2tajs/79JDzr9JukB\npmaDR6Of4OrFSysvrHjy/QeB1wNvBA4CniUlIRvnuMk5blxh2/VITwt/uFD2NWAxsF5+fxTpD/id\nwA8Kcc8DPym8nwrcw6pPG78NeLDU3uWkp6erh8dY7QnsQ6vE3QTMKrwfles6LL/fKJ+DS4EnCnFn\nAXO6acOQvK+lwDsK5W8mPeH80kLZhcATwPDSPi4D5gFr5fd75H3eCwwpxE3M7dyqm+P9dm7PxoWy\n/wPuKMWNy/V8vFB2CfBQ4f0BOebY0rZXAEtID8UFeGuO+2Qn5+f40ue2HLiw0T8nXrz01+KeILPG\nE3AzKfF5EvgNsAD4aKx4mOlewPSIuLOyUUQsBH4ObC5p21w8jTTWr3KJZ5dcNi1/jaR3ARvmMiRt\nRErCfgcMl/T6ygLcCLxN0saF9gbwi4ioeWZURLz62sFLw3JdtwBbSXpdjpkL/J0VPVu7AK8CpwKb\nSnpz6Rh7YlpE3F9ox+PA1cCHc1sE/BvwP8CaVc7FRsD2pX2eHyuP0ZlG+kzf0snxrpv3d0eOK+7v\nt8BOkt5UKDsIWETq4enMXqTk9+xS+SRSgvPhLrbtSgA/q3FbswHPSZBZ4wXp8tCHgN2BbSPirREx\ntRDzZmB2lW1nFdYDzCT9waxcLvoAK5Kg90haO68LUi8PpEtvIv3n/2xpOTHHlKfrP1Z8I2ktSaOK\nS1cHLGkXSX+Q9BLwQq7rpLx6eCH0ttKxTAf+BHQAu0gaDryDnidBf69S9hCwQU4GRwMbAF9m1XPx\n8xxfPhflMVHz8+tGlQJJb5Z0saTnSD18z5ISX1j5eC/Lrx8vlB0AXBNdD0h+M/BURLxcKi9/f9Ti\n0Tq2NRvQPDvMbGC4O1bMDqtZRCyV9H/ArpLeCmwM3Er6o7sWsBMpmZgVEc/lzSr/DP0E6OwBquXk\nofzHdlfS5awgJVQhabOI+Ed5R5LelmPvJ106epLUi7E/aUxL8Z+zacDBkjYjJUNTIyIk3Z7fVxKO\nWztpd08Up5pX6r4I+FUn8feW3nc2U0uQBh2TLjduAPyAlMwuAt5EGhP02vFGxFOS7iQlQT+RtAvp\nEumlvTiGrnTWezeki23Kn7XZoOEkyKw5PE4atFy2TWF9xTTgG6RB1M9GxEMAkv5KSlZ2IV0Cqngk\nvy6JiD/U2L4ZpJ6somc7id2flJDtky95kds3oUpspYdnAjAGOCG/vxX4HCkJepFVE5POvK1K2VbA\nixExX9ICYCGwRh3nomx70lictoh47XYHkjq7RHUpcLqkt5Auhb0IXN9NHY8BH5D0ulJvUPn7o5I0\nbljavp6eIrOm5cthZs3hOuC9knaqFEhaD/gC8GhEPFCInUa66eIxrLjkRf76M6TeodcuH0XEs6SB\nzl/MM6NWImlEd42LiBci4g+lpbNHZVR6Tl77/ZMvRX22yn7/DswlDfhegzSOpnKMW5PG79zRi/FJ\nH8hjoir1bg7sC9yQ61sGXAl8XNI25Y2rnIue1FvteEX6fKpt/zvy4GXSpbCrimOKOnEdsDbpMl5R\nZZD29QARMZ90+XHXUtxRnbSlKknDlW6psH5PtzEbiNwTZNZ4PbmU8UOgDbhB0hmk2V2HkP6D//dS\n7J2kWUdbAecWym8ljT2qNp38yFx2n6RfkHqHRpFmJr0R2KGX7e3KFNJU8utyXcOAzwP/ZNXxNpCS\ntwNJU/pfymV3ky7TbEmazdVT9wM3SjqTdI6+nF//sxDzDVKSMD23bxbwL8B7SL1oxUSxJ+fir6Rx\nNaflwdwv5eMZVi04IuZKmgZ8HVifnt0s80rS5/sjSVsCfyENlt4H+HFEFMctnQccK6mDNIZsd1JP\nVW8+108AP82vl3UTazZguSfIrPG6/Q88Ip4hJSQ3kv5r/wFpave+EXFVKXYRabp7cfAzpCQnSNPL\nnyxtM4v0R/4a0jT4s4AvknoRTmJltcwKe22bXNeBpN8/PwEOB84k3Xuomkq7i71XS0nTyXt6f6BK\nG24GjiUd44mkXqbxuU2Vfc8BdiSNC/r33LavkJKW4zo7rs7Kc4/YvqTE5HjgO6TE6HNdtPW3pATo\nBTofp1WsI0gJzxnAfqRbKmwFfDUivlna7gTSWKSPk5LRpbl9vX3G22B9Hpy1ENUxy9XMzMysaTW8\nJ0jSo1r5KdqV5cy8fqiksyXNk/Si0m39R5b2sZmka5UeJTBH0imS1ijF7C5pRr6d/EOqcgt8SUfm\n9rws6S5JO/bv0ZuZmVmjNDwJInXBjy4se5K6WSvXmU8jdfMeQLpOvwnpLqgA5GTnOtL4pp1J3dyH\nUOjCz4MfryF1hb8bOB04T9KehZiDSDdhO4E0/uFe0u30ux0UamZmZs1nwF0Ok3QasHdEbKX0wMdn\ngU9ExJV5/dakgYo7R8R0SXsBV5FuPT8vx3yRNJD0Dfm+KT8C9oqI4qyQdtJt8ffO7+8C/i8ijsnv\nRbp/yRkRccrqOXozMzNbXQZCT9BrJK0FfIoVD+l7D6mHp3JnVSJiNum5PuNy0c7AfZUEKJtCugvr\ndoWY4t13KzHjCvWOLdUTeZtxmJmZ2aAzoJIg0j0/hpNmZUCaors4IhaU4uayYprq6Py+vJ4exAyT\nNBQYQbpjarWYVe6bYmZmZs1voN0n6FDg+jxFtSuiZ9Mzu4pRD2M6XZ8fgjiBdLfWV3rQHjMzM0vW\nATYHphQe47NaDZgkKD81+UPARwvFc4C1JQ0r9QaNZEWvTeWeHkWjCusqr+UHOo4EFkTEYknzSPdD\nqRZT7h0qmgD8uov1ZmZm1rVPAb9pRMUDJgki9QLNJc30qphBupHXHqQ7oiJpK9KDByu3z78TOF7S\niMK4oPGkp0zPKsTsVapvfC4nIpZImpHruSrXo/z+jC7a/BjAr371K7bZZpU77FsXJk6cyOTJkxvd\njKbic1Ybn7fe8zmrjc9b78yaNYtPf/rTkP+WNsKASIJywnEIcGFELK+UR8QCSecDkyRVHpR4BnB7\nRNydw24EHgAukXQc6blIJwNnFZ5d9DPgqDxL7AJScnMgsHehGZOAi3IyNJ30zJ116fqW/K8AbLPN\nNowZM6bGo29Nw4cP9znrJZ+z2vi89Z7PWW183mrWsOEkAyIJIl0G2wz4ZZV1lQcAXg4MJT3o8MjK\nyohYLmlf0nNs7iA9AfpCVjxtmoh4TNI+pETnK8BTwGERMbUQc1m+J9BJpMtifwYm5IdLmpmZ2SAz\nIJKgiLiJNDur2rpXgaPz0tn2T5KefdNVHbeQpsF3FXMOnT+/yMzMzAaRgTZF3szMzGy1cBJkDdHW\n1tboJjQdn7Pa+Lz1ns9ZbXzems+Ae2xGM5E0BpgxY8YMD4YzMzPrhZkzZzJ27FiAsRExsxFtcE+Q\nmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJ\nkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYtyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3J\nSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYt\nyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pIGRBIkaRNJl0iaJ2mR\npHsljSnFnCTpH3n9TZK2LK3fSNKvJXVImi/pPEnrlWLeJelWSS9LelzS16u05WOSZuWYeyXt1T9H\nbWZmZo3U8CRI0obA7cCrwARgG+BrwPxCzHHAUcAXgfcCC4EpktYu7Oo3eds9gH2AXYFzC/vYAJgC\nPAqMAb4OnCjp8ELMuLyfXwDbA78Hfi9p2z49aDMzszr9/vfwkY80uhXNbc1GNwD4JvBERBxeKHu8\nFHMMcHJEXA0g6bPAXOCjwGWStiElUGMj4p4cczRwraRjI2IO8GlgLeCwiFgKzJK0A/BV4LxCPddH\nxKT8/gRJ40kJ2Jf79KjNzMzq8PjjcPPNjW5Fc2t4TxCwH/AnSZdJmitpZql3ZgtgNPDaRx0RC4D/\nA8blop2B+ZUEKJsKBLBTIebWnABVTAG2ljQ8vx+Xt6MUMw4zMzMbVAZCEvQW4AhgNjAe+BlwhqRP\n5/WjScnM3NJ2c/O6SswzxZURsQx4vhRTbR/0IGY0ZmZmNqgMhMthawDTI+K7+f29krYjJUa/6mI7\nkZKjrnQXox7GdFePmZmZNZmBkAT9E5hVKpsF/Hv+eg4pERnFyr00I4F7CjEjizuQNATYKK+rxIwq\n1TOSlXuZOosp9w6tZOLEiQwfPnylsra2Ntra2rrazMzMrCW0t7fT3t6+UllHR0eDWrPCQEiCbge2\nLpVtTR4cHRGPSppDmvX1FwBJw0hjfc7O8XcCG0raoTAuaA9S8jS9EPM9SUPypTJIl99mR0RHIWYP\n4IxCW/bM5Z2aPHkyY8aM6SrEzMysZVXrGJg5cyZjx45tUIuSgTAmaDKws6RvSXqrpE8ChwNnFWJO\nA74jaT9J7wQuBp4C/gcgIh4kDWD+haQdJb0fOBNozzPDIE19XwxcIGlbSQcBXwFOLdRzOrCXpK9K\n2lrSicDYUlvMzMxsEGh4EhQRfwL+DWgD7gO+DRwTEZcWYk4hJTXnkmaFvQ7YKyIWF3b1SeBB0uyu\na4BbSfcVquxjAWka/ebAn4AfAydGxPmFmDtzO74A/Jl0Se4jEfFAnx60mZmZNdxAuBxGRFwHXNdN\nzInAiV2sf4F0L6Cu9nEfsFs3MVcAV3QVY2ZmZs2v4T1BZmZmZo3gJMjMzMxakpMgMzMza0lOgszM\nzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLM\nzMysJTkJMjMzs5bkJMjMzKwJRTS6Bc3PSZCZmVmTkhrdgubmJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSQ1PgiSd\nIGl5aXmgsH6opLMlzZP0oqTLJY0s7WMzSddKWihpjqRTJK1Ritld0gxJr0h6SNLBVdpypKRHJb0s\n6S5JO/bfkZuZmVkjNTwJyu4HRgGj8/KBwrrTgH2AA4BdgU2AKyorc7JzHbAmsDNwMHAIcFIhZnPg\nGuBm4N3A6cB5kvYsxBwEnAqcAOwA3AtMkTSiD4/TzMzMBoiBkgQtjYhnI+KZvDwPIGkYcCgwMSJu\niYh7gM8B75f03rztBODtwKci4r6ImAJ8FzhS0po55gjgkYj4RkTMjoizgcuBiYU2TATOjYiLI+JB\n4EvAoly/mZmZDTIDJQl6m6SnJT0s6VeSNsvlY0k9PDdXAiNiNvAEMC4X7QzcFxHzCvubAgwHtivE\nTC3VOaWyD0lr5bqK9UTeZhxmZmYDTESjW9D8BkISdBfp8tUEUu/LFsCtktYjXRpbHBELStvMzevI\nr3OrrKcHMcMkDQVGAEM6iRmNmZnZACQ1ugXNbc3uQ/pXvnxVcb+k6cDjwMeBVzrZTEBPcuCuYtTD\nGOfaZmZmg1DDk6CyiOiQ9BCwJely1NqShpV6g0ayotdmDlCexTWqsK7yOqoUMxJYEBGLJc0DlnUS\nU+4dWsXEiRMZPnz4SmVtbW20tbV1t6mZmdmg197eTnt7+0plHR0dDWrNCgMuCZK0PvBW4CJgBrAU\n2AO4Mq+xdAPHAAAgAElEQVTfCngTcEfe5E7geEkjCuOCxgMdwKxCzF6lqsbnciJiiaQZuZ6rcj3K\n78/ors2TJ09mzJgxvT5WMzOzVlCtY2DmzJmMHTu2QS1KGp4ESfoxcDXpEtgbgf8kJT6XRsQCSecD\nkyTNB14kJSW3R8TdeRc3Ag8Al0g6DtgYOBk4KyKW5JifAUdJ+hFwASm5ORDYu9CUScBFORmaTpot\nti5wYb8cuJmZmTVUw5MgYFPgN8DrgWeB24CdI+K5vH4i6VLV5cBQ4AbgyMrGEbFc0r7AT0m9QwtJ\nicsJhZjHJO1DSnS+AjwFHBYRUwsxl+V7Ap1Euiz2Z2BCRDzbD8dsZmZmDdbwJCgiuhw4ExGvAkfn\npbOYJ4F9u9nPLaRp8F3FnAOc01WMmZmZDQ4DYYq8mZmZ2WrnJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzNrQhGNbkHzcxJkZmbWpKRGt6C59UkS\nJGmIpO0lbdQX+zMzMzPrbzUlQZJOk3RY/noIcAswE3hS0u591zwzMzOz/lFrT9CBwL356/2ALYC3\nA5OB7/dBu8zMzMz6Va1J0AhgTv56b+B3EfEQcAHwzr5omJmZmVl/qjUJmgtsmy+FfRiYmsvXBZb1\nRcPMzMzM+tOaNW73S+Ay4J9AADfl8p2AB/ugXWZmZmb9qqYkKCJOlHQ/sBnpUtiredUy4Id91Tgz\nMzOz/lJrTxARcTmApHUKZRf1RaPMzMzM+lutU+SHSPqupKeBlyS9JZefXJk6b2ZmZjaQ1Tow+tvA\nIcA3gMWF8vuBw+tsk5mZmVm/qzUJ+izwhYj4NSvPBruXdL8gMzMzswGt1iTojcDfO9nfWrU3x8zM\nzGz1qDUJegDYpUr5gcA9tTfHzMzMbPWodXbYScBFkt5ISqT+XdLWpMtk+/ZV48zMzMz6S009QRHx\nP6Rk50PAQlJStA2wX0Tc1NW2ZmZmZgNBPfcJug3Ysw/bYmZmZrba1HqfoB0l7VSlfCdJ76mnQZK+\nJWm5pEmFsqGSzpY0T9KLki6XNLK03WaSrpW0UNIcSadIWqMUs7ukGZJekfSQpIOr1H+kpEclvSzp\nLkk71nM8ZmZmNjDVOjD6bNIjM8remNfVJCccnydNtS86DdgHOADYFdgEuKKw3RrAdaSerZ2Bg0n3\nMTqpELM5cA1wM/Bu4HTgPEl7FmIOAk4FTgB2yO2YImlErcdkZmZmA1OtSdC2wMwq5ffkdb0maX3g\nV6SbLb5QKB8GHApMjIhbIuIe4HPA+yW9N4dNIN2f6FMRcV9ETAG+CxwpqXLJ7wjgkYj4RkTMjoiz\ngcuBiYVmTATOjYiLI+JB4EvAoly/mZmZDSK1JkGvAqOqlG8MLK1xn2cDV0fEH0rl7yH18NxcKYiI\n2cATwLhctDNwX0TMK2w3BRgObFeImVra95TKPiStBYwt1RN5m3GYmZnZoFJrEnQj8F+ShlcKJG0I\n/ADo9ewwSZ8Atge+VWX1KGBxRCwolc8FRuevR+f35fX0IGaYpKHACGBIJzGjMTMzs0Gl1tlhxwK3\nAo9LqtwccXtSwvCZ3uxI0qakMT97RsSS3mwKRA/iuopRD2O6rGfixIkMHz58pbK2tjba2tp60Dwz\nM7Pei578BRwg2tvbaW9vX6mso6OjQa1ZoaYkKCKelvQu4FOkQcYvA78E2nuZyEC6BPUGYIakSlIy\nBNhV0lHAh4GhkoaVeoNGsqLXZg5QnsU1qrCu8lq+hDcSWBARiyXNIz0HrVpMuXdoJZMnT2bMmDFd\nhZiZmfW51/5qDnDVOgZmzpzJ2LFjG9SipJ77BC0Eft4HbZgKvLNUdiEwC/gh8DSwBNgDuBJA0lbA\nm4A7cvydwPGSRhTGBY0HOvJ+KjF7leoZn8uJiCWSZuR6rsr1KL8/o96DNDMzs4Gl5iQoJyK7k3pK\nVhpbFBEnVdummpxMPVDa90LguYiYld+fD0ySNB94kZSU3B4Rd+dNbsz7uETScaQB2icDZxV6pn4G\nHCXpR8AFpOTmQGDvQtWTSI8DmQFMJ80WW5eUlJmZmdkgUlMSJOnzwE+BeaTLTMUrk0Hh/jw1Kl/p\nnEi6VHU5MBS4ATjyteCI5ZL2zW26g/QojwtJ9/upxDwmaR9SovMV4CngsIiYWoi5LN8T6CTSZbE/\nAxMi4tk6j8fMzMwGmFp7gr4DfDsiftSXjamIiH8tvX8VODovnW3zJN08vDUibiGNQeoq5hzgnB43\n1szMzJpSrVPkNwJ+15cNMTMzM1udak2CfkcaVGxmZmbWlGq9HPZ34GRJOwP3kWZvvSYiPJvKzMzM\nBrRak6AvAC8Bu+WlKPCUcjMzMxvgar1Z4hZ93RAzMzOz1anWMUEASFpb0taFJ7WbmZmZNYWakiBJ\n6+YbGC4C/kq6ezOSzpT0zT5sn5mZmVm/qLUn6L9IzwzbHXilUD4VOKjONpmZmZn1u1ovY30UOCgi\n7pJUvLvzX4G31t8sMzMzs/5Va0/QG4BnqpSvx6qPvDAzMzMbcGpNgv4E7FN4X0l8Dic/ld3MzMxs\nIKv1ctjxwPWSts37OEbSdsA4Vr1vkJmZmdmAU1NPUETcRhoYvSbpjtHjgbnAuIiY0XfNMzMzM+sf\nve4JyvcE+iQwJSI+3/dNMjMzM+t/ve4JioilwM+Adfq+OWZmZmarR60Do6cDO/RlQ8zMzMxWp1oH\nRp8DnCppU2AGsLC4MiL+Um/DzMzMzPpTrUnQpfm1+LT4AJRfh9TTKDMzM+ta+K58das1CfJT5M3M\nzBpManQLmltNSVBEPN7XDTEzMzNbnWpKgiR9tqv1EXFxbc0xMzMzWz1qvRx2eun9WsC6wGJgEeAk\nyMzMzAa0Wi+HbVQuk/Q24KfAj+ttlJmZmVl/q/U+QauIiL8B32TVXiIzMzOzAafPkqBsKbBJH+/T\nzMzMrM/VOjB6/3IRsDFwFHB7vY0yMzMz62+1Doz+fel9AM8CfwC+VleLzMzMzFaDWgdG9/VlNDMz\nM7PVysmMmZmZtaSakiBJl0v6ZpXyr0v6Xf3NMjMzM+tftfYE7QZcW6X8BmDX2ptjZmZmtnrUmgSt\nT7o7dNkSYFjtzTEzMzNbPWpNgu4DDqpS/gnggdqbY2ZmZrZ61JoEnQx8V9JFkg7Oy8XAt/O6HpP0\nJUn3SurIyx2SPlxYP1TS2ZLmSXoxj0caWdrHZpKulbRQ0hxJp0haoxSzu6QZkl6R9JCkg6u05UhJ\nj0p6WdJdknbs1VkxMzOzplFTEhQRVwMfBbYEzgFOBTYFPhQR5XsIdedJ4DhgbF7+APyPpG3y+tOA\nfYADSOONNgGuqGyck53rSNP9dwYOBg4BTirEbA5cA9wMvJv0aI/zJO1ZiDkoH8cJwA7AvcAUSSN6\neTxmZmbWBGq9WSIRcS3VB0fXsp+i70g6AthZ0tPAocAnIuIWAEmfA2ZJem9ETAcmAG8HPhgR84D7\nJH0X+KGkEyNiKXAE8EhEfCPXMVvSB4CJwE25bCJwbkRcnOv5Ein5OhQ4pd7jNDMzs4Gl1inyO0ra\nqUr5TpLeU2tjJK0h6RPAusCdpJ6hNUk9OABExGzgCWBcLtoZuC8nQBVTgOHAdoWYqaXqplT2IWmt\nXFexnsjbjMPMzMwGnVrHBJ0NbFal/I15Xa9IeoekF4FXSZfX/i0iHgRGA4sjYkFpk7l5Hfl1bpX1\n9CBmmKShwAhgSCcxozEzM7NBp9bLYdsCM6uU35PX9daDpLE6G5LG/lwsqav7DYn0vLLudBWjHsb0\npB4zMzNrMrUmQa8Co4BHSuUbA0t7u7M8bqeyr5mS3gscA1wGrC1pWKk3aCQrem3mAOVZXKMK6yqv\no0oxI4EFEbFY0jxgWScx5d6hVUycOJHhw4evVNbW1kZbW1t3m5qZmQ167e3ttLe3r1TW0dHRoNas\nUGsSdCPwX5I+EhEdAJI2BH7AioHG9VgDGArMICVVewBX5nq2At4E3JFj7wSOlzSiMC5oPNABzCrE\n7FWqY3wuJyKWSJqR67kq16P8/ozuGjt58mTGjBnT+6M0MzNrAdU6BmbOnMnYsWMb1KKk1iToWOBW\n4HFJ9+Sy7Um9Jp/pzY4kfR+4njRVfgPgU6THcoyPiAWSzgcmSZoPvEhKSm6PiLvzLm4k3aDxEknH\nkXqjTgbOioglOeZnwFGSfgRcQEpuDgT2LjRlEnBRToamk2aLrQtc2JvjMTMzWx3CgzXqVlMSFBFP\nS3oXKWF5N/Ay8EugvZB49NQo4GJS8tIB/IWUAP0hr59IulR1Oal36AbgyEJblkvaF/gpqXdoISlx\nOaEQ85ikfUiJzleAp4DDImJqIeayfE+gk3Kb/gxMiIhne3k8ZmZmq4XUfYx1rp77BC0Efl5vAyLi\n8G7WvwocnZfOYp4E9u1mP7eQpsF3FXMOaXaamZmZDXI1JUGSPga0AVuRZk/9DfhNRFzeh20zMzMz\n6ze9uk9Qvpnhb4HfkqbC/500q2s74DJJl+YBxWZmZmYDWm97go4BPgTsHxHXFFdI2p80LugY0vO+\nzMzMzAas3t4x+nPA18sJEEBEXAV8g/SsLTMzM7MBrbdJ0NtY9RlcRVNzjJmZmdmA1tsk6GXSoy06\nMwx4pfbmmJmZma0evU2C7gSO6GL9kTnGzMzMbEDr7cDo7wN/lPR64CekB58K2Ab4GvAR4IN92kIz\nMzOzftCrJCgi7pB0EOkmiQeUVs8H2iLi9r5qnJmZmVl/6fXNEiPiSklTSA8g3SoXPwTcGBGL+rJx\nZmZmZv2l1meHLZL0IeA/IuL5Pm6TmZmZWb/r7R2jNy28/SSwfi6/T9JmfdkwMzMzs/7U256gByU9\nB9wOrANsBjwBbA6s1bdNMzMzM+s/vZ0iPxz4GDAjb3udpIeAocAESaP7uH1mZmZm/aK3SdBaETE9\nIk4l3ThxB9KjNJaRHpfxsKTZfdxGMzMzsz7X28thCyTdQ7octjawbkTcLmkpcBDwFPDePm6jmZmZ\nWZ/rbU/QJsD3gFdJCdSfJE0jJURjgIiI2/q2iWZmZmZ9r1dJUETMi4irI+JbwCJgR+BMIEh3kF4g\n6Za+b6aZmZlZ3+ptT1BZR0RcBiwB/hXYAjin7laZmZmZ9bOabpaYvQt4On/9OLAkIuYAv627VWZm\nZmb9rOYkKCKeLHz9jr5pjpmZmfVERKNb0PzqvRxmZmZmDSI1ugXNzUmQmZmZtSQnQWZmZtaSnASZ\nmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwE\nmZmZWUtqeBIk6VuSpktaIGmupCslbVWKGSrpbEnzJL0o6XJJI0sxm0m6VtJCSXMknSJpjVLM7pJm\nSHpF0kOSDq7SniMlPSrpZUl3Sdqxf47czMzMGqnhSRCwC3AmsBPwIWAt4EZJryvEnAbsAxwA7Aps\nAlxRWZmTneuANYGdgYOBQ4CTCjGbA9cANwPvBk4HzpO0ZyHmIOBU4ARgB+BeYIqkEX13uGZmZjYQ\nrNnoBkTE3sX3kg4BngHGArdJGgYcCnwiIm7JMZ8DZkl6b0RMByYAbwc+GBHzgPskfRf4oaQTI2Ip\ncATwSER8I1c1W9IHgInATblsInBuRFyc6/kSKfk6FDilf86AmZmZNcJA6Akq2xAI4Pn8fiwpWbu5\nEhARs4EngHG5aGfgvpwAVUwBhgPbFWKmluqaUtmHpLVyXcV6Im8zDjMzMxtUBlQSJEmkS1+3RcQD\nuXg0sDgiFpTC5+Z1lZi5VdbTg5hhkoYCI4AhncSMxszMzAaVhl8OKzkH2Bb4QA9iReox6k5XMeph\nTE/qMTMzsyYyYJIgSWcBewO7RMQ/CqvmAGtLGlbqDRrJil6bOUB5FteowrrK66hSzEhgQUQsljQP\nWNZJTLl3aCUTJ05k+PDhK5W1tbXR1tbW1WZmZmYtob29nfb29pXKOjo6GtSaFQZEEpQToI8Au0XE\nE6XVM4ClwB7AlTl+K+BNwB055k7geEkjCuOCxgMdwKxCzF6lfY/P5UTEEkkzcj1X5XqU35/RVfsn\nT57MmDFjeny8ZmZmraRax8DMmTMZO3Zsg1qUNDwJknQO0AbsDyyUVOmJ6YiIVyJigaTzgUmS5gMv\nkpKS2yPi7hx7I/AAcImk44CNgZOBsyJiSY75GXCUpB8BF5CSmwNJvU8Vk4CLcjI0nTRbbF3gwn44\ndDMzM2ughidBwJdIY27+WCr/HHBx/noi6VLV5cBQ4AbgyEpgRCyXtC/wU1Lv0EJS4nJCIeYxSfuQ\nEp2vAE8Bh0XE1ELMZfmeQCeRLov9GZgQEc/20bGamZnZANHwJCgiup2hFhGvAkfnpbOYJ4F9u9nP\nLaRp8F3FnEMaoG1mZjZghafs1G1ATZE3MzOznpO6j7HOOQkyMzNrQu4Jqp+TIDMzsyYU4Z6gejkJ\nMjMza0JOgurnJMjMzKxJOQmqj5MgMzOzJuQxQfVzEmRmZtaEfDmsfk6CzMzMmpCToPo5CTIzM2tC\nToLq5yTIzMysCTkJqp+TIDMzM2tJToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzM\nmpCToPo5CTIzM2tCToLq5yTIzMzMWpKTIDMzsybknqD6OQkyMzNrQk6C6uckyMzMrAk5CaqfkyAz\nM7Mm5CSofk6CzMzMmlBEo1vQ/JwEmZmZNSn3BNXHSZCZmVkT8uWw+jkJMjMza0JOgurnJMjMzKwJ\nOQmqn5MgMzOzJuQkqH5OgszMzJqQZ4fVz0mQmZlZk3JPUH2cBJmZmTUhXw6rn5MgMzOzJuQkqH5O\ngszMzJqQk6D6OQkyMzNrQh4YXb8BkQRJ2kXSVZKelrRc0v5VYk6S9A9JiyTdJGnL0vqNJP1aUoek\n+ZLOk7ReKeZdkm6V9LKkxyV9vUo9H5M0K8fcK2mvvj9iMzOz+rgnqH4DIgkC1gP+DBwJrJLbSjoO\nOAr4IvBeYCEwRdLahbDfANsAewD7ALsC5xb2sQEwBXgUGAN8HThR0uGFmHF5P78Atgd+D/xe0rZ9\ndaBmZmZ9xUlQfdZsdAMAIuIG4AYAqepHegxwckRcnWM+C8wFPgpcJmkbYAIwNiLuyTFHA9dKOjYi\n5gCfBtYCDouIpcAsSTsAXwXOK9RzfURMyu9PkDSelIB9ua+P28zMrFbuCarfQOkJ6pSkLYDRwM2V\nsohYAPwfMC4X7QzMryRA2VRSr9JOhZhbcwJUMQXYWtLw/H5c3o5SzDjMzMwGECdB9RvwSRApAQpS\nz0/R3LyuEvNMcWVELAOeL8VU2wc9iBmNmZnZAOKB0fVrhiSoM6LK+KFexqiHMf5WMzOzAcU9QfUb\nEGOCujGHlIiMYuVempHAPYWYkcWNJA0BNsrrKjGjSvseycq9TJ3FlHuHVjJx4kSGDx++UllbWxtt\nbW1dbWZmZlazZkqC2tvbaW9vX6mso6OjQa1ZYcAnQRHxqKQ5pFlffwGQNIw01ufsHHYnsKGkHQrj\ngvYgJU/TCzHfkzQkXyoDGA/MjoiOQswewBmFJuyZyzs1efJkxowZU+shmpmZ1aRZkqBqHQMzZ85k\n7NixDWpRMiAuh0laT9K7JW2fi96S32+W358GfEfSfpLeCVwMPAX8D0BEPEgawPwLSTtKej9wJtCe\nZ4ZBmvq+GLhA0raSDgK+ApxaaMrpwF6Svippa0knAmOBs/rr2M3MzGrRTD1BA9VA6Ql6D/C/pEtT\nwYrE5CLg0Ig4RdK6pPv+bAhMA/aKiMWFfXySlKxMBZYDl5OmvANpRpmkCTnmT8A84MSIOL8Qc6ek\nNuD7efkb8JGIeKDvD9nMzKx2ToLqNyCSoIi4hW56pSLiRODELta/QLoXUFf7uA/YrZuYK4Aruoox\nMzNrNM8Oq9+AuBxmZmZmveOeoPo5CTIzM2tCToLq5yTIzMysSTkJqo+TIDMzsybknqD6OQkyMzNr\nQh4YXT8nQWZmZk3IPUH1cxJkZmbWhJwE1c9JkJmZWZNyElQfJ0FmZmZNyD1B9XMSZGZm1oQ8MLp+\nToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzMmpSToPo4CTIzM2tC7gmqn5MgMzOz\nJuTZYfVzEmRmZtaE3BNUPydBZmZmTchJUP2cBJmZmTUhJ0H1cxJkZmbWhJYtgzXXbHQrmpuTIDMz\nsya0eDGstVajW9HcnASZmZk1oSVLnATVy0mQmZlZE1qyBNZeu9GtaG5OgszMzJqQL4fVz0mQmZlZ\nE/LlsPo5CTIzM2tCvhxWPydBZmZmTcg9QfVzEmRmZtaEXn3VSVC9nASZmZk1oWefhTe8odGtaG5O\ngszMzJrMwoXwwguw6aaNbklzcxJkZmbWZJ5+Or06CaqPkyAzM7Mm89RT6dVJUH2cBJmZmTWZShK0\nySaNbUezcxJkZmbWZJ5+Gl7/enjd6xrdkubmJKgKSUdKelTSy5LukrRjo9s02LS3tze6CU3H56w2\nPm+953NWm9V53m6/HbbbbrVVN2g5CSqRdBBwKnACsANwLzBF0oiGNmyQ8S/Z3vM5q43PW+/5nNVm\ndZ23Rx+Fm26Cf/u31VLdoOYkaFUTgXMj4uKIeBD4ErAIOLSxzTIzs1Z3113wwQ+msUAHH9zo1jS/\nNRvdgIFE0lrAWOAHlbKICElTgXENa5iZmbWUCFiwAB5/HGbPhhkz4MYb4Z570mWw66+HjTZqdCub\nn5OglY0AhgBzS+Vzga072+iBB2D58p5XEtH7hg22bZ5/Hu64Y/XUNVi2ee45uOWW1de2WrcbaNs8\n+2y6dNDf9dS7zeqsq7tt5s6Fa6/t/3r6apt66lq+PC2Vr6u9drWu+PrIIzBpEixdCsuWpdfy14sX\np+XVV1e8LlwIixal5cUXU/Lz/PNpXcUmm8Auu8AJJ8C++8KQIbWdJ1uZk6CeEVDtR2wdgM98Ztbq\nbc2g0MH73z+z0Y1oMh3svrvPWe91MH68z1vvdLDvvj5nXZFWXZYt6+C7353JkCF0uqy11srL2mvD\nOuvAeuul2V7rrpu+HjYsvR89GjbbbOVen3vvbdxx96VZs17727lOo9rgJGhl84BlwKhS+UhW7R0C\n2Dy9fLo/2zSIjW10A5qQz1ltfN56z+esK5UeorJFi3zearA5UMO1gfo5CSqIiCWSZgB7AFcBSFJ+\nf0aVTaYAnwIeA15ZTc00MzMbDNYhJUBTGtUARa0XYAcpSR8HLgK+CEwnzRY7EHh7RDzbyLaZmZlZ\n33FPUElEXJbvCXQS6bLYn4EJToDMzMwGF/cEmZmZWUvyzRLNzMysJTkJqkOrPmNM0gmSlpeWBwrr\nh0o6W9I8SS9KulzSyNI+NpN0raSFkuZIOkXSGqWY3SXNkPSKpIckNdX9USXtIukqSU/nc7R/lZiT\nJP1D0iJJN0nasrR+I0m/ltQhab6k8yStV4p5l6Rb8/fh45K+XqWej0malWPulbRX3x9x/bo7Z5J+\nWeV777pSTEudMwBJ35I0XdICSXMlXSlpq1LMavu5bIbfjT08Z38sfa8tk3ROKaZlzhmApC/ln4eO\nvNwh6cOF9c31fRYRXmpYgINIM8I+C7wdOBd4HhjR6LathmM/AfgL8AbS7QNGAv9SWP9T0oy53UjP\nX7sDmFZYvwZwH2lGwDuBCcAzwPcKMZsDLwGnkG5UeSSwBNiz0cffi/P0YdLYso+Sbr2wf2n9cfl7\nZj/gHcDvgYeBtQsx1wMzgfcA7wMeAn5VWL8B8E/SYP5tgI8DC4HDCzH/v707j9WjKuM4/v1VoECb\ntsjiTaTQ0goUQZayNWy3LCVIkCCJNpi68QfKHypEiYkkQEQJokQCNCaCRBZBTAwQKSLLbRsNS1hS\nK9CylbKUawoUKHgV2j7+cc5Lz532Lr2Ud7nz+yRvmpk5M++Zp2fmfe6ZMzOzcuzOz7G8BPgfsF+r\nYzSCmN0A3F1pexMrZWoVs1zfBcC8vD8HAH/Jx+AORZmmHJd0yLlxmDHrAX5TaW/j6xqzXNdT83E6\nPX8uzcfGjE5sZy0PaKd+gIeBq4ppAa8CF7S6bk3Y94uAJwZYNiEfEGcU8/YBNgCH5+lTcoPepShz\nDrAG2CZPXw78s7LtW4EFrd7/EcZsA5v+oK8CzqvErg/4Sp6ekdc7uChzMrAO6MrT3yU932qbosxl\nwNPF9G3AXZXvfgiY3+q4jCBmNwB/HmSdfescs6Kuu+Q4HF20raYcl516bqzGLM/rAa4cZJ1ax6yo\n75vAtzqxnfly2Aho4zvGHmjMi/S/UKd3jH0uX7J4QdLNkibn+TNJdx2WsVkOvMzG2BwJLI2IN4rt\n3QtMBD5flLm/8p33MkriK2kq0EX/OL0LPEL/OK2JiCeLVe8nPb38iKLM4ohYV5S5F9hH0sQ8PYvR\nFcvufPlimaT5kj5dLJuFYwYwibTPb+XpphyXHX5urMas4WuSVktaKunnknYoltU6ZpLGSJoL7Ej6\nI6Hj2pmToJEZ7B1jXc2vTtM9DHyT9Bf2d4CpwOI87qIL+CD/oJfK2HSx+dgxjDITJI39uDvQBrpI\nJ9zB2lAXqZv4IxGxnnSS3hqx7MS2eg+p+/t44AJSl/sCScrLax+zHItfA3+PiMZYvWYdlx15bhwg\nZgC3kF4J0E16sfY84KZieS1jJml/SWtJvT7zST0/y+jAdubnBG1dA71jbFSJiPLpnv+S9CiwkjS2\nYqAnZw83NoOV0TDKdLrhxGmoMhpmmY6LY0TcXkw+JWkpaRxVN+nSxUDqFLP5wH7A0cMo26zjst1j\n14jZUeXMiLiumHxKUi/wgKSpEbFiiG2O5pgtAw4k9Z6dCdwo6dhByrdtO3NP0Mhs6TvGRrWIeIc0\n+HhpytQAAAY1SURBVHQ60AtsJ2lCpVgZm142jd1nimUDldkNeDciPtga9W6xXtIBO1gb6s3TH5H0\nKWAnho5T2cs0UJmOb6v5h+gNUtuDmsdM0jXAF4HuiFhVLGrWcdlx58ZKzF4fovgj+d+yvdUuZhGx\nLiJejIgnIuInwBLg+3RgO3MSNAIR8SHQeMcY0O8dYy15CVwrSRoPTCMN9H2cNAi1jM3ewB5sjM1D\nwAFKT+ZumAO8AzxTlDmB/ubk+R0v/3j30j9OE0jjVso4TZJ0cLHqCaTk6dGizLH5h75hDrA8J6eN\nMtVYnsQoiKWk3YGdSXd7QY1jln/MTwdmR8TLlcVNOS477dw4RMw252BSsly2t1rFbABjgLF0Yjtr\n9ajyTv2QLv300f/2vDeBXVtdtybs+xXAscCepFuQ7yNl3zvn5fOBFaRLFDOBf7DpLZJLSOM7vkAa\nW/Rv4KdFmSmkWyQvJ91dcC7wAXBiq/d/C+I0jtRlfBDp7ogf5OnJefkFuc2cRrpV9A7gOfrfIr8A\neAw4jNRVvxy4qVg+gZR8/p7Unf/VHLezizKzcuwat3tfTLps2Xa3ew8Ws7zsF6REcU/SCe8x0olz\n27rGLNd3PunummNIfx03PttXynzixyUdcm4cKmbAXsCFwCG5vX0JeB54sK4xy3X9GelS656kR3tc\nRkp8ju/EdtbygHbyJ//HvJT/Ix4CDm11nZq037eSbkXsI436/wMwtVg+Fria1GW5FvgTsFtlG5NJ\nz+V4Lx8AlwNjKmWOI2X7faTkYF6r930L43Qc6Yd8feXzu6LMxaQf5P+Q7n6YXtnGJOBm0l9Ja4Df\nAjtWyhwALMrbeBn44WbqcibpOn4f6RlPJ7c6PlsaM9Ibp/9K6kH7L/Ai6Zkku1a2UauY5bpuLmbr\nga8XZZp2XNIB58ahYgbsDiwEVud2spz0gz++sp3axCzX87p87PXlY/Fv5ASoE9uZ3x1mZmZmteQx\nQWZmZlZLToLMzMyslpwEmZmZWS05CTIzM7NachJkZmZmteQkyMzMzGrJSZCZmZnVkpMgMzMzqyUn\nQWZmZlZLToLMzDJJPZKubHU9zKw5nASZWVuQdI6kdyWNKeaNk/ShpAcqZWdL2iBpSrPraWajh5Mg\nM2sXPaQ3xR9azDsGeB04UtJ2xfzjgJUR8dKWfomkbT5OJc1s9HASZGZtISKeJSU83cXsbuAOYAVw\nZGV+D4CkyZLulLRW0juS/ihpt0ZBSRdJelLS2ZJeJL2BHkk7Sroxr/eapPOrdZJ0rqRnJfVJ6pV0\n+9bdazNrJSdBZtZOFgKzi+nZed6ixnxJY4EjgAdzmTuBSaReoxOBacBtle1OB74MnAEclOf9Mq9z\nGjCHlFjNbKwg6VDgKuBCYG/gZGDxx9w/M2sj7hY2s3ayELgyjwsaR0pYFgPbAecAlwBH5emFkk4C\n9gemRMQqAEnzgKckzYyIx/N2twXmRcRbucw44NvAWRGxMM/7BvBqUZfJwHvA3RHxPvAKsOQT2m8z\nawH3BJlZO2mMCzoMOBp4NiLeIPUEHZHHBXUDL0TEq8C+wCuNBAggIp4B3gZmFNtd2UiAsmmkxOjR\nYr01wPKizH3ASmBFvmx2lqQdttqemlnLOQkys7YRES8Ar5Eufc0mJT9ExOuknpijKMYDAQJiM5uq\nzn9/M8sZYN1GXd4DDgHmAqtIvVBLJE0Y9g6ZWVtzEmRm7aaHlAB1ky6PNSwGTgEOZ2MS9DSwh6TP\nNgpJ2g+YmJcN5HlgHcVga0k7kcb+fCQiNkTEgxHxY+BAYApw/Aj2yczakMcEmVm76QGuJZ2fFhXz\nFwPXkC5jLQSIiPslLQVukXReXnYt0BMRTw70BRHxvqTrgSskvQWsBi4F1jfKSDoV2Ct/7xrgVFIP\n0vJNt2hmnchJkJm1mx5ge+CZiFhdzF8EjAeWRURvMf904Oq8fANwD/C9YXzPj0jjj+4C1gK/AspL\nXW+T7ii7KNfnOWBuHnNkZqOAIga8JG5mZmY2anlMkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\nnASZmZlZLTkJMjMzs1pyEmRmZma15CTIzMzMaslJkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\n/g8dz3zImRgHIwAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -340,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 231, "metadata": { "collapsed": true }, @@ -358,7 +362,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 232, "metadata": { "collapsed": true }, @@ -372,7 +376,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 233, "metadata": { "collapsed": false }, @@ -381,9 +385,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 166\n", - "Number of unique tokens: 681\n", - "Number of documents: 90\n" + "Number of authors: 536\n", + "Number of unique tokens: 2245\n", + "Number of documents: 286\n" ] } ], @@ -553,7 +557,7 @@ }, { "cell_type": "code", - "execution_count": 130, + "execution_count": 234, "metadata": { "collapsed": true }, @@ -565,7 +569,7 @@ }, { "cell_type": "code", - "execution_count": 131, + "execution_count": 237, "metadata": { "collapsed": false }, @@ -574,21 +578,56 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1min 7s, sys: 8 ms, total: 1min 7s\n", - "Wall time: 1min 7s\n" + "CPU times: user 26min 26s, sys: 340 ms, total: 26min 26s\n", + "Wall time: 26min 26s\n" ] } ], "source": [ "%time model_online = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", " eval_every=1, random_state=1, var_lambda=None)" ] }, { "cell_type": "code", - "execution_count": 132, + "execution_count": 30, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(minibatchatvb)\n", + "MinibatchAtVb = minibatchatvb.MinibatchAtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 2min 1s, sys: 24 ms, total: 2min 1s\n", + "Wall time: 2min 1s\n" + ] + } + ], + "source": [ + "%time model_online = MinibatchAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", + " iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None, chunksize=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 238, "metadata": { "collapsed": false, "scrolled": false @@ -598,28 +637,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.065*role + 0.054*firing + 0.041*stimulus + 0.035*potential + 0.030*connectivity + 0.030*temporal + 0.028*activity + 0.024*cycle + 0.023*action + 0.019*strength'),\n", + " '0.109*processor + 0.061*cm + 0.057*link + 0.047*update + 0.046*list + 0.038*temperature + 0.031*grid + 0.026*machine + 0.023*serial + 0.022*matched'),\n", " (1,\n", - " '0.078*image + 0.036*visual + 0.035*field + 0.028*location + 0.025*map + 0.021*position + 0.020*surface + 0.020*center + 0.020*human + 0.019*computed'),\n", + " '0.065*chain + 0.054*velocity + 0.036*motion + 0.032*noise + 0.022*detection + 0.020*filter + 0.014*resolution + 0.014*gaussian + 0.013*real_time + 0.012*reconstruction'),\n", " (2,\n", - " '0.047*loop + 0.031*energy + 0.024*device + 0.021*activation + 0.021*interconnection + 0.019*vi + 0.019*path + 0.018*hardware + 0.016*circuit + 0.014*analog'),\n", + " '0.018*map + 0.012*region + 0.011*field + 0.011*cluster + 0.011*human + 0.010*receptive + 0.010*receptive_field + 0.008*orientation + 0.008*environment + 0.008*domain'),\n", " (3,\n", - " '0.043*capacity + 0.041*sequence + 0.036*bit + 0.028*associative_memory + 0.025*stage + 0.022*eq + 0.020*code + 0.015*bound + 0.013*delay + 0.012*xi'),\n", + " '0.046*memory + 0.031*string + 0.025*symbol + 0.021*capacity + 0.021*associative + 0.020*associative_memory + 0.020*sequence + 0.018*letter + 0.017*tolerance + 0.016*production'),\n", " (4,\n", - " '0.054*hopfield + 0.050*processor + 0.049*code + 0.047*matrix + 0.042*convergence + 0.039*stored + 0.025*product + 0.024*address + 0.023*storage + 0.021*column'),\n", + " '0.020*chip + 0.014*voltage + 0.014*circuit + 0.013*synapse + 0.013*transistor + 0.012*pulse + 0.012*analog + 0.010*action + 0.010*tree + 0.009*current'),\n", " (5,\n", - " '0.070*training + 0.069*hidden + 0.042*hidden_unit + 0.030*trained + 0.025*back + 0.024*decision + 0.023*back_propagation + 0.021*gradient + 0.019*propagation + 0.018*node'),\n", + " '0.026*classifier + 0.014*speech + 0.013*hidden + 0.011*recognition + 0.011*frame + 0.011*node + 0.010*hidden_unit + 0.009*propagation + 0.008*speaker + 0.008*back_propagation'),\n", " (6,\n", - " '0.086*cell + 0.030*stimulus + 0.029*firing + 0.021*probability + 0.018*synaptic + 0.017*activity + 0.017*phase + 0.017*feedback + 0.016*via + 0.015*synapsis'),\n", + " '0.025*cell + 0.023*fiber + 0.019*firing + 0.017*spike + 0.016*cortex + 0.016*axon + 0.016*eye + 0.016*cortical + 0.012*stimulus + 0.012*dendritic'),\n", " (7,\n", - " '0.045*representation + 0.021*connectionist + 0.020*move + 0.017*feature + 0.015*scheme + 0.012*represented + 0.010*mcclelland + 0.010*representing + 0.010*path + 0.009*represented_by'),\n", + " '0.004*vector + 0.003*eigenvalue + 0.003*attractor + 0.003*matrix + 0.003*energy + 0.003*np + 0.003*graph + 0.003*optimization + 0.003*fixed_point + 0.003*polynomial'),\n", " (8,\n", - " '0.073*node + 0.023*target + 0.017*neural_net + 0.016*standard + 0.015*mapping + 0.015*learned + 0.014*log + 0.014*learning_algorithm + 0.012*back_propagation + 0.012*activation'),\n", + " '0.110*image + 0.089*object + 0.043*pixel + 0.034*visual + 0.029*contour + 0.029*vision + 0.022*segmentation + 0.018*poggio + 0.018*rotation + 0.017*spectral'),\n", " (9,\n", - " '0.031*constraint + 0.027*noise + 0.026*minimum + 0.021*iv + 0.016*optimization + 0.013*search + 0.013*differential + 0.011*find + 0.011*recall + 0.010*distance')]" + " '0.076*motor + 0.066*controller + 0.052*charge + 0.050*sensor + 0.043*gain + 0.042*control + 0.034*movement + 0.034*body + 0.028*transfer_function + 0.027*loop')]" ] }, - "execution_count": 132, + "execution_count": 238, "metadata": {}, "output_type": "execute_result" } @@ -1392,7 +1431,7 @@ }, { "cell_type": "code", - "execution_count": 158, + "execution_count": 239, "metadata": { "collapsed": true }, @@ -1404,19 +1443,19 @@ }, { "cell_type": "code", - "execution_count": 151, + "execution_count": 243, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ "lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10,\n", - " iterations=100, alpha='auto', eta='symmetric')" + " iterations=10, alpha='auto', eta='symmetric')" ] }, { "cell_type": "code", - "execution_count": 154, + "execution_count": 244, "metadata": { "collapsed": false }, @@ -1425,28 +1464,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.020*memory + 0.015*chip + 0.010*synapse + 0.009*hidden + 0.009*energy + 0.008*activation + 0.008*bit + 0.007*analog + 0.007*associative + 0.007*circuit'),\n", + " '0.025*hidden + 0.015*hidden_unit + 0.012*propagation + 0.009*back_propagation + 0.009*vector + 0.007*gradient + 0.006*constraint + 0.006*speech + 0.005*hidden_layer + 0.005*internal'),\n", " (1,\n", - " '0.013*node + 0.010*vector + 0.008*dynamic + 0.007*role + 0.006*matrix + 0.006*temporal + 0.006*sequence + 0.005*propagation + 0.005*action + 0.004*noise'),\n", + " '0.019*field + 0.009*visual + 0.009*receptive_field + 0.009*receptive + 0.006*image + 0.006*position + 0.005*center + 0.005*activation + 0.005*joint + 0.005*role'),\n", " (2,\n", - " '0.022*processor + 0.017*activation + 0.013*cycle + 0.011*path + 0.009*machine + 0.008*cm + 0.007*letter + 0.007*array + 0.006*update + 0.006*string'),\n", + " '0.022*classifier + 0.018*node + 0.012*recognition + 0.010*classification + 0.009*image + 0.009*class + 0.007*decision + 0.007*frame + 0.006*vector + 0.006*trained'),\n", " (3,\n", - " '0.017*node + 0.010*circuit + 0.008*threshold + 0.007*classifier + 0.007*probability + 0.006*distribution + 0.005*bit + 0.005*vector + 0.005*let + 0.004*polynomial'),\n", + " '0.009*hopfield + 0.009*energy + 0.008*vector + 0.006*matrix + 0.006*optimization + 0.006*probability + 0.006*let + 0.006*minimum + 0.005*equilibrium + 0.005*distribution'),\n", " (4,\n", - " '0.017*memory + 0.010*vector + 0.010*fig + 0.009*matrix + 0.009*delay + 0.008*cell + 0.008*cortex + 0.007*associative + 0.007*map + 0.006*dynamic'),\n", + " '0.031*memory + 0.013*object + 0.012*vector + 0.011*associative + 0.009*capacity + 0.009*matrix + 0.008*associative_memory + 0.007*delay + 0.007*image + 0.006*stored'),\n", " (5,\n", - " '0.028*cell + 0.010*response + 0.010*firing + 0.009*stimulus + 0.008*activity + 0.007*frequency + 0.007*potential + 0.007*current + 0.006*synaptic + 0.006*spike'),\n", + " '0.013*pulse + 0.007*noise + 0.007*response + 0.005*temporal + 0.005*potential + 0.005*fig + 0.005*cell + 0.004*current + 0.004*adaptive + 0.004*firing'),\n", " (6,\n", - " '0.016*hidden + 0.012*recognition + 0.011*speech + 0.008*propagation + 0.007*classifier + 0.007*hidden_unit + 0.007*back_propagation + 0.006*trained + 0.006*hidden_layer + 0.005*training_set'),\n", + " '0.029*cell + 0.010*response + 0.009*stimulus + 0.009*activity + 0.009*firing + 0.008*cortex + 0.007*synaptic + 0.007*spike + 0.007*frequency + 0.006*map'),\n", " (7,\n", - " '0.013*vector + 0.008*code + 0.008*region + 0.007*chain + 0.007*class + 0.006*matrix + 0.005*probability + 0.005*hopfield + 0.005*let + 0.005*domain'),\n", + " '0.022*circuit + 0.016*chip + 0.013*analog + 0.011*voltage + 0.011*current + 0.009*synapse + 0.009*processor + 0.007*transistor + 0.007*synaptic + 0.006*vlsi'),\n", " (8,\n", - " '0.012*field + 0.007*constraint + 0.007*analog + 0.006*noise + 0.006*line + 0.006*image + 0.006*energy + 0.005*gradient + 0.005*velocity + 0.005*minimum'),\n", + " '0.009*action + 0.006*element + 0.006*environment + 0.006*sequence + 0.005*vector + 0.005*fig + 0.004*control + 0.004*forward + 0.004*language + 0.004*controller'),\n", " (9,\n", - " '0.032*image + 0.019*object + 0.008*visual + 0.008*vector + 0.008*joint + 0.007*fig + 0.006*pixel + 0.006*position + 0.006*region + 0.006*view')]" + " '0.012*hidden + 0.010*speech + 0.007*recognition + 0.007*generalization + 0.006*trained + 0.005*vector + 0.005*hidden_layer + 0.005*test + 0.005*node + 0.005*training_set')]" ] }, - "execution_count": 154, + "execution_count": 244, "metadata": {}, "output_type": "execute_result" } diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index d94986da61..01df446a10 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -17,6 +17,7 @@ from .phrases import Phrases from .normmodel import NormModel from .onlineatvb import OnlineAtVb +from .minibatchatvb import MinibatchAtVb from .atvb import AtVb from . import wrappers diff --git a/gensim/models/minibatchatvb.py b/gensim/models/minibatchatvb.py new file mode 100644 index 0000000000..cdde08eced --- /dev/null +++ b/gensim/models/minibatchatvb.py @@ -0,0 +1,516 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2016 Radim Rehurek +# Copyright (C) 2016 Olavur Mortensen +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Author-topic model. +""" + +# NOTE: from what I understand, my name as well as Radim's should be attributed copyright above? + +import pdb +from pdb import set_trace as st + +import logging +import numpy +import numbers + +from gensim import utils, matutils +from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim.models import LdaModel +from six.moves import xrange +from scipy.special import gammaln + +from pprint import pprint + +# log(sum(exp(x))) that tries to avoid overflow +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger(__name__) + + +class MinibatchAtVb(LdaModel): + """ + Train the author-topic model using online variational Bayes. + """ + # TODO: inherit interfaces.TransformationABC. + + def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, + author2doc=None, doc2author=None, threshold=0.001, minimum_probability=0.01, + iterations=10, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, + eval_every=1, random_state=None, var_lambda=None, chunksize=2000): + + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + # NOTE: this stuff is confusing to me (from LDA code). Why would id2word not be none, but have length 0? + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + logger.info('Vocabulary consists of %d words.', self.num_terms) + + if doc2author is None and author2doc is None: + raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') + + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). + # If either doc2author or author2doc is missing, construct them from the other. + if doc2author is None: + # Make a mapping from document IDs to author IDs. + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + elif author2doc is None: + # Make a mapping from author IDs to document IDs. + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + + self.author2doc = author2doc + self.doc2author = doc2author + + self.num_authors = len(self.author2doc) + logger.info('Number of authors: %d.', self.num_authors) + + self.id2author = id2author + if self.id2author is None: + logger.warning("no author id mapping provided; initializing from corpus, assuming identity") + author_integer_ids = [str(i) for i in range(len(author2doc))] + self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) + + # Make the reverse mapping, from author names to author IDs. + self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) + + self.corpus = corpus + self.iterations = iterations + self.passes = passes + self.num_topics = num_topics + self.threshold = threshold + self.minimum_probability = minimum_probability + self.decay = decay + self.offset = offset + self.num_docs = len(corpus) + self.num_authors = len(author2doc) + self.eval_every = eval_every + self.chunksize = chunksize + self.random_state = random_state + + # NOTE: I don't think this necessarily is a good way to initialize the topics. + self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) + self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + + self.random_state = get_random_state(random_state) + + if corpus is not None: + self.inference(corpus, var_lambda=var_lambda) + + def rho(self, t): + return pow(self.offset + t, -self.decay) + + def inference(self, corpus=None, var_lambda=None): + if corpus is None: + # TODO: I can't remember why I used "copy()" here. + corpus = self.corpus.copy() + + self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. + + logger.info('Starting inference. Training on %d documents.', len(corpus)) + + # Whether or not to evaluate bound and log probability, respectively. + bound_eval = True + logprob_eval = False + + if var_lambda is None: + self.optimize_lambda = True + else: + # We have topics from LDA, thus we do not train the topics. + self.optimize_lambda = False + + # Initial values of gamma and lambda. + # Parameters of gamma distribution same as in `ldamodel`. + var_gamma = self.random_state.gamma(100., 1. / 100., + (self.num_authors, self.num_topics)) + tilde_gamma = var_gamma.copy() + self.var_gamma = var_gamma + + if var_lambda is None: + var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + tilde_lambda = var_lambda.copy() + else: + self.norm_lambda = var_lambda.copy() + for k in xrange(self.num_topics): + self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + + self.var_lambda = var_lambda + + # TODO: consider making phi sparse. Each document does not contain all terms. + var_phi = numpy.zeros((self.num_terms, self.num_topics)) + + # Initialize dirichlet expectations. + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + t = 0 + if self.eval_every > 0: + if bound_eval: + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + for _pass in xrange(self.passes): + for chunk_no, chunk in enumerate(utils.grouper(corpus, self.chunksize, as_numpy=False)): + converged = 0 # Number of documents converged for current pass over corpus. + rhot = self.rho(chunk_no + _pass) + for d, doc in enumerate(chunk): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] # List of author IDs for document d. + + # Initialize mu. + # mu is 1/|A_d| if a is in A_d, zero otherwise. + # TODO: consider doing random initialization instead. + var_mu = dict() + for v in ids: + for a in authors_d: + var_mu[(v, a)] = 1 / len(authors_d) + + for iteration in xrange(self.iterations): + #logger.info('iteration %i', iteration) + + lastgamma = tilde_gamma.copy() + + # Update phi. + for v in ids: + for k in xrange(self.num_topics): + # Average Elogtheta over authors a in document d. + avgElogtheta = 0.0 + for a in authors_d: + avgElogtheta += var_mu[(v, a)] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute phi. + var_phi[v, k] = expavgElogtheta * expElogbeta[k, v] + + # Normalize phi over k. + var_phi[v, :] = var_phi[v, :] / (var_phi[v, :].sum() + 1e-100) + + # Update mu. + for v in ids: + # Prior probability of observing author a in document d is one + # over the number of authors in document d. + mu_sum = 0.0 + for a in authors_d: + # Average Elogtheta over topics k. + avgElogtheta = 0.0 + for k in xrange(self.num_topics): + avgElogtheta += var_phi[v, k] * Elogtheta[a, k] + expavgElogtheta = numpy.exp(avgElogtheta) + + # Compute mu over a. + var_mu[(v, a)] = expavgElogtheta + mu_sum += var_mu[(v, a)] + + # Normalize mu. + mu_norm_const = 1.0 / (mu_sum + 1e-100) + for a in authors_d: + var_mu[(v, a)] *= mu_norm_const + + # Update gamma. + for a in authors_d: + for k in xrange(self.num_topics): + tilde_gamma[a, k] = 0.0 + for vi, v in enumerate(ids): + tilde_gamma[a, k] += cts[vi] * var_mu[(v, a)] * var_phi[v, k] + tilde_gamma[a, k] *= len(self.author2doc[a]) + tilde_gamma[a, k] += self.alpha[k] + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). Same goes for lambda. + # TODO: I may need to be smarter about computing rho. In ldamodel, + # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). + # FIXME: if tilde_gamma is computed like this in every iteration, then I can't compare + # lastgamma to it for convergence test. FIXME. + var_gamma_temp = (1 - rhot) * var_gamma + rhot * tilde_gamma + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + # FIXME: I don't need to update the entire gamma, as I only updated a few rows of it, + # corresponding to the authors in the document. The same goes for Elogtheta. + Elogtheta = dirichlet_expectation(var_gamma_temp) + + # Check for convergence. + # Criterion is mean change in "local" gamma and lambda. + if iteration > 0: + meanchange_gamma = numpy.mean(abs(var_gamma_temp - lastgamma)) + gamma_condition = meanchange_gamma < self.threshold + # logger.info('Mean change in gamma: %.3e', meanchange_gamma) + if gamma_condition: + # logger.info('Converged after %d iterations.', iteration) + converged += 1 + break + # End of iterations loop. + + # FIXME: there are too many different gamma variables! + var_gamma = var_gamma_temp.copy() + + if self.optimize_lambda: + # Update lambda. + # only one update per document). + for k in xrange(self.num_topics): + for vi, v in enumerate(ids): + # cnt = dict(doc).get(v, 0) + cnt = cts[vi] + tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * var_phi[v, k] + + # This is a little bit faster: + # tilde_lambda[:, ids] = self.eta[ids] + self.num_docs * cts * var_phi[ids, :].T + + # Note that we only changed the elements in lambda corresponding to + # the words in document d, hence the [:, ids] indexing. + var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + var_lambda = var_lambda.copy() + + # Print topics: + # pprint(self.show_topics()) + + # End of corpus loop. + + if _pass % self.eval_every == 0: + self.var_gamma = var_gamma + self.var_lambda = var_lambda + if self.eval_every > 0: + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + + #logger.info('Converged documents: %d/%d', converged, self.num_docs) + + # TODO: consider whether to include somthing like this: + #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold: + # break + # End of pass over corpus loop. + + # Ensure that the bound (or log probabilities) is computed at the very last pass. + if self.eval_every != 0 and not _pass % self.eval_every == 0: + # If the bound should be computed, and it wasn't computed at the last pass, + # then compute the bound. + self.var_gamma = var_gamma + self.var_lambda = var_lambda + if self.eval_every > 0: + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + + + self.var_lambda = var_lambda + self.var_gamma = var_gamma + + return var_gamma, var_lambda + + def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): + """ + Compute the expectation of the log conditional likelihood of the data, + + E_q[log p(w_d | theta, beta, A_d)], + + where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. + """ + + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + # NOTE: computing the bound this way is very numerically unstable, which is why + # "logsumexp" is used in the LDA code. + # NOTE: computing bound is very very computationally intensive. I could, for example, + # only use a portion of the data to do that (even a held-out set). + bound= 0.0 + for d, doc in enumerate(docs): + authors_d = self.doc2author[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + bound_d = 0.0 + for vi, v in enumerate(ids): + bound_v = 0.0 + for k in xrange(self.num_topics): + for a in authors_d: + bound_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + bound_d += cts[vi] * numpy.log(bound_v) + bound += numpy.log(1.0 / len(authors_d)) + bound_d + + # For per-word likelihood, do: + # likelihood *= 1 /sum(len(doc) for doc in docs) + + # TODO: can I do something along the lines of (as in ldamodel): + # likelihood += numpy.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, id]) for id, cnt in doc) + # If I computed the LDA bound the way I compute the author-topic bound above: + # bound = 0.0 + # for d, doc in enumerate(docs): + # ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + # cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + # bound_d = 0.0 + # for vi, v in enumerate(ids): + # bound_v = 0.0 + # for k in xrange(self.num_topics): + # bound_v += numpy.exp(Elogtheta[d, k] + Elogbeta[k, v]) + # bound_d += cts[vi] * numpy.log(bound_v) + # bound += bound_d + + return bound + + def theta_bound(self, Elogtheta): + bound = 0.0 + for a in xrange(self.num_authors): + var_gamma_a = self.var_gamma[a, :] + Elogtheta_a = Elogtheta[a, :] + # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector + bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) + bound += numpy.sum(gammaln(var_gamma_a) - gammaln(self.alpha)) + bound += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(var_gamma_a)) + + return bound + + def beta_bound(self, Elogbeta): + bound = 0.0 + bound += numpy.sum((self.eta - self.var_lambda) * Elogbeta) + bound += numpy.sum(gammaln(self.var_lambda) - gammaln(self.eta)) + bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(self.var_lambda, 1))) + + return bound + + def eval_logprob(self, doc_ids=None): + """ + Compute the liklihood of the corpus under the model, by first + computing the conditional probabilities of the words in a + document d, + + p(w_d | theta, beta, A_d), + + summing over all documents, and dividing by the number of documents. + """ + + # TODO: if var_lambda is supplied from LDA, normalizing it every time + # is unnecessary. + norm_gamma = self.var_gamma.copy() + for a in xrange(self.num_authors): + norm_gamma[a, :] = self.var_gamma[a, :] / self.var_gamma.sum(axis=1)[a] + + if self.optimize_lambda: + norm_lambda = self.var_lambda.copy() + for k in xrange(self.num_topics): + norm_lambda[k, :] = self.var_lambda[k, :] / self.var_lambda.sum(axis=1)[k] + else: + norm_lambda = self.norm_lambda + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + logprob = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + logprob_d = 0.0 + for vi, v in enumerate(ids): + logprob_v = 0.0 + for k in xrange(self.num_topics): + for a in authors_d: + logprob_v += norm_gamma[a, k] * norm_lambda[k, v] + logprob_d += cts[vi] * numpy.log(logprob_v) + logprob += numpy.log(1.0 / len(authors_d)) + logprob_d + + return logprob + + # Overriding LdaModel.get_topic_terms. + def get_topic_terms(self, topicid, topn=10): + """ + Return a list of `(word_id, probability)` 2-tuples for the most + probable words in topic `topicid`. + Only return 2-tuples for the topn most probable words (ignore the rest). + """ + topic = self.var_lambda[topicid, :] + topic = topic / topic.sum() # normalize to probability distribution + bestn = matutils.argsort(topic, topn, reverse=True) + return [(id, topic[id]) for id in bestn] + + + def get_author_topics(self, author_id, minimum_probability=None): + """ + Return topic distribution the given author, as a list of + (topic_id, topic_probability) 2-tuples. + Ignore topics with very low probability (below `minimum_probability`). + """ + if minimum_probability is None: + minimum_probability = self.minimum_probability + minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output + + topic_dist = self.var_gamma[author_id, :] / sum(self.var_gamma[author_id, :]) + + author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) + if topicvalue >= minimum_probability] + + # author_name = self.id2author[author_id] + + return author_topics + + + From 1cfd00f4169922ebe025aadddc4a86d4ee006940 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 11 Nov 2016 11:40:05 +0100 Subject: [PATCH 040/100] Only updating the necessary expected log theta. Changed the name of OnlineAtVb2 to DisjointAtVb. Updated notebook. Other minor changes. --- docs/notebooks/at_with_nips.ipynb | 540 +++++++++--------- .../{onlineatvb2.py => disjointatvb.py} | 2 +- gensim/models/onlineatvb.py | 52 +- 3 files changed, 283 insertions(+), 311 deletions(-) rename gensim/models/{onlineatvb2.py => disjointatvb.py} (99%) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 18fe5fb1d3..d0e583e3d3 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -77,7 +77,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 3, "metadata": { "collapsed": true }, @@ -89,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -97,8 +97,8 @@ "source": [ "# Configure logging.\n", "\n", - "log_dir = '../../../log_files/log.log' # On my own machine.\n", - "#log_dir = '../../../../log_files/log.log' # On Hetzner\n", + "#log_dir = '../../../log_files/log.log' # On my own machine.\n", + "log_dir = '../../../../log_files/log.log' # On Hetzner\n", "\n", "logger = logging.getLogger()\n", "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", @@ -117,7 +117,7 @@ }, { "cell_type": "code", - "execution_count": 221, + "execution_count": 235, "metadata": { "collapsed": false }, @@ -127,8 +127,8 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "data_dir = '../../../../data/nipstxt/' # On my own machine.\n", - "#data_dir = '../../../nipstxt/' # On Hetzner.\n", + "#data_dir = '../../../../data/nipstxt/' # On my own machine.\n", + "data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", @@ -154,7 +154,7 @@ }, { "cell_type": "code", - "execution_count": 222, + "execution_count": 236, "metadata": { "collapsed": false }, @@ -184,7 +184,7 @@ }, { "cell_type": "code", - "execution_count": 223, + "execution_count": 237, "metadata": { "collapsed": false }, @@ -196,7 +196,7 @@ }, { "cell_type": "code", - "execution_count": 224, + "execution_count": 238, "metadata": { "collapsed": false }, @@ -214,7 +214,7 @@ }, { "cell_type": "code", - "execution_count": 225, + "execution_count": 239, "metadata": { "collapsed": false }, @@ -240,7 +240,7 @@ }, { "cell_type": "code", - "execution_count": 226, + "execution_count": 240, "metadata": { "collapsed": false }, @@ -263,7 +263,7 @@ }, { "cell_type": "code", - "execution_count": 227, + "execution_count": 241, "metadata": { "collapsed": true }, @@ -278,7 +278,7 @@ }, { "cell_type": "code", - "execution_count": 228, + "execution_count": 242, "metadata": { "collapsed": true }, @@ -297,7 +297,7 @@ }, { "cell_type": "code", - "execution_count": 229, + "execution_count": 243, "metadata": { "collapsed": true }, @@ -309,16 +309,34 @@ }, { "cell_type": "code", - "execution_count": 230, + "execution_count": 244, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Remove rare and common tokens.\n", + "\n", + "# Filter out words that occur too frequently or too rarely.\n", + "max_freq = 0.5\n", + "min_wordcount = 20\n", + "dictionary.filter_extremes(no_below=min_wordcount, no_above=max_freq)\n", + "\n", + "dict0 = dictionary[0] # This sort of \"initializes\" dictionary.id2token." + ] + }, + { + "cell_type": "code", + "execution_count": 245, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGcCAYAAADeTHTBAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmcHVWZ//HPlwBBlgTGmAQEBUUQcIFEhKgsDpLIqjOg\n2G4g4IKA/KIoijowoI6iJOyKArKojQiDwx4IjhC2CSaIICEoO2gCgdCBBMj2/P4455JK5fZ2b3du\n377f9+tVr9v31FN1TtXt5elT51QpIjAzMzNrNWs0ugFmZmZmjeAkyMzMzFqSkyAzMzNrSU6CzMzM\nrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMysJTkJMjMzs5bkJMjM+pykpyT9vPB+D0nLJb1v\nNdT9PUlLCu+H5Lon9Xfdub7Dc32brI76aiXpm5IekbRU0vRGt6enJL01n99PNrot1vycBNmgIeng\n/Mux2vKDRrevxVR7Hk+vn9Ej6duS9quh7uW9rau3umhbUMOxrk6S9gZ+APwvcAjw3YY2yKxB1mx0\nA8z6WJB+oT9WKr9/9TfFKiLiZkmvi4jFvdz0O8AlwNW92OYE4KRe1lOLztp2AXBJDce6On0QWAIc\nHn6ApLUwJ0E2GN0QETN7GixJwNoR8Wo/tqnl9XdSIGndiFgUEctZDT1BnclJxUBOgABGAQsHYgLk\nn0dbnXw5zFpKcXyIpM9I+ivwCrBHXi9JX5X0V0mvSPqnpHMkDSvtR5L+I499eUnSVElvl/RkaSzM\nSuNTCuVVx41I2kfStLzPDklXSXp7KeZXkuZL2jSvf1HSM5J+WKUeSZoo6S+SXs5x10naPq+/XdKf\nOjlXD0vqsgems/NQJW6VMUGStpL035Lm5LY9IenXktarfE7A2kDlXC2vnNt8XpfnffxW0nzSpZ1O\nz3le9xlJs3N908tjlPK5/VuV7V7bZw/a1tlne3Th++ppSWdU+b66TdJMSdtJ+l9Ji/K5/WpXn0Nh\n+zUlnZA/u1eUxvycJGmtUts/BQzP7VymTsbX5O+dJZLWK5Qdl7f7YaFszfz5n1QoW1/S5Pwz8Yqk\nWZL+X2n/3f08biTpYkkvSHpe0vnASucsx20s6aJ8rl6R9A9JV0ratCfnzVqXe4JsMBou6fXFgoh4\nrhQzHvgEcDbwPPBELr8AaMuvpwFvAY4G3i1pl9zLAGk8xXHAVcAUYCxwI/C6Uj2djQ9ZpVzSIcD5\nwHXAN4D1gC8D0yTtEBFPFbZdM9c3DfhaPp6vS/pbRJxf2O3FpD94VwM/J/3h3hXYCfhzXn+OpK0i\n4qFCW8YBWwDfqtL2op6eh0q7K/sfmuPWIJ3nucCmwH7AsIhYKOnTwC+B2/J5Afh7aV//DTwIfLNQ\n1tk53wP4JHAG6VLQkcAUSe+JiNndbPtaeUQs60Hbyp/t94DjgRtI33PbkD7bsaXvqwBGANcDvwMu\nBT4O/FjSvRFxc5W2FV2Yj/FS0vfGzqTLdlsDBxXa/mXg3cAXAAG3d7K/aaTP6P2kzwvgA8AyYJdC\n3FjSZ35rPl4B1+btfgH8BdgLmCRp44g4rlTPKj+PeR9Xk75XzwFmAweQznv5M/o9sCXps32C1NM1\nnvQ99RRmnYkIL14GxQIcTLoMUl6WFWKG5LLFwJal7XfP6w4ole+Vyw/M70fm7a8oxf0wx/28UHYy\nsLhKWw8j/SHZJL/fAHgBOLMUNyqXn1UouyRv+41S7J+BOwrv98ztOaWLc7Yh8DJwUqn87FzvOl1s\n25vzsEdu8/vy+7E5Zr9uPtOXi/spndflwIWdrFtceF/5zJcC7yiUv5nU63Bp6dw+1N0+u2lb+bMd\nlc/TVaW4r+S4TxXKpuWyjxfK1iYlib/p5lyNycd5dql8Ut7n+0vH+XwPfqaGAC8CJxfKniclWa9U\nvj+Ar+djXD+/PyC35djS/q4gJaBv6sHPY2UfXymUrUFKPJcBn8xl/1KO8+Klp4svh9lgE8ARwIcK\ny55V4m6OiL+Xyg4k/YL/o6TXVxbgT6Q/eB/McRNIv7zPLG1/Wh3t/jApEbq0VPcy4O5C3UU/L72/\njdRzVXEA6Q//yZ1VGhEvANeQeg+AdIkC+BgpuXmlizaPp/bz8EJ+3UvSOj2IryaAn/UiflpEvDZA\nPiIeJ/U0fLjG+ntqT9J5Kp+Xc4FFwD6l8o6IuKzyJtJYqrtZ+bOtZm/SOSnfCuBUUm9PuZ5uRcQy\n4E5S7yGS3gUMB/4LWIvUSwOpd+jeiHgpv9+LlNicXdrlJNK5KJ/zaj+PewGvUvg+j9RjdlY+nopF\npMTqg5KG9/IQrcU5CbLB6O6I+ENxqRLzWJWyt5H+q3y2tMwF1iH1fAC8Kb+u9Es7IuaQ/muuxZak\nX+zTSnU/A/xroe6Kl3ICUzQf2Kjw/i3AUxHRXZsuBraQtHN+/2Hg9aTegq68Ob/2+jxExMPA6cAX\ngeckXS/pCEkbdFNn2aO9iC3/kQV4CNhA0kZV1vWVynl6qFgYaeDvo4X1FU9W2Uf5s+2snqX53Bbr\neZr0eZTr6anbgB3zuKJdgCcj4l7SjMvKJbH3k753i215KiJeLu1rVmF90WNV6n0z8HSVRHx28U1e\nfzywL/CMpD9KOlZS+WfGbBUeE2StqvzLGdI/Bf8APsPK/2lWPJNfK+t6MrOms5ghVeoO0nikeVXi\nywN9l3WyX3XydVeuz3V+Grgrvz4dEX/sZrvenIdVRMTEPND1I6RepbOA4yTtnBOpnqj2OfZG+Rz1\n9POqp47u9OSz7e363rahaBrptgM7kXp8phXKd5G0Hemfh1vrqK/a5yiqfx6r7DsiTpV0JfBRUk/t\n94BvSdqt2PtnVuaeILMVHiYNSr2t3JOUl8ov08fy61bFjSWNJl3SKpoPDJG0bql88yp1AzzTSd3T\n6L2/A5uWZyCVRcRS8gBcSRuSBif/ugf7fyy/9uQ8dFb3/RHx/YjYDdiN1Mv2hWJIT/bTQ2+rUrYV\n8GJEzM/v55PGSZVtXqWsp217LL9uXSyUtHbe7+M93E9P6llT0ltL9WwCrF9HPXeRLqvuSur5qXwv\n3gq8j3SpNkg9RsW2bCqpPEB+m/zak7ZU9lG+XLp1lVgi4pGImBQRE4B3kgZq92hWnbUuJ0FmK1xG\nGoT6nfKKPAW4kkzcRPpv/ehS2MQq+3yY9J/rroV9rU/qbSq6HngJ+HYek1Ouf0QPj6HoClJvb0/u\nBnwJKQE8l/THoydJUG/Ow0okDZNU/v1zP+mP6dBC2UKqJyW1+EAe01Jpw+akSyg3FGIeBl4vaZtC\n3BtJiWFZT9tWOU/HlMq/SJoBeE0P9tET15G+1/5fqfxrpPN6bS07zZe0ZpK+Zzdm5Z6g9YCjgNkR\nUezBvI70s/Tl0u4mks7F9T2o+jrS98IXKwX5Z+MoVp5p+Lo827DoEdLP09BC3GhJW1f5vrMW5sth\nNtjU3O0fEX/Il2e+I2kMMJX0H/BWpEHTR5Bm+MyVNBk4VtJVpF/o7yENwn6+tNvrgaeBCyX9JJcd\nCvwTeO0+MhHRIeko0tT8mZIuJV2iejNpQOv/0sv/aiNiqqR24KtK9+65kXRZZxdgSkQUB5z+SdIs\n0oDov/TkEkIvzwOs/NnsCUyW9Dvgb6RBtgeTLvv9dyFuBjA+31/mn8DDEVH1vkY9cD9wo6QzSZ/r\nl/PrfxZifkOa9n9Vjlsf+BJpGv67S/vrUdvyefoRcLyk60hJzzZ5v3eSeuHqFhEzJf0a+HIeVD8N\nGEe6vHlZRHQ2Db4npgHHAs9FxKxc3z8lPUz6+fhFKf5KUk/RjyRtyYop8vsAP46IauOeyq4k9UL9\nJPduVabIl3tVtwVukHQZ8AApyTqQNK6tvRD3E9IEgE1Jl73NPEXey+BZSH9ElwFjuogZkmNO7SLm\n86TZOC+RLo/cA3wfGFmK+w9SgvMS6b/9rUmDWn9eihtD+mP3Muk/1CMpTaMuxO5O6pmYn/c7GzgP\n2L4Qcwnpj1G53ScDr5bKRPrj9UCufw5pRtS7qmz/zdymr/byvFc7D08A5xZiylPk35KP62+kHpVn\n8ra7lvb9duCPed/LKuc2H+sy0j2FujwPxc+clBA8lM/F9Ep7StuPB+4jTQH/K+k+PdWmyHfWts4+\n2yPz/l7J5+t0YINSzDRgRpU2XULqbenusxiSP4+Hcz2PkpK8Navsb5XvoS72u18+pitL5RdQmuZf\nWLceaTbYU7ktDwLH9ObnkTQY/GLSbMLnSPdk2oGVp8iPIM1QfABYQErAbwc+WuWYl5Y/Fy+tvSii\nLy+5m7U2SU8C10fEF7oNHmAkfY10j583RcQ/G90eM7P+5mujZlZxKOl+LU6AzKwleEyQWQtTeibU\n/qRxPG/Hs2nMrIU4CTLrW509e2qgGk2aCfY86dEZUxrcHjOz1cZjgszMzKwleUyQmZmZtSQnQWZm\nZtaSnASZWb+S9D1J5Wefre42DJG0XFL5Cev17HOPvM/9+2qfvaj7V5L+trrrNRtsnASZNZCkg/Mf\n0srysqTZks4cRE/BbrbB4r3RqOMKYHmD6jYbNDw7zKzxgvR8r8eAdUhP6j4C2EvSOyLilQa2zbpW\nz9PZ63FIA+s2GzScBJkNDDdExMz89QWSnic9bPIjwG8b16zuSVo3IhY1uh2tJCKWNaJef9Y22Phy\nmNnA9AfSf/pbVAokbSHpd5Kek7RQ0p2S9i5uJOnZwoNaUfKCpCWShhXKj8tl6xbKtpZ0ed7/y5Lu\nlrRfaf+Vy3e7SjpH0lzS89J6RdJhkm6WNDfXdb+kz5diTpc0p1T201z/lwplm+SyQ3tY92fyJceX\nJU2X9L4qMW+UdKGkOZJekXSfpIOr7C6ANSR9V9JTkhZJuknSFqX97ZY/uyfy/h6X9JPi088lfVPS\nMkmblCvJsS9L2iC/X2VMkKT1JU2W9GSuY1Z+uGsx5q35XH2yVF4ZM3V8oex7uWwrSb+VNJ/0IF+z\nQcNJkNnAtGV+fQ4gjw+6k/T09bOA44GhwNWSPlLY7nZg18L7dwGV5Of9hfIPADMr/9VL2o70xO6t\ngf8i3Tn6JeD3pf1XnEO6w/R/kp431ltHkB4m+33ga6QHip5bSoSmAW+QtFWp3cuAXQplu5KSkWk9\nqHcP4MfARaQHjY4EpkjauhIgaTTp4aq7AWcAx+S2/lLSl0v7E+lS5j7Aj/LyPtJDP4s+Tvq8zgKO\nIj0s9hjSA0grLs37+1iVdh8IXBcRL+b3K42zkiTgWuBo0lPqJ5IeTjtJ6Qn2tajs/79JDzr9JukB\npmaDR6Of4OrFSysvrHjy/QeB1wNvBA4CniUlIRvnuMk5blxh2/VITwt/uFD2NWAxsF5+fxTpD/id\nwA8Kcc8DPym8nwrcw6pPG78NeLDU3uWkp6erh8dY7QnsQ6vE3QTMKrwfles6LL/fKJ+DS4EnCnFn\nAXO6acOQvK+lwDsK5W8mPeH80kLZhcATwPDSPi4D5gFr5fd75H3eCwwpxE3M7dyqm+P9dm7PxoWy\n/wPuKMWNy/V8vFB2CfBQ4f0BOebY0rZXAEtID8UFeGuO+2Qn5+f40ue2HLiw0T8nXrz01+KeILPG\nE3AzKfF5EvgNsAD4aKx4mOlewPSIuLOyUUQsBH4ObC5p21w8jTTWr3KJZ5dcNi1/jaR3ARvmMiRt\nRErCfgcMl/T6ygLcCLxN0saF9gbwi4ioeWZURLz62sFLw3JdtwBbSXpdjpkL/J0VPVu7AK8CpwKb\nSnpz6Rh7YlpE3F9ox+PA1cCHc1sE/BvwP8CaVc7FRsD2pX2eHyuP0ZlG+kzf0snxrpv3d0eOK+7v\nt8BOkt5UKDsIWETq4enMXqTk9+xS+SRSgvPhLrbtSgA/q3FbswHPSZBZ4wXp8tCHgN2BbSPirREx\ntRDzZmB2lW1nFdYDzCT9waxcLvoAK5Kg90haO68LUi8PpEtvIv3n/2xpOTHHlKfrP1Z8I2ktSaOK\nS1cHLGkXSX+Q9BLwQq7rpLx6eCH0ttKxTAf+BHQAu0gaDryDnidBf69S9hCwQU4GRwMbAF9m1XPx\n8xxfPhflMVHz8+tGlQJJb5Z0saTnSD18z5ISX1j5eC/Lrx8vlB0AXBNdD0h+M/BURLxcKi9/f9Ti\n0Tq2NRvQPDvMbGC4O1bMDqtZRCyV9H/ArpLeCmwM3Er6o7sWsBMpmZgVEc/lzSr/DP0E6OwBquXk\nofzHdlfS5awgJVQhabOI+Ed5R5LelmPvJ106epLUi7E/aUxL8Z+zacDBkjYjJUNTIyIk3Z7fVxKO\nWztpd08Up5pX6r4I+FUn8feW3nc2U0uQBh2TLjduAPyAlMwuAt5EGhP02vFGxFOS7iQlQT+RtAvp\nEumlvTiGrnTWezeki23Kn7XZoOEkyKw5PE4atFy2TWF9xTTgG6RB1M9GxEMAkv5KSlZ2IV0Cqngk\nvy6JiD/U2L4ZpJ6somc7id2flJDtky95kds3oUpspYdnAjAGOCG/vxX4HCkJepFVE5POvK1K2VbA\nixExX9ICYCGwRh3nomx70lictoh47XYHkjq7RHUpcLqkt5Auhb0IXN9NHY8BH5D0ulJvUPn7o5I0\nbljavp6eIrOm5cthZs3hOuC9knaqFEhaD/gC8GhEPFCInUa66eIxrLjkRf76M6TeodcuH0XEs6SB\nzl/MM6NWImlEd42LiBci4g+lpbNHZVR6Tl77/ZMvRX22yn7/DswlDfhegzSOpnKMW5PG79zRi/FJ\nH8hjoir1bg7sC9yQ61sGXAl8XNI25Y2rnIue1FvteEX6fKpt/zvy4GXSpbCrimOKOnEdsDbpMl5R\nZZD29QARMZ90+XHXUtxRnbSlKknDlW6psH5PtzEbiNwTZNZ4PbmU8UOgDbhB0hmk2V2HkP6D//dS\n7J2kWUdbAecWym8ljT2qNp38yFx2n6RfkHqHRpFmJr0R2KGX7e3KFNJU8utyXcOAzwP/ZNXxNpCS\ntwNJU/pfymV3ky7TbEmazdVT9wM3SjqTdI6+nF//sxDzDVKSMD23bxbwL8B7SL1oxUSxJ+fir6Rx\nNaflwdwv5eMZVi04IuZKmgZ8HVifnt0s80rS5/sjSVsCfyENlt4H+HFEFMctnQccK6mDNIZsd1JP\nVW8+108AP82vl3UTazZguSfIrPG6/Q88Ip4hJSQ3kv5r/wFpave+EXFVKXYRabp7cfAzpCQnSNPL\nnyxtM4v0R/4a0jT4s4AvknoRTmJltcwKe22bXNeBpN8/PwEOB84k3Xuomkq7i71XS0nTyXt6f6BK\nG24GjiUd44mkXqbxuU2Vfc8BdiSNC/r33LavkJKW4zo7rs7Kc4/YvqTE5HjgO6TE6HNdtPW3pATo\nBTofp1WsI0gJzxnAfqRbKmwFfDUivlna7gTSWKSPk5LRpbl9vX3G22B9Hpy1ENUxy9XMzMysaTW8\nJ0jSo1r5KdqV5cy8fqiksyXNk/Si0m39R5b2sZmka5UeJTBH0imS1ijF7C5pRr6d/EOqcgt8SUfm\n9rws6S5JO/bv0ZuZmVmjNDwJInXBjy4se5K6WSvXmU8jdfMeQLpOvwnpLqgA5GTnOtL4pp1J3dyH\nUOjCz4MfryF1hb8bOB04T9KehZiDSDdhO4E0/uFe0u30ux0UamZmZs1nwF0Ok3QasHdEbKX0wMdn\ngU9ExJV5/dakgYo7R8R0SXsBV5FuPT8vx3yRNJD0Dfm+KT8C9oqI4qyQdtJt8ffO7+8C/i8ijsnv\nRbp/yRkRccrqOXozMzNbXQZCT9BrJK0FfIoVD+l7D6mHp3JnVSJiNum5PuNy0c7AfZUEKJtCugvr\ndoWY4t13KzHjCvWOLdUTeZtxmJmZ2aAzoJIg0j0/hpNmZUCaors4IhaU4uayYprq6Py+vJ4exAyT\nNBQYQbpjarWYVe6bYmZmZs1voN0n6FDg+jxFtSuiZ9Mzu4pRD2M6XZ8fgjiBdLfWV3rQHjMzM0vW\nATYHphQe47NaDZgkKD81+UPARwvFc4C1JQ0r9QaNZEWvTeWeHkWjCusqr+UHOo4EFkTEYknzSPdD\nqRZT7h0qmgD8uov1ZmZm1rVPAb9pRMUDJgki9QLNJc30qphBupHXHqQ7oiJpK9KDByu3z78TOF7S\niMK4oPGkp0zPKsTsVapvfC4nIpZImpHruSrXo/z+jC7a/BjAr371K7bZZpU77FsXJk6cyOTJkxvd\njKbic1Ybn7fe8zmrjc9b78yaNYtPf/rTkP+WNsKASIJywnEIcGFELK+UR8QCSecDkyRVHpR4BnB7\nRNydw24EHgAukXQc6blIJwNnFZ5d9DPgqDxL7AJScnMgsHehGZOAi3IyNJ30zJ116fqW/K8AbLPN\nNowZM6bGo29Nw4cP9znrJZ+z2vi89Z7PWW183mrWsOEkAyIJIl0G2wz4ZZV1lQcAXg4MJT3o8MjK\nyohYLmlf0nNs7iA9AfpCVjxtmoh4TNI+pETnK8BTwGERMbUQc1m+J9BJpMtifwYm5IdLmpmZ2SAz\nIJKgiLiJNDur2rpXgaPz0tn2T5KefdNVHbeQpsF3FXMOnT+/yMzMzAaRgTZF3szMzGy1cBJkDdHW\n1tboJjQdn7Pa+Lz1ns9ZbXzems+Ae2xGM5E0BpgxY8YMD4YzMzPrhZkzZzJ27FiAsRExsxFtcE+Q\nmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJ\nkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYtyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3J\nSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwEmZmZWUtyEmRmZmYt\nyUmQmZmZtSQnQWZmZtaSnASZmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pIGRBIkaRNJl0iaJ2mR\npHsljSnFnCTpH3n9TZK2LK3fSNKvJXVImi/pPEnrlWLeJelWSS9LelzS16u05WOSZuWYeyXt1T9H\nbWZmZo3U8CRI0obA7cCrwARgG+BrwPxCzHHAUcAXgfcCC4EpktYu7Oo3eds9gH2AXYFzC/vYAJgC\nPAqMAb4OnCjp8ELMuLyfXwDbA78Hfi9p2z49aDMzszr9/vfwkY80uhXNbc1GNwD4JvBERBxeKHu8\nFHMMcHJEXA0g6bPAXOCjwGWStiElUGMj4p4cczRwraRjI2IO8GlgLeCwiFgKzJK0A/BV4LxCPddH\nxKT8/gRJ40kJ2Jf79KjNzMzq8PjjcPPNjW5Fc2t4TxCwH/AnSZdJmitpZql3ZgtgNPDaRx0RC4D/\nA8blop2B+ZUEKJsKBLBTIebWnABVTAG2ljQ8vx+Xt6MUMw4zMzMbVAZCEvQW4AhgNjAe+BlwhqRP\n5/WjScnM3NJ2c/O6SswzxZURsQx4vhRTbR/0IGY0ZmZmNqgMhMthawDTI+K7+f29krYjJUa/6mI7\nkZKjrnQXox7GdFePmZmZNZmBkAT9E5hVKpsF/Hv+eg4pERnFyr00I4F7CjEjizuQNATYKK+rxIwq\n1TOSlXuZOosp9w6tZOLEiQwfPnylsra2Ntra2rrazMzMrCW0t7fT3t6+UllHR0eDWrPCQEiCbge2\nLpVtTR4cHRGPSppDmvX1FwBJw0hjfc7O8XcCG0raoTAuaA9S8jS9EPM9SUPypTJIl99mR0RHIWYP\n4IxCW/bM5Z2aPHkyY8aM6SrEzMysZVXrGJg5cyZjx45tUIuSgTAmaDKws6RvSXqrpE8ChwNnFWJO\nA74jaT9J7wQuBp4C/gcgIh4kDWD+haQdJb0fOBNozzPDIE19XwxcIGlbSQcBXwFOLdRzOrCXpK9K\n2lrSicDYUlvMzMxsEGh4EhQRfwL+DWgD7gO+DRwTEZcWYk4hJTXnkmaFvQ7YKyIWF3b1SeBB0uyu\na4BbSfcVquxjAWka/ebAn4AfAydGxPmFmDtzO74A/Jl0Se4jEfFAnx60mZmZNdxAuBxGRFwHXNdN\nzInAiV2sf4F0L6Cu9nEfsFs3MVcAV3QVY2ZmZs2v4T1BZmZmZo3gJMjMzMxakpMgMzMza0lOgszM\nzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLM\nzMysJTkJMjMzs5bkJMjMzKwJRTS6Bc3PSZCZmVmTkhrdgubmJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzOzluQkyMzMzFqSkyAzMzNrSQ1PgiSd\nIGl5aXmgsH6opLMlzZP0oqTLJY0s7WMzSddKWihpjqRTJK1Ritld0gxJr0h6SNLBVdpypKRHJb0s\n6S5JO/bfkZuZmVkjNTwJyu4HRgGj8/KBwrrTgH2AA4BdgU2AKyorc7JzHbAmsDNwMHAIcFIhZnPg\nGuBm4N3A6cB5kvYsxBwEnAqcAOwA3AtMkTSiD4/TzMzMBoiBkgQtjYhnI+KZvDwPIGkYcCgwMSJu\niYh7gM8B75f03rztBODtwKci4r6ImAJ8FzhS0po55gjgkYj4RkTMjoizgcuBiYU2TATOjYiLI+JB\n4EvAoly/mZmZDTIDJQl6m6SnJT0s6VeSNsvlY0k9PDdXAiNiNvAEMC4X7QzcFxHzCvubAgwHtivE\nTC3VOaWyD0lr5bqK9UTeZhxmZmYDTESjW9D8BkISdBfp8tUEUu/LFsCtktYjXRpbHBELStvMzevI\nr3OrrKcHMcMkDQVGAEM6iRmNmZnZACQ1ugXNbc3uQ/pXvnxVcb+k6cDjwMeBVzrZTEBPcuCuYtTD\nGOfaZmZmg1DDk6CyiOiQ9BCwJely1NqShpV6g0ayotdmDlCexTWqsK7yOqoUMxJYEBGLJc0DlnUS\nU+4dWsXEiRMZPnz4SmVtbW20tbV1t6mZmdmg197eTnt7+0plHR0dDWrNCgMuCZK0PvBW4CJgBrAU\n2AO4Mq+xdAPHAAAgAElEQVTfCngTcEfe5E7geEkjCuOCxgMdwKxCzF6lqsbnciJiiaQZuZ6rcj3K\n78/ors2TJ09mzJgxvT5WMzOzVlCtY2DmzJmMHTu2QS1KGp4ESfoxcDXpEtgbgf8kJT6XRsQCSecD\nkyTNB14kJSW3R8TdeRc3Ag8Al0g6DtgYOBk4KyKW5JifAUdJ+hFwASm5ORDYu9CUScBFORmaTpot\nti5wYb8cuJmZmTVUw5MgYFPgN8DrgWeB24CdI+K5vH4i6VLV5cBQ4AbgyMrGEbFc0r7AT0m9QwtJ\nicsJhZjHJO1DSnS+AjwFHBYRUwsxl+V7Ap1Euiz2Z2BCRDzbD8dsZmZmDdbwJCgiuhw4ExGvAkfn\npbOYJ4F9u9nPLaRp8F3FnAOc01WMmZmZDQ4DYYq8mZmZ2WrnJMjMzMxakpMgMzMza0lOgszMzKwl\nOQkyMzOzluQkyMzMzFqSkyAzMzNrSU6CzMzMrCU5CTIzM7OW5CTIzMzMWpKTIDMzM2tJToLMzMys\nJTkJMjMzs5bkJMjMzMxakpMgMzMza0lOgszMzKwlOQkyMzNrQhGNbkHzcxJkZmbWpKRGt6C59UkS\nJGmIpO0lbdQX+zMzMzPrbzUlQZJOk3RY/noIcAswE3hS0u591zwzMzOz/lFrT9CBwL356/2ALYC3\nA5OB7/dBu8zMzMz6Va1J0AhgTv56b+B3EfEQcAHwzr5omJmZmVl/qjUJmgtsmy+FfRiYmsvXBZb1\nRcPMzMzM+tOaNW73S+Ay4J9AADfl8p2AB/ugXWZmZmb9qqYkKCJOlHQ/sBnpUtiredUy4Id91Tgz\nMzOz/lJrTxARcTmApHUKZRf1RaPMzMzM+lutU+SHSPqupKeBlyS9JZefXJk6b2ZmZjaQ1Tow+tvA\nIcA3gMWF8vuBw+tsk5mZmVm/qzUJ+izwhYj4NSvPBruXdL8gMzMzswGt1iTojcDfO9nfWrU3x8zM\nzGz1qDUJegDYpUr5gcA9tTfHzMzMbPWodXbYScBFkt5ISqT+XdLWpMtk+/ZV48zMzMz6S009QRHx\nP6Rk50PAQlJStA2wX0Tc1NW2ZmZmZgNBPfcJug3Ysw/bYmZmZrba1HqfoB0l7VSlfCdJ76mnQZK+\nJWm5pEmFsqGSzpY0T9KLki6XNLK03WaSrpW0UNIcSadIWqMUs7ukGZJekfSQpIOr1H+kpEclvSzp\nLkk71nM8ZmZmNjDVOjD6bNIjM8remNfVJCccnydNtS86DdgHOADYFdgEuKKw3RrAdaSerZ2Bg0n3\nMTqpELM5cA1wM/Bu4HTgPEl7FmIOAk4FTgB2yO2YImlErcdkZmZmA1OtSdC2wMwq5ffkdb0maX3g\nV6SbLb5QKB8GHApMjIhbIuIe4HPA+yW9N4dNIN2f6FMRcV9ETAG+CxwpqXLJ7wjgkYj4RkTMjoiz\ngcuBiYVmTATOjYiLI+JB4EvAoly/mZmZDSK1JkGvAqOqlG8MLK1xn2cDV0fEH0rl7yH18NxcKYiI\n2cATwLhctDNwX0TMK2w3BRgObFeImVra95TKPiStBYwt1RN5m3GYmZnZoFJrEnQj8F+ShlcKJG0I\n/ADo9ewwSZ8Atge+VWX1KGBxRCwolc8FRuevR+f35fX0IGaYpKHACGBIJzGjMTMzs0Gl1tlhxwK3\nAo9LqtwccXtSwvCZ3uxI0qakMT97RsSS3mwKRA/iuopRD2O6rGfixIkMHz58pbK2tjba2tp60Dwz\nM7Pei578BRwg2tvbaW9vX6mso6OjQa1ZoaYkKCKelvQu4FOkQcYvA78E2nuZyEC6BPUGYIakSlIy\nBNhV0lHAh4GhkoaVeoNGsqLXZg5QnsU1qrCu8lq+hDcSWBARiyXNIz0HrVpMuXdoJZMnT2bMmDFd\nhZiZmfW51/5qDnDVOgZmzpzJ2LFjG9SipJ77BC0Eft4HbZgKvLNUdiEwC/gh8DSwBNgDuBJA0lbA\nm4A7cvydwPGSRhTGBY0HOvJ+KjF7leoZn8uJiCWSZuR6rsr1KL8/o96DNDMzs4Gl5iQoJyK7k3pK\nVhpbFBEnVdummpxMPVDa90LguYiYld+fD0ySNB94kZSU3B4Rd+dNbsz7uETScaQB2icDZxV6pn4G\nHCXpR8AFpOTmQGDvQtWTSI8DmQFMJ80WW5eUlJmZmdkgUlMSJOnzwE+BeaTLTMUrk0Hh/jw1Kl/p\nnEi6VHU5MBS4ATjyteCI5ZL2zW26g/QojwtJ9/upxDwmaR9SovMV4CngsIiYWoi5LN8T6CTSZbE/\nAxMi4tk6j8fMzMwGmFp7gr4DfDsiftSXjamIiH8tvX8VODovnW3zJN08vDUibiGNQeoq5hzgnB43\n1szMzJpSrVPkNwJ+15cNMTMzM1udak2CfkcaVGxmZmbWlGq9HPZ34GRJOwP3kWZvvSYiPJvKzMzM\nBrRak6AvAC8Bu+WlKPCUcjMzMxvgar1Z4hZ93RAzMzOz1anWMUEASFpb0taFJ7WbmZmZNYWakiBJ\n6+YbGC4C/kq6ezOSzpT0zT5sn5mZmVm/qLUn6L9IzwzbHXilUD4VOKjONpmZmZn1u1ovY30UOCgi\n7pJUvLvzX4G31t8sMzMzs/5Va0/QG4BnqpSvx6qPvDAzMzMbcGpNgv4E7FN4X0l8Dic/ld3MzMxs\nIKv1ctjxwPWSts37OEbSdsA4Vr1vkJmZmdmAU1NPUETcRhoYvSbpjtHjgbnAuIiY0XfNMzMzM+sf\nve4JyvcE+iQwJSI+3/dNMjMzM+t/ve4JioilwM+Adfq+OWZmZmarR60Do6cDO/RlQ8zMzMxWp1oH\nRp8DnCppU2AGsLC4MiL+Um/DzMzMzPpTrUnQpfm1+LT4AJRfh9TTKDMzM+ta+K58das1CfJT5M3M\nzBpManQLmltNSVBEPN7XDTEzMzNbnWpKgiR9tqv1EXFxbc0xMzMzWz1qvRx2eun9WsC6wGJgEeAk\nyMzMzAa0Wi+HbVQuk/Q24KfAj+ttlJmZmVl/q/U+QauIiL8B32TVXiIzMzOzAafPkqBsKbBJH+/T\nzMzMrM/VOjB6/3IRsDFwFHB7vY0yMzMz62+1Doz+fel9AM8CfwC+VleLzMzMzFaDWgdG9/VlNDMz\nM7PVysmMmZmZtaSakiBJl0v6ZpXyr0v6Xf3NMjMzM+tftfYE7QZcW6X8BmDX2ptjZmZmtnrUmgSt\nT7o7dNkSYFjtzTEzMzNbPWpNgu4DDqpS/gnggdqbY2ZmZrZ61JoEnQx8V9JFkg7Oy8XAt/O6HpP0\nJUn3SurIyx2SPlxYP1TS2ZLmSXoxj0caWdrHZpKulbRQ0hxJp0haoxSzu6QZkl6R9JCkg6u05UhJ\nj0p6WdJdknbs1VkxMzOzplFTEhQRVwMfBbYEzgFOBTYFPhQR5XsIdedJ4DhgbF7+APyPpG3y+tOA\nfYADSOONNgGuqGyck53rSNP9dwYOBg4BTirEbA5cA9wMvJv0aI/zJO1ZiDkoH8cJwA7AvcAUSSN6\neTxmZmbWBGq9WSIRcS3VB0fXsp+i70g6AthZ0tPAocAnIuIWAEmfA2ZJem9ETAcmAG8HPhgR84D7\nJH0X+KGkEyNiKXAE8EhEfCPXMVvSB4CJwE25bCJwbkRcnOv5Ein5OhQ4pd7jNDMzs4Gl1inyO0ra\nqUr5TpLeU2tjJK0h6RPAusCdpJ6hNUk9OABExGzgCWBcLtoZuC8nQBVTgOHAdoWYqaXqplT2IWmt\nXFexnsjbjMPMzMwGnVrHBJ0NbFal/I15Xa9IeoekF4FXSZfX/i0iHgRGA4sjYkFpk7l5Hfl1bpX1\n9CBmmKShwAhgSCcxozEzM7NBp9bLYdsCM6uU35PX9daDpLE6G5LG/lwsqav7DYn0vLLudBWjHsb0\npB4zMzNrMrUmQa8Co4BHSuUbA0t7u7M8bqeyr5mS3gscA1wGrC1pWKk3aCQrem3mAOVZXKMK6yqv\no0oxI4EFEbFY0jxgWScx5d6hVUycOJHhw4evVNbW1kZbW1t3m5qZmQ167e3ttLe3r1TW0dHRoNas\nUGsSdCPwX5I+EhEdAJI2BH7AioHG9VgDGArMICVVewBX5nq2At4E3JFj7wSOlzSiMC5oPNABzCrE\n7FWqY3wuJyKWSJqR67kq16P8/ozuGjt58mTGjBnT+6M0MzNrAdU6BmbOnMnYsWMb1KKk1iToWOBW\n4HFJ9+Sy7Um9Jp/pzY4kfR+4njRVfgPgU6THcoyPiAWSzgcmSZoPvEhKSm6PiLvzLm4k3aDxEknH\nkXqjTgbOioglOeZnwFGSfgRcQEpuDgT2LjRlEnBRToamk2aLrQtc2JvjMTMzWx3CgzXqVlMSFBFP\nS3oXKWF5N/Ay8EugvZB49NQo4GJS8tIB/IWUAP0hr59IulR1Oal36AbgyEJblkvaF/gpqXdoISlx\nOaEQ85ikfUiJzleAp4DDImJqIeayfE+gk3Kb/gxMiIhne3k8ZmZmq4XUfYx1rp77BC0Efl5vAyLi\n8G7WvwocnZfOYp4E9u1mP7eQpsF3FXMOaXaamZmZDXI1JUGSPga0AVuRZk/9DfhNRFzeh20zMzMz\n6ze9uk9Qvpnhb4HfkqbC/500q2s74DJJl+YBxWZmZmYDWm97go4BPgTsHxHXFFdI2p80LugY0vO+\nzMzMzAas3t4x+nPA18sJEEBEXAV8g/SsLTMzM7MBrbdJ0NtY9RlcRVNzjJmZmdmA1tsk6GXSoy06\nMwx4pfbmmJmZma0evU2C7gSO6GL9kTnGzMzMbEDr7cDo7wN/lPR64CekB58K2Ab4GvAR4IN92kIz\nMzOzftCrJCgi7pB0EOkmiQeUVs8H2iLi9r5qnJmZmVl/6fXNEiPiSklTSA8g3SoXPwTcGBGL+rJx\nZmZmZv2l1meHLZL0IeA/IuL5Pm6TmZmZWb/r7R2jNy28/SSwfi6/T9JmfdkwMzMzs/7U256gByU9\nB9wOrANsBjwBbA6s1bdNMzMzM+s/vZ0iPxz4GDAjb3udpIeAocAESaP7uH1mZmZm/aK3SdBaETE9\nIk4l3ThxB9KjNJaRHpfxsKTZfdxGMzMzsz7X28thCyTdQ7octjawbkTcLmkpcBDwFPDePm6jmZmZ\nWZ/rbU/QJsD3gFdJCdSfJE0jJURjgIiI2/q2iWZmZmZ9r1dJUETMi4irI+JbwCJgR+BMIEh3kF4g\n6Za+b6aZmZlZ3+ptT1BZR0RcBiwB/hXYAjin7laZmZmZ9bOabpaYvQt4On/9OLAkIuYAv627VWZm\nZmb9rOYkKCKeLHz9jr5pjpmZmfVERKNb0PzqvRxmZmZmDSI1ugXNzUmQmZmZtSQnQWZmZtaSnASZ\nmZlZS3ISZGZmZi3JSZCZmZm1JCdBZmZm1pKcBJmZmVlLchJkZmZmLclJkJmZmbUkJ0FmZmbWkpwE\nmZmZWUtqeBIk6VuSpktaIGmupCslbVWKGSrpbEnzJL0o6XJJI0sxm0m6VtJCSXMknSJpjVLM7pJm\nSHpF0kOSDq7SniMlPSrpZUl3Sdqxf47czMzMGqnhSRCwC3AmsBPwIWAt4EZJryvEnAbsAxwA7Aps\nAlxRWZmTneuANYGdgYOBQ4CTCjGbA9cANwPvBk4HzpO0ZyHmIOBU4ARgB+BeYIqkEX13uGZmZjYQ\nrNnoBkTE3sX3kg4BngHGArdJGgYcCnwiIm7JMZ8DZkl6b0RMByYAbwc+GBHzgPskfRf4oaQTI2Ip\ncATwSER8I1c1W9IHgInATblsInBuRFyc6/kSKfk6FDilf86AmZmZNcJA6Akq2xAI4Pn8fiwpWbu5\nEhARs4EngHG5aGfgvpwAVUwBhgPbFWKmluqaUtmHpLVyXcV6Im8zDjMzMxtUBlQSJEmkS1+3RcQD\nuXg0sDgiFpTC5+Z1lZi5VdbTg5hhkoYCI4AhncSMxszMzAaVhl8OKzkH2Bb4QA9iReox6k5XMeph\nTE/qMTMzsyYyYJIgSWcBewO7RMQ/CqvmAGtLGlbqDRrJil6bOUB5FteowrrK66hSzEhgQUQsljQP\nWNZJTLl3aCUTJ05k+PDhK5W1tbXR1tbW1WZmZmYtob29nfb29pXKOjo6GtSaFQZEEpQToI8Au0XE\nE6XVM4ClwB7AlTl+K+BNwB055k7geEkjCuOCxgMdwKxCzF6lfY/P5UTEEkkzcj1X5XqU35/RVfsn\nT57MmDFjeny8ZmZmraRax8DMmTMZO3Zsg1qUNDwJknQO0AbsDyyUVOmJ6YiIVyJigaTzgUmS5gMv\nkpKS2yPi7hx7I/AAcImk44CNgZOBsyJiSY75GXCUpB8BF5CSmwNJvU8Vk4CLcjI0nTRbbF3gwn44\ndDMzM2ughidBwJdIY27+WCr/HHBx/noi6VLV5cBQ4AbgyEpgRCyXtC/wU1Lv0EJS4nJCIeYxSfuQ\nEp2vAE8Bh0XE1ELMZfmeQCeRLov9GZgQEc/20bGamZnZANHwJCgiup2hFhGvAkfnpbOYJ4F9u9nP\nLaRp8F3FnEMaoG1mZjZghafs1G1ATZE3MzOznpO6j7HOOQkyMzNrQu4Jqp+TIDMzsyYU4Z6gejkJ\nMjMza0JOgurnJMjMzKxJOQmqj5MgMzOzJuQxQfVzEmRmZtaEfDmsfk6CzMzMmpCToPo5CTIzM2tC\nToLq5yTIzMysCTkJqp+TIDMzM2tJToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzM\nmpCToPo5CTIzM2tCToLq5yTIzMzMWpKTIDMzsybknqD6OQkyMzNrQk6C6uckyMzMrAk5CaqfkyAz\nM7Mm5CSofk6CzMzMmlBEo1vQ/JwEmZmZNSn3BNXHSZCZmVkT8uWw+jkJMjMza0JOgurnJMjMzKwJ\nOQmqn5MgMzOzJuQkqH5OgszMzJqQZ4fVz0mQmZlZk3JPUH2cBJmZmTUhXw6rn5MgMzOzJuQkqH5O\ngszMzJqQk6D6OQkyMzNrQh4YXb8BkQRJ2kXSVZKelrRc0v5VYk6S9A9JiyTdJGnL0vqNJP1aUoek\n+ZLOk7ReKeZdkm6V9LKkxyV9vUo9H5M0K8fcK2mvvj9iMzOz+rgnqH4DIgkC1gP+DBwJrJLbSjoO\nOAr4IvBeYCEwRdLahbDfANsAewD7ALsC5xb2sQEwBXgUGAN8HThR0uGFmHF5P78Atgd+D/xe0rZ9\ndaBmZmZ9xUlQfdZsdAMAIuIG4AYAqepHegxwckRcnWM+C8wFPgpcJmkbYAIwNiLuyTFHA9dKOjYi\n5gCfBtYCDouIpcAsSTsAXwXOK9RzfURMyu9PkDSelIB9ua+P28zMrFbuCarfQOkJ6pSkLYDRwM2V\nsohYAPwfMC4X7QzMryRA2VRSr9JOhZhbcwJUMQXYWtLw/H5c3o5SzDjMzMwGECdB9RvwSRApAQpS\nz0/R3LyuEvNMcWVELAOeL8VU2wc9iBmNmZnZAOKB0fVrhiSoM6LK+KFexqiHMf5WMzOzAcU9QfUb\nEGOCujGHlIiMYuVempHAPYWYkcWNJA0BNsrrKjGjSvseycq9TJ3FlHuHVjJx4kSGDx++UllbWxtt\nbW1dbWZmZlazZkqC2tvbaW9vX6mso6OjQa1ZYcAnQRHxqKQ5pFlffwGQNIw01ufsHHYnsKGkHQrj\ngvYgJU/TCzHfkzQkXyoDGA/MjoiOQswewBmFJuyZyzs1efJkxowZU+shmpmZ1aRZkqBqHQMzZ85k\n7NixDWpRMiAuh0laT9K7JW2fi96S32+W358GfEfSfpLeCVwMPAX8D0BEPEgawPwLSTtKej9wJtCe\nZ4ZBmvq+GLhA0raSDgK+ApxaaMrpwF6Svippa0knAmOBs/rr2M3MzGrRTD1BA9VA6Ql6D/C/pEtT\nwYrE5CLg0Ig4RdK6pPv+bAhMA/aKiMWFfXySlKxMBZYDl5OmvANpRpmkCTnmT8A84MSIOL8Qc6ek\nNuD7efkb8JGIeKDvD9nMzKx2ToLqNyCSoIi4hW56pSLiRODELta/QLoXUFf7uA/YrZuYK4Aruoox\nMzNrNM8Oq9+AuBxmZmZmveOeoPo5CTIzM2tCToLq5yTIzMysSTkJqo+TIDMzsybknqD6OQkyMzNr\nQh4YXT8nQWZmZk3IPUH1cxJkZmbWhJwE1c9JkJmZWZNyElQfJ0FmZmZNyD1B9XMSZGZm1oQ8MLp+\nToLMzMyakHuC6uckyMzMrAk5CaqfkyAzM7Mm5CSofk6CzMzMmpSToPo4CTIzM2tC7gmqn5MgMzOz\nJuTZYfVzEmRmZtaE3BNUPydBZmZmTchJUP2cBJmZmTUhJ0H1cxJkZmbWhJYtgzXXbHQrmpuTIDMz\nsya0eDGstVajW9HcnASZmZk1oSVLnATVy0mQmZlZE1qyBNZeu9GtaG5OgszMzJqQL4fVz0mQmZlZ\nE/LlsPo5CTIzM2tCvhxWPydBZmZmTcg9QfVzEmRmZtaEXn3VSVC9nASZmZk1oWefhTe8odGtaG5O\ngszMzJrMwoXwwguw6aaNbklzcxJkZmbWZJ5+Or06CaqPkyAzM7Mm89RT6dVJUH2cBJmZmTWZShK0\nySaNbUezcxJkZmbWZJ5+Gl7/enjd6xrdkubmJKgKSUdKelTSy5LukrRjo9s02LS3tze6CU3H56w2\nPm+953NWm9V53m6/HbbbbrVVN2g5CSqRdBBwKnACsANwLzBF0oiGNmyQ8S/Z3vM5q43PW+/5nNVm\ndZ23Rx+Fm26Cf/u31VLdoOYkaFUTgXMj4uKIeBD4ErAIOLSxzTIzs1Z3113wwQ+msUAHH9zo1jS/\nNRvdgIFE0lrAWOAHlbKICElTgXENa5iZmbWUCFiwAB5/HGbPhhkz4MYb4Z570mWw66+HjTZqdCub\nn5OglY0AhgBzS+Vzga072+iBB2D58p5XEtH7hg22bZ5/Hu64Y/XUNVi2ee45uOWW1de2WrcbaNs8\n+2y6dNDf9dS7zeqsq7tt5s6Fa6/t/3r6apt66lq+PC2Vr6u9drWu+PrIIzBpEixdCsuWpdfy14sX\np+XVV1e8LlwIixal5cUXU/Lz/PNpXcUmm8Auu8AJJ8C++8KQIbWdJ1uZk6CeEVDtR2wdgM98Ztbq\nbc2g0MH73z+z0Y1oMh3svrvPWe91MH68z1vvdLDvvj5nXZFWXZYt6+C7353JkCF0uqy11srL2mvD\nOuvAeuul2V7rrpu+HjYsvR89GjbbbOVen3vvbdxx96VZs17727lOo9rgJGhl84BlwKhS+UhW7R0C\n2Dy9fLo/2zSIjW10A5qQz1ltfN56z+esK5UeorJFi3zearA5UMO1gfo5CSqIiCWSZgB7AFcBSFJ+\nf0aVTaYAnwIeA15ZTc00MzMbDNYhJUBTGtUARa0XYAcpSR8HLgK+CEwnzRY7EHh7RDzbyLaZmZlZ\n33FPUElEXJbvCXQS6bLYn4EJToDMzMwGF/cEmZmZWUvyzRLNzMysJTkJqkOrPmNM0gmSlpeWBwrr\nh0o6W9I8SS9KulzSyNI+NpN0raSFkuZIOkXSGqWY3SXNkPSKpIckNdX9USXtIukqSU/nc7R/lZiT\nJP1D0iJJN0nasrR+I0m/ltQhab6k8yStV4p5l6Rb8/fh45K+XqWej0malWPulbRX3x9x/bo7Z5J+\nWeV777pSTEudMwBJ35I0XdICSXMlXSlpq1LMavu5bIbfjT08Z38sfa8tk3ROKaZlzhmApC/ln4eO\nvNwh6cOF9c31fRYRXmpYgINIM8I+C7wdOBd4HhjR6LathmM/AfgL8AbS7QNGAv9SWP9T0oy53UjP\nX7sDmFZYvwZwH2lGwDuBCcAzwPcKMZsDLwGnkG5UeSSwBNiz0cffi/P0YdLYso+Sbr2wf2n9cfl7\nZj/gHcDvgYeBtQsx1wMzgfcA7wMeAn5VWL8B8E/SYP5tgI8DC4HDCzH/v707j9WjKuM4/v1VoECb\ntsjiTaTQ0goUQZayNWy3LCVIkCCJNpi68QfKHypEiYkkQEQJokQCNCaCRBZBTAwQKSLLbRsNS1hS\nK9CylbKUawoUKHgV2j7+cc5Lz532Lr2Ud7nz+yRvmpk5M++Zp2fmfe6ZMzOzcuzOz7G8BPgfsF+r\nYzSCmN0A3F1pexMrZWoVs1zfBcC8vD8HAH/Jx+AORZmmHJd0yLlxmDHrAX5TaW/j6xqzXNdT83E6\nPX8uzcfGjE5sZy0PaKd+gIeBq4ppAa8CF7S6bk3Y94uAJwZYNiEfEGcU8/YBNgCH5+lTcoPepShz\nDrAG2CZPXw78s7LtW4EFrd7/EcZsA5v+oK8CzqvErg/4Sp6ekdc7uChzMrAO6MrT3yU932qbosxl\nwNPF9G3AXZXvfgiY3+q4jCBmNwB/HmSdfescs6Kuu+Q4HF20raYcl516bqzGLM/rAa4cZJ1ax6yo\n75vAtzqxnfly2Aho4zvGHmjMi/S/UKd3jH0uX7J4QdLNkibn+TNJdx2WsVkOvMzG2BwJLI2IN4rt\n3QtMBD5flLm/8p33MkriK2kq0EX/OL0LPEL/OK2JiCeLVe8nPb38iKLM4ohYV5S5F9hH0sQ8PYvR\nFcvufPlimaT5kj5dLJuFYwYwibTPb+XpphyXHX5urMas4WuSVktaKunnknYoltU6ZpLGSJoL7Ej6\nI6Hj2pmToJEZ7B1jXc2vTtM9DHyT9Bf2d4CpwOI87qIL+CD/oJfK2HSx+dgxjDITJI39uDvQBrpI\nJ9zB2lAXqZv4IxGxnnSS3hqx7MS2eg+p+/t44AJSl/sCScrLax+zHItfA3+PiMZYvWYdlx15bhwg\nZgC3kF4J0E16sfY84KZieS1jJml/SWtJvT7zST0/y+jAdubnBG1dA71jbFSJiPLpnv+S9CiwkjS2\nYqAnZw83NoOV0TDKdLrhxGmoMhpmmY6LY0TcXkw+JWkpaRxVN+nSxUDqFLP5wH7A0cMo26zjst1j\n14jZUeXMiLiumHxKUi/wgKSpEbFiiG2O5pgtAw4k9Z6dCdwo6dhByrdtO3NP0Mhs6TvGRrWIeIc0\n+HhpytQAAAY1SURBVHQ60AtsJ2lCpVgZm142jd1nimUDldkNeDciPtga9W6xXtIBO1gb6s3TH5H0\nKWAnho5T2cs0UJmOb6v5h+gNUtuDmsdM0jXAF4HuiFhVLGrWcdlx58ZKzF4fovgj+d+yvdUuZhGx\nLiJejIgnIuInwBLg+3RgO3MSNAIR8SHQeMcY0O8dYy15CVwrSRoPTCMN9H2cNAi1jM3ewB5sjM1D\nwAFKT+ZumAO8AzxTlDmB/ubk+R0v/3j30j9OE0jjVso4TZJ0cLHqCaTk6dGizLH5h75hDrA8J6eN\nMtVYnsQoiKWk3YGdSXd7QY1jln/MTwdmR8TLlcVNOS477dw4RMw252BSsly2t1rFbABjgLF0Yjtr\n9ajyTv2QLv300f/2vDeBXVtdtybs+xXAscCepFuQ7yNl3zvn5fOBFaRLFDOBf7DpLZJLSOM7vkAa\nW/Rv4KdFmSmkWyQvJ91dcC7wAXBiq/d/C+I0jtRlfBDp7ogf5OnJefkFuc2cRrpV9A7gOfrfIr8A\neAw4jNRVvxy4qVg+gZR8/p7Unf/VHLezizKzcuwat3tfTLps2Xa3ew8Ws7zsF6REcU/SCe8x0olz\n27rGLNd3PunummNIfx03PttXynzixyUdcm4cKmbAXsCFwCG5vX0JeB54sK4xy3X9GelS656kR3tc\nRkp8ju/EdtbygHbyJ//HvJT/Ix4CDm11nZq037eSbkXsI436/wMwtVg+Fria1GW5FvgTsFtlG5NJ\nz+V4Lx8AlwNjKmWOI2X7faTkYF6r930L43Qc6Yd8feXzu6LMxaQf5P+Q7n6YXtnGJOBm0l9Ja4Df\nAjtWyhwALMrbeBn44WbqcibpOn4f6RlPJ7c6PlsaM9Ibp/9K6kH7L/Ai6Zkku1a2UauY5bpuLmbr\nga8XZZp2XNIB58ahYgbsDiwEVud2spz0gz++sp3axCzX87p87PXlY/Fv5ASoE9uZ3x1mZmZmteQx\nQWZmZlZLToLMzMyslpwEmZmZWS05CTIzM7NachJkZmZmteQkyMzMzGrJSZCZmZnVkpMgMzMzqyUn\nQWZmZlZLToLMzDJJPZKubHU9zKw5nASZWVuQdI6kdyWNKeaNk/ShpAcqZWdL2iBpSrPraWajh5Mg\nM2sXPaQ3xR9azDsGeB04UtJ2xfzjgJUR8dKWfomkbT5OJc1s9HASZGZtISKeJSU83cXsbuAOYAVw\nZGV+D4CkyZLulLRW0juS/ihpt0ZBSRdJelLS2ZJeJL2BHkk7Sroxr/eapPOrdZJ0rqRnJfVJ6pV0\n+9bdazNrJSdBZtZOFgKzi+nZed6ixnxJY4EjgAdzmTuBSaReoxOBacBtle1OB74MnAEclOf9Mq9z\nGjCHlFjNbKwg6VDgKuBCYG/gZGDxx9w/M2sj7hY2s3ayELgyjwsaR0pYFgPbAecAlwBH5emFkk4C\n9gemRMQqAEnzgKckzYyIx/N2twXmRcRbucw44NvAWRGxMM/7BvBqUZfJwHvA3RHxPvAKsOQT2m8z\nawH3BJlZO2mMCzoMOBp4NiLeIPUEHZHHBXUDL0TEq8C+wCuNBAggIp4B3gZmFNtd2UiAsmmkxOjR\nYr01wPKizH3ASmBFvmx2lqQdttqemlnLOQkys7YRES8Ar5Eufc0mJT9ExOuknpijKMYDAQJiM5uq\nzn9/M8sZYN1GXd4DDgHmAqtIvVBLJE0Y9g6ZWVtzEmRm7aaHlAB1ky6PNSwGTgEOZ2MS9DSwh6TP\nNgpJ2g+YmJcN5HlgHcVga0k7kcb+fCQiNkTEgxHxY+BAYApw/Aj2yczakMcEmVm76QGuJZ2fFhXz\nFwPXkC5jLQSIiPslLQVukXReXnYt0BMRTw70BRHxvqTrgSskvQWsBi4F1jfKSDoV2Ct/7xrgVFIP\n0vJNt2hmnchJkJm1mx5ge+CZiFhdzF8EjAeWRURvMf904Oq8fANwD/C9YXzPj0jjj+4C1gK/AspL\nXW+T7ii7KNfnOWBuHnNkZqOAIga8JG5mZmY2anlMkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\nnASZmZlZLTkJMjMzs1pyEmRmZma15CTIzMzMaslJkJmZmdWSkyAzMzOrJSdBZmZmVktOgszMzKyW\n/g8dz3zImRgHIwAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -344,25 +362,7 @@ }, { "cell_type": "code", - "execution_count": 231, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Remove rare and common tokens.\n", - "\n", - "# Filter out words that occur too frequently or too rarely.\n", - "max_freq = 0.5\n", - "min_wordcount = 20\n", - "dictionary.filter_extremes(no_below=min_wordcount, no_above=max_freq)\n", - "\n", - "dict0 = dictionary[0] # This sort of \"initializes\" dictionary.id2token." - ] - }, - { - "cell_type": "code", - "execution_count": 232, + "execution_count": 246, "metadata": { "collapsed": true }, @@ -376,7 +376,7 @@ }, { "cell_type": "code", - "execution_count": 233, + "execution_count": 247, "metadata": { "collapsed": false }, @@ -462,70 +462,6 @@ "print(time() - start)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Online AT VB 2" - ] - }, - { - "cell_type": "code", - "execution_count": 122, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(onlineatvb2)\n", - "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" - ] - }, - { - "cell_type": "code", - "execution_count": 123, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'time model_online2 = OnlineAtVb2(corpus=corpus, grouped_corpus=disjoint_authors, num_topics=10, id2word=dictionary.id2token, id2author=id2author, author2doc=author2doc, doc2author=doc2author, threshold=1e-19, iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, eval_every=1, random_state=1, var_lambda=None)'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb2.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, grouped_corpus, num_topics, id2word, id2author, author2doc, doc2author, threshold, minimum_probability, iterations, passes, alpha, eta, decay, offset, eval_every, random_state, var_lambda)\u001b[0m\n\u001b[1;32m 140\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 141\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 142\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvar_lambda\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvar_lambda\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 143\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 144\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mt\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/onlineatvb2.py\u001b[0m in \u001b[0;36minference\u001b[0;34m(self, corpus, var_lambda)\u001b[0m\n\u001b[1;32m 303\u001b[0m \u001b[0mcts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnumpy\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mcnt\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcnt\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mdoc\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# Word counts.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 304\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 305\u001b[0;31m \u001b[0mvar_gamma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcts\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_mu\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mvar_phi\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 306\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 307\u001b[0m \u001b[0;31m# Update Elogtheta, since gamma has been updated.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], - "source": [ - "%time model_online2 = OnlineAtVb2(corpus=corpus, grouped_corpus=disjoint_authors, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-19, \\\n", - " iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "model_online.show_topics()" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -557,7 +493,7 @@ }, { "cell_type": "code", - "execution_count": 234, + "execution_count": 256, "metadata": { "collapsed": true }, @@ -569,7 +505,7 @@ }, { "cell_type": "code", - "execution_count": 237, + "execution_count": 271, "metadata": { "collapsed": false }, @@ -578,56 +514,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 26min 26s, sys: 340 ms, total: 26min 26s\n", - "Wall time: 26min 26s\n" + "CPU times: user 1min 24s, sys: 4 ms, total: 1min 24s\n", + "Wall time: 1min 24s\n" ] } ], "source": [ - "%time model_online = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + "%time model_online = OnlineAtVb(corpus=corpus, num_topics=4, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None)" + " iterations=1, passes=20, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=200, random_state=2, var_lambda=None)" ] }, { "cell_type": "code", - "execution_count": 30, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(minibatchatvb)\n", - "MinibatchAtVb = minibatchatvb.MinibatchAtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 2min 1s, sys: 24 ms, total: 2min 1s\n", - "Wall time: 2min 1s\n" - ] - } - ], - "source": [ - "%time model_online = MinibatchAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None, chunksize=1)" - ] - }, - { - "cell_type": "code", - "execution_count": 238, + "execution_count": 272, "metadata": { "collapsed": false, "scrolled": false @@ -637,28 +538,16 @@ "data": { "text/plain": [ "[(0,\n", - " '0.109*processor + 0.061*cm + 0.057*link + 0.047*update + 0.046*list + 0.038*temperature + 0.031*grid + 0.026*machine + 0.023*serial + 0.022*matched'),\n", + " '0.011*cell + 0.011*object + 0.009*fiber + 0.008*pulse + 0.008*firing + 0.007*cortex + 0.007*spike + 0.007*cortical + 0.007*receptor + 0.007*axon'),\n", " (1,\n", - " '0.065*chain + 0.054*velocity + 0.036*motion + 0.032*noise + 0.022*detection + 0.020*filter + 0.014*resolution + 0.014*gaussian + 0.013*real_time + 0.012*reconstruction'),\n", + " '0.006*vector + 0.005*memory + 0.005*probability + 0.005*processor + 0.004*energy + 0.004*np + 0.004*boltzmann + 0.004*matrix + 0.004*graph + 0.004*polynomial'),\n", " (2,\n", - " '0.018*map + 0.012*region + 0.011*field + 0.011*cluster + 0.011*human + 0.010*receptive + 0.010*receptive_field + 0.008*orientation + 0.008*environment + 0.008*domain'),\n", + " '0.012*classifier + 0.007*speech + 0.007*node + 0.006*frame + 0.006*chain + 0.006*hidden + 0.005*recognition + 0.005*cluster + 0.004*hidden_unit + 0.004*digit'),\n", " (3,\n", - " '0.046*memory + 0.031*string + 0.025*symbol + 0.021*capacity + 0.021*associative + 0.020*associative_memory + 0.020*sequence + 0.018*letter + 0.017*tolerance + 0.016*production'),\n", - " (4,\n", - " '0.020*chip + 0.014*voltage + 0.014*circuit + 0.013*synapse + 0.013*transistor + 0.012*pulse + 0.012*analog + 0.010*action + 0.010*tree + 0.009*current'),\n", - " (5,\n", - " '0.026*classifier + 0.014*speech + 0.013*hidden + 0.011*recognition + 0.011*frame + 0.011*node + 0.010*hidden_unit + 0.009*propagation + 0.008*speaker + 0.008*back_propagation'),\n", - " (6,\n", - " '0.025*cell + 0.023*fiber + 0.019*firing + 0.017*spike + 0.016*cortex + 0.016*axon + 0.016*eye + 0.016*cortical + 0.012*stimulus + 0.012*dendritic'),\n", - " (7,\n", - " '0.004*vector + 0.003*eigenvalue + 0.003*attractor + 0.003*matrix + 0.003*energy + 0.003*np + 0.003*graph + 0.003*optimization + 0.003*fixed_point + 0.003*polynomial'),\n", - " (8,\n", - " '0.110*image + 0.089*object + 0.043*pixel + 0.034*visual + 0.029*contour + 0.029*vision + 0.022*segmentation + 0.018*poggio + 0.018*rotation + 0.017*spectral'),\n", - " (9,\n", - " '0.076*motor + 0.066*controller + 0.052*charge + 0.050*sensor + 0.043*gain + 0.042*control + 0.034*movement + 0.034*body + 0.028*transfer_function + 0.027*loop')]" + " '0.016*chip + 0.014*image + 0.010*velocity + 0.010*voltage + 0.009*circuit + 0.009*transistor + 0.009*charge + 0.009*eye + 0.008*motion + 0.007*analog')]" ] }, - "execution_count": 238, + "execution_count": 272, "metadata": {}, "output_type": "execute_result" } @@ -669,7 +558,7 @@ }, { "cell_type": "code", - "execution_count": 122, + "execution_count": 273, "metadata": { "collapsed": false }, @@ -681,49 +570,28 @@ "\n", "Yaser S.Abu-Mostafa\n", "Docs: [21]\n", - "[(0, 0.43981727821822292),\n", - " (1, 0.028347213089721844),\n", - " (3, 0.096034791617892343),\n", - " (5, 0.11974213992896583),\n", - " (6, 0.04818530676877044),\n", - " (7, 0.052015356949023761),\n", - " (8, 0.19358105735922765),\n", - " (9, 0.012210592598702002)]\n", + "[(0, 0.16188318876615412), (1, 0.80823920909246583), (3, 0.021312448059559796)]\n", "\n", "Geoffrey E. Hinton\n", - "Docs: [276, 235, 270]\n", - "[(0, 0.23709584775467316),\n", - " (1, 0.036278840277891584),\n", - " (2, 0.060881928460912567),\n", - " (3, 0.040860020890840953),\n", - " (4, 0.22120874865101597),\n", - " (5, 0.17881352536707981),\n", - " (6, 0.011552682298532534),\n", - " (7, 0.17862704317305195),\n", - " (8, 0.034587306660400441)]\n", + "Docs: [146, 276, 235, 270]\n", + "[(0, 0.14004630013032807),\n", + " (1, 0.23772038268835666),\n", + " (2, 0.5640333145036398),\n", + " (3, 0.058200002677675597)]\n", "\n", "Michael I. Jordan\n", "Docs: [205]\n", - "[(0, 0.2657244791505019),\n", - " (1, 0.042233864299392278),\n", - " (2, 0.056208047768936259),\n", - " (3, 0.020408371858599395),\n", - " (4, 0.075285256015344873),\n", - " (5, 0.44939042793717449),\n", - " (7, 0.064270462578477641),\n", - " (8, 0.014524432524677481),\n", - " (9, 0.011664709291004252)]\n", + "[(0, 0.26951795605324808),\n", + " (1, 0.1612862641672847),\n", + " (2, 0.4872153771544665),\n", + " (3, 0.081980402625000656)]\n", "\n", "James M. Bower\n", - "Docs: [188, 251, 244]\n", - "[(0, 0.35369888348382267),\n", - " (1, 0.097782509316364244),\n", - " (2, 0.11873783156273017),\n", - " (3, 0.02244484445927224),\n", - " (5, 0.11510615687752906),\n", - " (6, 0.01642214092725941),\n", - " (7, 0.056016631498195003),\n", - " (9, 0.21370723667506888)]\n" + "Docs: [150, 128, 162, 101, 188, 251, 244]\n", + "[(0, 0.67413384788621999),\n", + " (1, 0.071583305581578827),\n", + " (2, 0.06345028631865203),\n", + " (3, 0.19083256021354914)]\n" ] } ], @@ -758,7 +626,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 202, "metadata": { "collapsed": false }, @@ -776,7 +644,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 203, "metadata": { "collapsed": false }, @@ -793,7 +661,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 204, "metadata": { "collapsed": false }, @@ -806,7 +674,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 205, "metadata": { "collapsed": false }, @@ -823,7 +691,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 206, "metadata": { "collapsed": false }, @@ -839,7 +707,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 207, "metadata": { "collapsed": false }, @@ -849,7 +717,7 @@ "output_type": "stream", "text": [ "phi is 10 x 681 x 10 (68100 elements)\n", - "mu is 10 x 681 x 22 (149820 elements)\n" + "mu is 10 x 681 x 21 (143010 elements)\n" ] } ], @@ -874,7 +742,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 212, "metadata": { "collapsed": false }, @@ -883,16 +751,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 16.5 s, sys: 4 ms, total: 16.5 s\n", - "Wall time: 16.5 s\n" + "CPU times: user 34.6 s, sys: 4 ms, total: 34.6 s\n", + "Wall time: 34.6 s\n" ] } ], "source": [ "%time model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-3, \\\n", - " iterations=10, passes=5, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None)" + " iterations=1, passes=200, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=10, random_state=1, var_lambda=None)" ] }, { @@ -942,6 +810,92 @@ "pprint(model.get_author_topics(model.author2id[name]))\n" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Mini-batch" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(minibatchatvb)\n", + "MinibatchAtVb = minibatchatvb.MinibatchAtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 2min 1s, sys: 24 ms, total: 2min 1s\n", + "Wall time: 2min 1s\n" + ] + } + ], + "source": [ + "%time model_online = MinibatchAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", + " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None, chunksize=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Online AT VB 2" + ] + }, + { + "cell_type": "code", + "execution_count": 122, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(onlineatvb2)\n", + "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "%time model_online2 = OnlineAtVb2(corpus=corpus, grouped_corpus=disjoint_authors, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-19, \\\n", + " iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "model_online.show_topics()" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -996,7 +950,7 @@ }, { "cell_type": "code", - "execution_count": 98, + "execution_count": 185, "metadata": { "collapsed": false }, @@ -1008,7 +962,7 @@ }, { "cell_type": "code", - "execution_count": 99, + "execution_count": 186, "metadata": { "collapsed": false }, @@ -1017,15 +971,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 39.4 s, sys: 16 ms, total: 39.4 s\n", - "Wall time: 39.4 s\n" + "CPU times: user 2min 30s, sys: 44 ms, total: 2min 30s\n", + "Wall time: 2min 30s\n" ] } ], "source": [ "%time model_offline = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=3, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", + " iterations=1, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", " eval_every=1, random_state=1)" ] }, @@ -1113,7 +1067,7 @@ }, { "cell_type": "code", - "execution_count": 119, + "execution_count": 149, "metadata": { "collapsed": false }, @@ -1124,41 +1078,18 @@ "text": [ "\n", "Yaser S.Abu-Mostafa\n", - "Docs: [21]\n", - "[(0, 0.13509546636836239),\n", - " (1, 0.44987514305413251),\n", - " (3, 0.015628876899866424),\n", - " (4, 0.17133899219205551),\n", - " (5, 0.12622125049769653),\n", - " (7, 0.038299020391926251),\n", - " (8, 0.060545623938452663)]\n", - "\n", - "Geoffrey E. Hinton\n", - "Docs: [276, 235, 270]\n", - "[(0, 0.051128741737399293),\n", - " (2, 0.031947903095827117),\n", - " (3, 0.12717064775550488),\n", - " (4, 0.14970319940657217),\n", - " (5, 0.22650616321963829),\n", - " (6, 0.27680468289365207),\n", - " (7, 0.13302071076542252)]\n", - "\n", - "Michael I. Jordan\n", - "Docs: [205]\n", - "[(0, 0.62535055104859583),\n", - " (4, 0.087850011456332838),\n", - " (5, 0.18252069201813775),\n", - " (7, 0.094925833198552231)]\n", - "\n", - "James M. Bower\n", - "Docs: [188, 251, 244]\n", - "[(0, 0.066667129031022732),\n", - " (1, 0.01033213561317742),\n", - " (2, 0.60401298021861427),\n", - " (3, 0.073436683842966574),\n", - " (4, 0.024716090603344801),\n", - " (8, 0.023197011324340159),\n", - " (9, 0.19741318615356793)]\n" + "Docs: [21]\n" + ] + }, + { + "ename": "NameError", + "evalue": "name 'model' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\n%s'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Docs:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_author_topics\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'Geoffrey E. Hinton'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'model' is not defined" ] } ], @@ -1181,7 +1112,7 @@ "name = 'James M. Bower'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))" + "pprint(model.get_author_topics(author2id[name]))\n" ] }, { @@ -1333,7 +1264,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 210, "metadata": { "collapsed": false }, @@ -1342,16 +1273,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 8.61 s, sys: 4 ms, total: 8.61 s\n", - "Wall time: 8.62 s\n" + "CPU times: user 1min 25s, sys: 0 ns, total: 1min 25s\n", + "Wall time: 1min 25s\n" ] } ], "source": [ "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", - " iterations=5, alpha='symmetric', eta='symmetric', \\\n", - " eval_every=1, random_state=1, var_lambda=None)" + " iterations=100, alpha='symmetric', eta='symmetric', \\\n", + " eval_every=10, random_state=1, var_lambda=None)" ] }, { @@ -1431,7 +1362,7 @@ }, { "cell_type": "code", - "execution_count": 239, + "execution_count": 131, "metadata": { "collapsed": true }, @@ -1443,7 +1374,7 @@ }, { "cell_type": "code", - "execution_count": 243, + "execution_count": 132, "metadata": { "collapsed": false }, @@ -1455,7 +1386,7 @@ }, { "cell_type": "code", - "execution_count": 244, + "execution_count": 133, "metadata": { "collapsed": false }, @@ -1464,28 +1395,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.025*hidden + 0.015*hidden_unit + 0.012*propagation + 0.009*back_propagation + 0.009*vector + 0.007*gradient + 0.006*constraint + 0.006*speech + 0.005*hidden_layer + 0.005*internal'),\n", + " '0.015*hidden + 0.013*hidden_unit + 0.010*human + 0.009*chain + 0.008*node + 0.008*propagation + 0.007*region + 0.005*back_propagation + 0.005*gradient + 0.005*domain'),\n", " (1,\n", - " '0.019*field + 0.009*visual + 0.009*receptive_field + 0.009*receptive + 0.006*image + 0.006*position + 0.005*center + 0.005*activation + 0.005*joint + 0.005*role'),\n", + " '0.009*activation + 0.008*action + 0.006*machine + 0.006*node + 0.005*energy + 0.005*controller + 0.005*matrix + 0.005*sequence + 0.005*role + 0.005*forward'),\n", " (2,\n", - " '0.022*classifier + 0.018*node + 0.012*recognition + 0.010*classification + 0.009*image + 0.009*class + 0.007*decision + 0.007*frame + 0.006*vector + 0.006*trained'),\n", + " '0.015*hidden + 0.013*speech + 0.012*recognition + 0.012*classifier + 0.008*propagation + 0.008*trained + 0.006*back_propagation + 0.006*classification + 0.006*hidden_layer + 0.005*test'),\n", " (3,\n", - " '0.009*hopfield + 0.009*energy + 0.008*vector + 0.006*matrix + 0.006*optimization + 0.006*probability + 0.006*let + 0.006*minimum + 0.005*equilibrium + 0.005*distribution'),\n", + " '0.018*memory + 0.009*constraint + 0.009*optimization + 0.008*location + 0.007*hopfield + 0.006*address + 0.006*vector + 0.006*map + 0.006*field + 0.005*associative_memory'),\n", " (4,\n", - " '0.031*memory + 0.013*object + 0.012*vector + 0.011*associative + 0.009*capacity + 0.009*matrix + 0.008*associative_memory + 0.007*delay + 0.007*image + 0.006*stored'),\n", + " '0.016*chip + 0.015*circuit + 0.011*analog + 0.010*memory + 0.009*voltage + 0.009*synapse + 0.009*bit + 0.008*current + 0.007*synaptic + 0.007*processor'),\n", " (5,\n", - " '0.013*pulse + 0.007*noise + 0.007*response + 0.005*temporal + 0.005*potential + 0.005*fig + 0.005*cell + 0.004*current + 0.004*adaptive + 0.004*firing'),\n", + " '0.029*cell + 0.010*activity + 0.010*firing + 0.010*cortex + 0.007*brain + 0.007*potential + 0.006*inhibitory + 0.006*map + 0.006*cortical + 0.006*fig'),\n", " (6,\n", - " '0.029*cell + 0.010*response + 0.009*stimulus + 0.009*activity + 0.009*firing + 0.008*cortex + 0.007*synaptic + 0.007*spike + 0.007*frequency + 0.006*map'),\n", + " '0.012*vector + 0.007*probability + 0.007*matrix + 0.007*node + 0.006*let + 0.006*class + 0.005*distribution + 0.005*convergence + 0.005*theorem + 0.005*threshold'),\n", " (7,\n", - " '0.022*circuit + 0.016*chip + 0.013*analog + 0.011*voltage + 0.011*current + 0.009*synapse + 0.009*processor + 0.007*transistor + 0.007*synaptic + 0.006*vlsi'),\n", + " '0.018*image + 0.014*field + 0.008*receptive_field + 0.007*receptive + 0.007*orientation + 0.006*center + 0.006*map + 0.005*vector + 0.005*cell + 0.004*noise'),\n", " (8,\n", - " '0.009*action + 0.006*element + 0.006*environment + 0.006*sequence + 0.005*vector + 0.005*fig + 0.004*control + 0.004*forward + 0.004*language + 0.004*controller'),\n", + " '0.014*response + 0.014*cell + 0.011*stimulus + 0.009*current + 0.009*circuit + 0.008*spike + 0.007*frequency + 0.007*velocity + 0.006*synaptic + 0.006*fig'),\n", " (9,\n", - " '0.012*hidden + 0.010*speech + 0.007*recognition + 0.007*generalization + 0.006*trained + 0.005*vector + 0.005*hidden_layer + 0.005*test + 0.005*node + 0.005*training_set')]" + " '0.021*object + 0.012*image + 0.011*vector + 0.010*memory + 0.010*node + 0.008*view + 0.008*motion + 0.006*aspect + 0.005*optical + 0.005*recognition')]" ] }, - "execution_count": 244, + "execution_count": 133, "metadata": {}, "output_type": "execute_result" } @@ -1496,7 +1427,36 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": 144, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0, 0.047033260650131463),\n", + " (1, 0.27397832290210905),\n", + " (2, 0.45395489648769094),\n", + " (3, 0.031717136420799215),\n", + " (4, 0.057763485248973352),\n", + " (6, 0.090069707199356377),\n", + " (7, 0.011674522457529131),\n", + " (9, 0.022742673539037268)]" + ] + }, + "execution_count": 144, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lda[corpus[5]]" + ] + }, + { + "cell_type": "code", + "execution_count": 145, "metadata": { "collapsed": false }, @@ -1504,21 +1464,35 @@ { "data": { "text/plain": [ - "[(0, 0.14679045510589872),\n", - " (2, 0.024722847345847499),\n", - " (3, 0.036692535207794273),\n", - " (4, 0.10874558108160597),\n", - " (6, 0.29675634369596471),\n", - " (9, 0.38555538612902118)]" + "['scaling',\n", + " 'property',\n", + " 'of',\n", + " 'coarse',\n", + " 'coded',\n", + " 'symbol',\n", + " 'memory',\n", + " 'ronald',\n", + " 'rosenfeld',\n", + " 'david',\n", + " 'touretzky',\n", + " 'computer',\n", + " 'science',\n", + " 'department',\n", + " 'carnegie',\n", + " 'mellon',\n", + " 'university',\n", + " 'pittsburgh',\n", + " 'pennsylvania',\n", + " 'abstract']" ] }, - "execution_count": 59, + "execution_count": 145, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "lda[corpus[0]]" + "docs[0][:20]" ] } ], @@ -1538,7 +1512,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.2" + "version": "3.4.3+" } }, "nbformat": 4, diff --git a/gensim/models/onlineatvb2.py b/gensim/models/disjointatvb.py similarity index 99% rename from gensim/models/onlineatvb2.py rename to gensim/models/disjointatvb.py index be9f31dbed..57cd93c438 100644 --- a/gensim/models/onlineatvb2.py +++ b/gensim/models/disjointatvb.py @@ -37,7 +37,7 @@ logger = logging.getLogger(__name__) -class OnlineAtVb2(LdaModel): +class DisjointAtVb(LdaModel): """ Train the author-topic model using online variational Bayes. """ diff --git a/gensim/models/onlineatvb.py b/gensim/models/onlineatvb.py index 3c85191b07..96475a8f1e 100644 --- a/gensim/models/onlineatvb.py +++ b/gensim/models/onlineatvb.py @@ -11,6 +11,7 @@ # NOTE: from what I understand, my name as well as Radim's should be attributed copyright above? +from time import time import pdb from pdb import set_trace as st @@ -196,6 +197,7 @@ def inference(self, corpus=None, var_lambda=None): logger.info('Log prob: %.3e.', logprob) for _pass in xrange(self.passes): converged = 0 # Number of documents converged for current pass over corpus. + start = time() for d, doc in enumerate(corpus): rhot = self.rho(d + _pass) ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. @@ -265,14 +267,12 @@ def inference(self, corpus=None, var_lambda=None): # and "global" gamma (var_gamma). Same goes for lambda. # TODO: I may need to be smarter about computing rho. In ldamodel, # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). - # FIXME: if tilde_gamma is computed like this in every iteration, then I can't compare - # lastgamma to it for convergence test. FIXME. var_gamma_temp = (1 - rhot) * var_gamma + rhot * tilde_gamma # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. # FIXME: I don't need to update the entire gamma, as I only updated a few rows of it, # corresponding to the authors in the document. The same goes for Elogtheta. - Elogtheta = dirichlet_expectation(var_gamma_temp) + Elogtheta[authors_d, :] = dirichlet_expectation(var_gamma_temp[authors_d, :]) # Check for convergence. # Criterion is mean change in "local" gamma and lambda. @@ -313,20 +313,19 @@ def inference(self, corpus=None, var_lambda=None): # End of corpus loop. - if _pass % self.eval_every == 0: + if self.eval_every > 0 and (_pass + 1) % self.eval_every == 0: self.var_gamma = var_gamma self.var_lambda = var_lambda - if self.eval_every > 0: - if bound_eval: - prev_bound = bound - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta) - beta_bound = self.beta_bound(Elogbeta) - bound = word_bound + theta_bound + beta_bound - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) - if logprob_eval: - logprob = self.eval_logprob() - logger.info('Log prob: %.3e.', logprob) + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) #logger.info('Converged documents: %d/%d', converged, self.num_docs) @@ -336,22 +335,21 @@ def inference(self, corpus=None, var_lambda=None): # End of pass over corpus loop. # Ensure that the bound (or log probabilities) is computed at the very last pass. - if self.eval_every != 0 and not _pass % self.eval_every == 0: + if self.eval_every > 0 and not (_pass + 1) % self.eval_every == 0: # If the bound should be computed, and it wasn't computed at the last pass, # then compute the bound. self.var_gamma = var_gamma self.var_lambda = var_lambda - if self.eval_every > 0: - if bound_eval: - prev_bound = bound - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta) - beta_bound = self.beta_bound(Elogbeta) - bound = word_bound + theta_bound + beta_bound - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) - if logprob_eval: - logprob = self.eval_logprob() - logger.info('Log prob: %.3e.', logprob) + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) self.var_lambda = var_lambda From edd5025b6f44d0ba0280353ad3cd27bde559cebe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Sun, 13 Nov 2016 15:14:09 +0100 Subject: [PATCH 041/100] Implemented a new algorithm. It is 5 times faster, more memory efficient, and even gives better results. --- docs/notebooks/at_with_nips.ipynb | 293 ++++++++++---- gensim/models/__init__.py | 2 + gensim/models/atvb2.py | 610 ++++++++++++++++++++++++++++++ gensim/models/onlineatvb2.py | 510 +++++++++++++++++++++++++ 4 files changed, 1341 insertions(+), 74 deletions(-) create mode 100644 gensim/models/atvb2.py create mode 100644 gensim/models/onlineatvb2.py diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index d0e583e3d3..7d5308eaee 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -69,6 +69,10 @@ "from gensim.models import onlineatvb\n", "from gensim.models import MinibatchAtVb\n", "from gensim.models import minibatchatvb\n", + "from gensim.models import AtVb2\n", + "from gensim.models import atvb2\n", + "from gensim.models import OnlineAtVb2\n", + "from gensim.models import onlineatvb2\n", "\n", "from time import time\n", "\n", @@ -78,18 +82,6 @@ { "cell_type": "code", "execution_count": 3, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "from gensim.models import onlineatvb2\n", - "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" - ] - }, - { - "cell_type": "code", - "execution_count": 4, "metadata": { "collapsed": false }, @@ -97,8 +89,8 @@ "source": [ "# Configure logging.\n", "\n", - "#log_dir = '../../../log_files/log.log' # On my own machine.\n", - "log_dir = '../../../../log_files/log.log' # On Hetzner\n", + "log_dir = '../../../log_files/log.log' # On my own machine.\n", + "#log_dir = '../../../../log_files/log.log' # On Hetzner\n", "\n", "logger = logging.getLogger()\n", "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", @@ -117,7 +109,7 @@ }, { "cell_type": "code", - "execution_count": 235, + "execution_count": 163, "metadata": { "collapsed": false }, @@ -127,8 +119,8 @@ "import re\n", "\n", "# Folder containing all NIPS papers.\n", - "#data_dir = '../../../../data/nipstxt/' # On my own machine.\n", - "data_dir = '../../../nipstxt/' # On Hetzner.\n", + "data_dir = '../../../../data/nipstxt/' # On my own machine.\n", + "#data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", @@ -154,7 +146,7 @@ }, { "cell_type": "code", - "execution_count": 236, + "execution_count": 164, "metadata": { "collapsed": false }, @@ -184,7 +176,7 @@ }, { "cell_type": "code", - "execution_count": 237, + "execution_count": 165, "metadata": { "collapsed": false }, @@ -196,7 +188,7 @@ }, { "cell_type": "code", - "execution_count": 238, + "execution_count": 166, "metadata": { "collapsed": false }, @@ -214,7 +206,7 @@ }, { "cell_type": "code", - "execution_count": 239, + "execution_count": 167, "metadata": { "collapsed": false }, @@ -240,7 +232,7 @@ }, { "cell_type": "code", - "execution_count": 240, + "execution_count": 168, "metadata": { "collapsed": false }, @@ -263,7 +255,7 @@ }, { "cell_type": "code", - "execution_count": 241, + "execution_count": 169, "metadata": { "collapsed": true }, @@ -278,7 +270,7 @@ }, { "cell_type": "code", - "execution_count": 242, + "execution_count": 170, "metadata": { "collapsed": true }, @@ -297,7 +289,7 @@ }, { "cell_type": "code", - "execution_count": 243, + "execution_count": 171, "metadata": { "collapsed": true }, @@ -309,7 +301,7 @@ }, { "cell_type": "code", - "execution_count": 244, + "execution_count": 172, "metadata": { "collapsed": false }, @@ -327,7 +319,7 @@ }, { "cell_type": "code", - "execution_count": 245, + "execution_count": 173, "metadata": { "collapsed": false }, @@ -336,7 +328,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -362,7 +354,7 @@ }, { "cell_type": "code", - "execution_count": 246, + "execution_count": 174, "metadata": { "collapsed": true }, @@ -376,7 +368,7 @@ }, { "cell_type": "code", - "execution_count": 247, + "execution_count": 175, "metadata": { "collapsed": false }, @@ -462,6 +454,108 @@ "print(time() - start)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Online AT VB 2" + ] + }, + { + "cell_type": "code", + "execution_count": 153, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(onlineatvb2)\n", + "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" + ] + }, + { + "cell_type": "code", + "execution_count": 178, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 1min 15s, sys: 0 ns, total: 1min 15s\n", + "Wall time: 1min 15s\n" + ] + } + ], + "source": [ + "%time model_online2 = OnlineAtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", + " iterations=1, passes=40, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=100, random_state=2, var_lambda=None)" + ] + }, + { + "cell_type": "code", + "execution_count": 179, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.056*image + 0.047*object + 0.037*map + 0.034*velocity + 0.032*eye + 0.027*visual + 0.022*field + 0.022*motion + 0.021*receptive + 0.021*receptive_field'),\n", + " (1,\n", + " '0.017*memory + 0.011*vector + 0.010*processor + 0.009*matrix + 0.009*energy + 0.009*graph + 0.008*associative + 0.008*boltzmann + 0.008*np + 0.008*optimization'),\n", + " (2,\n", + " '0.017*probability + 0.015*theorem + 0.014*polynomial + 0.011*markov + 0.011*theory + 0.010*distribution + 0.010*proof + 0.009*criterion + 0.009*separable + 0.008*let'),\n", + " (3,\n", + " '0.039*cell + 0.035*fiber + 0.029*firing + 0.025*cortex + 0.025*axon + 0.024*cortical + 0.023*receptor + 0.021*synaptic + 0.020*activity + 0.020*stimulus'),\n", + " (4,\n", + " '0.008*spike + 0.006*controller + 0.006*frequency + 0.006*correlation + 0.006*transfer + 0.005*fig + 0.005*link + 0.005*phase + 0.005*delay + 0.004*loop'),\n", + " (5,\n", + " '0.010*speech + 0.009*frame + 0.008*recognition + 0.007*region + 0.006*noise + 0.006*speaker + 0.006*acoustic + 0.006*character + 0.006*human + 0.005*domain'),\n", + " (6,\n", + " '0.045*chain + 0.039*eigenvalue + 0.033*fixed_point + 0.031*oscillation + 0.031*basin + 0.030*attractor + 0.027*hebb + 0.025*oscillatory + 0.024*stability + 0.021*dt'),\n", + " (7,\n", + " '0.062*classifier + 0.032*hidden + 0.025*hidden_unit + 0.021*propagation + 0.020*back_propagation + 0.018*hidden_layer + 0.017*bp + 0.016*training_set + 0.014*xor + 0.013*backpropagation'),\n", + " (8,\n", + " '0.013*cluster + 0.012*node + 0.009*string + 0.008*tree + 0.007*failure + 0.007*robot + 0.007*letter + 0.007*symbol + 0.006*recurrent + 0.006*competitive'),\n", + " (9,\n", + " '0.061*chip + 0.052*pulse + 0.042*circuit + 0.041*voltage + 0.040*transistor + 0.034*charge + 0.034*synapse + 0.030*analog + 0.027*impulse + 0.025*vlsi')]" + ] + }, + "execution_count": 179, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_online2.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 162, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Speed improvement from new algorithm: 5.503876!\n" + ] + } + ], + "source": [ + "print(\"Speed improvement from new algorithm: %f!\" %(28.4 / 5.16))" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -493,7 +587,7 @@ }, { "cell_type": "code", - "execution_count": 256, + "execution_count": 118, "metadata": { "collapsed": true }, @@ -505,7 +599,7 @@ }, { "cell_type": "code", - "execution_count": 271, + "execution_count": 157, "metadata": { "collapsed": false }, @@ -514,21 +608,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1min 24s, sys: 4 ms, total: 1min 24s\n", - "Wall time: 1min 24s\n" + "CPU times: user 28.3 s, sys: 12 ms, total: 28.4 s\n", + "Wall time: 28.4 s\n" ] } ], "source": [ - "%time model_online = OnlineAtVb(corpus=corpus, num_topics=4, id2word=dictionary.id2token, id2author=id2author, \\\n", + "%time model_online = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=20, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", " eval_every=200, random_state=2, var_lambda=None)" ] }, { "cell_type": "code", - "execution_count": 272, + "execution_count": 40, "metadata": { "collapsed": false, "scrolled": false @@ -538,16 +632,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.011*cell + 0.011*object + 0.009*fiber + 0.008*pulse + 0.008*firing + 0.007*cortex + 0.007*spike + 0.007*cortical + 0.007*receptor + 0.007*axon'),\n", + " '0.075*image + 0.037*field + 0.034*visual + 0.031*position + 0.029*move + 0.025*map + 0.025*location + 0.021*center + 0.021*search + 0.019*human'),\n", " (1,\n", - " '0.006*vector + 0.005*memory + 0.005*probability + 0.005*processor + 0.004*energy + 0.004*np + 0.004*boltzmann + 0.004*matrix + 0.004*graph + 0.004*polynomial'),\n", + " '0.044*bit + 0.038*code + 0.030*hopfield + 0.029*matrix + 0.024*eq + 0.019*stored + 0.017*minimum + 0.016*stage + 0.014*optimization + 0.013*column'),\n", " (2,\n", - " '0.012*classifier + 0.007*speech + 0.007*node + 0.006*frame + 0.006*chain + 0.006*hidden + 0.005*recognition + 0.005*cluster + 0.004*hidden_unit + 0.004*digit'),\n", + " '0.031*iv + 0.025*differential + 0.023*code + 0.023*scheme + 0.020*adaptive + 0.017*find + 0.016*criterion + 0.015*he + 0.014*bound + 0.014*half'),\n", " (3,\n", - " '0.016*chip + 0.014*image + 0.010*velocity + 0.010*voltage + 0.009*circuit + 0.009*transistor + 0.009*charge + 0.009*eye + 0.008*motion + 0.007*analog')]" + " '0.035*activity + 0.033*array + 0.027*cell + 0.023*synaptic + 0.020*low + 0.018*rate + 0.017*synapsis + 0.016*region + 0.016*storage + 0.016*distribution'),\n", + " (4,\n", + " '0.052*role + 0.049*loop + 0.046*processor + 0.037*sequence + 0.029*gain + 0.021*product + 0.018*activation + 0.018*multiple + 0.018*edge + 0.017*address'),\n", + " (5,\n", + " '0.028*stimulus + 0.024*classification + 0.024*shape + 0.020*circuit + 0.018*fully + 0.018*design + 0.015*power + 0.015*pp + 0.014*sample + 0.014*experiment'),\n", + " (6,\n", + " '0.042*capacity + 0.034*associative_memory + 0.019*feedback + 0.018*cell + 0.017*phase + 0.016*interaction + 0.015*delay + 0.014*recall + 0.014*sequence + 0.014*matrix'),\n", + " (7,\n", + " '0.061*node + 0.049*hidden + 0.036*convergence + 0.033*energy + 0.030*gradient + 0.030*dynamic + 0.019*back_propagation + 0.016*back + 0.016*propagation + 0.016*learning_algorithm'),\n", + " (8,\n", + " '0.060*training + 0.039*representation + 0.029*connectionist + 0.028*trained + 0.020*context + 0.017*learned + 0.017*target + 0.015*mcclelland + 0.015*hidden_unit + 0.015*rumelhart'),\n", + " (9,\n", + " '0.074*firing + 0.056*stimulus + 0.056*cell + 0.037*connectivity + 0.033*path + 0.030*potential + 0.027*temporal + 0.027*control + 0.021*synaptic + 0.019*inhibition')]" ] }, - "execution_count": 272, + "execution_count": 40, "metadata": {}, "output_type": "execute_result" } @@ -856,44 +962,83 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Online AT VB 2" + "## Offline AT VB 2" ] }, { "cell_type": "code", - "execution_count": 122, + "execution_count": 22, "metadata": { "collapsed": true }, "outputs": [], "source": [ - "reload(onlineatvb2)\n", - "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" + "reload(atvb2)\n", + "AtVb2 = atvb2.AtVb2" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": { - "collapsed": true + "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 2min 10s, sys: 72 ms, total: 2min 11s\n", + "Wall time: 2min 11s\n" + ] + } + ], "source": [ - "%time model_online2 = OnlineAtVb2(corpus=corpus, grouped_corpus=disjoint_authors, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-19, \\\n", - " iterations=10, passes=3, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None)\n" + "%time model_offline2 = AtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", + " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", + " eval_every=1, random_state=1)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": { - "collapsed": true + "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.018*path + 0.014*center + 0.013*constraint + 0.011*map + 0.011*activity + 0.010*array + 0.010*rate + 0.010*cycle + 0.010*visual + 0.010*iv'),\n", + " (1,\n", + " '0.019*matrix + 0.016*delay + 0.013*associative_memory + 0.013*capacity + 0.012*potential + 0.010*storage + 0.010*classification + 0.010*dynamic + 0.010*synaptic + 0.009*rate'),\n", + " (2,\n", + " '0.044*cell + 0.020*stimulus + 0.014*probability + 0.010*region + 0.009*training + 0.008*noise + 0.007*field + 0.007*node + 0.007*actual + 0.007*area'),\n", + " (3,\n", + " '0.026*code + 0.025*hopfield + 0.015*sequence + 0.015*image + 0.013*energy + 0.013*length + 0.013*machine + 0.012*field + 0.012*matrix + 0.011*minimum'),\n", + " (4,\n", + " '0.032*processor + 0.023*activation + 0.012*dynamic + 0.012*operation + 0.012*hidden + 0.011*energy + 0.011*edge + 0.010*machine + 0.010*update + 0.009*training'),\n", + " (5,\n", + " '0.024*hidden + 0.016*hidden_unit + 0.013*matrix + 0.012*sequence + 0.012*adaptive + 0.012*action + 0.010*multiple + 0.009*training + 0.009*back + 0.008*learn'),\n", + " (6,\n", + " '0.026*training + 0.015*stage + 0.014*bit + 0.013*optimization + 0.012*convergence + 0.011*eq + 0.011*surface + 0.011*node + 0.011*neural_net + 0.010*code'),\n", + " (7,\n", + " '0.049*cell + 0.015*node + 0.014*feature + 0.013*region + 0.011*map + 0.011*control + 0.011*back + 0.010*temporal + 0.008*cycle + 0.008*decision'),\n", + " (8,\n", + " '0.023*cell + 0.014*probability + 0.012*current + 0.012*position + 0.012*image + 0.011*principle + 0.010*noise + 0.008*dimensional + 0.008*shape + 0.007*firing'),\n", + " (9,\n", + " '0.042*representation + 0.033*activity + 0.029*role + 0.026*firing + 0.023*cell + 0.014*stimulus + 0.014*variable + 0.013*product + 0.012*potential + 0.010*synaptic')]" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "model_online.show_topics()" + "model_offline2.show_topics()" ] }, { @@ -962,7 +1107,7 @@ }, { "cell_type": "code", - "execution_count": 186, + "execution_count": 28, "metadata": { "collapsed": false }, @@ -971,21 +1116,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 2min 30s, sys: 44 ms, total: 2min 30s\n", - "Wall time: 2min 30s\n" + "CPU times: user 2min 16s, sys: 120 ms, total: 2min 16s\n", + "Wall time: 2min 16s\n" ] } ], "source": [ "%time model_offline = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=1, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", + " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", " eval_every=1, random_state=1)" ] }, { "cell_type": "code", - "execution_count": 143, + "execution_count": 29, "metadata": { "collapsed": false }, @@ -994,28 +1139,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.017*cell + 0.008*activity + 0.008*training + 0.007*matrix + 0.007*field + 0.007*representation + 0.006*hopfield + 0.006*probability + 0.006*stimulus + 0.006*current'),\n", + " '0.019*path + 0.015*center + 0.014*constraint + 0.011*rate + 0.011*map + 0.011*cycle + 0.010*array + 0.010*visual + 0.009*activity + 0.009*iv'),\n", " (1,\n", - " '0.015*cell + 0.009*matrix + 0.007*probability + 0.007*hopfield + 0.007*training + 0.006*activity + 0.006*feature + 0.006*stimulus + 0.006*hidden + 0.005*representation'),\n", + " '0.018*matrix + 0.016*delay + 0.013*associative_memory + 0.013*potential + 0.012*capacity + 0.011*synaptic + 0.010*classification + 0.010*dynamic + 0.010*storage + 0.008*circuit'),\n", " (2,\n", - " '0.015*cell + 0.008*matrix + 0.008*training + 0.007*representation + 0.006*image + 0.006*field + 0.006*dynamic + 0.006*probability + 0.006*activity + 0.006*sequence'),\n", + " '0.040*cell + 0.015*stimulus + 0.014*probability + 0.010*region + 0.010*training + 0.009*noise + 0.008*convergence + 0.007*field + 0.007*node + 0.007*positive'),\n", " (3,\n", - " '0.013*cell + 0.009*training + 0.007*activity + 0.006*hidden + 0.006*node + 0.005*matrix + 0.005*probability + 0.005*hopfield + 0.005*representation + 0.005*sequence'),\n", + " '0.026*code + 0.024*hopfield + 0.015*sequence + 0.015*image + 0.013*length + 0.012*matrix + 0.012*energy + 0.012*field + 0.012*machine + 0.011*current'),\n", " (4,\n", - " '0.016*cell + 0.007*activity + 0.007*training + 0.007*matrix + 0.007*representation + 0.006*hidden + 0.006*feature + 0.006*synaptic + 0.006*rate + 0.005*field'),\n", + " '0.032*processor + 0.023*activation + 0.013*dynamic + 0.013*energy + 0.012*operation + 0.011*edge + 0.010*hidden + 0.010*machine + 0.010*update + 0.009*matrix'),\n", " (5,\n", - " '0.015*cell + 0.008*training + 0.006*matrix + 0.006*sequence + 0.006*field + 0.006*bit + 0.006*stimulus + 0.006*hopfield + 0.006*noise + 0.005*firing'),\n", + " '0.022*hidden + 0.016*hidden_unit + 0.014*matrix + 0.013*sequence + 0.012*adaptive + 0.012*action + 0.010*multiple + 0.009*training + 0.008*back + 0.008*stored'),\n", " (6,\n", - " '0.018*cell + 0.008*representation + 0.008*matrix + 0.006*hopfield + 0.006*hidden + 0.005*rate + 0.005*firing + 0.005*training + 0.005*capacity + 0.005*node'),\n", + " '0.025*training + 0.015*stage + 0.014*bit + 0.013*optimization + 0.012*convergence + 0.011*eq + 0.011*surface + 0.010*neural_net + 0.010*code + 0.010*hidden'),\n", " (7,\n", - " '0.014*cell + 0.007*activity + 0.007*matrix + 0.007*field + 0.007*training + 0.007*node + 0.006*hopfield + 0.006*representation + 0.006*rate + 0.005*synaptic'),\n", + " '0.056*cell + 0.017*node + 0.015*region + 0.013*feature + 0.013*map + 0.012*back + 0.011*control + 0.010*temporal + 0.009*decision + 0.008*activity'),\n", " (8,\n", - " '0.013*cell + 0.008*activity + 0.007*hidden + 0.007*training + 0.007*matrix + 0.006*feature + 0.006*capacity + 0.006*hopfield + 0.006*synaptic + 0.005*rate'),\n", + " '0.023*cell + 0.013*probability + 0.013*image + 0.012*position + 0.012*current + 0.011*principle + 0.010*noise + 0.008*dimensional + 0.007*shape + 0.007*firing'),\n", " (9,\n", - " '0.014*cell + 0.009*matrix + 0.008*representation + 0.007*image + 0.007*activity + 0.007*hidden + 0.006*stimulus + 0.006*training + 0.006*hopfield + 0.006*firing')]" + " '0.042*representation + 0.034*activity + 0.029*role + 0.025*firing + 0.021*cell + 0.017*stimulus + 0.014*variable + 0.014*product + 0.012*potential + 0.010*synaptic')]" ] }, - "execution_count": 143, + "execution_count": 29, "metadata": {}, "output_type": "execute_result" } @@ -1512,7 +1657,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.4.3+" + "version": "3.5.2" } }, "nbformat": 4, diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index 01df446a10..3063de7956 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -19,6 +19,8 @@ from .onlineatvb import OnlineAtVb from .minibatchatvb import MinibatchAtVb from .atvb import AtVb +from .atvb2 import AtVb2 +from .onlineatvb2 import OnlineAtVb2 from . import wrappers diff --git a/gensim/models/atvb2.py b/gensim/models/atvb2.py new file mode 100644 index 0000000000..58e761f3dc --- /dev/null +++ b/gensim/models/atvb2.py @@ -0,0 +1,610 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2016 Radim Rehurek +# Copyright (C) 2016 Olavur Mortensen +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Author-topic model. +""" + +import pdb +from pdb import set_trace as st + +import logging +import numpy +import numbers + +from gensim import utils, matutils +from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim.models import LdaModel +from scipy.special import gammaln, psi # gamma function utils +from scipy.special import polygamma +from scipy.optimize import line_search + +from six.moves import xrange + +from pprint import pprint +from random import sample +from copy import deepcopy + +# log(sum(exp(x))) that tries to avoid overflow +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger('gensim.models.atmodel') + +def update_dir_prior(prior, N, logphat, rho): + """ + Updates a given prior using Newton's method, described in + **Huang: Maximum Likelihood Estimation of Dirichlet Distribution Parameters.** + http://jonathan-huang.org/research/dirichlet/dirichlet.pdf + """ + dprior = numpy.copy(prior) + gradf = N * (psi(numpy.sum(prior)) - psi(prior) + logphat) + + c = N * polygamma(1, numpy.sum(prior)) + q = -N * polygamma(1, prior) + + b = numpy.sum(gradf / q) / (1 / c + numpy.sum(1 / q)) + + dprior = -(gradf - b) / q + + # NOTE: in the LDA code, the criterion below is: + # if all(rho * dprior + prior > 0) + # But this causes an error for me, but the below criterion works. + if (rho * dprior + prior > 0).all(): + prior += rho * dprior + else: + logger.warning("updated prior not positive") + + return prior + +def dir_mle_search_direction(prior, N, logphat): + """ + Updates a given prior using Newton's method, described in + **Huang: Maximum Likelihood Estimation of Dirichlet Distribution Parameters.** + http://jonathan-huang.org/research/dirichlet/dirichlet.pdf + """ + dprior = numpy.copy(prior) + gradf = N * (psi(numpy.sum(prior)) - psi(prior) + logphat) + + c = N * polygamma(1, numpy.sum(prior)) + q = -N * polygamma(1, prior) + + b = numpy.sum(gradf / q) / (1 / c + numpy.sum(1 / q)) + + dprior = -(gradf - b) / q + + return dprior + +class AtVb2(LdaModel): + """ + Train the author-topic model using variational Bayes. + """ + + def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, + author2doc=None, doc2author=None, threshold=0.001, + iterations=10, alpha='symmetric', eta='symmetric', minimum_probability=0.01, + eval_every=1, random_state=None, var_lambda=None): + + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + # NOTE: this stuff is confusing to me (from LDA code). Why would id2word not be none, but have length 0? + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + logger.info('Vocabulary consists of %d words.', self.num_terms) + + if doc2author is None and author2doc is None: + raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') + + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). + # If either doc2author or author2doc is missing, construct them from the other. + if doc2author is None: + # Make a mapping from document IDs to author IDs. + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + elif author2doc is None: + # Make a mapping from author IDs to document IDs. + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + + self.author2doc = author2doc + self.doc2author = doc2author + + self.num_authors = len(self.author2doc) + logger.info('Number of authors: %d.', self.num_authors) + + self.id2author = id2author + if self.id2author is None: + logger.warning("no author id mapping provided; initializing from corpus, assuming identity") + author_integer_ids = [str(i) for i in range(len(author2doc))] + self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) + + self.corpus = corpus + self.iterations = iterations + self.num_topics = num_topics + self.threshold = threshold + self.minimum_probability = minimum_probability + self.num_docs = len(corpus) + self.num_authors = len(author2doc) + self.eval_every = eval_every + self.random_state = random_state + + self.random_state = get_random_state(random_state) + + # NOTE: I don't think this necessarily is a good way to initialize the topics. + self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) + self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + + if alpha == 'auto': + self.optimize_alpha = True + else: + self.optimize_alpha = False + + if eta == 'auto': + self.optimize_eta = True + else: + self.optimize_eta = False + + if corpus is not None: + self.inference(corpus, author2doc, doc2author, var_lambda) + + def update_alpha(self, var_gamma): + """ + Update parameters for the Dirichlet prior on the per-document + topic weights `alpha` given the last `var_gamma`. + """ + N = float(var_gamma.shape[0]) + + # NOTE: there might be possibility for overflow if number + # of authors is very high. + logphat = 0.0 + for a in xrange(self.num_authors): + logphat += dirichlet_expectation(var_gamma[a, :]) + logphat *= 1 / N + + self.alpha = update_dir_prior(self.alpha, N, logphat, 1) + + def update_alpha_ls(self, var_gamma): + """ + Work in progress. + MLE of alpha with line search. + """ + N = float(var_gamma.shape[0]) + + # NOTE: there might be possibility for overflow if number + # of authors is very high. + logphat = 0.0 + for a in xrange(self.num_authors): + logphat += dirichlet_expectation(var_gamma[a, :]) + logphat *= 1 / N + + def f(alpha): + '''Compute the Dirichlet likelihood.''' + return -N * (gammaln(numpy.sum(alpha)) - numpy.sum(gammaln(alpha)) + numpy.sum((alpha - 1) * logphat)) + + def g(alpha): + '''Compute the first derivative of the Dirichlet likelihood.''' + return -N * (psi(numpy.sum(alpha)) - psi(alpha) + logphat) + + + # TODO: consider what stopping criterion to use here, and + # how many maximum iterations to use. + # TODO: consider using line search. + f1 = f(self.alpha) + #print(f1) + #print(0) + for i in xrange(10): + # Obtain search direction for Newton step. + pk = dir_mle_search_direction(self.alpha, N, logphat) + # Obtain stepsize using Wolfe condition. + stepsize = line_search(f, g, self.alpha, pk)[0] + # Update alpha. + # NOTE: need to check that update is positive. + self.alpha += stepsize * pk + f2 = f(self.alpha) + if (f2 - f1) / f1 < 0.01: + break + else: + f1 = f2 + #print(f2) + + # logger.info("optimized eta %s", list(self.alpha)) + + return self.alpha + + def update_eta(self, var_lambda): + """ + Update parameters for the Dirichlet prior on the per-document + topic weights `eta` given the last `var_lambda`. + """ + N = float(len(var_lambda)) + + logphat = 0.0 + for k in xrange(self.num_topics): + logphat += dirichlet_expectation(var_lambda[k, :]) + logphat *= 1 / N + + self.eta = update_dir_prior(self.eta, N, logphat, 1) + # logger.info("optimized eta %s", list(self.eta)) + + return self.eta + + def inference(self, corpus=None, author2doc=None, doc2author=None, var_lambda=None): + if corpus is None: + corpus = self.corpus + + logger.info('Starting inference. Training on %d documents.', len(corpus)) + + # Whether or not to evaluate bound and log probability, respectively. + bound_eval = True + logprob_eval = False + + if var_lambda is None: + self.optimize_lambda = True + else: + # We have topics from LDA, thus we do not train the topics. + self.optimize_lambda = False + + # Initial value of gamma and lambda. + # NOTE: parameters of gamma distribution same as in `ldamodel`. + var_gamma = self.random_state.gamma(100., 1. / 100., + (self.num_authors, self.num_topics)) + + if var_lambda is None: + var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + else: + self.norm_lambda = var_lambda.copy() + for k in xrange(self.num_topics): + self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + + self.var_lambda = var_lambda + self.var_gamma = var_gamma + + var_phi = numpy.zeros((self.num_docs, self.num_terms, self.num_authors, self.num_topics)) + + # TODO: consider how to vectorize opterations as much as + # possible. + # TODO: check vector and matrix dimensions, and ensure that + # things are multiplied along the correct dimensions. + + Elogtheta = dirichlet_expectation(var_gamma) + expElogtheta = numpy.exp(dirichlet_expectation(var_gamma)) + # NOTE: computing the Dirichlet expectation of lambda may + # cause overflow when the vocabulary is very large, as it + # requires a sum over vocab words. + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + if self.eval_every > 0: + if bound_eval: + # TODO: compute per-word bound. + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + for iteration in xrange(self.iterations): + lastgamma = var_gamma.copy() + lastlambda = var_lambda.copy() + #logger.info('Starting iteration %d.', iteration) + # Update phi. + for d, doc in enumerate(corpus): + #logger.info('Updating phi, document %d.', d) + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + authors_d = doc2author[d] # List of author IDs for document d. + + # Update phi. + for v in ids: + for a in authors_d: + for k in xrange(self.num_topics): + # Compute phi. + # TODO: avoid computing phi if possible. + # NOTE: won't var_phi[d, v, a, k] be the same for two different d if + # a is the same? + # NOTE: computation can be made more stable by adding the maximal value + # inside the exponential, which will disappear in the normalization. + var_phi[d, v, a, k] = expElogtheta[a, k] * expElogbeta[k, v] + # Normalize phi. + var_phi[d, v, :, :] = var_phi[d, v, :, :] / (var_phi[d, v, :, :].sum() + 1e-100) + + # Update gamma. + #logger.info('Updating gamma.') + for a in xrange(self.num_authors): + for k in xrange(self.num_topics): + docs_a = self.author2doc[a] + var_gamma[a, k] = 0.0 + var_gamma[a, k] += self.alpha[k] + for d in docs_a: + # TODO: if this document doesn't exist, we will have problems here. Could to an "if corpus.get(d)" type of thing. + doc = corpus[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + for vi, v in enumerate(ids): + var_gamma[a, k] += cts[vi] * var_phi[d, v, a, k] + + if self.optimize_alpha: + # NOTE: taking a full Newton step seems to yield good results. + # In the LDA code, they use rho() as step size. This seems + # very arbitrary; if a carefully chosen stepsize is needed, + # linesearch would probably be better. + stepsize = 1 + self.update_alpha(var_gamma) + + # Update Elogtheta, since gamma has been updated. + Elogtheta = dirichlet_expectation(var_gamma) + expElogtheta = numpy.exp(dirichlet_expectation(var_gamma)) + + # Update lambda. + if self.optimize_lambda: + #logger.info('Updating lambda.') + for k in xrange(self.num_topics): + #logger.info('k = %d.', k) + for v in xrange(self.num_terms): + #logger.info('v = %d.', v) + var_lambda[k, v] = self.eta[v] + + # The following commented-out code is used for "sampling" documents when + # updating lambda: + # sample_ratio = 1.0 # When sample_ratio is 1.0, the whole dataset is used. + # nsamples = int(numpy.ceil(self.num_docs * sample_ratio)) + # doc_idxs = sample(xrange(self.num_docs), nsamples) + + # TODO: this would be more efficient if there was a mapping from words + # to the documents that contain that word, although that mapping would be + # very large. + # NOTE: the below might cause overflow if number of documents is very large, + # although it seems somewhat unlikely. + for d, doc in enumerate(corpus): + # Get the count of v in doc. If v is not in doc, return 0. + cnt = dict(doc).get(v, 0) + phi_sum = 0.0 + for a in self.doc2author[d]: + phi_sum += var_phi[d, v, a, k] + var_lambda[k, v] += cnt * phi_sum + + if self.optimize_eta: + stepsize = 1 + self.update_eta(var_lambda) + + # Update Elogbeta, since lambda has been updated. + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + self.var_lambda = var_lambda + + self.var_gamma = var_gamma + + #meanchange = numpy.mean(abs(var_gamma - lastgamma)) + #logger.info('meanchange in gamma: %.3e', meanchange) + #meanchange = numpy.mean(abs(var_lambda - lastlambda)) + #logger.info('meanchange in lambda: %.3e', meanchange) + + # Print topics: + #pprint(self.show_topics()) + + # Evaluate bound. + if (iteration + 1) % self.eval_every == 0: + if bound_eval: + prev_bound = deepcopy(bound) + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + if bound_eval: + if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.threshold: + break + # End of update loop (iterations). + + # Ensure that the bound (or log probabilities) is computed after the last iteration. + if self.eval_every != 0 and not (iteration + 1) % self.eval_every == 0: + if bound_eval: + prev_bound = deepcopy(bound) + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + + return var_gamma, var_lambda + + def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): + """ + Compute the expectation of the log conditional likelihood of the data, + + E_q[log p(w_d | theta, beta, A_d)], + + where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. + """ + + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + # NOTE: computing the bound this way is very numerically unstable, which is why + # "logsumexp" is used in the LDA code. + bound= 0.0 + for d, doc in enumerate(docs): + authors_d = self.doc2author[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + bound_d = 0.0 + for vi, v in enumerate(ids): + bound_v = 0.0 + for k in xrange(self.num_topics): + for a in authors_d: + bound_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + bound_d += cts[vi] * numpy.log(bound_v) + bound += numpy.log(1.0 / len(authors_d)) + bound_d + + # For per-word likelihood, do: + # likelihood *= 1 /sum(len(doc) for doc in docs) + + # TODO: can I do something along the lines of (as in ldamodel): + # likelihood += numpy.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, id]) for id, cnt in doc) + # If I computed the LDA bound the way I compute the author-topic bound above: + # bound = 0.0 + # for d, doc in enumerate(docs): + # ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + # cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + # bound_d = 0.0 + # for vi, v in enumerate(ids): + # bound_v = 0.0 + # for k in xrange(self.num_topics): + # bound_v += numpy.exp(Elogtheta[d, k] + Elogbeta[k, v]) + # bound_d += cts[vi] * numpy.log(bound_v) + # bound += bound_d + + return bound + + def theta_bound(self, Elogtheta): + bound = 0.0 + for a in xrange(self.num_authors): + var_gamma_a = self.var_gamma[a, :] + Elogtheta_a = Elogtheta[a, :] + # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector + bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) + bound += numpy.sum(gammaln(var_gamma_a) - gammaln(self.alpha)) + bound += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(var_gamma_a)) + + return bound + + def beta_bound(self, Elogbeta): + bound = 0.0 + bound += numpy.sum((self.eta - self.var_lambda) * Elogbeta) + bound += numpy.sum(gammaln(self.var_lambda) - gammaln(self.eta)) + bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(self.var_lambda, 1))) + + return bound + + def eval_logprob(self, doc_ids=None): + """ + Compute the liklihood of the corpus under the model, by first + computing the conditional probabilities of the words in a + document d, + + p(w_d | theta, beta, A_d), + + summing over all documents, and dividing by the number of documents. + """ + + # TODO: if var_lambda is supplied from LDA, normalizing it every time + # is unnecessary. + norm_gamma = self.var_gamma.copy() + for a in xrange(self.num_authors): + norm_gamma[a, :] = self.var_gamma[a, :] / self.var_gamma.sum(axis=1)[a] + + if self.optimize_lambda: + norm_lambda = self.var_lambda.copy() + for k in xrange(self.num_topics): + norm_lambda[k, :] = self.var_lambda[k, :] / self.var_lambda.sum(axis=1)[k] + else: + norm_lambda = self.norm_lambda + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + logprob = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + logprob_d = 0.0 + for vi, v in enumerate(ids): + logprob_v = 0.0 + for k in xrange(self.num_topics): + for a in authors_d: + logprob_v += norm_gamma[a, k] * norm_lambda[k, v] + logprob_d += cts[vi] * numpy.log(logprob_v) + logprob += numpy.log(1.0 / len(authors_d)) + logprob_d + + return logprob + + # Overriding LdaModel.get_topic_terms. + def get_topic_terms(self, topicid, topn=10): + """ + Return a list of `(word_id, probability)` 2-tuples for the most + probable words in topic `topicid`. + Only return 2-tuples for the topn most probable words (ignore the rest). + """ + topic = self.var_lambda[topicid, :] + topic = topic / topic.sum() # normalize to probability distribution + bestn = matutils.argsort(topic, topn, reverse=True) + return [(id, topic[id]) for id in bestn] + + + def get_author_topics(self, author_id, minimum_probability=None): + """ + Return topic distribution the given author, as a list of + (topic_id, topic_probability) 2-tuples. + Ignore topics with very low probability (below `minimum_probability`). + """ + if minimum_probability is None: + minimum_probability = self.minimum_probability + minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output + + topic_dist = self.var_gamma[author_id, :] / sum(self.var_gamma[author_id, :]) + + author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) + if topicvalue >= minimum_probability] + + # author_name = self.id2author[author_id] + + return author_topics + + + + + + diff --git a/gensim/models/onlineatvb2.py b/gensim/models/onlineatvb2.py new file mode 100644 index 0000000000..a5a916c8db --- /dev/null +++ b/gensim/models/onlineatvb2.py @@ -0,0 +1,510 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2016 Radim Rehurek +# Copyright (C) 2016 Olavur Mortensen +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Author-topic model. +""" + +# NOTE: from what I understand, my name as well as Radim's should be attributed copyright above? + +from time import time +import pdb +from pdb import set_trace as st + +import logging +import numpy +import numbers + +from gensim import utils, matutils +from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim.models import LdaModel +from six.moves import xrange +from scipy.special import gammaln + +from pprint import pprint + +# log(sum(exp(x))) that tries to avoid overflow +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger(__name__) + + +class OnlineAtVb2(LdaModel): + """ + Train the author-topic model using online variational Bayes. + """ + # TODO: inherit interfaces.TransformationABC. + + def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, + author2doc=None, doc2author=None, threshold=0.001, minimum_probability=0.01, + iterations=10, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, + eval_every=1, random_state=None, var_lambda=None): + + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + # NOTE: this stuff is confusing to me (from LDA code). Why would id2word not be none, but have length 0? + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + logger.info('Vocabulary consists of %d words.', self.num_terms) + + if doc2author is None and author2doc is None: + raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') + + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). + # If either doc2author or author2doc is missing, construct them from the other. + if doc2author is None: + # Make a mapping from document IDs to author IDs. + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + elif author2doc is None: + # Make a mapping from author IDs to document IDs. + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + + self.author2doc = author2doc + self.doc2author = doc2author + + self.num_authors = len(self.author2doc) + logger.info('Number of authors: %d.', self.num_authors) + + self.id2author = id2author + if self.id2author is None: + logger.warning("no author id mapping provided; initializing from corpus, assuming identity") + author_integer_ids = [str(i) for i in range(len(author2doc))] + self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) + + # Make the reverse mapping, from author names to author IDs. + self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) + + self.corpus = corpus + self.iterations = iterations + self.passes = passes + self.num_topics = num_topics + self.threshold = threshold + self.minimum_probability = minimum_probability + self.decay = decay + self.offset = offset + self.num_docs = len(corpus) + self.num_authors = len(author2doc) + self.eval_every = eval_every + self.random_state = random_state + + # NOTE: I don't think this necessarily is a good way to initialize the topics. + self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) + self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + + self.random_state = get_random_state(random_state) + + if corpus is not None: + self.inference(corpus, var_lambda=var_lambda) + + def rho(self, t): + return pow(self.offset + t, -self.decay) + + def inference(self, corpus=None, var_lambda=None): + if corpus is None: + # TODO: I can't remember why I used "copy()" here. + corpus = self.corpus.copy() + + self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. + + logger.info('Starting inference. Training on %d documents.', len(corpus)) + + # Whether or not to evaluate bound and log probability, respectively. + bound_eval = True + logprob_eval = False + + if var_lambda is None: + self.optimize_lambda = True + else: + # We have topics from LDA, thus we do not train the topics. + self.optimize_lambda = False + + # Initial values of gamma and lambda. + # Parameters of gamma distribution same as in `ldamodel`. + var_gamma = self.random_state.gamma(100., 1. / 100., + (self.num_authors, self.num_topics)) + tilde_gamma = var_gamma.copy() + self.var_gamma = var_gamma + + if var_lambda is None: + var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + tilde_lambda = var_lambda.copy() + else: + self.norm_lambda = var_lambda.copy() + for k in xrange(self.num_topics): + self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + + self.var_lambda = var_lambda + + var_phi = dict() + + # Initialize dirichlet expectations. + Elogtheta = dirichlet_expectation(var_gamma) + expElogtheta = numpy.exp(Elogtheta) + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + + t = 0 + if self.eval_every > 0: + if bound_eval: + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + for _pass in xrange(self.passes): + converged = 0 # Number of documents converged for current pass over corpus. + start = time() + for d, doc in enumerate(corpus): + rhot = self.rho(d + _pass) + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] # List of author IDs for document d. + + expElogthetad = expElogtheta[authors_d, :] + expElogbetad = expElogbeta[:, ids] + + #var_phi = dict() + + phinorm = numpy.zeros(len(ids)) + for a in authors_d: + phinorm += numpy.dot(expElogtheta[a, :], expElogbetad) + + for iteration in xrange(self.iterations): + #logger.info('iteration %i', iteration) + + lastgamma = tilde_gamma.copy() + + ## Update phi. + #for v in ids: + # phi_sum = 0.0 + # for a in authors_d: + # for k in xrange(self.num_topics): + # var_phi[(v, a, k)] = expElogtheta[a, k] * expElogbeta[k, v] + # phi_sum += var_phi[(v, a, k)] + + # # Normalize phi over k. + # phi_norm_const = 1.0 / (phi_sum + 1e-100) + # for a in authors_d: + # for k in xrange(self.num_topics): + # var_phi[(v, a, k)] *= phi_norm_const + + # Update gamma. + for a in authors_d: + tilde_gamma[a, :] = self.alpha + len(self.author2doc[a]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) + #for k in xrange(self.num_topics): + # tilde_gamma[a, k] = 0.0 + # for vi, v in enumerate(ids): + # tilde_gamma[a, k] += cts[vi] * var_phi[v, a, k] + # tilde_gamma[a, k] *= len(self.author2doc[a]) + # tilde_gamma[a, k] += self.alpha[k] + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). Same goes for lambda. + # TODO: I may need to be smarter about computing rho. In ldamodel, + # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). + var_gamma_temp = (1 - rhot) * var_gamma + rhot * tilde_gamma + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + # FIXME: I don't need to update the entire gamma, as I only updated a few rows of it, + # corresponding to the authors in the document. The same goes for Elogtheta. + Elogtheta[authors_d, :] = dirichlet_expectation(var_gamma_temp[authors_d, :]) + expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :]) + + phinorm = numpy.zeros(len(ids)) + for a in authors_d: + phinorm += numpy.dot(expElogtheta[a, :], expElogbetad) + + # Check for convergence. + # Criterion is mean change in "local" gamma and lambda. + if iteration > 0: + meanchange_gamma = numpy.mean(abs(var_gamma_temp - lastgamma)) + gamma_condition = meanchange_gamma < self.threshold + # logger.info('Mean change in gamma: %.3e', meanchange_gamma) + if gamma_condition: + # logger.info('Converged after %d iterations.', iteration) + converged += 1 + break + # End of iterations loop. + + # FIXME: there are too many different gamma variables! + var_gamma = var_gamma_temp.copy() + + if self.optimize_lambda: + # Update lambda. + # only one update per document). + + phi_sum = numpy.zeros((self.num_topics, len(ids))) + phinorm_rep = numpy.tile(phinorm, [self.num_topics, 1]) + for a in authors_d: + expElogtheta_a_rep = numpy.tile(expElogtheta[a, :], [len(ids), 1]) + phi_sum += expElogtheta_a_rep.T * expElogbetad / phinorm_rep + eta_rep = numpy.tile(self.eta[ids], [self.num_topics, 1]) + cts_rep = numpy.tile(cts, [self.num_topics, 1]) + tilde_lambda[:, ids] = eta_rep + self.num_docs * cts_rep * phi_sum + + #for k in xrange(self.num_topics): + # for vi, v in enumerate(ids): + # # cnt = dict(doc).get(v, 0) + # cnt = cts[vi] + # phi_sum = 0.0 + # for a in authors_d: + # phi_sum += expElogtheta[a, k] * expElogbeta[k, v] / phinorm[vi] + # tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * phi_sum + + # This is a little bit faster (from old algorithm): + # tilde_lambda[:, ids] = self.eta[ids] + self.num_docs * cts * var_phi[ids, :].T + + # Note that we only changed the elements in lambda corresponding to + # the words in document d, hence the [:, ids] indexing. + var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] + Elogbeta = dirichlet_expectation(var_lambda) + expElogbeta = numpy.exp(Elogbeta) + var_lambda = var_lambda.copy() + + # Print topics: + # pprint(self.show_topics()) + + # End of corpus loop. + + if self.eval_every > 0 and (_pass + 1) % self.eval_every == 0: + self.var_gamma = var_gamma + self.var_lambda = var_lambda + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + + #logger.info('Converged documents: %d/%d', converged, self.num_docs) + + # TODO: consider whether to include somthing like this: + #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold: + # break + # End of pass over corpus loop. + + # Ensure that the bound (or log probabilities) is computed at the very last pass. + if self.eval_every > 0 and not (_pass + 1) % self.eval_every == 0: + # If the bound should be computed, and it wasn't computed at the last pass, + # then compute the bound. + self.var_gamma = var_gamma + self.var_lambda = var_lambda + if bound_eval: + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + if logprob_eval: + logprob = self.eval_logprob() + logger.info('Log prob: %.3e.', logprob) + + + self.var_lambda = var_lambda + self.var_gamma = var_gamma + + return var_gamma, var_lambda + + def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): + """ + Compute the expectation of the log conditional likelihood of the data, + + E_q[log p(w_d | theta, beta, A_d)], + + where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. + """ + + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + # NOTE: computing the bound this way is very numerically unstable, which is why + # "logsumexp" is used in the LDA code. + # NOTE: computing bound is very very computationally intensive. I could, for example, + # only use a portion of the data to do that (even a held-out set). + bound= 0.0 + for d, doc in enumerate(docs): + authors_d = self.doc2author[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + bound_d = 0.0 + for vi, v in enumerate(ids): + bound_v = 0.0 + for k in xrange(self.num_topics): + for a in authors_d: + bound_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + bound_d += cts[vi] * numpy.log(bound_v) + bound += numpy.log(1.0 / len(authors_d)) + bound_d + + # For per-word likelihood, do: + # likelihood *= 1 /sum(len(doc) for doc in docs) + + # TODO: can I do something along the lines of (as in ldamodel): + # likelihood += numpy.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, id]) for id, cnt in doc) + # If I computed the LDA bound the way I compute the author-topic bound above: + # bound = 0.0 + # for d, doc in enumerate(docs): + # ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + # cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + # bound_d = 0.0 + # for vi, v in enumerate(ids): + # bound_v = 0.0 + # for k in xrange(self.num_topics): + # bound_v += numpy.exp(Elogtheta[d, k] + Elogbeta[k, v]) + # bound_d += cts[vi] * numpy.log(bound_v) + # bound += bound_d + + return bound + + def theta_bound(self, Elogtheta): + bound = 0.0 + for a in xrange(self.num_authors): + var_gamma_a = self.var_gamma[a, :] + Elogtheta_a = Elogtheta[a, :] + # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector + bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) + bound += numpy.sum(gammaln(var_gamma_a) - gammaln(self.alpha)) + bound += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(var_gamma_a)) + + return bound + + def beta_bound(self, Elogbeta): + bound = 0.0 + bound += numpy.sum((self.eta - self.var_lambda) * Elogbeta) + bound += numpy.sum(gammaln(self.var_lambda) - gammaln(self.eta)) + bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(self.var_lambda, 1))) + + return bound + + def eval_logprob(self, doc_ids=None): + """ + Compute the liklihood of the corpus under the model, by first + computing the conditional probabilities of the words in a + document d, + + p(w_d | theta, beta, A_d), + + summing over all documents, and dividing by the number of documents. + """ + + # TODO: if var_lambda is supplied from LDA, normalizing it every time + # is unnecessary. + norm_gamma = self.var_gamma.copy() + for a in xrange(self.num_authors): + norm_gamma[a, :] = self.var_gamma[a, :] / self.var_gamma.sum(axis=1)[a] + + if self.optimize_lambda: + norm_lambda = self.var_lambda.copy() + for k in xrange(self.num_topics): + norm_lambda[k, :] = self.var_lambda[k, :] / self.var_lambda.sum(axis=1)[k] + else: + norm_lambda = self.norm_lambda + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + logprob = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + logprob_d = 0.0 + for vi, v in enumerate(ids): + logprob_v = 0.0 + for k in xrange(self.num_topics): + for a in authors_d: + logprob_v += norm_gamma[a, k] * norm_lambda[k, v] + logprob_d += cts[vi] * numpy.log(logprob_v) + logprob += numpy.log(1.0 / len(authors_d)) + logprob_d + + return logprob + + # Overriding LdaModel.get_topic_terms. + def get_topic_terms(self, topicid, topn=10): + """ + Return a list of `(word_id, probability)` 2-tuples for the most + probable words in topic `topicid`. + Only return 2-tuples for the topn most probable words (ignore the rest). + """ + topic = self.var_lambda[topicid, :] + topic = topic / topic.sum() # normalize to probability distribution + bestn = matutils.argsort(topic, topn, reverse=True) + return [(id, topic[id]) for id in bestn] + + + def get_author_topics(self, author_id, minimum_probability=None): + """ + Return topic distribution the given author, as a list of + (topic_id, topic_probability) 2-tuples. + Ignore topics with very low probability (below `minimum_probability`). + """ + if minimum_probability is None: + minimum_probability = self.minimum_probability + minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output + + topic_dist = self.var_gamma[author_id, :] / sum(self.var_gamma[author_id, :]) + + author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) + if topicvalue >= minimum_probability] + + # author_name = self.id2author[author_id] + + return author_topics + + + From fafc20ae83e1b9a4ef52129315dc2d6091195c18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Tue, 15 Nov 2016 15:34:51 +0100 Subject: [PATCH 042/100] Moved all algorithms except the new online one to a 'temp' folder. Vectorization is an option, so I can test it (speed up etc.). Updated notebook. --- docs/notebooks/at_with_nips.ipynb | 1326 +++++++++++++++++++-- gensim/models/onlineatvb2.py | 106 +- gensim/models/{ => temp}/atvb.py | 0 gensim/models/{ => temp}/atvb2.py | 0 gensim/models/{ => temp}/disjointatvb.py | 0 gensim/models/{ => temp}/minibatchatvb.py | 0 gensim/models/{ => temp}/onlineatvb.py | 0 7 files changed, 1301 insertions(+), 131 deletions(-) rename gensim/models/{ => temp}/atvb.py (100%) rename gensim/models/{ => temp}/atvb2.py (100%) rename gensim/models/{ => temp}/disjointatvb.py (100%) rename gensim/models/{ => temp}/minibatchatvb.py (100%) rename gensim/models/{ => temp}/onlineatvb.py (100%) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 7d5308eaee..339082dcca 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -60,6 +60,8 @@ "from gensim.models import LdaModel\n", "from imp import reload\n", "from pprint import pprint\n", + "from random import sample\n", + "import bokeh\n", "\n", "import logging\n", "\n", @@ -109,7 +111,7 @@ }, { "cell_type": "code", - "execution_count": 163, + "execution_count": 85, "metadata": { "collapsed": false }, @@ -124,7 +126,7 @@ "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00', '01', '02']\n", + "yrs = ['00']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -146,7 +148,7 @@ }, { "cell_type": "code", - "execution_count": 164, + "execution_count": 86, "metadata": { "collapsed": false }, @@ -176,7 +178,7 @@ }, { "cell_type": "code", - "execution_count": 165, + "execution_count": 87, "metadata": { "collapsed": false }, @@ -188,7 +190,7 @@ }, { "cell_type": "code", - "execution_count": 166, + "execution_count": 88, "metadata": { "collapsed": false }, @@ -206,7 +208,7 @@ }, { "cell_type": "code", - "execution_count": 167, + "execution_count": 89, "metadata": { "collapsed": false }, @@ -232,7 +234,7 @@ }, { "cell_type": "code", - "execution_count": 168, + "execution_count": 90, "metadata": { "collapsed": false }, @@ -255,9 +257,9 @@ }, { "cell_type": "code", - "execution_count": 169, + "execution_count": 91, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ @@ -270,9 +272,9 @@ }, { "cell_type": "code", - "execution_count": 170, + "execution_count": 92, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ @@ -289,7 +291,7 @@ }, { "cell_type": "code", - "execution_count": 171, + "execution_count": 93, "metadata": { "collapsed": true }, @@ -301,7 +303,7 @@ }, { "cell_type": "code", - "execution_count": 172, + "execution_count": 94, "metadata": { "collapsed": false }, @@ -319,16 +321,16 @@ }, { "cell_type": "code", - "execution_count": 173, + "execution_count": 95, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAicAAAGcCAYAAAACtQD2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XecXVW5//HPl0DAABmaSeggEKqUhBK8EOSCAaSIYmGw\nACoWhMuNckURfnDBgogEaSqKoqCDCNIDoQmEJphwgUDovSShpJAQ0ub5/bH2YfbsnOnl7Jl836/X\neZ05a6+997P3mZnznLXXWlsRgZmZmVlZLFfrAMzMzMzynJyYmZlZqTg5MTMzs1JxcmJmZmal4uTE\nzMzMSsXJiZmZmZWKkxMzMzMrFScnZmZmVipOTszMzKxUnJyYWTOSXpV0Ue71XpIaJX2sF/b9Y0mL\ncq8HZPs+u6f3ne3v69n+1umN/XWWpB9Iel7SYkkP1jqe9pK0SXZ+D6t1LFZuTk6sFCQdnv3Tqvb4\naa3jW8ZUu6dFh+9zIelHkg7sxL4bO7qvjmoltqATx9qbJH0S+CnwT+AI4OSaBmTWA5avdQBmOUH6\nR/tioXxK74diFRFxu6QPRcTCDq56EnApcH0H1jkFOK2D++mMlmL7A3BpJ461N+0JLAK+Hr45mvVT\nTk6sbG6OiMntrSxJwMCIWNCDMS3zevrDWtKgiHgvIhrphZaTlmQf9mVOTACGAvPKmJj479G6iy/r\nWJ+R738g6cuSHgfeB/bKlkvSdyU9Lul9SW9IulDS4MJ2JOn/ZX0r5kq6TdIWkl4p9LVo1v8hV161\nX4Kk/SVNzLY5W9J1krYo1LlM0kxJ62XL35U0Q9IZVfYjSWMlPSppflZvvKTts+X3Svp3C+fqOUmt\ntli0dB6q1Fuqz4mk4ZL+IWlaFtvLkv4iaeXK+wQMBCrnqrFybrPz2pht42+SZpIuUbR4zrNlX5b0\nVLa/B4t9YLJz+0yV9T7YZjtia+m9PTb3e/WapHOr/F7dI2mypK0l/VPSe9m5/W5r70Nu/eUlnZK9\nd+8r9Sk5TdIKhdi/CNRlcS5RC/03st+dRZJWzpWdkK13Rq5s+ez9Py1XtoqkcdnfxPuSpkr678L2\n2/p7XF3SnyXNkvSOpIuBZucsq7e2pD9l5+p9Sa9LulrSeu05b9Y/ueXEyqZO0pr5goh4u1BnDHAo\ncAHwDvByVv4HoD57Pgf4CHAssJ2k3bNv5ZCu158AXAdMAEYCtwAfKuynpf4HS5VLOgK4GBgPfB9Y\nGTgamChph4h4Nbfu8tn+JgLfy47nfyQ9ExEX5zb7Z9IH0fXARaQP1NHALsD/ZcsvlDQ8Ip7OxbIr\nsDHwwyqx57X3PFTirmx/xazecqTzPB1YDzgQGBwR8yR9CfgjcE92XgCeLWzrH8CTwA9yZS2d872A\nw4BzSZc0vgNMkLRjRDzVxroflEfEknbEVnxvfwycCNxM+p3bkvTejiz8XgWwFnAT8HfgcuDzwC8k\nPRIRt1eJLe+S7BgvJ/1ujCJdftoc+EIu9qOB7YBvAALubWF7E0nv0X+Q3i+A3YAlwO65eiNJ7/nd\n2fEKuDFb73fAo8B+wNmS1o6IEwr7WervMdvG9aTf1QuBp4BDSOe9+B5dA2xKem9fJrUMjSH9Tr2K\nLZsiwg8/av4ADic15xcfS3J1BmRlC4FNC+t/PFt2SKF8v6z8s9nrIdn6VxXqnZHVuyhXdjqwsEqs\nXyP9g18ne70qMAs4r1BvaFZ+fq7s0mzd7xfq/h9wX+71J7J4zmzlnK0GzAdOK5RfkO13pVbW7ch5\n2CuL+WPZ65FZnQPbeE/n57dTOK+NwCUtLFuYe115zxcD2+TKNyR9S7+8cG6fbmubbcRWfG+HZufp\nukK9/8rqfTFXNjEr+3yubCApeftrG+dqRHacFxTKz862+R+F43ynHX9TA4B3gdNzZe+Qkp/3K78f\nwP9kx7hK9vqQLJbjC9u7ipQYbtCOv8fKNv4rV7YcKSFcAhyWla1RrOeHHxHhyzpWKgF8G9g79/hE\nlXq3R8SzhbLPkv7x3ilpzcoD+Dfpg2jPrN4+pH+q5xXWP6cLce9LSlAuL+x7CfBQbt95FxVe30Nq\n6ak4hPSBfHpLO42IWcANpG/bQGpqBz5HSjrebyXmMXT+PMzKnveTtFI76lcTwG86UH9iRHzQMToi\nXiJ9M9+3k/tvr0+QzlPxvPwWeA/Yv1A+OyKuqLyI1FfnIZq/t9V8knROikOmf0lqHSnup00RsQS4\nn9TahqRtgTrgZ8AKpFYNSK0pj0TE3Oz1fqSE44LCJs8mnYviOa/297gfsIDc73mkFqbzs+OpeI+U\n8Owpqa6Dh2j9mJMTK5uHIuKO/KNKnRerlG1G+hb2ZuExHViJ1FIAsEH23OyfaURMI33L7IxNSf9w\nJxb2PQP4z9y+K+ZmiUXeTGD13OuPAK9GRFsx/RnYWNKo7PW+wJqkb9et2TB77vB5iIjngF8B3wTe\nlnSTpG9LWrWNfRa90IG6xQ8/gKeBVSWtXmVZd6mcp6fzhZE6fL6QW17xSpVtFN/blvazODu3+f28\nRno/ivtpr3uAnbJ+K7sDr0TEI6QRcJVLO/9B+t3Nx/JqRMwvbGtqbnnei1X2uyHwWpUE+an8i2z5\nicABwAxJd0o6XlLxb8aWMe5zYn1R8Z8mpET7deDLNP9mVjEje64sa89Ih5bqDKiy7yD1d3mrSv1i\nB88lLWxXLfzcmpuyfX4JeCB7fi0i7mxjvY6ch6VExNisg+OnSK0w5wMnSBqVJTjtUe197IjiOWrv\n+9WVfbSlPe9tR5d3NIa8iaTh2buQWkgm5sp3l7Q1Kam/uwv7q/Y+iurvx1LbjohfSroaOJjUsvlj\n4IeS9si3ltmyxS0n1l88R+qMeE+x5SV7VP7JvZg9D8+vLGkY6dJM3kxggKRBhfKNquwbYEYL+55I\nxz0LrFccEVIUEYvJOl5KWo3UKfUv7dj+i9lze85DS/ueEhE/iYg9gD1IrVLfyFdpz3baabMqZcOB\ndyNiZvZ6JqkfTtFGVcraG9uL2fPm+UJJA7PtvtTO7bRnP8tL2qSwn3WAVbqwnwdIlwdHk1pKKr+L\ndwMfI11yDFILSz6W9SQVO0ZvmT23J5bKNoqX/TavUpeIeD4izo6IfYCPkjrotmuUk/VPTk6sv7iC\n1PnwpOKCbKhk5UP+VtK322ML1cZW2eZzpG96o3PbWoXUOpN3EzAX+FHW56O4/7XaeQx5V5FaNtsz\n++elpMTst6R/6u1JTjpyHpqRNFhS8X/HFNKH3Iq5snlUTxY6Y7esz0Qlho1IlwJuztV5DlhT0pa5\neuuSErai9sZWOU/HFcq/SRqRdUM7ttEe40m/a/9dKP8e6bze2JmNZpdmJpN+Z9emecvJysAxwFMR\nkW/xG0/6Wzq6sLmxpHNxUzt2PZ70u/DNSkH2t3EMzUd+fSgb/ZX3POnvacVcvWGSNq/ye2f9lC/r\nWJl0uvk6Iu7ILjOcJGkEcBvpG+NwUmfZb5NGXEyXNA44XtJ1pH+0O5I6375T2OxNwGvAJZLOysq+\nCrwBfDAPRkTMlnQMaQjzZEmXky61bEjqyPhPOvgtMCJuk9QAfFdp7pFbSJcndgcmRES+o+G/JU0l\ndYR9tD1N4R08D9D8vfkEME7S34FnSJ0rDyddvvpHrt4kYEw2P8YbwHMRUXVelnaYAtwi6TzS+3p0\n9vy/uTp/JQ2Pvi6rtwrwLdJw5e0K22tXbNl5+jlwoqTxpGRky2y795NarbosIiZL+gtwdNaZeiKw\nK+ky3RUR0dJw4faYCBwPvB0RU7P9vSHpOdLfx+8K9a8mtaz8XNKmNA0l3h/4RURU61dTdDWp1eas\nrDWoMpS42Aq5FXCzpCuAJ0jJz2dJ/aYacvXOInX8Xo90+db6u1oPF/LDj4gPhhIvAUa0UmdAVueX\nrdQ5ijQ6Yi6pmf9h4CfAkEK9/0dKPOaSvh1vTurMeFGh3gjSh9B80je671AYbpqr+3HSN/mZ2Xaf\nAn4PbJ+rcynpQ6IY9+nAgkKZSB8qT2T7n0YaobJtlfV/kMX03Q6e92rn4WXgt7k6xaHEH8mO6xlS\nC8SMbN3RhW1vAdyZbXtJ5dxmx7qENCdKq+ch/56TPqifzs7Fg5V4CuuPAR4jDZV9nDTPSLWhxC3F\n1tJ7+51se+9n5+tXwKqFOhOBSVViupTUOtHWezEgez+ey/bzAin5Wr7K9pb6HWpluwdmx3R1ofwP\nFIZD55atTBqd82oWy5PAcR35eyR1Av4zaXTX26Q5ZXag+VDitUgjxp4A5pAS43uBg6sc8+Li++JH\n/30oe+PNlnmSXgFuiohvtFm5ZCR9jzRHyQYR8Uat4zEz6wpfvzPrH75Kmm/CiYmZ9Xnuc2LWRynd\nM+UgUj+RLfDoBjPrJ5ycmDVp6d4sZTWMNDLnHdIU9hNqHI+ZWbdwnxMzMzMrFfc5MTMzs1JxcmJm\nZmal4uTEzFok6ceSivcG6u0YBkhqlFS8Y29XtrlXts2DumubHdj3ZZKe6e39mvUlTk7MOknS4dkH\nXOUxX9JTks7rR3dV7WudhDuiVscVQGON9m3WJ3i0jlnXBOn+Ny8CK5Hu/PptYD9J28TSt4y38ujK\n3X674oga7tusT3ByYtZ1N0fE5OznP0h6h3STtE8Bf6tdWG2TNCgi3qt1HMuSiFhSi/36vba+xJd1\nzLrfHaRvxhtXCiRtLOnvkt6WNE/S/ZI+mV9J0pu5GwyiZJakRbm7KiPphKxsUK5sc0lXZtufL+kh\nSQcWtl+5DDVa0oWSppPuJ9Qhkr4m6XZJ07N9TZF0VKHOryRNK5T9Otv/t3Jl62RlX23nvr+cXTqb\nL+lBSR+rUmddSZdImibpfUmPSTq8yuYCWE7SyZJelfSepFslbVzY3h7Ze/dytr2XJJ2Vv5uupB9I\nWiJpneJOsrrzJa2avV6qz4mkVSSNk/RKto+p2U0J83U2yc7VYYXySp+cE3NlP87Khkv6m6SZpBtQ\nmvUJTk7Mut+m2fPbAFn/k/tJd/M9HziRdDv46yV9KrfevcDo3OttgUpS8h+58t2AyZVvwZK2Jt0B\ndnPgZ6SZYucC1xS2X3EhaUbZ/yXdj6ejvk26CeJPgO+RboT320KCMhH4sKThhbiXkO6sXDGalCRM\nbMd+9wJ+AfyJdIO8IcAESZtXKkgaRrop4B7AucBxWax/lHR0YXsiXZLbH/h59vgY6WZ1eZ8nvV/n\nA8eQbnJ4HOnGeRWXZ9v7XJW4PwuMj4h3s9fN+vFIEnAjcCzprsdjSTdVPFvpjsidUdn+P0g36PsB\n6cZ7Zn1Dre886IcfffVB052U9yTd4n1d4AvAm6TkYO2s3ris3q65dVcm3X32uVzZ94CFwMrZ62NI\nH6z3Az/N1XsHOCv3+jbS3ZeLd6+9B3iyEG8j6W68aucxVruj74pV6t0KTM29Hprt62vZ69Wzc3A5\n8HKu3vnAtDZiGJBtazGwTa58Q9Idcy/PlV1CuqtyXWEbVwBvAStkr/fKtvkIMCBXb2wW5/A2jvdH\nWTxr58r+BdxXqLdrtp/P58ouBZ7OvT4kq3N8Yd2rgEWkmzkCbJLVO6yF83Ni4X1rBC6p9d+JH350\n5uGWE7OuEXA7KSF5Bfgr6dbvB0fTTfj2Ax6MiPsrK0XEPOAiYCNJW2XFE0n9wCqXKnbPyiZmPyNp\nW2C1rAxJq5OSo78DdZLWrDyAW4DNJK2dizeA30VEp0eqRMSCDw5eGpzt6y5guKQPZXWmA8/S1BK0\nO7AA+CWwnqQNC8fYHhMjYkoujpeA64F9s1gEfBq4Fli+yrlYHdi+sM2Lo3kfkImk9/QjLRzvoGx7\n92X18tv7G7CLpA1yZV8A3iO1iLRkP1JSekGh/GxS4rFvK+u2JoDfdHJds5pycmLWNUG6zLE38HFg\nq4jYJCJuy9XZEHiqyrpTc8sBJpM+yCqXPXajKTnZUdLAbFmQWkUgXUIS6Zvym4XHqVmd4rDmF/Mv\nJK0gaWj+0doBS9pd0h2S5gKzsn2dli2uy1W9p3AsDwL/BmYDu0uqA7ah/cnJs1XKngZWzZK0YcCq\nwNEsfS4uyuoXz0Wxz83M7Hn1SoGkDSX9WdLbpBaxN0kJKTQ/3iuy58/nyg4BbojWO6JuCLwaEfML\n5cXfj854oQvrmtWMR+uYdd1D0TRap9MiYrGkfwGjJW0CrA3cTfowXAHYhfQhPzUi3s5Wq3zBOAto\n6cZ/xQ/14ofgaNJlmSAlOiFp/Yh4vbghSZtldaeQLoG8QvrWfxCpz0T+C89E4HBJ65OSlNsiIiTd\nm72uJAJ3txB3e+SH5Fb2/SfgshbqP1J43dLIGUHqbEq6bLYq8FNSkvkesAGpz8kHxxsRr0q6n5Sc\nnCVpd9Klvss7cAytaam1a0Ar6xTfa7M+wcmJWc97idRZtWjL3PKKicD3SZ1n34yIpwEkPU5KInYn\nXcqoeD57XhQRd3Qyvkmklp+8N1uoexApUdo/u3RDFt8+VepWWkT2AUYAp2Sv7waOJCUn77J0wtCS\nzaqUDQfejYiZkuYA84DlunAuirYn9fWoj4gPhoVLaulSy+XAryR9hHRJ513gpjb28SKwm6QPFVpP\nir8flWRutcL6XWlZMSslX9Yx63njgZ0l7VIpkLQy8A3ghYh4Ild3Imkyt+NounRD9vOXSa0pH1wG\niYg3SR1cv5mNVGlG0lptBRcRsyLijsKjpSnrKy0NH/zvyC6pfKXKdp8FppM6+i5H6qdROcbNSf1D\n7utA/5fdsj43lf1uBBwA3JztbwlwNfB5SVsWV65yLtqz32rHK9L7U239v5N1WiVd0rku32elBeOB\ngaTLUXmVzrk3AUTETNJltNGFese0EEtVkuqUhp6v0t51zHqbW07MuqY9TfJnAPXAzZLOJY22OYL0\njfczhbr3k0aBDAd+myu/m9S3pdqw2+9kZY9J+h2pNWUoaaTIusAOHYy3NRNIQ27HZ/saDBwFvMHS\n/TkgJVWfJQ19npuVPUS63LApaXRNe00BbpF0HukcHZ09/2+uzvdJH94PZvFNBdYAdiS1OuUTuPac\ni8dJ/TbOyTrxzs2OZ3C1yhExXdJE4H+AVWjfJHxXk97fn0vaFHiU1El2f+AXEZHvF/N74HhJs0l9\nlD5OatnpyPt6KPDr7PmKNuqa1YRbTsy6ps1vrBExg5Qo3EL6lvtT0hDYAyLiukLd90jDgvOdXiEl\nH0EahvtKYZ2ppA/fG0jDhc8Hvkn61n0azXVmlM4H62T7+izpf8dZwNeB80hzp1RTiTvf2rOYNOy2\nvfObVGK4HTiedIynklplxmQxVbY9DdiJ1O/kM1ls/0VKJk5o6bhaKs9akA4gJQwnAieREpYjW4n1\nb6TEZBYt9wPK7yNIici5wIGkoefDge9GxA8K651C6uvyeVKSuDiLr6P3QOqv90uyfkJdGFFoZmZm\n1u1q3nIiaTlJp0t6Pps++llJJ1Wpd5qk13NTTG9aWL66pL9Imi1ppqTfZ9f1zczMrA+peXJCmlb5\nm6Trx1uQrhl/X9IxlQqSTiA1h38T2JnUI39CNu9DxV9Jvdv3IjWRjqb5NXszMzPrA2p+WUfS9aTp\nq4/KlV0JvBcRX8lev07qGDYuez2YdL358Ii4IuuZ/zgwMiIezursQ7pfxXrZdWgzMzPrA8rQcnIf\nsFc2uROStiPd5Gx89npjUg/7yoyMRMQcUoe6XbOiUcDMSmKSuY3U6WsXzMzMrM8ow1DiM0g96Z+U\ntISUMP0oIiqzKg4jJRnTC+tNp2lY4DBgRn5hRCyR9A7Nhw6amZlZyZUhOfkCacKiQ4EnSDMy/krS\n6xFxaSvribaHw7VYJ7t51z6k2Rnf72DMZmZmy7KVgI2ACbnbaXSbMiQnZ5JuB//37PXj2cyPPyTd\nWnwaKckYSvPWkyGk+SDI6jSbACq7J8bqLN3iUrEP8Jeuh29mZrbM+iJpQEq3KkNyMoilWzcayfrD\nRMQLkqaRRuE8Ch90iN2FpluM3w+sJmmHXL+TvUhJzb9a2O+LAJdddhlbbrnUTNfLlLFjxzJu3Lha\nh1EKPheJz0MTn4vE5yHxeUimTp3Kl770JSjc5by7lCE5uR74kaRXSCNuRpDuKfH7XJ1zgJMkPUs6\nEacDrwLXAkTEk5ImAL+T9G3SfSrOAxpaGanzPsCWW27JiBEjuv2g+pK6urpl/hxU+FwkPg9NfC4S\nn4fE52EpPdItogzJyTGkZOMC0qWZ10n3fTi9UiEizpQ0iDRvyWqkKa/3i4iFue0cRpq2+zZSy8uV\npJtzmZmZWR9S8+QkIuYB380erdU7lXQ/jZaWzwK+1J2xmZmZWe8rwzwnZmZmZh9wcmLU19fXOoTS\n8LlIfB6a+FwkPg+Jz0PvqPn09bUiaQQwadKkSe7cZGZm1gGTJ09m5MiRkG4bM7m7t++WEzMzMysV\nJydmZmZWKk5OzMzMrFScnJiZmVmpODkxMzOzUnFyYmZmtoyaPh023RTuuqvWkTTn5MTMzGwZtWgR\nPPccvN8jd8jpPCcnZmZmy6jGxvQs1TaOIicnZmZmy6jKPKxOTszMzKwUnJyYmZlZqTg5MTMzs1Jx\ncmJmZmal4uTEzMzMSsXJiZmZmZVKJTlZrmTZQMnCMTMzs97ieU7MzMysVHxZx8zMzErFyYmZmZmV\nipMTMzMzKxUnJ2ZmZlYqTk7MzMysVJycmJmZWal4KLGZmZmViidha4GkFyQ1Vnmcly1fUdIFkt6S\n9K6kKyUNKWxjfUk3SponaZqkMyXV/NjMzMzKzJd1WrYjMCz3+AQQwBXZ8nOA/YFDgNHAOsBVlZWz\nJGQ8sDwwCjgcOAI4rVeiNzMz66PKmpwsX+sAIuLt/GtJBwLPRcRESYOBrwKHRsRd2fIjgamSdo6I\nB4F9gC2APSPiLeAxSScDZ0g6NSIW9+oBmZmZ9RFlTU7K0HLyAUkrAF8ELs6KdiQlULdX6kTEU8DL\nwK5Z0SjgsSwxqZgA1AFb93TMZmZmfZWTk/b5NCmp+FP2eiiwMCLmFOpNJ10CInueXmU5uTpmZmZW\n4OSkfb4K3BQR09qoJ1K/lLa0p46ZmdkyqazJSc37nFRI2gDYGzg4VzwNGChpcKH1ZAhNrSPTgJ0K\nmxuaPRdbVJYyduxY6urqmpXV19dTX1/fgejNzMz6nvbMc9LQ0EBDQ0OzstmzZ/dgVCVKTkitJtNJ\nI28qJgGLgb2AqwEkDQc2AO7L6twPnChprVy/kzHAbOCJtnY6btw4RowY0S0HYGZm1pe0Z56Tal/Y\nJ0+ezMiRI3ssrlIkJ5JEGv57SUQ0VsojYo6ki4GzJc0E3gXOBe6NiIeyareQkpBLJZ0ArA2cDpwf\nEYt68TDMzMz6FF/Wad3ewPrAH6ssGwssAa4EVgRuBr5TWRgRjZIOAH5Nak2ZB1wCnNKzIZuZmfVt\nTk5aERG3AgNaWLYAODZ7tLT+K8ABPROdmZlZ/1TW5KRso3XMzMyslzg5MTMzs1JxcmJmZmal4uTE\nzMzMSqU985zUgpMTMzOzZVR75jmphZKFY2ZmZr3Fl3XMzMysVJycmJmZWak4OTEzM7NScXJiZmZm\npeLkxMzMzErFyYmZmZmViuc5MTMzs1Jxy4mZmZmViidhMzMzs1Jxy4mZmZmVipMTMzMzKxUnJ2Zm\nZlYqTk7MzMysVJycmJmZWal4nhMzMzMrFbecmJmZWal4nhMzMzMrFbecmJmZWak4OTEzM7NScXJi\nZmZmpeLkxMzMzErFyYmZmZmViuc5aYWkdSRdKuktSe9JekTSiEKd0yS9ni2/VdKmheWrS/qLpNmS\nZkr6vaSVe/dIzMzM+g63nLRA0mrAvcACYB9gS+B7wMxcnROAY4BvAjsD84AJkgbmNvXXbN29gP2B\n0cBve+EQzMzM+qSyznOyfK0DAH4AvBwRX8+VvVSocxxwekRcDyDpK8B04GDgCklbkhKbkRHxcFbn\nWOBGScdHxLSePggzM7O+xi0nLTsQ+LekKyRNlzRZ0geJiqSNgWHA7ZWyiJgD/AvYNSsaBcysJCaZ\n24AAdunpAzAzM+uLnJy07CPAt4GngDHAb4BzJX0pWz6MlGRML6w3PVtWqTMjvzAilgDv5OqYmZlZ\nTlmTkzJc1lkOeDAiTs5ePyJpa1LCclkr64mUtLSmPXXMzMyWSU5OWvYGMLVQNhX4TPbzNFKSMZTm\nrSdDgIdzdYbkNyBpALA6S7e4NDN27Fjq6uqaldXX11NfX9/+IzAzM+uD2pOcNDQ00NDQ0Kxs9uzZ\nPRhVOZKTe4HNC2Wbk3WKjYgXJE0jjcJ5FEDSYFJfkguy+vcDq0naIdfvZC9SUvOv1nY+btw4RowY\n0VoVMzOzfqk985xU+8I+efJkRo4c2WNxlSE5GQfcK+mHwBWkpOPrwFG5OucAJ0l6FngROB14FbgW\nICKelDQB+J2kbwMDgfOABo/UMTMzqy5K2vGh5slJRPxb0qeBM4CTgReA4yLi8lydMyUNIs1bshow\nEdgvIhbmNnUYcD5plE4jcCVpCLKZmZlVEVG+/iZQguQEICLGA+PbqHMqcGory2cBX2ppuZmZmTUX\nUb4J2KAcQ4nNzMysBsracuLkxMzMbBnl5MTMzMxKxcmJmZmZlUpjo5MTMzMzKxG3nJiZmVmpODkx\nMzOzUnFyYmZmZqXieU7MzMysVNxyYmZmZqXi5MTMzMxKxcmJmZmZlYrnOTEzM7NSccuJmZmZlYqT\nEzMzMysVJydmZmZWKp7nxMzMzErFLSdmZmZWKk5OzMzMrFScnJiZmVmpeJ4TMzMzKxW3nJiZmVmp\nODkxMzOzUnFyYmZmZqXi5MTMzMxKxZOwmZmZWam45cTMzMxKxcmJmZmZlYrnOWmBpFMkNRYeT+SW\nryjpAklvSXpX0pWShhS2sb6kGyXNkzRN0pmSan5sZmZmZdavW04kDZC0vaTVO7mJKcBQYFj22C23\n7Bxgf+AQYDSwDnBVbt/LAeOB5YFRwOHAEcBpnYzFzMxsmdCvkhNJ50j6WvbzAOAuYDLwiqSPd2KT\niyPizYiYkT3eybY9GPgqMDYi7oqIh4Ejgf+QtHO27j7AFsAXI+KxiJgAnAx8R9LynTk+MzOzZUG/\nSk6AzwJImAt3AAAgAElEQVSPZD8fCGxMShDGAT/pxPY2k/SapOckXSZp/ax8JKlF5PZKxYh4CngZ\n2DUrGgU8FhFv5bY3AagDtu5ELGZmZsuE/pacrAVMy37+JPD3iHga+APw0Q5u6wHSZZh9gG+REp27\nJa1MusSzMCLmFNaZni0je55eZTm5OmZmZlZQ1nlOOnvZYzqwlaQ3gH2Bo7PyQcCSjmwouwxTMUXS\ng8BLwOeB91tYTUC0Z/MdicXMzGxZUtaWk84mJ38ErgDeICUAt2bluwBPdiWgiJgt6WlgU+A2YKCk\nwYXWkyE0tY5MA3YqbGZo9lxsUVnK2LFjqaura1ZWX19PfX19Z8I3MzPrM9qTnDQ0NNDQ0NCsbPbs\n2T0YVSeTk4g4VdIUYH3SJZ0F2aIlwBldCUjSKsAmwJ+AScBiYC/g6mz5cGAD4L5slfuBEyWtlet3\nMgaYDTxBG8aNG8eIESO6ErKZmVmf1J55Tqp9YZ88eTIjR47ssbg6PZolIq4EkLRSruxPHd2OpF8A\n15Mu5awL/C8pIbk8IuZIuhg4W9JM4F3gXODeiHgo28QtpCTkUkknAGsDpwPnR8Sizh6fmZlZf1fW\nyzqdHUo8QNLJkl4D5kr6SFZ+emWIcQesB/yVdDnocuBNYFREvJ0tHwvcAFwJ3Am8TprzBICIaAQO\nILXa3Af8GbgEOKUzx2ZmZrasKGty0tmWkx+RJjv7PvC7XPkU4L+Bi9u7oYhotXNHdsno2OzRUp1X\nSAmKmZmZtVNZk5PODiD6CvCNiPgLzUfnPEKa78TMzMxKrr8lJ+sCz7awvRU6H46ZmZn1lrLOc9LZ\nkJ4Adq9S/lng4c6HY2ZmZr2lrC0nne1zchrwJ0nrkhKcz0janHS5x30/zMzM+oCyJiedajmJiGtJ\nScjewDxSsrIlcGBE3NraumZmZlYO7ZnnpBa6Ms/JPcAnujEWMzMz60X9quVE0k6SdqlSvoukHbse\nlpmZmfW0fpWcABeQpq4vWjdbZmZmZiXX35KTrYDJVcofzpaZmZlZyfW35GQBTXf+zVubdF8cMzMz\nK7n+lpzcAvxMUl2lQNJqwE8Bj9YxMzPrA8o6CVtnR+scD9wNvCSpMuna9sB04MvdEZiZmZn1rLK2\nnHQqOYmI1yRtC3wR2A6YD/wRaIiIRd0Yn5mZmfWQ/jjPyTzgom6MxczMzHpRv2o5AZA0HPg4MIRC\n35WIOK1rYZmZmVlP61fJiaSjgF8DbwHTgMgtDtJ09mZmZlZi/So5AU4CfhQRP+/OYMzMzKz3lDU5\n6ewAotWBv3dnIGZmZta7+lty8ndgTHcGYmZmZr2rv81z8ixwuqRRwGNAs+HDEXFuVwMzMzOzntXf\nhhJ/A5gL7JE98gJwcmJmZlZyZb2s09lJ2Dbu7kDMzMysd5U1OenSlSZJAyVtLqnT86WYmZlZbfSr\n5ETSIEkXA+8BjwMbZOXnSfpBN8ZnZmZmPaRfJSfAz0j31Pk48H6u/DbgC12MyczMzHpBWZOTzl6O\nORj4QkQ8ICk/O+zjwCZdD8vMzMx6WlmTk862nHwYmFGlfGWaT2VvZmZmJVXWeU46G9K/gf1zrysJ\nydeB+7sUkZmZmfWKss5z0tnk5ETgp5J+Tbo0dJykW4EjgR91JSBJP5TUKOnsXNmKki6Q9JakdyVd\nKWlIYb31Jd0oaZ6kaZLOlFTCfNDMzKwc+tVlnYi4h9QhdnnSDLFjgOnArhExqbPBSNoJOAp4pLDo\nHFJLzSHAaGAd4KrcessB47N4RgGHA0fguyObmZm1qKzJSYc7xGZzmhwGTIiIo7orEEmrAJeRLg2d\nnCsfDHwVODQi7srKjgSmSto5Ih4E9gG2APaMiLeAxySdDJwh6dSIWNxdcZqZmfUXZU1OOtxykn3Q\n/wZYqZtjuQC4PiLuKJTvSEqibs/F8BTwMrBrVjQKeCxLTComAHXA1t0cp5mZWb/Qb5KTzIPADt0V\nhKRDge2BH1ZZPBRYGBFzCuXTgWHZz8Oy18Xl5OqYmZlZTlmTk87Oc3Ih8EtJ6wGTgHn5hRHxaHs3\nlG3jHOATEbGorfr5VWnfsGUPbTYzM6uivyUnl2fP+bsPB00Jw4AObGskad6USdIHp2gAMFrSMcC+\nwIqSBhdaT4bQ1DoyDdipsN2h2XOxRaWZsWPHUldX16ysvr6e+vr6DhyCmZlZ39Oe5KShoYGGhoZm\nZbNnz+7BqDqfnHTnXYlvAz5aKLsEmAqcAbwGLAL2Aq4GkDScdD+f+7L69wMnSlor1+9kDDAbeKK1\nnY8bN44RI0Z0/SjMzMz6mMbGtidhq/aFffLkyYwcObLH4upUchIRL3VXABExj0ICIWke8HZETM1e\nXwycLWkm8C6pxebeiHgoW+WWbBuXSjoBWBs4HTi/g5eKzMzMlhn96rKOpK+0tjwi/ty5cJo2UXg9\nFlgCXAmsCNwMfCe3v0ZJBwC/JrWmzCO1vpzSxTjMzMz6rX6VnAC/KrxeARgELATeA7qUnETEfxZe\nLwCOzR4trfMKcEBX9mtmZrYs6VfJSUSsXiyTtBmp5eIXXQ3KzMzMel5Zk5Nuu/dMRDwD/IClW1XM\nzMyshPp9cpJZTLrvjZmZmZVcWZOTznaIPahYRBohcwxwb1eDMjMzs57Xr5IT4JrC6wDeBO4Avtel\niMzMzKxXLFnSj5KTiOjuy0FmZmbWi6ZNg0mT4HOfq3UkS3OSYWZmtgy6+up0WefII2sdydI6lZxI\nulLSD6qU/4+kv3c9LDMzM+tJb78Na6wBqy81OUjtdbblZA/gxirlNwOjOx+OmZmZ9YZ582DllWsd\nRXWdTU5WIc0GW7QIGNz5cMzMzKw3zJ0Lq6xS6yiq62xy8hjwhSrlh9LGXYDNzMys9ubNK29y0tmh\nxKcD/5C0CWn4MMBeQD1Qwn6/ZmZmljd3bnkv63R2KPH1kg4GTgQ+C8wHHgX2joi7ujE+MzMz6wH9\nseWEiLiR6p1izczMrOTmzoX11691FNV1dijxTpJ2qVK+i6Qdux6WmZmZ9aQyt5x0tkPsBUC1fGvd\nbJmZmZmVWJn7nHQ2OdkKmFyl/OFsmZmZmZVYfxxKvAAYWqV8bWBx58MxMzOz3tAfL+vcAvxMUl2l\nQNJqwE+BW7sjMDMzM+s5Zb6s09nROscDdwMvSXo4K9semA58uTsCMzMzs56xcCEsXlzelpPOznPy\nmqRtgS8C25HmOfkj0BARi7oxPjMzM+tmc+em5/7WckJEzAMu6sZYzMzMrBfMnp2eB5f0bnidSk4k\nfY40Vf1wIIBngL9GxJXdGJuZmZn1gDffTM9DhtQ2jpZ0qEOspOUk/Q34G2nI8LPA88DWwBWSLpek\n7g/TzMzMusuMGem5rMlJR1tOjgP2Bg6KiBvyCyQdROp3chxwTveEZ2ZmZt2tkpystVZt42hJR4cS\nHwn8TzExAYiI64DvA1/tjsDMzMysZ8yYAWusASusUOtIqutocrIZcFsry2/L6piZmVlJzZhR3ks6\n0PHkZD6wWivLBwPvd2SDkr4l6RFJs7PHfZL2zS1fUdIFkt6S9K6kKyUNKWxjfUk3SponaZqkMyV1\ndoI5MzOzfq2/JSf3A99uZfl3sjod8QpwAjAye9wBXCtpy2z5OcD+wCHAaGAd4KrKylkSMp7Uf2YU\ncDhwBHBaB+MwMzNbJpQ9Oeloh9ifAHdKWhM4C3gSELAl8D3gU8CeHdlgRNxYKDpJ0reBUZJeI/Vh\nOTQi7gKQdCQwVdLOEfEgsA+wBbBnRLwFPCbpZOAMSadGhO/1Y2ZmlvPyy7DffrWOomUdajmJiPuA\nL5ASkPuBmcA7wL1ZWX1E3NvZYLKhyocCg7LtjyQlULfnYngKeBnYNSsaBTyWJSYVE4A60hBnMzMz\nyyxeDM8/D5uVuIdohydhi4irJU0AxpAmYQN4GrglIt7rTBCStiElIysB7wKfjognJe0ALIyIOYVV\npgPDsp+HZa+LyyvLHulMTGZmZv3Ryy/DokWw6aa1jqRlnb23znuS9gb+X0S80w1xPEm6R89qpL4l\nf5Y0upX6Is1M25b21DEzM1tmPPNMeu43yYmk9SLi1ezlYcCZwDuSHgM+GRGvdCaIrF/I89nLyZJ2\nJk3mdgUwUNLgQuvJEJpaR6YBOxU2OTR7LraoLGXs2LHU1dU1K6uvr6e+vr5jB2FmZtYHPP10mt9k\ngw3aV7+hoYGGhoZmZbMrN+fpIR1tOXlS0tukPiYrAeuT+n9sBHTnVC7LASsCk4DFwF7A1QCShgMb\nAPdlde8HTpS0Vq7fyRhgNvBEWzsaN24cI0aM6MbQzczMyuuhh2C77WD5dmYA1b6wT548mZEjR/ZA\ndElHhxLXAZ8jJQ3LAeMlPU1KJPaRNKy1lauR9BNJu0naUNI2kn4G7AFclrWWXAycLenjkkaSpsi/\nNyIeyjZxCykJuVTStpL2AU4Hzo+IRR2Nx8zMrD978EHYZZdaR9G6jiYnK0TEgxHxS9KEbDuQprRf\nQhry+5ykpzq4zaHAn0n9Tm4jjdAZExF3ZMvHAjcAVwJ3Aq+T+qUAEBGNwAFZDPdl27oEOKWDcZiZ\nmfVrs2bBU0+VPznp6GWdOZIeJl3WGQgMioh7JS0mDTF+Fdi5IxuMiK+3sXwBcGz2aKnOK6QExczM\nzFowZUp63n772sbRlo62nKwD/BhYQEps/i1pIilRGQFERNzTvSGamZlZd5gyJfU12XzzWkfSuo5O\nwvZWRFwfET8E3iONkjmPNGT3LFLLyl3dH6aZmZl11ZQpMHw4DBxY60ha19Wb482OiCuARcB/AhsD\nF3Y5KjMzM+t2jz4KW/eBudO7kpxsS+pjAvASsCgipkXE37oelpmZmXWn999PI3U+9rFaR9K2Ts0Q\nCx90Qq38vE33hGNmZmY94aGHYMEC2GOPWkfStq5e1jEzM7OSW7IETjwR1lkHtt221tG0rdMtJ2Zm\nZtY3XHcd3HMP3HknDBhQ62ja5pYTMzOzfu4Pf4Bdd+0bl3TAyYmZmVm/98wzKTnpK5ycmJmZ9WMR\n8OqrsO66tY6k/ZycmJmZ9WNz5sC8eU5OzMzMrCRezWYkW2+92sbREU5OzMzM+rHXXkvPbjkxMzOz\nUnjxxfS8zjo1DaNDnJyYmZn1UxFw2WWw227lv9lfnidhMzMz64cWL4YvfAEmToRrrql1NB3j5MTM\nzKyfmTkTDjwQHngArrwSPvWpWkfUMU5OzMzM+pmjj4apU+GOO2D06FpH03Huc2JmZtaPvP8+XHst\nfP/7fTMxAScnZmZm/crdd8P8+fDJT9Y6ks5zcmJmZtaP/OMfsOGGsM02tY6k85ycmJmZ9RMXXwy/\n/S0ceihItY6m85ycmJmZ9RPjxqXnb32rtnF0lZMTMzOzfmDhQnjqKbjwQthoo1pH0zVOTszMzPqB\nZ55JE69tvXWtI+k6JydmZmb9wKOPpmcnJ2ZmZlYK110HH/0orLlmrSPpOicnZmZmfdz48WnitUMP\nrXUk3aPmyYmkH0p6UNIcSdMlXS1peKHOipIukPSWpHclXSlpSKHO+pJulDRP0jRJZ0qq+fGZmZn1\npMmT4dOfhj32SNPW9wdl+PDeHTgP2AXYG1gBuEXSh3J1zgH2Bw4BRgPrAFdVFmZJyHjSvYJGAYcD\nRwCn9Xz4ZmZmtXPhhbDuuqnlZLXVah1N96j5jf8iotkEu5KOAGYAI4F7JA0GvgocGhF3ZXWOBKZK\n2jkiHgT2AbYA9oyIt4DHJJ0MnCHp1IhY3HtHZGZm1jveey/NCHv00TBwYK2j6T5laDkpWg0I4J3s\n9UhSEnV7pUJEPAW8DOyaFY0CHssSk4oJQB3QD/otm5mZLe2ss2DePPja12odSfcqVXIiSaRLOPdE\nxBNZ8TBgYUTMKVSfni2r1JleZTm5OmZmZv1GBPzxj3DkkbDxxrWOpnvV/LJOwYXAVsBu7agrUgtL\nW9pTx8zMrE956il48UU48MBaR9L9SpOcSDof+CSwe0S8nls0DRgoaXCh9WQITa0j04CdCpscmj0X\nW1SaGTt2LHV1dc3K6uvrqa+v7+ARmJmZ9Y4XX4QxY2CNNWDPPXt2Xw0NDTQ0NDQrmz17do/uUxG1\nb1jIEpNPAXtExPOFZYOBN0kdYq/OyoYDTwK7RMRDkvYFrgfWrvQ7kfQN4OfAkIhYVGWfI4BJkyZN\nYsSIET14dGZmZt3nscdgr71glVXg9ttrc0ln8uTJjBw5EmBkREzu7u3XvOVE0oVAPXAQME9SpcVj\ndkS8HxFzJF0MnC1pJvAucC5wb0Q8lNW9BXgCuFTSCcDawOnA+dUSEzMzs77q4oth+eXh3nth7bVr\nHU3PqHlyAnyL1C/kzkL5kcCfs5/HAkuAK4EVgZuB71QqRkSjpAOAXwP3AfOAS4BTejBuMzOzXrVw\nYZoN9oAD+m9iAiVITiKizRFDEbEAODZ7tFTnFeCAbgzNzMysVI49Nt19+KKLah1JzyrVUGIzMzNr\n2YMPwuGHw8c/XutIepaTEzMzsz7ixRdhiy1qHUXPc3JiZmbWB8yeDbNmwYYb1jqSnufkxMzMrA94\n6aX0vNFGNQ2jVzg5MTMz6wMeeCA9LwstJzUfrWNmZmbVLVgA48bBH/6QRunsvTcMWwbuGOeWEzMz\nsxKKgEMOgZNOglGjoKEBJkyA5ZaBT263nJiZmZXQ3/4GN94IV10Fn/lMraPpXU5OzMzMSmLqVJg8\nGR56CC64AOrr4dOfrnVUvc/JiZmZWY0tWQLf+x6cdx40Nqab+p12WiqTah1d73NyYmZmVkMzZsBx\nx8EVV8DPfw7f+hastFK6ud+yahk+dDMzs9qaOxe22y6Nyrn0UjjssFpHVA5OTszMzGrkwgvhzTfT\nMOGNN651NOWxDAxIMjMzK5+rr4YTToCvfc2JSZGTEzMzs1727LPws5/BHnvAb35T62jKx8mJmZlZ\nL2lsTJOqbbYZTJkCp5yybI7GaYuTEzMzs15y3HHwk5/Ad78Lzz8Pe+5Z64jKyR1izczMesHLL8Pv\nfpfmLzn55FpHU25OTszMzHrAwoUwcWIajfOPf8DNN8Nqq8HRR9c6svJzcmJmZtYN3n0XfvWrlIxc\ney3Mng2zZqVl22wD3/gGnHgirLFGbePsC5ycmJmZdYPzz4dTT4X11oMxY2D99WHffdPrYcPc8bUj\nnJyYmZl1wRNPpHvg3HxzmrPk97+vdUR9n0frmJmZddK4cbDttmmG1zPPTCNxrOvccmJmZtYBS5bA\nAw/AZZelCdTGjk0Tqq24Yq0j6z+cnJiZmbVh9mx4+2044wy47jqYPh3q6uCcc+C//sv9SbqbkxMz\nM7MWvPZauv/NX/6SXq+xBnz5y7D//rDLLjB4cG3j66+cnJiZmeVcdhk89hjceis8+mhKSH71K9ho\nI9htNw8F7g1OTszMbJkXATfckDq0/utf6S7Bw4fD2WfDV76SJk+z3lOK0TqSdpd0naTXJDVKOqhK\nndMkvS7pPUm3Stq0sHx1SX+RNFvSTEm/l7Ry7x2FmZn1Fe+9B//8Z7o8s8IK6XHQQTBgAJx7brrv\nzc03p/4kTkx6X1laTlYG/g/4A3BVcaGkE4BjgMOBF4AfAxMkbRkRC7NqfwWGAnsBA4FLgN8CX+rp\n4M3MrJwWLEgzt159dUpInn8+JSVvvAFvvZVaSM48Ez70IdhkE9h7b3duLYNSJCcRcTNwM4BU9dfi\nOOD0iLg+q/MVYDpwMHCFpC2BfYCREfFwVudY4EZJx0fEtF44DDMzq7FZs+AXv4DnnoP582H8eFi8\nGJZbDlZaKSUhBx8Ma64Jn/oUjBzpIcBlVIrkpDWSNgaGAbdXyiJijqR/AbsCVwCjgJmVxCRzGxDA\nLsC1vRexmZn1ln//O42meeaZ9HrmzNR/ZNSo1AJyyimw6aapI+t669U2Vmu/0icnpMQkSC0ledOz\nZZU6M/ILI2KJpHdydczMrA9bsgQmTICbbkrJyPvvw913w9Zbw+GHp2RkpZXgiCNgnXVqHa11RV9I\nTloiUtLS1TpmZlYis2al+UUWLkx9Rd56C665JpXPnw+rrw7/+Z+w6qpwwQVw1FGwfF/+NLOl9IW3\ncxopyRhK89aTIcDDuTpD8itJGgCsztItLs2MHTuWurq6ZmX19fXU19d3LWozM2uXCHj11TTz6jXX\nwP33w7x5aVldHay7LhxwAGyxBey6K+y4YxpdY72joaGBhoaGZmWzZ8/u0X0qolwNC5IagYMj4rpc\n2evALyJiXPZ6MCnp+EpE/F3SFsDjwI65DrFjgPHAetU6xEoaAUyaNGkSI0aM6PHjMjMzmDIlTQUP\n6f40U6ak5yefTGW77AJjxsB++6XLNFtt5VlYy2jy5MmMHDkS0kCUyd29/VK0nGTzkWxKaiEB+Iik\n7YB3IuIV4BzgJEnPAi8CpwOvknV0jYgnJU0Afifp26ShxOcBDR6pY2ZWO889l1pDXn8drr02va5Y\nfvnUCrLVVqlT66c/nVpKzEqRnAA7Av8k9Q8J4JdZ+Z+Ar0bEmZIGkeYtWQ2YCOyXm+ME4DDgfNIo\nnUbgStIQZDMz6yXz5sHEiSkZueaa1IF1ueVS/5D990937/3oR1PdNdaAIUNa354tm0qRnETEXbQx\nW21EnAqc2sryWXjCNTOzXhEBL7+cRtBMmpRmU7355nTJptJfZKed4Ec/guOPh0GDahuv9S2lSE7M\nzKxcZs9OycbixU1lS5bA9den0TNvvZX6i1RsuCF8/vMwbFjqvDp0KKy1Vu/Hbf2DkxMzs2VQYyM8\n/jgsWpR+vvHGdAlmcta1cfHilIwUbbEFjBgB668PJ50EH/5wmm11u+16N37r35ycmJn1I0uWpIQj\nLyJN4/766+l52rTUMvLCC011Bg2CnXeGn/88dVQdMCCNmBk6tPm2VlzR956xnufkxMysD3nqqTQn\nyC23pOe8iFT+9tvV111xxTSb6sc+ljqp7rsvrL12WrbRRqmDqlkZODkxM6uh+fOb5v2A1Jn0mmvS\n1OwAc+c2vY6Al15K5XV1sMMOS2/vc59L95Ep2nLLdDnGrC9wcmJm1o0eeaT5XB4V8+enJOO995rK\nItJsqLNmNa+70kpN831Iaar2jTZKrzfbDEaPTn09Vl21Rw7BrOacnJiZVTFnTuqjAek+LxMmpGSi\nYsGCdN+XuXObr1dMNPK22SbdITfvyCNT8lHpxyGlWVLXXLPrx2DWVzk5MbNl1rRpabhsY2N6/frr\nKQlpbEzDZOfMaaq77rqwyirN1x8zJo1eyVt/ffjEJ6p3Gl19dXcmNWsPJydm1mctXgwPP5xGqCxZ\nkm4c9847S9ebPj2NUqk2NDZvwAA48EBYbTXYYw/45CdTMjFwIIwcmTqRmlnPc3JiZjW3cGHqk1H0\nzjtp0q/KnBs33JAm/6qYNavp0guk0Sabbbb0dpZfHn7yk6VHo6ywQkpGVl+9qcwJiFntOTkxs241\nY0bTRF7VPP54ugttRWMj3HorvPtu9forrJA6iEKa6GvMmKZlAwak2Ug//OH0esMNl770YmZ9j5MT\ns2XUjBnVWysqIuD225uGrj7zDNx9d9vbfeedpmGw1Sy3XLpkMnBgU9lRR8GoUUvXHTAgdRZdbbW2\n92tm/YeTE7M+4s034Z//bOq8Wc3EifD0021va/58uPfetutJsN566edBg+ArX2lqxWjJqqvCwQen\nCb+qGTSo+WUUM7MiJydmPWj2bHjxxZaXP/ggPPbY0uWLFsG11zafnGvBgrY7dK65Juy1V/tGhFx4\n4dLDWos23BCGD297W2Zm3cnJiVmmsbHlD/9HH4WHHmp53Ycfrt7P4plnmicYRRJstVX1Tpj775/m\nxagYOBA+9ammybmqWXHF1PnTzKwv878x69dmzkyJRVEE3HQTvPFGet3YmOa7aOmeJJASiJZGcqyx\nBhx0UOojkTdmTBoNUiyvWGst2Hjjto/DzGxZ4uTESmXOnHT5oui55+DOO1ter3LDs+KN0KZNW3oG\nz4q6Oth226bXn/tc9U6ZkDpkHnBAy0mGmZl1Hycn1mOeeCJd1ii66y549tmly+fPT6ND8lOE5w0e\n3HyER9Fmm8EhhzQvW2WVljtnDhvme5OYmZWRkxOratq0pnknHnggzU1RTbV7jlS8/Xb18ro62H33\npTttfuhDcNZZsMkmS68zaFAaUuqWCzOz/s/JyTKgsTFN3V2tP8X06anvRb4j6KJF8K9/NU8sPvKR\n6v0tBg6Er32teifN9dZLfS6KSciqq7Y9HNXMzJZdTk5KorExddxctKjtuq2NHHnrrTTFdz7ZiGh5\nFIqU7h9SnNb7i1+Ej340/Tx06NI3NzMzM+spTk46acmSpWfBfOaZ1J+iJU8+2fLEV3PmNM3E2R7b\nb199yOiAAXDiiTBkSPPyESNghx2Wri956KmZmZWLP5Za8PTTTclCRJqZ84UXml7feWea/rto4MCW\nP+xb65y53HLpEsi667YdW10dbLRRe47CzMys71nmk5PzzksdPufNayqLSB09830uVl0Vdt65qf/E\nwQfDnns239bKK8O++6YblZmZmVnnLPPJyQ03wN57L32b9Q03bD4N+Jpr+m6nZmZmvWGZT04mTEj9\nMczMzKwcWpiM28zMzKw2nJyYmZlZqfSr5ETSdyS9IGm+pAck7VTrmPqChoaGWodQGj4Xic9DE5+L\nxOch8XnoHf0mOZH0BeCXwCnADsAjwARJa9U0sD7Af2xNfC4Sn4cmPheJz0Pi89A7+k1yAowFfhsR\n/7+9ew+2sirjOP79oQKKgzh5YSwUlbybF1AoRUFFUkcdsnEcTC2qwazJrElzqrEspzvjDadGbfJa\nUU1pqaMiBylvjGLmhYsp4gUPieIRARXh6Y+1trxsz0Hw7LP3Pvv9fWb2DO9a691nrYd3r/3s9117\nv9dHxDzgbGAlMKmx3TIzM7NN0RLJiaQtgOHAPZWyiAhgOvDJRvXLzMzMNl1LJCfAdsBmwJKq8iXA\n4EHfvXcAAAoVSURBVPp3x8zMzD6sVv+dEwHRRV1/gLlz59avN02qo6ODOXPmNLobTcGxSByHdRyL\nxHFIHIek8N7ZI/eYV0RX7929R76ssxI4JSJuLZT/DtgmIiZ0ss9E4Ka6ddLMzKz1nB4RN9f6SVvi\nzElErJb0CHA0cCuAJOXty7vY7U7gdOA54K0u2piZmdn79QeGkt5La64lzpwASDoVuA6YDMwmfXvn\ns8BeEfFKI/tmZmZmG68lzpwARMS0/JsmFwM7Av8GxjsxMTMz611a5syJmZmZtYZW+SqxmZmZtYhS\nJidluAePpNGSbpX0kqS1kk7qpM3FkhZLWinpbknDquq3lXSTpA5JyyRdI2lA/UbRfZIulDRb0huS\nlkj6q6Q9qtr0kzRV0lJJyyX9WdIOVW2GSLpN0gpJ7ZJ+LqnXvH4knS3psfx/2SHpfkmfLtS3fAw6\nk4+PtZKmFMpKEQtJF+WxFx9PFepLEQcASTtJuiGPdWV+rRxc1aYM8+XCTo6JtZKuyPV1OyZ63UHU\nXSrPPXgGkNbdfJVOfutF0gXA10gLiA8FVpDi0LfQ7GZgb9K3nk4AjgB+07PdrrnRwBXASOAYYAvg\nLklbFtpcShrfKaQx7gT8pVKZX1i3k9ZojQLOAj5PWt/UW7wAXED6JeXhwAzgFkl75/oyxGA9Sh9K\nvkyaA4rKFIsnSGv0BufH4YW6UsRB0iDgPuBtYDxpzvsWsKzQpizz5QjWHQuDgXGk949pub5+x0RE\nlOoBPAhcVtgW8CJwfqP71oNjXgucVFW2GDivsD0QWAWcmrf3zvsdVGgzHngXGNzoMXUjFtvlcR1e\nGPfbwIRCmz1zm0Pz9nHAamC7QpvJpMlr80aPqRuxeBX4QhljAGwNzAeOAtqAKWU7Hkgf0OZ0UVem\nOPwUuPcD2pR1vrwUWNCIY6JUZ07ke/AAIGlXUlZcjMMbwEOsi8MoYFlEPFrYdTopix5Zp672hEGk\nMbyWt4eTsvxiLOYDz7N+LB6PiKWF57kT2AbYt6c7XGuS+kg6DdgKeIASxgCYCvw9ImZUlY+gXLH4\nuNKl32ck3ShpSC4v0zFxIvCwpGn50u8cSV+qVJZ1vszvl6cD1+aiur42SpWc4HvwVAwmvWg2FIfB\nwP+KlRGxhvSm3itjJUmkTwL/iojKtfXBwDt5simqjkVnsYJeFAtJ+0laTvr0cxXpE9A8ShQDgJyY\nHQhc2En1jpQnFg+STrmPJ93FfVdgVl4nUaZjYjfgK6QzaccCvwYul/S5XF/K+RKYQEoqrsvbdX1t\ntMzvnHTThu7BUyYbE4feHKurgH1Y/7p6VzZ2nL0pFvOAA0hnj04Brpd0xAbat1wMJH2MlKCOi4jV\nm7IrLRaLiCj+sucTkmYDi4BT6fpXs1suDqQP6bMj4vt5+zFJ+5ISlhs3sF+rz5eTgDsiov0D2vXI\nMVG2MydLgTWkDLBoB96f7bWydtIBtaE4tOft90jaDNiWXhgrSVcCxwNjImJxoaod6CtpYNUu1bGo\njlVlu9fEIiLejYhnI2JORHyXtBD0XEoUA9Lliu2BRyStlrQaOBI4V9I7pLH0K0ks1hMRHcACYBjl\nOiZeBqrvADsX2Dn/u4zz5c6kLxBcXSiu6zFRquQkf1Kq3IMHWO8ePPc3ql/1FhELSQdRMQ4DSddG\nK3F4ABgk6aDCrkeTXqQP1amrNZETk5OBsRHxfFX1I6RFa8VY7EGamIqx2L/qG13HAh3AU/RefYB+\nlCsG04H9SZd1DsiPh0mfkCv/Xk05YrEeSVsDu5MWf5bpmLiPtLCzaE/SWaTSzZfZJFIycXuhrL7H\nRKNXAzdg9fGppFXWZwJ7kb7q9SqwfaP7VuNxDiBNtgeSVlN/I28PyfXn53GfSJqs/wY8DfQtPMft\npMn6EOAw0jXZGxo9tk2Mw1WkleKjSRl85dG/qs1CYAzpk/V9wD8L9X1IZxnuAD5Buka/BPhRo8e3\nCXG4hHQ5axdgP+AnpInmqLLEYAOxee/bOmWKBfAL0tdBdwE+Bdydx/GRksVhBGkd1oWk5GwisBw4\nrdCmFPNlHodIN8S9pJO6uh0TDQ9Eg4J/Tg7+KlKmN6LRfeqBMR5JSkrWVD1+W2jzA9KnpJWkFdXD\nqp5jEOkTZQfpDf5qYKtGj20T49BZDNYAZxba9CP9FsrSPCn9Cdih6nmGAP8A3swvtp8BfRo9vk2I\nwzXAs/mYbwfuIicmZYnBBmIzg/WTk1LEAvg96WcUVpG+cXEzsGvZ4pDHcTzwnzwXPglM6qRNy8+X\neRzj8hw5rJO6uh0TvreOmZmZNZVSrTkxMzOz5ufkxMzMzJqKkxMzMzNrKk5OzMzMrKk4OTEzM7Om\n4uTEzMzMmoqTEzMzM2sqTk7MzMysqTg5MTMzs6bi5MTMWoKkNklTGt0PM+s+Jydm1m2SJkt6Q1Kf\nQtkASasl3VPVdqyktZKG1rufZtY7ODkxs1poI90Je0ShbDTwMjBKUt9C+ZHAooh4blP/iKTNu9NJ\nM+sdnJyYWbdFxAJSIjKmUDyGdGv5hcCoqvI2AElDJN0iabmkDkl/lLRDpaGkiyQ9KumLkp4F3srl\nW0m6Pu/3kqRvVvdJ0jmSFkhaJald0rTajtrMeoqTEzOrlZnA2ML22Fx2b6VcUj9gJDAjt7mFdKv5\n0cAxwO7AH6qedxjwGWACcGAu+2Xe50TgWFLCM7yyg6QRwGXA94A9gPHArG6Oz8zqxKdIzaxWZgJT\n8rqTAaREYhbQF5gM/BA4LG/PlDQO2A8YGhGLASSdATwpaXhEPJKfdwvgjIh4LbcZAEwCJkbEzFx2\nFvBioS9DgDeB2yJiBfAC8FgPjdvMasxnTsysVirrTg4BDgcWRMRS0pmTkXndyRjgmYh4EdgLeKGS\nmABExFzgdWDvwvMuqiQm2e6khGV2Yb9lwPxCm7uBRcDCfPlnoqQtazZSM+tRTk7MrCYi4hngJdIl\nnLGkpISIeJl05uIwCutNAAHRyVNVl6/opJ4u9q305U3gYOA0YDHprM1jkgZu9IDMrGGcnJhZLbWR\nEpMxpMs8FbOA44BDWZecPAXsLOmjlUaS9gG2yXVd+S/wLoVFtpK2Ja0teU9ErI2IGRHxHeAAYChw\n1IcYk5nVmdecmFkttQFTSXPLvYXyWcCVpMsxMwEiYrqkx4GbJJ2X66YCbRHxaFd/ICJWSLoW+IWk\n14BXgB8DayptJJ0A7Jb/7jLgBNIZl/nvf0YzazZOTsysltqA/sDciHilUH4vsDUwLyLaC+UnA1fk\n+rXAHcDXN+LvfJu0vuVWYDnwK6B4yeZ10jd8Lsr9eRo4La9pMbMmp4guL9uamZmZ1Z3XnJiZmVlT\ncXJiZmZmTcXJiZmZmTUVJydmZmbWVJycmJmZWVNxcmJmZmZNxcmJmZmZNRUnJ2ZmZtZUnJyYmZlZ\nU3FyYmZmZk3FyYmZmZk1FScnZmZm1lT+DwrH78/1pfXIAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -354,7 +356,7 @@ }, { "cell_type": "code", - "execution_count": 174, + "execution_count": 96, "metadata": { "collapsed": true }, @@ -368,7 +370,7 @@ }, { "cell_type": "code", - "execution_count": 175, + "execution_count": 97, "metadata": { "collapsed": false }, @@ -377,9 +379,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 536\n", - "Number of unique tokens: 2245\n", - "Number of documents: 286\n" + "Number of authors: 166\n", + "Number of unique tokens: 681\n", + "Number of documents: 90\n" ] } ], @@ -463,7 +465,7 @@ }, { "cell_type": "code", - "execution_count": 153, + "execution_count": 101, "metadata": { "collapsed": false }, @@ -475,7 +477,7 @@ }, { "cell_type": "code", - "execution_count": 178, + "execution_count": 102, "metadata": { "collapsed": false }, @@ -484,21 +486,40 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1min 15s, sys: 0 ns, total: 1min 15s\n", - "Wall time: 1min 15s\n" + "CPU times: user 10.9 s, sys: 12 ms, total: 10.9 s\n", + "Wall time: 10.9 s\n" ] } ], "source": [ "%time model_online2 = OnlineAtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=40, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=100, random_state=2, var_lambda=None)" + " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None)" ] }, { "cell_type": "code", - "execution_count": 179, + "execution_count": 100, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Speed improvement from new algorithm: 4.709677!\n" + ] + } + ], + "source": [ + "print(\"Speed improvement from new algorithm: %f!\" %((2 * 60 + 26) / 31))" + ] + }, + { + "cell_type": "code", + "execution_count": 218, "metadata": { "collapsed": false }, @@ -507,34 +528,130 @@ "data": { "text/plain": [ "[(0,\n", - " '0.056*image + 0.047*object + 0.037*map + 0.034*velocity + 0.032*eye + 0.027*visual + 0.022*field + 0.022*motion + 0.021*receptive + 0.021*receptive_field'),\n", + " '0.007*rule + 0.005*class + 0.005*classifier + 0.004*probability + 0.004*cue + 0.004*distribution + 0.004*sample + 0.003*sequence + 0.003*tree + 0.003*evidence'),\n", " (1,\n", - " '0.017*memory + 0.011*vector + 0.010*processor + 0.009*matrix + 0.009*energy + 0.009*graph + 0.008*associative + 0.008*boltzmann + 0.008*np + 0.008*optimization'),\n", + " '0.056*motion + 0.052*velocity + 0.051*muscle + 0.044*robot + 0.040*reinforcement + 0.035*controller + 0.029*obstacle + 0.028*command + 0.028*reinforcement_learning + 0.027*movement'),\n", " (2,\n", - " '0.017*probability + 0.015*theorem + 0.014*polynomial + 0.011*markov + 0.011*theory + 0.010*distribution + 0.010*proof + 0.009*criterion + 0.009*separable + 0.008*let'),\n", + " '0.049*cell + 0.027*spike + 0.024*stimulus + 0.022*eye + 0.020*firing + 0.019*response + 0.017*burst + 0.016*inhibition + 0.016*fiber + 0.016*wave'),\n", " (3,\n", - " '0.039*cell + 0.035*fiber + 0.029*firing + 0.025*cortex + 0.025*axon + 0.024*cortical + 0.023*receptor + 0.021*synaptic + 0.020*activity + 0.020*stimulus'),\n", + " '0.029*attractor + 0.026*vc + 0.024*theorem + 0.019*bound + 0.019*xt + 0.017*fixed_point + 0.016*eigenvalue + 0.016*threshold + 0.015*let + 0.014*capacity'),\n", " (4,\n", - " '0.008*spike + 0.006*controller + 0.006*frequency + 0.006*correlation + 0.006*transfer + 0.005*fig + 0.005*link + 0.005*phase + 0.005*delay + 0.004*loop'),\n", + " '0.039*hmm + 0.032*tdnn + 0.030*speech + 0.030*mlp + 0.028*phonetic + 0.026*speaker + 0.024*segmentation + 0.021*recognition + 0.021*hybrid + 0.021*phoneme'),\n", " (5,\n", - " '0.010*speech + 0.009*frame + 0.008*recognition + 0.007*region + 0.006*noise + 0.006*speaker + 0.006*acoustic + 0.006*character + 0.006*human + 0.005*domain'),\n", + " '0.055*chip + 0.055*word + 0.043*circuit + 0.033*analog + 0.031*vlsi + 0.030*pulse + 0.028*voltage + 0.027*board + 0.027*perturbation + 0.024*processor'),\n", " (6,\n", - " '0.045*chain + 0.039*eigenvalue + 0.033*fixed_point + 0.031*oscillation + 0.031*basin + 0.030*attractor + 0.027*hebb + 0.025*oscillatory + 0.024*stability + 0.021*dt'),\n", + " '0.027*rbf + 0.023*spline + 0.015*schedule + 0.015*basis_function + 0.012*weight_decay + 0.012*approximation + 0.010*regression + 0.010*validation + 0.009*stochastic + 0.009*prediction'),\n", " (7,\n", - " '0.062*classifier + 0.032*hidden + 0.025*hidden_unit + 0.021*propagation + 0.020*back_propagation + 0.018*hidden_layer + 0.017*bp + 0.016*training_set + 0.014*xor + 0.013*backpropagation'),\n", + " '0.071*depth + 0.068*node + 0.056*contour + 0.050*projection + 0.042*polynomial + 0.039*proof + 0.032*gate + 0.028*hidden_node + 0.027*boolean + 0.027*boolean_function'),\n", " (8,\n", - " '0.013*cluster + 0.012*node + 0.009*string + 0.008*tree + 0.007*failure + 0.007*robot + 0.007*letter + 0.007*symbol + 0.006*recurrent + 0.006*competitive'),\n", + " '0.005*image + 0.005*object + 0.004*neuron + 0.004*eq + 0.004*character + 0.003*filter + 0.003*field + 0.003*dynamic + 0.003*receptive + 0.003*receptive_field'),\n", " (9,\n", - " '0.061*chip + 0.052*pulse + 0.042*circuit + 0.041*voltage + 0.040*transistor + 0.034*charge + 0.034*synapse + 0.030*analog + 0.027*impulse + 0.025*vlsi')]" + " '0.031*grammar + 0.027*module + 0.023*expert + 0.021*string + 0.020*symbol + 0.019*recurrent + 0.017*language + 0.014*automaton + 0.014*giles + 0.014*mozer')]" ] }, - "execution_count": 179, + "execution_count": 218, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "model_online2.show_topics()" + "model_online2.show_topics(num_topics=10)" + ] + }, + { + "cell_type": "code", + "execution_count": 214, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Sheila \t Kannappan\n", + "Docs: [100]\n", + "[(0, 0.29470045213299129),\n", + " (1, 0.018773780023831975),\n", + " (2, 0.071451542822641448),\n", + " (3, 0.026741158302140633),\n", + " (4, 0.018099032024313566),\n", + " (5, 0.015363132745463916),\n", + " (6, 0.089347751415205109),\n", + " (7, 0.020278388465418653),\n", + " (8, 0.31198092387189108),\n", + " (9, 0.1332638381961023)]\n" + ] + } + ], + "source": [ + "name = id2author[114]\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))" + ] + }, + { + "cell_type": "code", + "execution_count": 200, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [357]\n", + "[(0, 0.16874400828774647),\n", + " (1, 0.05776392793070604),\n", + " (2, 0.018385851898290052),\n", + " (3, 0.090073600218074618),\n", + " (4, 0.12243813551115512),\n", + " (5, 0.048550522852509548),\n", + " (6, 0.1728010777698884),\n", + " (7, 0.19524400649884482),\n", + " (8, 0.056488897891914927),\n", + " (9, 0.069509971140870139)]\n", + "\n", + "Geoffrey E. Hinton\n" + ] + }, + { + "ename": "KeyError", + "evalue": "'Geoffrey E. Hinton'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'Geoffrey E. Hinton'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\n%s'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Docs:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 9\u001b[0m \u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_online2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_author_topics\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 10\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyError\u001b[0m: 'Geoffrey E. Hinton'" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))" ] }, { @@ -979,7 +1096,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 29, "metadata": { "collapsed": false }, @@ -988,16 +1105,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 2min 10s, sys: 72 ms, total: 2min 11s\n", - "Wall time: 2min 11s\n" + "CPU times: user 21min 58s, sys: 376 ms, total: 21min 58s\n", + "Wall time: 21min 58s\n" ] } ], "source": [ "%time model_offline2 = AtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", - " eval_every=1, random_state=1)" + " iterations=100, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", + " eval_every=10, random_state=1)" ] }, { @@ -1107,7 +1224,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 245, "metadata": { "collapsed": false }, @@ -1116,8 +1233,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 2min 16s, sys: 120 ms, total: 2min 16s\n", - "Wall time: 2min 16s\n" + "CPU times: user 2min 34s, sys: 104 ms, total: 2min 34s\n", + "Wall time: 2min 34s\n" ] } ], @@ -1519,19 +1636,50 @@ }, { "cell_type": "code", - "execution_count": 132, + "execution_count": 151, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 2.48 s, sys: 524 ms, total: 3 s\n", + "Wall time: 2.43 s\n" + ] + } + ], "source": [ - "lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10,\n", - " iterations=10, alpha='auto', eta='symmetric')" + "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=1, \\\n", + " iterations=1, alpha='symmetric', eta='symmetric', eval_every=0)" ] }, { "cell_type": "code", - "execution_count": 133, + "execution_count": 154, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 288 ms, sys: 0 ns, total: 288 ms\n", + "Wall time: 290 ms\n", + "Bound: -3.588e+05\n" + ] + } + ], + "source": [ + "%time lda_bound = lda.bound(sample(corpus, 10))\n", + "print('Bound: %.3e' % lda_bound)" + ] + }, + { + "cell_type": "code", + "execution_count": 155, "metadata": { "collapsed": false }, @@ -1540,28 +1688,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.015*hidden + 0.013*hidden_unit + 0.010*human + 0.009*chain + 0.008*node + 0.008*propagation + 0.007*region + 0.005*back_propagation + 0.005*gradient + 0.005*domain'),\n", + " '0.004*neuron + 0.003*image + 0.003*layer + 0.003*field + 0.003*class + 0.003*cell + 0.003*signal + 0.003*noise + 0.003*hidden + 0.002*node'),\n", " (1,\n", - " '0.009*activation + 0.008*action + 0.006*machine + 0.006*node + 0.005*energy + 0.005*controller + 0.005*matrix + 0.005*sequence + 0.005*role + 0.005*forward'),\n", + " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*class + 0.003*signal + 0.003*matrix + 0.003*layer + 0.003*noise + 0.002*hidden + 0.002*recognition'),\n", " (2,\n", - " '0.015*hidden + 0.013*speech + 0.012*recognition + 0.012*classifier + 0.008*propagation + 0.008*trained + 0.006*back_propagation + 0.006*classification + 0.006*hidden_layer + 0.005*test'),\n", + " '0.004*cell + 0.003*neuron + 0.003*matrix + 0.003*signal + 0.003*image + 0.003*hidden + 0.002*rule + 0.002*response + 0.002*field + 0.002*dynamic'),\n", " (3,\n", - " '0.018*memory + 0.009*constraint + 0.009*optimization + 0.008*location + 0.007*hopfield + 0.006*address + 0.006*vector + 0.006*map + 0.006*field + 0.005*associative_memory'),\n", + " '0.005*neuron + 0.003*layer + 0.003*image + 0.003*cell + 0.002*class + 0.002*net + 0.002*hidden + 0.002*control + 0.002*sequence + 0.002*response'),\n", " (4,\n", - " '0.016*chip + 0.015*circuit + 0.011*analog + 0.010*memory + 0.009*voltage + 0.009*synapse + 0.009*bit + 0.008*current + 0.007*synaptic + 0.007*processor'),\n", + " '0.004*layer + 0.003*image + 0.003*neuron + 0.003*cell + 0.003*hidden + 0.003*signal + 0.003*component + 0.002*recognition + 0.002*net + 0.002*node'),\n", " (5,\n", - " '0.029*cell + 0.010*activity + 0.010*firing + 0.010*cortex + 0.007*brain + 0.007*potential + 0.006*inhibitory + 0.006*map + 0.006*cortical + 0.006*fig'),\n", + " '0.005*image + 0.004*neuron + 0.004*layer + 0.003*hidden + 0.003*cell + 0.002*control + 0.002*class + 0.002*net + 0.002*noise + 0.002*signal'),\n", " (6,\n", - " '0.012*vector + 0.007*probability + 0.007*matrix + 0.007*node + 0.006*let + 0.006*class + 0.005*distribution + 0.005*convergence + 0.005*theorem + 0.005*threshold'),\n", + " '0.005*neuron + 0.005*layer + 0.004*hidden + 0.003*image + 0.003*cell + 0.003*class + 0.003*rule + 0.002*noise + 0.002*net + 0.002*matrix'),\n", " (7,\n", - " '0.018*image + 0.014*field + 0.008*receptive_field + 0.007*receptive + 0.007*orientation + 0.006*center + 0.006*map + 0.005*vector + 0.005*cell + 0.004*noise'),\n", + " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*hidden + 0.003*recognition + 0.003*field + 0.003*layer + 0.002*noise + 0.002*node + 0.002*component'),\n", " (8,\n", - " '0.014*response + 0.014*cell + 0.011*stimulus + 0.009*current + 0.009*circuit + 0.008*spike + 0.007*frequency + 0.007*velocity + 0.006*synaptic + 0.006*fig'),\n", + " '0.004*neuron + 0.003*image + 0.003*signal + 0.003*recognition + 0.003*cell + 0.003*layer + 0.003*noise + 0.003*rule + 0.002*class + 0.002*hidden'),\n", " (9,\n", - " '0.021*object + 0.012*image + 0.011*vector + 0.010*memory + 0.010*node + 0.008*view + 0.008*motion + 0.006*aspect + 0.005*optical + 0.005*recognition')]" + " '0.005*neuron + 0.004*class + 0.003*layer + 0.003*image + 0.003*cell + 0.002*hidden + 0.002*signal + 0.002*control + 0.002*field + 0.002*net')]" ] }, - "execution_count": 133, + "execution_count": 155, "metadata": {}, "output_type": "execute_result" } @@ -1572,31 +1720,49 @@ }, { "cell_type": "code", - "execution_count": 144, + "execution_count": 150, "metadata": { "collapsed": false }, "outputs": [ { - "data": { - "text/plain": [ - "[(0, 0.047033260650131463),\n", - " (1, 0.27397832290210905),\n", - " (2, 0.45395489648769094),\n", - " (3, 0.031717136420799215),\n", - " (4, 0.057763485248973352),\n", - " (6, 0.090069707199356377),\n", - " (7, 0.011674522457529131),\n", - " (9, 0.022742673539037268)]" - ] - }, - "execution_count": 144, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "Document 5\n", + "[(0, 0.11806384798431847),\n", + " (1, 0.099612053680607937),\n", + " (2, 0.076668193975964943),\n", + " (3, 0.075072909998916373),\n", + " (4, 0.067243477696594139),\n", + " (5, 0.1004083782314163),\n", + " (6, 0.1049567779188061),\n", + " (7, 0.10291505408912022),\n", + " (8, 0.12682229186467239),\n", + " (9, 0.12823701455958317)]\n", + "\n", + "Document 50\n", + "[(0, 0.12019310780479558),\n", + " (1, 0.11241507965934601),\n", + " (2, 0.084261861610351887),\n", + " (3, 0.074722708722277847),\n", + " (4, 0.089536455599529025),\n", + " (5, 0.11951468917677081),\n", + " (6, 0.077140801257090358),\n", + " (7, 0.086592729473957755),\n", + " (8, 0.12048290979429044),\n", + " (9, 0.11513965690159025)]\n" + ] } ], "source": [ - "lda[corpus[5]]" + "d = 5\n", + "print('Document %d' %d)\n", + "pprint(lda[corpus[d]])\n", + "\n", + "d = 50\n", + "print('\\nDocument %d' %d)\n", + "pprint(lda[corpus[d]])" ] }, { @@ -1639,6 +1805,1004 @@ "source": [ "docs[0][:20]" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Convergence and speed plots" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from bokeh.io import output_notebook\n", + "from bokeh.models.layouts import Row, Column\n", + "from bokeh.models import Title, Legend\n", + "from bokeh.plotting import figure, output_file, show" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " Loading BokehJS ...\n", + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/javascript": [ + "\n", + "(function(global) {\n", + " function now() {\n", + " return new Date();\n", + " }\n", + "\n", + " var force = \"1\";\n", + "\n", + " if (typeof (window._bokeh_onload_callbacks) === \"undefined\" || force !== \"\") {\n", + " window._bokeh_onload_callbacks = [];\n", + " window._bokeh_is_loading = undefined;\n", + " }\n", + "\n", + "\n", + " \n", + " if (typeof (window._bokeh_timeout) === \"undefined\" || force !== \"\") {\n", + " window._bokeh_timeout = Date.now() + 5000;\n", + " window._bokeh_failed_load = false;\n", + " }\n", + "\n", + " var NB_LOAD_WARNING = {'data': {'text/html':\n", + " \"
\\n\"+\n", + " \"

\\n\"+\n", + " \"BokehJS does not appear to have successfully loaded. If loading BokehJS from CDN, this \\n\"+\n", + " \"may be due to a slow or bad network connection. Possible fixes:\\n\"+\n", + " \"

\\n\"+\n", + " \"
    \\n\"+\n", + " \"
  • re-rerun `output_notebook()` to attempt to load from CDN again, or
  • \\n\"+\n", + " \"
  • use INLINE resources instead, as so:
  • \\n\"+\n", + " \"
\\n\"+\n", + " \"\\n\"+\n", + " \"from bokeh.resources import INLINE\\n\"+\n", + " \"output_notebook(resources=INLINE)\\n\"+\n", + " \"\\n\"+\n", + " \"
\"}};\n", + "\n", + " function display_loaded() {\n", + " if (window.Bokeh !== undefined) {\n", + " Bokeh.$(\"#d982e20b-e5a9-4239-8121-81cecd38c4d7\").text(\"BokehJS successfully loaded.\");\n", + " } else if (Date.now() < window._bokeh_timeout) {\n", + " setTimeout(display_loaded, 100)\n", + " }\n", + " }\n", + "\n", + " function run_callbacks() {\n", + " window._bokeh_onload_callbacks.forEach(function(callback) { callback() });\n", + " delete window._bokeh_onload_callbacks\n", + " console.info(\"Bokeh: all callbacks have finished\");\n", + " }\n", + "\n", + " function load_libs(js_urls, callback) {\n", + " window._bokeh_onload_callbacks.push(callback);\n", + " if (window._bokeh_is_loading > 0) {\n", + " console.log(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", + " return null;\n", + " }\n", + " if (js_urls == null || js_urls.length === 0) {\n", + " run_callbacks();\n", + " return null;\n", + " }\n", + " console.log(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", + " window._bokeh_is_loading = js_urls.length;\n", + " for (var i = 0; i < js_urls.length; i++) {\n", + " var url = js_urls[i];\n", + " var s = document.createElement('script');\n", + " s.src = url;\n", + " s.async = false;\n", + " s.onreadystatechange = s.onload = function() {\n", + " window._bokeh_is_loading--;\n", + " if (window._bokeh_is_loading === 0) {\n", + " console.log(\"Bokeh: all BokehJS libraries loaded\");\n", + " run_callbacks()\n", + " }\n", + " };\n", + " s.onerror = function() {\n", + " console.warn(\"failed to load library \" + url);\n", + " };\n", + " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", + " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", + " }\n", + " };var element = document.getElementById(\"d982e20b-e5a9-4239-8121-81cecd38c4d7\");\n", + " if (element == null) {\n", + " console.log(\"Bokeh: ERROR: autoload.js configured with elementid 'd982e20b-e5a9-4239-8121-81cecd38c4d7' but no matching script tag was found. \")\n", + " return false;\n", + " }\n", + "\n", + " var js_urls = ['https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.js', 'https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.js'];\n", + "\n", + " var inline_js = [\n", + " function(Bokeh) {\n", + " Bokeh.set_log_level(\"info\");\n", + " },\n", + " \n", + " function(Bokeh) {\n", + " \n", + " Bokeh.$(\"#d982e20b-e5a9-4239-8121-81cecd38c4d7\").text(\"BokehJS is loading...\");\n", + " },\n", + " function(Bokeh) {\n", + " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", + " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", + " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", + " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", + " }\n", + " ];\n", + "\n", + " function run_inline_js() {\n", + " \n", + " if ((window.Bokeh !== undefined) || (force === \"1\")) {\n", + " for (var i = 0; i < inline_js.length; i++) {\n", + " inline_js[i](window.Bokeh);\n", + " }if (force === \"1\") {\n", + " display_loaded();\n", + " }} else if (Date.now() < window._bokeh_timeout) {\n", + " setTimeout(run_inline_js, 100);\n", + " } else if (!window._bokeh_failed_load) {\n", + " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", + " window._bokeh_failed_load = true;\n", + " } else if (!force) {\n", + " var cell = $(\"#d982e20b-e5a9-4239-8121-81cecd38c4d7\").parents('.cell').data().cell;\n", + " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", + " }\n", + "\n", + " }\n", + "\n", + " if (window._bokeh_is_loading === 0) {\n", + " console.log(\"Bokeh: BokehJS loaded, going straight to plotting\");\n", + " run_inline_js();\n", + " } else {\n", + " load_libs(js_urls, function() {\n", + " console.log(\"Bokeh: BokehJS plotting callback run at\", now());\n", + " run_inline_js();\n", + " });\n", + " }\n", + "}(this));" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "output_notebook()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 10 iterations (passes)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# NOTE: the times of both offline and online are *without* vectorization!\n", + "\n", + "offline = [-3.958e+05, -3.430e+05, -3.428e+05, -3.426e+05, -3.423e+05, -3.417e+05, -3.406e+05, -3.388e+05, -3.361e+05, -3.326e+05, -3.285e+05]\n", + "\n", + "online_1iter = [-3.958e+05, -3.471e+05, -3.456e+05, -3.417e+05, -3.338e+05, -3.244e+05, -3.165e+05, -3.111e+05, -3.075e+05, -3.051e+05, -3.036e+05]\n", + "\n", + "online_10iter = [-3.958e+05, -3.343e+05, -3.223e+05, -3.128e+05, -3.072e+05, -3.041e+05, -3.023e+05, -3.011e+05, -3.003e+05, -2.997e+05, -2.993e+05]" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "iterations = range(10)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "offline_time = [20 * 60 + 49, 21 * 60 + 8, 21 * 60 + 25, 21 * 60 + 41, 21 * 60 + 56, 22 * 60 + 11, 22 * 60 + 25, 22 * 60 + 41, 22 * 60 + 56, 23 * 60 + 11, 23 * 60 + 26]\n", + "offline_time = np.array(offline_time) - offline_time[0]\n", + "\n", + "online_1iter_time = [23 * 60 + 54, 23 * 60 + 55, 23 * 60 + 55, 23 * 60 + 56, 23 * 60 + 58, 23 * 60 + 59, 24 * 60 + 0, 24 * 60 + 1, 24 * 60 + 2, 24 * 60 + 3, 24 * 60 + 4]\n", + "online_1iter_time = np.array(online_1iter_time) - online_1iter_time[0]\n", + " \n", + "online_10iter_time = [24 * 60 + 59, 25 * 60 + 0, 25 * 60 + 2, 25 * 60 + 3, 25 * 60 + 4, 25 * 60 + 5, 25 * 60 + 6, 25 * 60 + 7, 25 * 60 + 8, 25 * 60 + 8, 25 * 60 + 9]\n", + "online_10iter_time = np.array(online_10iter_time) - online_10iter_time[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p = figure(title=(\"Variational lower bound (initial bound at %.3e)\" % offline[0]), x_axis_label='Iterations', y_axis_label='Bound')\n", + "p.circle(iterations[1:], offline[1:], legend=\"offline\", size=5, color='red')\n", + "p.circle(iterations[1:], online_1iter[1:], legend=\"online 1 iter\", size=5, color='green')\n", + "p.circle(iterations[1:], online_10iter[1:], legend=\"online 10 iter.\", size=5, color='blue')\n", + "p.plot_height=400\n", + "p.plot_width=600\n", + "p.toolbar_location = None\n", + "show(p)" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p1 = figure(title=(\"Offline (initial bound at %.3e)\" % offline[0]), x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "p1.circle(offline_time[1:], offline[1:], size=5, color='red')\n", + "p1.plot_height=400\n", + "p1.plot_width=400\n", + "p1.toolbar_location = None\n", + "\n", + "p2 = figure(title=\"Online\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "s1 = p2.circle(online_1iter_time[1:], online_1iter[1:], size=5, line_color='green')\n", + "s2 = p2.circle(online_10iter_time[1:], online_10iter[1:], size=5, line_color='blue')\n", + "p2.plot_height=400\n", + "p2.plot_width=400\n", + "p2.toolbar_location = None\n", + "\n", + "legend = Legend(items=[('1 iter', [s1]), ('10 iter', [s2])], location=(-100, -200))\n", + "p2.add_layout(legend, 'right')\n", + "\n", + "p3 = Row(p1, p2)\n", + "\n", + "show(p3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 100 iterations (passes)" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# NOTE: the times of both offline and online are *without* vectorization!\n", + "\n", + "offline100 = [-3.957e+05, -3.304e+05, -3.049e+05, -3.005e+05, -2.989e+05, -2.981e+05, -2.976e+05, -2.973e+05, -2.970e+05, -2.968e+05, -2.966e+05]\n", + "\n", + "online_1iter = [-3.957e+05, -3.072e+05, -3.008e+05, -2.997e+05, -2.991e+05, -2.986e+05, -2.983e+05, -2.981e+05, -2.979e+05, -2.977e+05, -2.976e+05]\n", + "\n", + "online_10iter = [-3.957e+05, -3.001e+05, -2.975e+05, -2.965e+05, -2.961e+05, -2.958e+05, -2.955e+05, -2.954e+05, -2.953e+05, -2.952e+05, -2.951e+05]" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "offline_time = [38 * 60 + 8, 40 * 60 + 18, 42 * 60 + 36, 44 * 60 + 44, 46 * 60 + 57, 49 * 60 + 12, 51 * 60 + 19, 53 * 60 + 29, 55 * 60 + 40, 57 * 60 + 56, 60 * 60 + 6]\n", + "offline_time = np.array(offline_time) - offline_time[0]\n", + "\n", + "online_1iter_time = [3 * 60 + 36, 3 * 60 + 59, 4 * 60 + 20, 4 * 60 + 43, 5 * 60 + 6, 5 * 60 + 28, 5 * 60 + 51, 6 * 60 + 14, 6 * 60 + 36, 6 * 60 + 56, 7 * 60 + 16]\n", + "online_1iter_time = np.array(online_1iter_time) - online_1iter_time[0]\n", + "\n", + "online_10iter_time = [8 * 60 + 1, 10 * 60 + 28, 12 * 60 + 50, 15 * 60 + 15, 17 * 60 + 40, 20 * 60 + 10, 22 * 60 + 35, 25 * 60 + 7, 27 * 60 + 31, 29 * 60 + 54, 32 * 60 + 13]\n", + "online_10iter_time = np.array(online_10iter_time) - online_10iter_time[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "iterations = range(0, 100, 10)" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p = figure(title=(\"Variational lower bound (initial bound at %.3e)\" % offline[0]), x_axis_label='Iterations', y_axis_label='Bound')\n", + "p.circle(iterations[1:], offline[1:], legend=\"offline\", size=5, color='red')\n", + "p.circle(iterations[1:], online_1iter[1:], legend=\"online 1 iter\", size=5, color='green')\n", + "p.circle(iterations[1:], online_10iter[1:], legend=\"online 10 iter.\", size=5, color='blue')\n", + "p.plot_height=400\n", + "p.plot_width=600\n", + "p.toolbar_location = None\n", + "show(p)" + ] + }, + { + "cell_type": "code", + "execution_count": 105, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p1 = figure(title=\"Offline\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "p1.circle(offline_time[1:], offline[1:], size=5, color='red')\n", + "p1.plot_height=400\n", + "p1.plot_width=300\n", + "p1.toolbar_location = None\n", + "\n", + "p2 = figure(title=\"Online 1 iter\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "s1 = p2.circle(online_1iter_time[1:], online_1iter[1:], size=5, line_color='green')\n", + "p2.plot_height=400\n", + "p2.plot_width=300\n", + "p2.toolbar_location = None\n", + "\n", + "p3 = figure(title=\"Online 10 iter\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "s3 = p3.circle(online_10iter_time[1:], online_10iter[1:], size=5, line_color='blue')\n", + "p3.plot_height=400\n", + "p3.plot_width=300\n", + "p3.toolbar_location = None\n", + "\n", + "\n", + "p4 = Row(p1, p2, p3)\n", + "\n", + "show(p4)" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 6.39130435, 6.56818182, 6.47761194, 6.43333333, 6.50892857,\n", + " 6.47407407, 6.49367089, 6.5 , 6.565 , 6.6 ])" + ] + }, + "execution_count": 108, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "online_10iter_time[1:] / online_1iter_time[1:]" + ] } ], "metadata": { diff --git a/gensim/models/onlineatvb2.py b/gensim/models/onlineatvb2.py index a5a916c8db..ae0d8efd56 100644 --- a/gensim/models/onlineatvb2.py +++ b/gensim/models/onlineatvb2.py @@ -150,7 +150,9 @@ def inference(self, corpus=None, var_lambda=None): # Whether or not to evaluate bound and log probability, respectively. bound_eval = True - logprob_eval = False + logprob_eval = False # TODO: remove log prob evaluation, but keep the method. + + vectorized = True # FIXME: set to True. if var_lambda is None: self.optimize_lambda = True @@ -207,11 +209,13 @@ def inference(self, corpus=None, var_lambda=None): expElogthetad = expElogtheta[authors_d, :] expElogbetad = expElogbeta[:, ids] - #var_phi = dict() - phinorm = numpy.zeros(len(ids)) - for a in authors_d: - phinorm += numpy.dot(expElogtheta[a, :], expElogbetad) + if vectorized: + phinorm = numpy.zeros(len(ids)) + for a in authors_d: + phinorm += numpy.dot(expElogtheta[a, :], expElogbetad) + else: + var_phi = dict() for iteration in xrange(self.iterations): #logger.info('iteration %i', iteration) @@ -219,28 +223,31 @@ def inference(self, corpus=None, var_lambda=None): lastgamma = tilde_gamma.copy() ## Update phi. - #for v in ids: - # phi_sum = 0.0 - # for a in authors_d: - # for k in xrange(self.num_topics): - # var_phi[(v, a, k)] = expElogtheta[a, k] * expElogbeta[k, v] - # phi_sum += var_phi[(v, a, k)] - - # # Normalize phi over k. - # phi_norm_const = 1.0 / (phi_sum + 1e-100) - # for a in authors_d: - # for k in xrange(self.num_topics): - # var_phi[(v, a, k)] *= phi_norm_const - - # Update gamma. - for a in authors_d: - tilde_gamma[a, :] = self.alpha + len(self.author2doc[a]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) - #for k in xrange(self.num_topics): - # tilde_gamma[a, k] = 0.0 - # for vi, v in enumerate(ids): - # tilde_gamma[a, k] += cts[vi] * var_phi[v, a, k] - # tilde_gamma[a, k] *= len(self.author2doc[a]) - # tilde_gamma[a, k] += self.alpha[k] + if not vectorized: + for v in ids: + phi_sum = 0.0 + for a in authors_d: + for k in xrange(self.num_topics): + var_phi[(v, a, k)] = expElogtheta[a, k] * expElogbeta[k, v] + phi_sum += var_phi[(v, a, k)] + + # Normalize phi over k. + phi_norm_const = 1.0 / (phi_sum + 1e-100) + for a in authors_d: + for k in xrange(self.num_topics): + var_phi[(v, a, k)] *= phi_norm_const + + for a in authors_d: + for k in xrange(self.num_topics): + tilde_gamma[a, k] = 0.0 + for vi, v in enumerate(ids): + tilde_gamma[a, k] += cts[vi] * var_phi[(v, a, k)] + tilde_gamma[a, k] *= len(self.author2doc[a]) + tilde_gamma[a, k] += self.alpha[k] + else: + # Update gamma. + for a in authors_d: + tilde_gamma[a, :] = self.alpha + len(self.author2doc[a]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) # Update gamma and lambda. # Interpolation between document d's "local" gamma (tilde_gamma), @@ -255,9 +262,10 @@ def inference(self, corpus=None, var_lambda=None): Elogtheta[authors_d, :] = dirichlet_expectation(var_gamma_temp[authors_d, :]) expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :]) - phinorm = numpy.zeros(len(ids)) - for a in authors_d: - phinorm += numpy.dot(expElogtheta[a, :], expElogbetad) + if vectorized: + phinorm = numpy.zeros(len(ids)) + for a in authors_d: + phinorm += numpy.dot(expElogtheta[a, :], expElogbetad) # Check for convergence. # Criterion is mean change in "local" gamma and lambda. @@ -278,26 +286,24 @@ def inference(self, corpus=None, var_lambda=None): # Update lambda. # only one update per document). - phi_sum = numpy.zeros((self.num_topics, len(ids))) - phinorm_rep = numpy.tile(phinorm, [self.num_topics, 1]) - for a in authors_d: - expElogtheta_a_rep = numpy.tile(expElogtheta[a, :], [len(ids), 1]) - phi_sum += expElogtheta_a_rep.T * expElogbetad / phinorm_rep - eta_rep = numpy.tile(self.eta[ids], [self.num_topics, 1]) - cts_rep = numpy.tile(cts, [self.num_topics, 1]) - tilde_lambda[:, ids] = eta_rep + self.num_docs * cts_rep * phi_sum - - #for k in xrange(self.num_topics): - # for vi, v in enumerate(ids): - # # cnt = dict(doc).get(v, 0) - # cnt = cts[vi] - # phi_sum = 0.0 - # for a in authors_d: - # phi_sum += expElogtheta[a, k] * expElogbeta[k, v] / phinorm[vi] - # tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * phi_sum - - # This is a little bit faster (from old algorithm): - # tilde_lambda[:, ids] = self.eta[ids] + self.num_docs * cts * var_phi[ids, :].T + if vectorized: + phi_sum = numpy.zeros((self.num_topics, len(ids))) + phinorm_rep = numpy.tile(phinorm, [self.num_topics, 1]) + for a in authors_d: + expElogtheta_a_rep = numpy.tile(expElogtheta[a, :], [len(ids), 1]) + phi_sum += expElogtheta_a_rep.T * expElogbetad / phinorm_rep + eta_rep = numpy.tile(self.eta[ids], [self.num_topics, 1]) + cts_rep = numpy.tile(cts, [self.num_topics, 1]) + tilde_lambda[:, ids] = eta_rep + self.num_docs * cts_rep * phi_sum + else: + for k in xrange(self.num_topics): + for vi, v in enumerate(ids): + # cnt = dict(doc).get(v, 0) + cnt = cts[vi] + phi_sum = 0.0 + for a in authors_d: + phi_sum += var_phi[(v, a, k)] + tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * phi_sum # Note that we only changed the elements in lambda corresponding to # the words in document d, hence the [:, ids] indexing. diff --git a/gensim/models/atvb.py b/gensim/models/temp/atvb.py similarity index 100% rename from gensim/models/atvb.py rename to gensim/models/temp/atvb.py diff --git a/gensim/models/atvb2.py b/gensim/models/temp/atvb2.py similarity index 100% rename from gensim/models/atvb2.py rename to gensim/models/temp/atvb2.py diff --git a/gensim/models/disjointatvb.py b/gensim/models/temp/disjointatvb.py similarity index 100% rename from gensim/models/disjointatvb.py rename to gensim/models/temp/disjointatvb.py diff --git a/gensim/models/minibatchatvb.py b/gensim/models/temp/minibatchatvb.py similarity index 100% rename from gensim/models/minibatchatvb.py rename to gensim/models/temp/minibatchatvb.py diff --git a/gensim/models/onlineatvb.py b/gensim/models/temp/onlineatvb.py similarity index 100% rename from gensim/models/onlineatvb.py rename to gensim/models/temp/onlineatvb.py From 32e750de5cad4b1fc1d386569e5405dd14ae72f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Tue, 15 Nov 2016 16:31:54 +0100 Subject: [PATCH 043/100] Changed the name of the main algorithm (and file). Made a new notebook for old tests, removed all old tests from main notebook. Removed references to old code in __init__.py. --- docs/notebooks/at_with_nips.ipynb | 2167 ++------------ docs/notebooks/at_with_nips_old.ipynb | 2825 ++++++++++++++++++ docs/notebooks/plots.html | 43 + gensim/models/__init__.py | 6 +- gensim/models/{onlineatvb2.py => atmodel.py} | 2 +- 5 files changed, 3111 insertions(+), 1932 deletions(-) create mode 100644 docs/notebooks/at_with_nips_old.ipynb create mode 100644 docs/notebooks/plots.html rename gensim/models/{onlineatvb2.py => atmodel.py} (99%) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 339082dcca..1459e154cc 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": 18, "metadata": { "collapsed": false }, @@ -65,16 +65,8 @@ "\n", "import logging\n", "\n", - "from gensim.models import AtVb\n", - "from gensim.models import atvb\n", - "from gensim.models import OnlineAtVb\n", - "from gensim.models import onlineatvb\n", - "from gensim.models import MinibatchAtVb\n", - "from gensim.models import minibatchatvb\n", - "from gensim.models import AtVb2\n", - "from gensim.models import atvb2\n", - "from gensim.models import OnlineAtVb2\n", - "from gensim.models import onlineatvb2\n", + "from gensim.models import AuthorTopicModel\n", + "from gensim.models import atmodel\n", "\n", "from time import time\n", "\n", @@ -111,7 +103,7 @@ }, { "cell_type": "code", - "execution_count": 85, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -148,7 +140,7 @@ }, { "cell_type": "code", - "execution_count": 86, + "execution_count": 5, "metadata": { "collapsed": false }, @@ -178,7 +170,7 @@ }, { "cell_type": "code", - "execution_count": 87, + "execution_count": 6, "metadata": { "collapsed": false }, @@ -190,7 +182,7 @@ }, { "cell_type": "code", - "execution_count": 88, + "execution_count": 7, "metadata": { "collapsed": false }, @@ -208,7 +200,7 @@ }, { "cell_type": "code", - "execution_count": 89, + "execution_count": 8, "metadata": { "collapsed": false }, @@ -234,7 +226,7 @@ }, { "cell_type": "code", - "execution_count": 90, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -257,7 +249,7 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": 10, "metadata": { "collapsed": false }, @@ -272,7 +264,7 @@ }, { "cell_type": "code", - "execution_count": 92, + "execution_count": 11, "metadata": { "collapsed": false }, @@ -291,7 +283,7 @@ }, { "cell_type": "code", - "execution_count": 93, + "execution_count": 12, "metadata": { "collapsed": true }, @@ -303,7 +295,7 @@ }, { "cell_type": "code", - "execution_count": 94, + "execution_count": 13, "metadata": { "collapsed": false }, @@ -321,7 +313,7 @@ }, { "cell_type": "code", - "execution_count": 95, + "execution_count": 14, "metadata": { "collapsed": false }, @@ -330,7 +322,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAicAAAGcCAYAAAACtQD2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XecXVW5//HPl0DAABmaSeggEKqUhBK8EOSCAaSIYmGw\nACoWhMuNckURfnDBgogEaSqKoqCDCNIDoQmEJphwgUDovSShpJAQ0ub5/bH2YfbsnOnl7Jl836/X\neZ05a6+997P3mZnznLXXWlsRgZmZmVlZLFfrAMzMzMzynJyYmZlZqTg5MTMzs1JxcmJmZmal4uTE\nzMzMSsXJiZmZmZWKkxMzMzMrFScnZmZmVipOTszMzKxUnJyYWTOSXpV0Ue71XpIaJX2sF/b9Y0mL\ncq8HZPs+u6f3ne3v69n+1umN/XWWpB9Iel7SYkkP1jqe9pK0SXZ+D6t1LFZuTk6sFCQdnv3Tqvb4\naa3jW8ZUu6dFh+9zIelHkg7sxL4bO7qvjmoltqATx9qbJH0S+CnwT+AI4OSaBmTWA5avdQBmOUH6\nR/tioXxK74diFRFxu6QPRcTCDq56EnApcH0H1jkFOK2D++mMlmL7A3BpJ461N+0JLAK+Hr45mvVT\nTk6sbG6OiMntrSxJwMCIWNCDMS3zevrDWtKgiHgvIhrphZaTlmQf9mVOTACGAvPKmJj479G6iy/r\nWJ+R738g6cuSHgfeB/bKlkvSdyU9Lul9SW9IulDS4MJ2JOn/ZX0r5kq6TdIWkl4p9LVo1v8hV161\nX4Kk/SVNzLY5W9J1krYo1LlM0kxJ62XL35U0Q9IZVfYjSWMlPSppflZvvKTts+X3Svp3C+fqOUmt\ntli0dB6q1Fuqz4mk4ZL+IWlaFtvLkv4iaeXK+wQMBCrnqrFybrPz2pht42+SZpIuUbR4zrNlX5b0\nVLa/B4t9YLJz+0yV9T7YZjtia+m9PTb3e/WapHOr/F7dI2mypK0l/VPSe9m5/W5r70Nu/eUlnZK9\nd+8r9Sk5TdIKhdi/CNRlcS5RC/03st+dRZJWzpWdkK13Rq5s+ez9Py1XtoqkcdnfxPuSpkr678L2\n2/p7XF3SnyXNkvSOpIuBZucsq7e2pD9l5+p9Sa9LulrSeu05b9Y/ueXEyqZO0pr5goh4u1BnDHAo\ncAHwDvByVv4HoD57Pgf4CHAssJ2k3bNv5ZCu158AXAdMAEYCtwAfKuynpf4HS5VLOgK4GBgPfB9Y\nGTgamChph4h4Nbfu8tn+JgLfy47nfyQ9ExEX5zb7Z9IH0fXARaQP1NHALsD/ZcsvlDQ8Ip7OxbIr\nsDHwwyqx57X3PFTirmx/xazecqTzPB1YDzgQGBwR8yR9CfgjcE92XgCeLWzrH8CTwA9yZS2d872A\nw4BzSZc0vgNMkLRjRDzVxroflEfEknbEVnxvfwycCNxM+p3bkvTejiz8XgWwFnAT8HfgcuDzwC8k\nPRIRt1eJLe+S7BgvJ/1ujCJdftoc+EIu9qOB7YBvAALubWF7E0nv0X+Q3i+A3YAlwO65eiNJ7/nd\n2fEKuDFb73fAo8B+wNmS1o6IEwr7WervMdvG9aTf1QuBp4BDSOe9+B5dA2xKem9fJrUMjSH9Tr2K\nLZsiwg8/av4ADic15xcfS3J1BmRlC4FNC+t/PFt2SKF8v6z8s9nrIdn6VxXqnZHVuyhXdjqwsEqs\nXyP9g18ne70qMAs4r1BvaFZ+fq7s0mzd7xfq/h9wX+71J7J4zmzlnK0GzAdOK5RfkO13pVbW7ch5\n2CuL+WPZ65FZnQPbeE/n57dTOK+NwCUtLFuYe115zxcD2+TKNyR9S7+8cG6fbmubbcRWfG+HZufp\nukK9/8rqfTFXNjEr+3yubCApeftrG+dqRHacFxTKz862+R+F43ynHX9TA4B3gdNzZe+Qkp/3K78f\nwP9kx7hK9vqQLJbjC9u7ipQYbtCOv8fKNv4rV7YcKSFcAhyWla1RrOeHHxHhyzpWKgF8G9g79/hE\nlXq3R8SzhbLPkv7x3ilpzcoD+Dfpg2jPrN4+pH+q5xXWP6cLce9LSlAuL+x7CfBQbt95FxVe30Nq\n6ak4hPSBfHpLO42IWcANpG/bQGpqBz5HSjrebyXmMXT+PMzKnveTtFI76lcTwG86UH9iRHzQMToi\nXiJ9M9+3k/tvr0+QzlPxvPwWeA/Yv1A+OyKuqLyI1FfnIZq/t9V8knROikOmf0lqHSnup00RsQS4\nn9TahqRtgTrgZ8AKpFYNSK0pj0TE3Oz1fqSE44LCJs8mnYviOa/297gfsIDc73mkFqbzs+OpeI+U\n8Owpqa6Dh2j9mJMTK5uHIuKO/KNKnRerlG1G+hb2ZuExHViJ1FIAsEH23OyfaURMI33L7IxNSf9w\nJxb2PQP4z9y+K+ZmiUXeTGD13OuPAK9GRFsx/RnYWNKo7PW+wJqkb9et2TB77vB5iIjngF8B3wTe\nlnSTpG9LWrWNfRa90IG6xQ8/gKeBVSWtXmVZd6mcp6fzhZE6fL6QW17xSpVtFN/blvazODu3+f28\nRno/ivtpr3uAnbJ+K7sDr0TEI6QRcJVLO/9B+t3Nx/JqRMwvbGtqbnnei1X2uyHwWpUE+an8i2z5\nicABwAxJd0o6XlLxb8aWMe5zYn1R8Z8mpET7deDLNP9mVjEje64sa89Ih5bqDKiy7yD1d3mrSv1i\nB88lLWxXLfzcmpuyfX4JeCB7fi0i7mxjvY6ch6VExNisg+OnSK0w5wMnSBqVJTjtUe197IjiOWrv\n+9WVfbSlPe9tR5d3NIa8iaTh2buQWkgm5sp3l7Q1Kam/uwv7q/Y+iurvx1LbjohfSroaOJjUsvlj\n4IeS9si3ltmyxS0n1l88R+qMeE+x5SV7VP7JvZg9D8+vLGkY6dJM3kxggKRBhfKNquwbYEYL+55I\nxz0LrFccEVIUEYvJOl5KWo3UKfUv7dj+i9lze85DS/ueEhE/iYg9gD1IrVLfyFdpz3baabMqZcOB\ndyNiZvZ6JqkfTtFGVcraG9uL2fPm+UJJA7PtvtTO7bRnP8tL2qSwn3WAVbqwnwdIlwdHk1pKKr+L\ndwMfI11yDFILSz6W9SQVO0ZvmT23J5bKNoqX/TavUpeIeD4izo6IfYCPkjrotmuUk/VPTk6sv7iC\n1PnwpOKCbKhk5UP+VtK322ML1cZW2eZzpG96o3PbWoXUOpN3EzAX+FHW56O4/7XaeQx5V5FaNtsz\n++elpMTst6R/6u1JTjpyHpqRNFhS8X/HFNKH3Iq5snlUTxY6Y7esz0Qlho1IlwJuztV5DlhT0pa5\neuuSErai9sZWOU/HFcq/SRqRdUM7ttEe40m/a/9dKP8e6bze2JmNZpdmJpN+Z9emecvJysAxwFMR\nkW/xG0/6Wzq6sLmxpHNxUzt2PZ70u/DNSkH2t3EMzUd+fSgb/ZX3POnvacVcvWGSNq/ye2f9lC/r\nWJl0uvk6Iu7ILjOcJGkEcBvpG+NwUmfZb5NGXEyXNA44XtJ1pH+0O5I6375T2OxNwGvAJZLOysq+\nCrwBfDAPRkTMlnQMaQjzZEmXky61bEjqyPhPOvgtMCJuk9QAfFdp7pFbSJcndgcmRES+o+G/JU0l\ndYR9tD1N4R08D9D8vfkEME7S34FnSJ0rDyddvvpHrt4kYEw2P8YbwHMRUXVelnaYAtwi6TzS+3p0\n9vy/uTp/JQ2Pvi6rtwrwLdJw5e0K22tXbNl5+jlwoqTxpGRky2y795NarbosIiZL+gtwdNaZeiKw\nK+ky3RUR0dJw4faYCBwPvB0RU7P9vSHpOdLfx+8K9a8mtaz8XNKmNA0l3h/4RURU61dTdDWp1eas\nrDWoMpS42Aq5FXCzpCuAJ0jJz2dJ/aYacvXOInX8Xo90+db6u1oPF/LDj4gPhhIvAUa0UmdAVueX\nrdQ5ijQ6Yi6pmf9h4CfAkEK9/0dKPOaSvh1vTurMeFGh3gjSh9B80je671AYbpqr+3HSN/mZ2Xaf\nAn4PbJ+rcynpQ6IY9+nAgkKZSB8qT2T7n0YaobJtlfV/kMX03Q6e92rn4WXgt7k6xaHEH8mO6xlS\nC8SMbN3RhW1vAdyZbXtJ5dxmx7qENCdKq+ch/56TPqifzs7Fg5V4CuuPAR4jDZV9nDTPSLWhxC3F\n1tJ7+51se+9n5+tXwKqFOhOBSVViupTUOtHWezEgez+ey/bzAin5Wr7K9pb6HWpluwdmx3R1ofwP\nFIZD55atTBqd82oWy5PAcR35eyR1Av4zaXTX26Q5ZXag+VDitUgjxp4A5pAS43uBg6sc8+Li++JH\n/30oe+PNlnmSXgFuiohvtFm5ZCR9jzRHyQYR8Uat4zEz6wpfvzPrH75Kmm/CiYmZ9Xnuc2LWRynd\nM+UgUj+RLfDoBjPrJ5ycmDVp6d4sZTWMNDLnHdIU9hNqHI+ZWbdwnxMzMzMrFfc5MTMzs1JxcmJm\nZmal4uTEzFok6ceSivcG6u0YBkhqlFS8Y29XtrlXts2DumubHdj3ZZKe6e39mvUlTk7MOknS4dkH\nXOUxX9JTks7rR3dV7WudhDuiVscVQGON9m3WJ3i0jlnXBOn+Ny8CK5Hu/PptYD9J28TSt4y38ujK\n3X674oga7tusT3ByYtZ1N0fE5OznP0h6h3STtE8Bf6tdWG2TNCgi3qt1HMuSiFhSi/36vba+xJd1\nzLrfHaRvxhtXCiRtLOnvkt6WNE/S/ZI+mV9J0pu5GwyiZJakRbm7KiPphKxsUK5sc0lXZtufL+kh\nSQcWtl+5DDVa0oWSppPuJ9Qhkr4m6XZJ07N9TZF0VKHOryRNK5T9Otv/t3Jl62RlX23nvr+cXTqb\nL+lBSR+rUmddSZdImibpfUmPSTq8yuYCWE7SyZJelfSepFslbVzY3h7Ze/dytr2XJJ2Vv5uupB9I\nWiJpneJOsrrzJa2avV6qz4mkVSSNk/RKto+p2U0J83U2yc7VYYXySp+cE3NlP87Khkv6m6SZpBtQ\nmvUJTk7Mut+m2fPbAFn/k/tJd/M9HziRdDv46yV9KrfevcDo3OttgUpS8h+58t2AyZVvwZK2Jt0B\ndnPgZ6SZYucC1xS2X3EhaUbZ/yXdj6ejvk26CeJPgO+RboT320KCMhH4sKThhbiXkO6sXDGalCRM\nbMd+9wJ+AfyJdIO8IcAESZtXKkgaRrop4B7AucBxWax/lHR0YXsiXZLbH/h59vgY6WZ1eZ8nvV/n\nA8eQbnJ4HOnGeRWXZ9v7XJW4PwuMj4h3s9fN+vFIEnAjcCzprsdjSTdVPFvpjsidUdn+P0g36PsB\n6cZ7Zn1Dre886IcfffVB052U9yTd4n1d4AvAm6TkYO2s3ris3q65dVcm3X32uVzZ94CFwMrZ62NI\nH6z3Az/N1XsHOCv3+jbS3ZeLd6+9B3iyEG8j6W68aucxVruj74pV6t0KTM29Hprt62vZ69Wzc3A5\n8HKu3vnAtDZiGJBtazGwTa58Q9Idcy/PlV1CuqtyXWEbVwBvAStkr/fKtvkIMCBXb2wW5/A2jvdH\nWTxr58r+BdxXqLdrtp/P58ouBZ7OvT4kq3N8Yd2rgEWkmzkCbJLVO6yF83Ni4X1rBC6p9d+JH350\n5uGWE7OuEXA7KSF5Bfgr6dbvB0fTTfj2Ax6MiPsrK0XEPOAiYCNJW2XFE0n9wCqXKnbPyiZmPyNp\nW2C1rAxJq5OSo78DdZLWrDyAW4DNJK2dizeA30VEp0eqRMSCDw5eGpzt6y5guKQPZXWmA8/S1BK0\nO7AA+CWwnqQNC8fYHhMjYkoujpeA64F9s1gEfBq4Fli+yrlYHdi+sM2Lo3kfkImk9/QjLRzvoGx7\n92X18tv7G7CLpA1yZV8A3iO1iLRkP1JSekGh/GxS4rFvK+u2JoDfdHJds5pycmLWNUG6zLE38HFg\nq4jYJCJuy9XZEHiqyrpTc8sBJpM+yCqXPXajKTnZUdLAbFmQWkUgXUIS6Zvym4XHqVmd4rDmF/Mv\nJK0gaWj+0doBS9pd0h2S5gKzsn2dli2uy1W9p3AsDwL/BmYDu0uqA7ah/cnJs1XKngZWzZK0YcCq\nwNEsfS4uyuoXz0Wxz83M7Hn1SoGkDSX9WdLbpBaxN0kJKTQ/3iuy58/nyg4BbojWO6JuCLwaEfML\n5cXfj854oQvrmtWMR+uYdd1D0TRap9MiYrGkfwGjJW0CrA3cTfowXAHYhfQhPzUi3s5Wq3zBOAto\n6cZ/xQ/14ofgaNJlmSAlOiFp/Yh4vbghSZtldaeQLoG8QvrWfxCpz0T+C89E4HBJ65OSlNsiIiTd\nm72uJAJ3txB3e+SH5Fb2/SfgshbqP1J43dLIGUHqbEq6bLYq8FNSkvkesAGpz8kHxxsRr0q6n5Sc\nnCVpd9Klvss7cAytaam1a0Ar6xTfa7M+wcmJWc97idRZtWjL3PKKicD3SZ1n34yIpwEkPU5KInYn\nXcqoeD57XhQRd3Qyvkmklp+8N1uoexApUdo/u3RDFt8+VepWWkT2AUYAp2Sv7waOJCUn77J0wtCS\nzaqUDQfejYiZkuYA84DlunAuirYn9fWoj4gPhoVLaulSy+XAryR9hHRJ513gpjb28SKwm6QPFVpP\nir8flWRutcL6XWlZMSslX9Yx63njgZ0l7VIpkLQy8A3ghYh4Ild3Imkyt+NounRD9vOXSa0pH1wG\niYg3SR1cv5mNVGlG0lptBRcRsyLijsKjpSnrKy0NH/zvyC6pfKXKdp8FppM6+i5H6qdROcbNSf1D\n7utA/5fdsj43lf1uBBwA3JztbwlwNfB5SVsWV65yLtqz32rHK9L7U239v5N1WiVd0rku32elBeOB\ngaTLUXmVzrk3AUTETNJltNGFese0EEtVkuqUhp6v0t51zHqbW07MuqY9TfJnAPXAzZLOJY22OYL0\njfczhbr3k0aBDAd+myu/m9S3pdqw2+9kZY9J+h2pNWUoaaTIusAOHYy3NRNIQ27HZ/saDBwFvMHS\n/TkgJVWfJQ19npuVPUS63LApaXRNe00BbpF0HukcHZ09/2+uzvdJH94PZvFNBdYAdiS1OuUTuPac\ni8dJ/TbOyTrxzs2OZ3C1yhExXdJE4H+AVWjfJHxXk97fn0vaFHiU1El2f+AXEZHvF/N74HhJs0l9\nlD5OatnpyPt6KPDr7PmKNuqa1YRbTsy6ps1vrBExg5Qo3EL6lvtT0hDYAyLiukLd90jDgvOdXiEl\nH0EahvtKYZ2ppA/fG0jDhc8Hvkn61n0azXVmlM4H62T7+izpf8dZwNeB80hzp1RTiTvf2rOYNOy2\nvfObVGK4HTiedIynklplxmQxVbY9DdiJ1O/kM1ls/0VKJk5o6bhaKs9akA4gJQwnAieREpYjW4n1\nb6TEZBYt9wPK7yNIici5wIGkoefDge9GxA8K651C6uvyeVKSuDiLr6P3QOqv90uyfkJdGFFoZmZm\n1u1q3nIiaTlJp0t6Pps++llJJ1Wpd5qk13NTTG9aWL66pL9Imi1ppqTfZ9f1zczMrA+peXJCmlb5\nm6Trx1uQrhl/X9IxlQqSTiA1h38T2JnUI39CNu9DxV9Jvdv3IjWRjqb5NXszMzPrA2p+WUfS9aTp\nq4/KlV0JvBcRX8lev07qGDYuez2YdL358Ii4IuuZ/zgwMiIezursQ7pfxXrZdWgzMzPrA8rQcnIf\nsFc2uROStiPd5Gx89npjUg/7yoyMRMQcUoe6XbOiUcDMSmKSuY3U6WsXzMzMrM8ow1DiM0g96Z+U\ntISUMP0oIiqzKg4jJRnTC+tNp2lY4DBgRn5hRCyR9A7Nhw6amZlZyZUhOfkCacKiQ4EnSDMy/krS\n6xFxaSvribaHw7VYJ7t51z6k2Rnf72DMZmZmy7KVgI2ACbnbaXSbMiQnZ5JuB//37PXj2cyPPyTd\nWnwaKckYSvPWkyGk+SDI6jSbACq7J8bqLN3iUrEP8Jeuh29mZrbM+iJpQEq3KkNyMoilWzcayfrD\nRMQLkqaRRuE8Ch90iN2FpluM3w+sJmmHXL+TvUhJzb9a2O+LAJdddhlbbrnUTNfLlLFjxzJu3Lha\nh1EKPheJz0MTn4vE5yHxeUimTp3Kl770JSjc5by7lCE5uR74kaRXSCNuRpDuKfH7XJ1zgJMkPUs6\nEacDrwLXAkTEk5ImAL+T9G3SfSrOAxpaGanzPsCWW27JiBEjuv2g+pK6urpl/hxU+FwkPg9NfC4S\nn4fE52EpPdItogzJyTGkZOMC0qWZ10n3fTi9UiEizpQ0iDRvyWqkKa/3i4iFue0cRpq2+zZSy8uV\npJtzmZmZWR9S8+QkIuYB380erdU7lXQ/jZaWzwK+1J2xmZmZWe8rwzwnZmZmZh9wcmLU19fXOoTS\n8LlIfB6a+FwkPg+Jz0PvqPn09bUiaQQwadKkSe7cZGZm1gGTJ09m5MiRkG4bM7m7t++WEzMzMysV\nJydmZmZWKk5OzMzMrFScnJiZmVmpODkxMzOzUnFyYmZmtoyaPh023RTuuqvWkTTn5MTMzGwZtWgR\nPPccvN8jd8jpPCcnZmZmy6jGxvQs1TaOIicnZmZmy6jKPKxOTszMzKwUnJyYmZlZqTg5MTMzs1Jx\ncmJmZmal4uTEzMzMSsXJiZmZmZVKJTlZrmTZQMnCMTMzs97ieU7MzMysVHxZx8zMzErFyYmZmZmV\nipMTMzMzKxUnJ2ZmZlYqTk7MzMysVJycmJmZWal4KLGZmZmViidha4GkFyQ1Vnmcly1fUdIFkt6S\n9K6kKyUNKWxjfUk3SponaZqkMyXV/NjMzMzKzJd1WrYjMCz3+AQQwBXZ8nOA/YFDgNHAOsBVlZWz\nJGQ8sDwwCjgcOAI4rVeiNzMz66PKmpwsX+sAIuLt/GtJBwLPRcRESYOBrwKHRsRd2fIjgamSdo6I\nB4F9gC2APSPiLeAxSScDZ0g6NSIW9+oBmZmZ9RFlTU7K0HLyAUkrAF8ELs6KdiQlULdX6kTEU8DL\nwK5Z0SjgsSwxqZgA1AFb93TMZmZmfZWTk/b5NCmp+FP2eiiwMCLmFOpNJ10CInueXmU5uTpmZmZW\n4OSkfb4K3BQR09qoJ1K/lLa0p46ZmdkyqazJSc37nFRI2gDYGzg4VzwNGChpcKH1ZAhNrSPTgJ0K\nmxuaPRdbVJYyduxY6urqmpXV19dTX1/fgejNzMz6nvbMc9LQ0EBDQ0OzstmzZ/dgVCVKTkitJtNJ\nI28qJgGLgb2AqwEkDQc2AO7L6twPnChprVy/kzHAbOCJtnY6btw4RowY0S0HYGZm1pe0Z56Tal/Y\nJ0+ezMiRI3ssrlIkJ5JEGv57SUQ0VsojYo6ki4GzJc0E3gXOBe6NiIeyareQkpBLJZ0ArA2cDpwf\nEYt68TDMzMz6FF/Wad3ewPrAH6ssGwssAa4EVgRuBr5TWRgRjZIOAH5Nak2ZB1wCnNKzIZuZmfVt\nTk5aERG3AgNaWLYAODZ7tLT+K8ABPROdmZlZ/1TW5KRso3XMzMyslzg5MTMzs1JxcmJmZmal4uTE\nzMzMSqU985zUgpMTMzOzZVR75jmphZKFY2ZmZr3Fl3XMzMysVJycmJmZWak4OTEzM7NScXJiZmZm\npeLkxMzMzErFyYmZmZmViuc5MTMzs1Jxy4mZmZmViidhMzMzs1Jxy4mZmZmVipMTMzMzKxUnJ2Zm\nZlYqTk7MzMysVJycmJmZWal4nhMzMzMrFbecmJmZWal4nhMzMzMrFbecmJmZWak4OTEzM7NScXJi\nZmZmpeLkxMzMzErFyYmZmZmViuc5aYWkdSRdKuktSe9JekTSiEKd0yS9ni2/VdKmheWrS/qLpNmS\nZkr6vaSVe/dIzMzM+g63nLRA0mrAvcACYB9gS+B7wMxcnROAY4BvAjsD84AJkgbmNvXXbN29gP2B\n0cBve+EQzMzM+qSyznOyfK0DAH4AvBwRX8+VvVSocxxwekRcDyDpK8B04GDgCklbkhKbkRHxcFbn\nWOBGScdHxLSePggzM7O+xi0nLTsQ+LekKyRNlzRZ0geJiqSNgWHA7ZWyiJgD/AvYNSsaBcysJCaZ\n24AAdunpAzAzM+uLnJy07CPAt4GngDHAb4BzJX0pWz6MlGRML6w3PVtWqTMjvzAilgDv5OqYmZlZ\nTlmTkzJc1lkOeDAiTs5ePyJpa1LCclkr64mUtLSmPXXMzMyWSU5OWvYGMLVQNhX4TPbzNFKSMZTm\nrSdDgIdzdYbkNyBpALA6S7e4NDN27Fjq6uqaldXX11NfX9/+IzAzM+uD2pOcNDQ00NDQ0Kxs9uzZ\nPRhVOZKTe4HNC2Wbk3WKjYgXJE0jjcJ5FEDSYFJfkguy+vcDq0naIdfvZC9SUvOv1nY+btw4RowY\n0VoVMzOzfqk985xU+8I+efJkRo4c2WNxlSE5GQfcK+mHwBWkpOPrwFG5OucAJ0l6FngROB14FbgW\nICKelDQB+J2kbwMDgfOABo/UMTMzqy5K2vGh5slJRPxb0qeBM4CTgReA4yLi8lydMyUNIs1bshow\nEdgvIhbmNnUYcD5plE4jcCVpCLKZmZlVEVG+/iZQguQEICLGA+PbqHMqcGory2cBX2ppuZmZmTUX\nUb4J2KAcQ4nNzMysBsracuLkxMzMbBnl5MTMzMxKxcmJmZmZlUpjo5MTMzMzKxG3nJiZmVmpODkx\nMzOzUnFyYmZmZqXieU7MzMysVNxyYmZmZqXi5MTMzMxKxcmJmZmZlYrnOTEzM7NSccuJmZmZlYqT\nEzMzMysVJydmZmZWKp7nxMzMzErFLSdmZmZWKk5OzMzMrFScnJiZmVmpeJ4TMzMzKxW3nJiZmVmp\nODkxMzOzUnFyYmZmZqXi5MTMzMxKxZOwmZmZWam45cTMzMxKxcmJmZmZlYrnOWmBpFMkNRYeT+SW\nryjpAklvSXpX0pWShhS2sb6kGyXNkzRN0pmSan5sZmZmZdavW04kDZC0vaTVO7mJKcBQYFj22C23\n7Bxgf+AQYDSwDnBVbt/LAeOB5YFRwOHAEcBpnYzFzMxsmdCvkhNJ50j6WvbzAOAuYDLwiqSPd2KT\niyPizYiYkT3eybY9GPgqMDYi7oqIh4Ejgf+QtHO27j7AFsAXI+KxiJgAnAx8R9LynTk+MzOzZUG/\nSk6AzwJImAt3AAAgAElEQVSPZD8fCGxMShDGAT/pxPY2k/SapOckXSZp/ax8JKlF5PZKxYh4CngZ\n2DUrGgU8FhFv5bY3AagDtu5ELGZmZsuE/pacrAVMy37+JPD3iHga+APw0Q5u6wHSZZh9gG+REp27\nJa1MusSzMCLmFNaZni0je55eZTm5OmZmZlZQ1nlOOnvZYzqwlaQ3gH2Bo7PyQcCSjmwouwxTMUXS\ng8BLwOeB91tYTUC0Z/MdicXMzGxZUtaWk84mJ38ErgDeICUAt2bluwBPdiWgiJgt6WlgU+A2YKCk\nwYXWkyE0tY5MA3YqbGZo9lxsUVnK2LFjqaura1ZWX19PfX19Z8I3MzPrM9qTnDQ0NNDQ0NCsbPbs\n2T0YVSeTk4g4VdIUYH3SJZ0F2aIlwBldCUjSKsAmwJ+AScBiYC/g6mz5cGAD4L5slfuBEyWtlet3\nMgaYDTxBG8aNG8eIESO6ErKZmVmf1J55Tqp9YZ88eTIjR47ssbg6PZolIq4EkLRSruxPHd2OpF8A\n15Mu5awL/C8pIbk8IuZIuhg4W9JM4F3gXODeiHgo28QtpCTkUkknAGsDpwPnR8Sizh6fmZlZf1fW\nyzqdHUo8QNLJkl4D5kr6SFZ+emWIcQesB/yVdDnocuBNYFREvJ0tHwvcAFwJ3Am8TprzBICIaAQO\nILXa3Af8GbgEOKUzx2ZmZrasKGty0tmWkx+RJjv7PvC7XPkU4L+Bi9u7oYhotXNHdsno2OzRUp1X\nSAmKmZmZtVNZk5PODiD6CvCNiPgLzUfnPEKa78TMzMxKrr8lJ+sCz7awvRU6H46ZmZn1lrLOc9LZ\nkJ4Adq9S/lng4c6HY2ZmZr2lrC0nne1zchrwJ0nrkhKcz0janHS5x30/zMzM+oCyJiedajmJiGtJ\nScjewDxSsrIlcGBE3NraumZmZlYO7ZnnpBa6Ms/JPcAnujEWMzMz60X9quVE0k6SdqlSvoukHbse\nlpmZmfW0fpWcABeQpq4vWjdbZmZmZiXX35KTrYDJVcofzpaZmZlZyfW35GQBTXf+zVubdF8cMzMz\nK7n+lpzcAvxMUl2lQNJqwE8Bj9YxMzPrA8o6CVtnR+scD9wNvCSpMuna9sB04MvdEZiZmZn1rLK2\nnHQqOYmI1yRtC3wR2A6YD/wRaIiIRd0Yn5mZmfWQ/jjPyTzgom6MxczMzHpRv2o5AZA0HPg4MIRC\n35WIOK1rYZmZmVlP61fJiaSjgF8DbwHTgMgtDtJ09mZmZlZi/So5AU4CfhQRP+/OYMzMzKz3lDU5\n6ewAotWBv3dnIGZmZta7+lty8ndgTHcGYmZmZr2rv81z8ixwuqRRwGNAs+HDEXFuVwMzMzOzntXf\nhhJ/A5gL7JE98gJwcmJmZlZyZb2s09lJ2Dbu7kDMzMysd5U1OenSlSZJAyVtLqnT86WYmZlZbfSr\n5ETSIEkXA+8BjwMbZOXnSfpBN8ZnZmZmPaRfJSfAz0j31Pk48H6u/DbgC12MyczMzHpBWZOTzl6O\nORj4QkQ8ICk/O+zjwCZdD8vMzMx6WlmTk862nHwYmFGlfGWaT2VvZmZmJVXWeU46G9K/gf1zrysJ\nydeB+7sUkZmZmfWKss5z0tnk5ETgp5J+Tbo0dJykW4EjgR91JSBJP5TUKOnsXNmKki6Q9JakdyVd\nKWlIYb31Jd0oaZ6kaZLOlFTCfNDMzKwc+tVlnYi4h9QhdnnSDLFjgOnArhExqbPBSNoJOAp4pLDo\nHFJLzSHAaGAd4KrcessB47N4RgGHA0fguyObmZm1qKzJSYc7xGZzmhwGTIiIo7orEEmrAJeRLg2d\nnCsfDHwVODQi7srKjgSmSto5Ih4E9gG2APaMiLeAxySdDJwh6dSIWNxdcZqZmfUXZU1OOtxykn3Q\n/wZYqZtjuQC4PiLuKJTvSEqibs/F8BTwMrBrVjQKeCxLTComAHXA1t0cp5mZWb/Qb5KTzIPADt0V\nhKRDge2BH1ZZPBRYGBFzCuXTgWHZz8Oy18Xl5OqYmZlZTlmTk87Oc3Ih8EtJ6wGTgHn5hRHxaHs3\nlG3jHOATEbGorfr5VWnfsGUPbTYzM6uivyUnl2fP+bsPB00Jw4AObGskad6USdIHp2gAMFrSMcC+\nwIqSBhdaT4bQ1DoyDdipsN2h2XOxRaWZsWPHUldX16ysvr6e+vr6DhyCmZlZ39Oe5KShoYGGhoZm\nZbNnz+7BqDqfnHTnXYlvAz5aKLsEmAqcAbwGLAL2Aq4GkDScdD+f+7L69wMnSlor1+9kDDAbeKK1\nnY8bN44RI0Z0/SjMzMz6mMbGtidhq/aFffLkyYwcObLH4upUchIRL3VXABExj0ICIWke8HZETM1e\nXwycLWkm8C6pxebeiHgoW+WWbBuXSjoBWBs4HTi/g5eKzMzMlhn96rKOpK+0tjwi/ty5cJo2UXg9\nFlgCXAmsCNwMfCe3v0ZJBwC/JrWmzCO1vpzSxTjMzMz6rX6VnAC/KrxeARgELATeA7qUnETEfxZe\nLwCOzR4trfMKcEBX9mtmZrYs6VfJSUSsXiyTtBmp5eIXXQ3KzMzMel5Zk5Nuu/dMRDwD/IClW1XM\nzMyshPp9cpJZTLrvjZmZmZVcWZOTznaIPahYRBohcwxwb1eDMjMzs57Xr5IT4JrC6wDeBO4Avtel\niMzMzKxXLFnSj5KTiOjuy0FmZmbWi6ZNg0mT4HOfq3UkS3OSYWZmtgy6+up0WefII2sdydI6lZxI\nulLSD6qU/4+kv3c9LDMzM+tJb78Na6wBqy81OUjtdbblZA/gxirlNwOjOx+OmZmZ9YZ582DllWsd\nRXWdTU5WIc0GW7QIGNz5cMzMzKw3zJ0Lq6xS6yiq62xy8hjwhSrlh9LGXYDNzMys9ubNK29y0tmh\nxKcD/5C0CWn4MMBeQD1Qwn6/ZmZmljd3bnkv63R2KPH1kg4GTgQ+C8wHHgX2joi7ujE+MzMz6wH9\nseWEiLiR6p1izczMrOTmzoX11691FNV1dijxTpJ2qVK+i6Qdux6WmZmZ9aQyt5x0tkPsBUC1fGvd\nbJmZmZmVWJn7nHQ2OdkKmFyl/OFsmZmZmZVYfxxKvAAYWqV8bWBx58MxMzOz3tAfL+vcAvxMUl2l\nQNJqwE+BW7sjMDMzM+s5Zb6s09nROscDdwMvSXo4K9semA58uTsCMzMzs56xcCEsXlzelpPOznPy\nmqRtgS8C25HmOfkj0BARi7oxPjMzM+tmc+em5/7WckJEzAMu6sZYzMzMrBfMnp2eB5f0bnidSk4k\nfY40Vf1wIIBngL9GxJXdGJuZmZn1gDffTM9DhtQ2jpZ0qEOspOUk/Q34G2nI8LPA88DWwBWSLpek\n7g/TzMzMusuMGem5rMlJR1tOjgP2Bg6KiBvyCyQdROp3chxwTveEZ2ZmZt2tkpystVZt42hJR4cS\nHwn8TzExAYiI64DvA1/tjsDMzMysZ8yYAWusASusUOtIqutocrIZcFsry2/L6piZmVlJzZhR3ks6\n0PHkZD6wWivLBwPvd2SDkr4l6RFJs7PHfZL2zS1fUdIFkt6S9K6kKyUNKWxjfUk3SponaZqkMyV1\ndoI5MzOzfq2/JSf3A99uZfl3sjod8QpwAjAye9wBXCtpy2z5OcD+wCHAaGAd4KrKylkSMp7Uf2YU\ncDhwBHBaB+MwMzNbJpQ9Oeloh9ifAHdKWhM4C3gSELAl8D3gU8CeHdlgRNxYKDpJ0reBUZJeI/Vh\nOTQi7gKQdCQwVdLOEfEgsA+wBbBnRLwFPCbpZOAMSadGhO/1Y2ZmlvPyy7DffrWOomUdajmJiPuA\nL5ASkPuBmcA7wL1ZWX1E3NvZYLKhyocCg7LtjyQlULfnYngKeBnYNSsaBTyWJSYVE4A60hBnMzMz\nyyxeDM8/D5uVuIdohydhi4irJU0AxpAmYQN4GrglIt7rTBCStiElIysB7wKfjognJe0ALIyIOYVV\npgPDsp+HZa+LyyvLHulMTGZmZv3Ryy/DokWw6aa1jqRlnb23znuS9gb+X0S80w1xPEm6R89qpL4l\nf5Y0upX6Is1M25b21DEzM1tmPPNMeu43yYmk9SLi1ezlYcCZwDuSHgM+GRGvdCaIrF/I89nLyZJ2\nJk3mdgUwUNLgQuvJEJpaR6YBOxU2OTR7LraoLGXs2LHU1dU1K6uvr6e+vr5jB2FmZtYHPP10mt9k\ngw3aV7+hoYGGhoZmZbMrN+fpIR1tOXlS0tukPiYrAeuT+n9sBHTnVC7LASsCk4DFwF7A1QCShgMb\nAPdlde8HTpS0Vq7fyRhgNvBEWzsaN24cI0aM6MbQzczMyuuhh2C77WD5dmYA1b6wT548mZEjR/ZA\ndElHhxLXAZ8jJQ3LAeMlPU1KJPaRNKy1lauR9BNJu0naUNI2kn4G7AFclrWWXAycLenjkkaSpsi/\nNyIeyjZxCykJuVTStpL2AU4Hzo+IRR2Nx8zMrD978EHYZZdaR9G6jiYnK0TEgxHxS9KEbDuQprRf\nQhry+5ykpzq4zaHAn0n9Tm4jjdAZExF3ZMvHAjcAVwJ3Aq+T+qUAEBGNwAFZDPdl27oEOKWDcZiZ\nmfVrs2bBU0+VPznp6GWdOZIeJl3WGQgMioh7JS0mDTF+Fdi5IxuMiK+3sXwBcGz2aKnOK6QExczM\nzFowZUp63n772sbRlo62nKwD/BhYQEps/i1pIilRGQFERNzTvSGamZlZd5gyJfU12XzzWkfSuo5O\nwvZWRFwfET8E3iONkjmPNGT3LFLLyl3dH6aZmZl11ZQpMHw4DBxY60ha19Wb482OiCuARcB/AhsD\nF3Y5KjMzM+t2jz4KW/eBudO7kpxsS+pjAvASsCgipkXE37oelpmZmXWn999PI3U+9rFaR9K2Ts0Q\nCx90Qq38vE33hGNmZmY94aGHYMEC2GOPWkfStq5e1jEzM7OSW7IETjwR1lkHtt221tG0rdMtJ2Zm\nZtY3XHcd3HMP3HknDBhQ62ja5pYTMzOzfu4Pf4Bdd+0bl3TAyYmZmVm/98wzKTnpK5ycmJmZ9WMR\n8OqrsO66tY6k/ZycmJmZ9WNz5sC8eU5OzMzMrCRezWYkW2+92sbREU5OzMzM+rHXXkvPbjkxMzOz\nUnjxxfS8zjo1DaNDnJyYmZn1UxFw2WWw227lv9lfnidhMzMz64cWL4YvfAEmToRrrql1NB3j5MTM\nzKyfmTkTDjwQHngArrwSPvWpWkfUMU5OzMzM+pmjj4apU+GOO2D06FpH03Huc2JmZtaPvP8+XHst\nfP/7fTMxAScnZmZm/crdd8P8+fDJT9Y6ks5zcmJmZtaP/OMfsOGGsM02tY6k85ycmJmZ9RMXXwy/\n/S0ceihItY6m85ycmJmZ9RPjxqXnb32rtnF0lZMTMzOzfmDhQnjqKbjwQthoo1pH0zVOTszMzPqB\nZ55JE69tvXWtI+k6JydmZmb9wKOPpmcnJ2ZmZlYK110HH/0orLlmrSPpOicnZmZmfdz48WnitUMP\nrXUk3aPmyYmkH0p6UNIcSdMlXS1peKHOipIukPSWpHclXSlpSKHO+pJulDRP0jRJZ0qq+fGZmZn1\npMmT4dOfhj32SNPW9wdl+PDeHTgP2AXYG1gBuEXSh3J1zgH2Bw4BRgPrAFdVFmZJyHjSvYJGAYcD\nRwCn9Xz4ZmZmtXPhhbDuuqnlZLXVah1N96j5jf8iotkEu5KOAGYAI4F7JA0GvgocGhF3ZXWOBKZK\n2jkiHgT2AbYA9oyIt4DHJJ0MnCHp1IhY3HtHZGZm1jveey/NCHv00TBwYK2j6T5laDkpWg0I4J3s\n9UhSEnV7pUJEPAW8DOyaFY0CHssSk4oJQB3QD/otm5mZLe2ss2DePPja12odSfcqVXIiSaRLOPdE\nxBNZ8TBgYUTMKVSfni2r1JleZTm5OmZmZv1GBPzxj3DkkbDxxrWOpnvV/LJOwYXAVsBu7agrUgtL\nW9pTx8zMrE956il48UU48MBaR9L9SpOcSDof+CSwe0S8nls0DRgoaXCh9WQITa0j04CdCpscmj0X\nW1SaGTt2LHV1dc3K6uvrqa+v7+ARmJmZ9Y4XX4QxY2CNNWDPPXt2Xw0NDTQ0NDQrmz17do/uUxG1\nb1jIEpNPAXtExPOFZYOBN0kdYq/OyoYDTwK7RMRDkvYFrgfWrvQ7kfQN4OfAkIhYVGWfI4BJkyZN\nYsSIET14dGZmZt3nscdgr71glVXg9ttrc0ln8uTJjBw5EmBkREzu7u3XvOVE0oVAPXAQME9SpcVj\ndkS8HxFzJF0MnC1pJvAucC5wb0Q8lNW9BXgCuFTSCcDawOnA+dUSEzMzs77q4oth+eXh3nth7bVr\nHU3PqHlyAnyL1C/kzkL5kcCfs5/HAkuAK4EVgZuB71QqRkSjpAOAXwP3AfOAS4BTejBuMzOzXrVw\nYZoN9oAD+m9iAiVITiKizRFDEbEAODZ7tFTnFeCAbgzNzMysVI49Nt19+KKLah1JzyrVUGIzMzNr\n2YMPwuGHw8c/XutIepaTEzMzsz7ixRdhiy1qHUXPc3JiZmbWB8yeDbNmwYYb1jqSnufkxMzMrA94\n6aX0vNFGNQ2jVzg5MTMz6wMeeCA9LwstJzUfrWNmZmbVLVgA48bBH/6QRunsvTcMWwbuGOeWEzMz\nsxKKgEMOgZNOglGjoKEBJkyA5ZaBT263nJiZmZXQ3/4GN94IV10Fn/lMraPpXU5OzMzMSmLqVJg8\nGR56CC64AOrr4dOfrnVUvc/JiZmZWY0tWQLf+x6cdx40Nqab+p12WiqTah1d73NyYmZmVkMzZsBx\nx8EVV8DPfw7f+hastFK6ud+yahk+dDMzs9qaOxe22y6Nyrn0UjjssFpHVA5OTszMzGrkwgvhzTfT\nMOGNN651NOWxDAxIMjMzK5+rr4YTToCvfc2JSZGTEzMzs1727LPws5/BHnvAb35T62jKx8mJmZlZ\nL2lsTJOqbbYZTJkCp5yybI7GaYuTEzMzs15y3HHwk5/Ad78Lzz8Pe+5Z64jKyR1izczMesHLL8Pv\nfpfmLzn55FpHU25OTszMzHrAwoUwcWIajfOPf8DNN8Nqq8HRR9c6svJzcmJmZtYN3n0XfvWrlIxc\ney3Mng2zZqVl22wD3/gGnHgirLFGbePsC5ycmJmZdYPzz4dTT4X11oMxY2D99WHffdPrYcPc8bUj\nnJyYmZl1wRNPpHvg3HxzmrPk97+vdUR9n0frmJmZddK4cbDttmmG1zPPTCNxrOvccmJmZtYBS5bA\nAw/AZZelCdTGjk0Tqq24Yq0j6z+cnJiZmbVh9mx4+2044wy47jqYPh3q6uCcc+C//sv9SbqbkxMz\nM7MWvPZauv/NX/6SXq+xBnz5y7D//rDLLjB4cG3j66+cnJiZmeVcdhk89hjceis8+mhKSH71K9ho\nI9htNw8F7g1OTszMbJkXATfckDq0/utf6S7Bw4fD2WfDV76SJk+z3lOK0TqSdpd0naTXJDVKOqhK\nndMkvS7pPUm3Stq0sHx1SX+RNFvSTEm/l7Ry7x2FmZn1Fe+9B//8Z7o8s8IK6XHQQTBgAJx7brrv\nzc03p/4kTkx6X1laTlYG/g/4A3BVcaGkE4BjgMOBF4AfAxMkbRkRC7NqfwWGAnsBA4FLgN8CX+rp\n4M3MrJwWLEgzt159dUpInn8+JSVvvAFvvZVaSM48Ez70IdhkE9h7b3duLYNSJCcRcTNwM4BU9dfi\nOOD0iLg+q/MVYDpwMHCFpC2BfYCREfFwVudY4EZJx0fEtF44DDMzq7FZs+AXv4DnnoP582H8eFi8\nGJZbDlZaKSUhBx8Ma64Jn/oUjBzpIcBlVIrkpDWSNgaGAbdXyiJijqR/AbsCVwCjgJmVxCRzGxDA\nLsC1vRexmZn1ln//O42meeaZ9HrmzNR/ZNSo1AJyyimw6aapI+t669U2Vmu/0icnpMQkSC0ledOz\nZZU6M/ILI2KJpHdydczMrA9bsgQmTICbbkrJyPvvw913w9Zbw+GHp2RkpZXgiCNgnXVqHa11RV9I\nTloiUtLS1TpmZlYis2al+UUWLkx9Rd56C665JpXPnw+rrw7/+Z+w6qpwwQVw1FGwfF/+NLOl9IW3\ncxopyRhK89aTIcDDuTpD8itJGgCsztItLs2MHTuWurq6ZmX19fXU19d3LWozM2uXCHj11TTz6jXX\nwP33w7x5aVldHay7LhxwAGyxBey6K+y4YxpdY72joaGBhoaGZmWzZ8/u0X0qolwNC5IagYMj4rpc\n2evALyJiXPZ6MCnp+EpE/F3SFsDjwI65DrFjgPHAetU6xEoaAUyaNGkSI0aM6PHjMjMzmDIlTQUP\n6f40U6ak5yefTGW77AJjxsB++6XLNFtt5VlYy2jy5MmMHDkS0kCUyd29/VK0nGTzkWxKaiEB+Iik\n7YB3IuIV4BzgJEnPAi8CpwOvknV0jYgnJU0Afifp26ShxOcBDR6pY2ZWO889l1pDXn8drr02va5Y\nfvnUCrLVVqlT66c/nVpKzEqRnAA7Av8k9Q8J4JdZ+Z+Ar0bEmZIGkeYtWQ2YCOyXm+ME4DDgfNIo\nnUbgStIQZDMz6yXz5sHEiSkZueaa1IF1ueVS/5D990937/3oR1PdNdaAIUNa354tm0qRnETEXbQx\nW21EnAqc2sryWXjCNTOzXhEBL7+cRtBMmpRmU7355nTJptJfZKed4Ec/guOPh0GDahuv9S2lSE7M\nzKxcZs9OycbixU1lS5bA9den0TNvvZX6i1RsuCF8/vMwbFjqvDp0KKy1Vu/Hbf2DkxMzs2VQYyM8\n/jgsWpR+vvHGdAlmcta1cfHilIwUbbEFjBgB668PJ50EH/5wmm11u+16N37r35ycmJn1I0uWpIQj\nLyJN4/766+l52rTUMvLCC011Bg2CnXeGn/88dVQdMCCNmBk6tPm2VlzR956xnufkxMysD3nqqTQn\nyC23pOe8iFT+9tvV111xxTSb6sc+ljqp7rsvrL12WrbRRqmDqlkZODkxM6uh+fOb5v2A1Jn0mmvS\n1OwAc+c2vY6Al15K5XV1sMMOS2/vc59L95Ep2nLLdDnGrC9wcmJm1o0eeaT5XB4V8+enJOO995rK\nItJsqLNmNa+70kpN831Iaar2jTZKrzfbDEaPTn09Vl21Rw7BrOacnJiZVTFnTuqjAek+LxMmpGSi\nYsGCdN+XuXObr1dMNPK22SbdITfvyCNT8lHpxyGlWVLXXLPrx2DWVzk5MbNl1rRpabhsY2N6/frr\nKQlpbEzDZOfMaaq77rqwyirN1x8zJo1eyVt/ffjEJ6p3Gl19dXcmNWsPJydm1mctXgwPP5xGqCxZ\nkm4c9847S9ebPj2NUqk2NDZvwAA48EBYbTXYYw/45CdTMjFwIIwcmTqRmlnPc3JiZjW3cGHqk1H0\nzjtp0q/KnBs33JAm/6qYNavp0guk0Sabbbb0dpZfHn7yk6VHo6ywQkpGVl+9qcwJiFntOTkxs241\nY0bTRF7VPP54ugttRWMj3HorvPtu9forrJA6iEKa6GvMmKZlAwak2Ug//OH0esMNl770YmZ9j5MT\ns2XUjBnVWysqIuD225uGrj7zDNx9d9vbfeedpmGw1Sy3XLpkMnBgU9lRR8GoUUvXHTAgdRZdbbW2\n92tm/YeTE7M+4s034Z//bOq8Wc3EifD0021va/58uPfetutJsN566edBg+ArX2lqxWjJqqvCwQen\nCb+qGTSo+WUUM7MiJydmPWj2bHjxxZaXP/ggPPbY0uWLFsG11zafnGvBgrY7dK65Juy1V/tGhFx4\n4dLDWos23BCGD297W2Zm3cnJiVmmsbHlD/9HH4WHHmp53Ycfrt7P4plnmicYRRJstVX1Tpj775/m\nxagYOBA+9ammybmqWXHF1PnTzKwv878x69dmzkyJRVEE3HQTvPFGet3YmOa7aOmeJJASiJZGcqyx\nBhx0UOojkTdmTBoNUiyvWGst2Hjjto/DzGxZ4uTESmXOnHT5oui55+DOO1ter3LDs+KN0KZNW3oG\nz4q6Oth226bXn/tc9U6ZkDpkHnBAy0mGmZl1Hycn1mOeeCJd1ii66y549tmly+fPT6ND8lOE5w0e\n3HyER9Fmm8EhhzQvW2WVljtnDhvme5OYmZWRkxOratq0pnknHnggzU1RTbV7jlS8/Xb18ro62H33\npTttfuhDcNZZsMkmS68zaFAaUuqWCzOz/s/JyTKgsTFN3V2tP8X06anvRb4j6KJF8K9/NU8sPvKR\n6v0tBg6Er32teifN9dZLfS6KSciqq7Y9HNXMzJZdTk5KorExddxctKjtuq2NHHnrrTTFdz7ZiGh5\nFIqU7h9SnNb7i1+Ej340/Tx06NI3NzMzM+spTk46acmSpWfBfOaZ1J+iJU8+2fLEV3PmNM3E2R7b\nb199yOiAAXDiiTBkSPPyESNghx2Wri956KmZmZWLP5Za8PTTTclCRJqZ84UXml7feWea/rto4MCW\nP+xb65y53HLpEsi667YdW10dbLRRe47CzMys71nmk5PzzksdPufNayqLSB09830uVl0Vdt65qf/E\nwQfDnns239bKK8O++6YblZmZmVnnLPPJyQ03wN57L32b9Q03bD4N+Jpr+m6nZmZmvWGZT04mTEj9\nMczMzKwcWpiM28zMzKw2nJyYmZlZqfSr5ETSdyS9IGm+pAck7VTrmPqChoaGWodQGj4Xic9DE5+L\nxOch8XnoHf0mOZH0BeCXwCnADsAjwARJa9U0sD7Af2xNfC4Sn4cmPheJz0Pi89A7+k1yAowFfhsR\n/7+9ew+2sirjOP79oQKKgzh5YSwUlbybF1AoRUFFUkcdsnEcTC2qwazJrElzqrEspzvjDadGbfJa\nUU1pqaMiBylvjGLmhYsp4gUPieIRARXh6Y+1trxsz0Hw7LP3Pvv9fWb2DO9a691nrYd3r/3s9117\nv9dHxDzgbGAlMKmx3TIzM7NN0RLJiaQtgOHAPZWyiAhgOvDJRvXLzMzMNl1LJCfAdsBmwJKq8iXA\n4EHfvXcAAAoVSURBVPp3x8zMzD6sVv+dEwHRRV1/gLlz59avN02qo6ODOXPmNLobTcGxSByHdRyL\nxHFIHIek8N7ZI/eYV0RX7929R76ssxI4JSJuLZT/DtgmIiZ0ss9E4Ka6ddLMzKz1nB4RN9f6SVvi\nzElErJb0CHA0cCuAJOXty7vY7U7gdOA54K0u2piZmdn79QeGkt5La64lzpwASDoVuA6YDMwmfXvn\ns8BeEfFKI/tmZmZmG68lzpwARMS0/JsmFwM7Av8GxjsxMTMz611a5syJmZmZtYZW+SqxmZmZtYhS\nJidluAePpNGSbpX0kqS1kk7qpM3FkhZLWinpbknDquq3lXSTpA5JyyRdI2lA/UbRfZIulDRb0huS\nlkj6q6Q9qtr0kzRV0lJJyyX9WdIOVW2GSLpN0gpJ7ZJ+LqnXvH4knS3psfx/2SHpfkmfLtS3fAw6\nk4+PtZKmFMpKEQtJF+WxFx9PFepLEQcASTtJuiGPdWV+rRxc1aYM8+XCTo6JtZKuyPV1OyZ63UHU\nXSrPPXgGkNbdfJVOfutF0gXA10gLiA8FVpDi0LfQ7GZgb9K3nk4AjgB+07PdrrnRwBXASOAYYAvg\nLklbFtpcShrfKaQx7gT8pVKZX1i3k9ZojQLOAj5PWt/UW7wAXED6JeXhwAzgFkl75/oyxGA9Sh9K\nvkyaA4rKFIsnSGv0BufH4YW6UsRB0iDgPuBtYDxpzvsWsKzQpizz5QjWHQuDgXGk949pub5+x0RE\nlOoBPAhcVtgW8CJwfqP71oNjXgucVFW2GDivsD0QWAWcmrf3zvsdVGgzHngXGNzoMXUjFtvlcR1e\nGPfbwIRCmz1zm0Pz9nHAamC7QpvJpMlr80aPqRuxeBX4QhljAGwNzAeOAtqAKWU7Hkgf0OZ0UVem\nOPwUuPcD2pR1vrwUWNCIY6JUZ07ke/AAIGlXUlZcjMMbwEOsi8MoYFlEPFrYdTopix5Zp672hEGk\nMbyWt4eTsvxiLOYDz7N+LB6PiKWF57kT2AbYt6c7XGuS+kg6DdgKeIASxgCYCvw9ImZUlY+gXLH4\nuNKl32ck3ShpSC4v0zFxIvCwpGn50u8cSV+qVJZ1vszvl6cD1+aiur42SpWc4HvwVAwmvWg2FIfB\nwP+KlRGxhvSm3itjJUmkTwL/iojKtfXBwDt5simqjkVnsYJeFAtJ+0laTvr0cxXpE9A8ShQDgJyY\nHQhc2En1jpQnFg+STrmPJ93FfVdgVl4nUaZjYjfgK6QzaccCvwYul/S5XF/K+RKYQEoqrsvbdX1t\ntMzvnHTThu7BUyYbE4feHKurgH1Y/7p6VzZ2nL0pFvOAA0hnj04Brpd0xAbat1wMJH2MlKCOi4jV\nm7IrLRaLiCj+sucTkmYDi4BT6fpXs1suDqQP6bMj4vt5+zFJ+5ISlhs3sF+rz5eTgDsiov0D2vXI\nMVG2MydLgTWkDLBoB96f7bWydtIBtaE4tOft90jaDNiWXhgrSVcCxwNjImJxoaod6CtpYNUu1bGo\njlVlu9fEIiLejYhnI2JORHyXtBD0XEoUA9Lliu2BRyStlrQaOBI4V9I7pLH0K0ks1hMRHcACYBjl\nOiZeBqrvADsX2Dn/u4zz5c6kLxBcXSiu6zFRquQkf1Kq3IMHWO8ePPc3ql/1FhELSQdRMQ4DSddG\nK3F4ABgk6aDCrkeTXqQP1amrNZETk5OBsRHxfFX1I6RFa8VY7EGamIqx2L/qG13HAh3AU/RefYB+\nlCsG04H9SZd1DsiPh0mfkCv/Xk05YrEeSVsDu5MWf5bpmLiPtLCzaE/SWaTSzZfZJFIycXuhrL7H\nRKNXAzdg9fGppFXWZwJ7kb7q9SqwfaP7VuNxDiBNtgeSVlN/I28PyfXn53GfSJqs/wY8DfQtPMft\npMn6EOAw0jXZGxo9tk2Mw1WkleKjSRl85dG/qs1CYAzpk/V9wD8L9X1IZxnuAD5Buka/BPhRo8e3\nCXG4hHQ5axdgP+AnpInmqLLEYAOxee/bOmWKBfAL0tdBdwE+Bdydx/GRksVhBGkd1oWk5GwisBw4\nrdCmFPNlHodIN8S9pJO6uh0TDQ9Eg4J/Tg7+KlKmN6LRfeqBMR5JSkrWVD1+W2jzA9KnpJWkFdXD\nqp5jEOkTZQfpDf5qYKtGj20T49BZDNYAZxba9CP9FsrSPCn9Cdih6nmGAP8A3swvtp8BfRo9vk2I\nwzXAs/mYbwfuIicmZYnBBmIzg/WTk1LEAvg96WcUVpG+cXEzsGvZ4pDHcTzwnzwXPglM6qRNy8+X\neRzj8hw5rJO6uh0TvreOmZmZNZVSrTkxMzOz5ufkxMzMzJqKkxMzMzNrKk5OzMzMrKk4OTEzM7Om\n4uTEzMzMmoqTEzMzM2sqTk7MzMysqTg5MTMzs6bi5MTMWoKkNklTGt0PM+s+Jydm1m2SJkt6Q1Kf\nQtkASasl3VPVdqyktZKG1rufZtY7ODkxs1poI90Je0ShbDTwMjBKUt9C+ZHAooh4blP/iKTNu9NJ\nM+sdnJyYWbdFxAJSIjKmUDyGdGv5hcCoqvI2AElDJN0iabmkDkl/lLRDpaGkiyQ9KumLkp4F3srl\nW0m6Pu/3kqRvVvdJ0jmSFkhaJald0rTajtrMeoqTEzOrlZnA2ML22Fx2b6VcUj9gJDAjt7mFdKv5\n0cAxwO7AH6qedxjwGWACcGAu+2Xe50TgWFLCM7yyg6QRwGXA94A9gPHArG6Oz8zqxKdIzaxWZgJT\n8rqTAaREYhbQF5gM/BA4LG/PlDQO2A8YGhGLASSdATwpaXhEPJKfdwvgjIh4LbcZAEwCJkbEzFx2\nFvBioS9DgDeB2yJiBfAC8FgPjdvMasxnTsysVirrTg4BDgcWRMRS0pmTkXndyRjgmYh4EdgLeKGS\nmABExFzgdWDvwvMuqiQm2e6khGV2Yb9lwPxCm7uBRcDCfPlnoqQtazZSM+tRTk7MrCYi4hngJdIl\nnLGkpISIeJl05uIwCutNAAHRyVNVl6/opJ4u9q305U3gYOA0YDHprM1jkgZu9IDMrGGcnJhZLbWR\nEpMxpMs8FbOA44BDWZecPAXsLOmjlUaS9gG2yXVd+S/wLoVFtpK2Ja0teU9ErI2IGRHxHeAAYChw\n1IcYk5nVmdecmFkttQFTSXPLvYXyWcCVpMsxMwEiYrqkx4GbJJ2X66YCbRHxaFd/ICJWSLoW+IWk\n14BXgB8DayptJJ0A7Jb/7jLgBNIZl/nvf0YzazZOTsysltqA/sDciHilUH4vsDUwLyLaC+UnA1fk\n+rXAHcDXN+LvfJu0vuVWYDnwK6B4yeZ10jd8Lsr9eRo4La9pMbMmp4guL9uamZmZ1Z3XnJiZmVlT\ncXJiZmZmTcXJiZmZmTUVJydmZmbWVJycmJmZWVNxcmJmZmZNxcmJmZmZNRUnJ2ZmZtZUnJyYmZlZ\nU3FyYmZmZk3FyYmZmZk1FScnZmZm1lT+DwrH78/1pfXIAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -356,7 +348,7 @@ }, { "cell_type": "code", - "execution_count": 96, + "execution_count": 15, "metadata": { "collapsed": true }, @@ -370,7 +362,7 @@ }, { "cell_type": "code", - "execution_count": 97, + "execution_count": 16, "metadata": { "collapsed": false }, @@ -460,7 +452,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Online AT VB 2" + "## Train model" ] }, { @@ -471,13 +463,13 @@ }, "outputs": [], "source": [ - "reload(onlineatvb2)\n", - "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" + "reload(atmodel)\n", + "AuthorTopicModel = atmodel.AuthorTopicModel" ] }, { "cell_type": "code", - "execution_count": 102, + "execution_count": 17, "metadata": { "collapsed": false }, @@ -486,37 +478,18 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 10.9 s, sys: 12 ms, total: 10.9 s\n", - "Wall time: 10.9 s\n" + "CPU times: user 1.84 s, sys: 0 ns, total: 1.84 s\n", + "Wall time: 1.85 s\n" ] } ], "source": [ - "%time model_online2 = OnlineAtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", " eval_every=1, random_state=1, var_lambda=None)" ] }, - { - "cell_type": "code", - "execution_count": 100, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Speed improvement from new algorithm: 4.709677!\n" - ] - } - ], - "source": [ - "print(\"Speed improvement from new algorithm: %f!\" %((2 * 60 + 26) / 31))" - ] - }, { "cell_type": "code", "execution_count": 218, @@ -555,7 +528,7 @@ } ], "source": [ - "model_online2.show_topics(num_topics=10)" + "model.show_topics(num_topics=10)" ] }, { @@ -589,7 +562,7 @@ "name = id2author[114]\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))" + "pprint(model.get_author_topics(author2id[name]))" ] }, { @@ -636,208 +609,22 @@ "name = 'Yaser S.Abu-Mostafa'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Geoffrey E. Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Michael I. Jordan'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))\n", - "\n", - "name = 'James M. Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))" - ] - }, - { - "cell_type": "code", - "execution_count": 162, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Speed improvement from new algorithm: 5.503876!\n" - ] - } - ], - "source": [ - "print(\"Speed improvement from new algorithm: %f!\" %(28.4 / 5.16))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Online AT VB" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 13.6 s, sys: 16 ms, total: 13.6 s\n", - "Wall time: 13.6 s\n" - ] - } - ], - "source": [ - "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", - " iterations=100, alpha='auto', eta='symmetric', random_state=1)\n", - "var_lambda = lda.state.get_lambda()" - ] - }, - { - "cell_type": "code", - "execution_count": 118, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(onlineatvb)\n", - "OnlineAtVb = onlineatvb.OnlineAtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 157, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 28.3 s, sys: 12 ms, total: 28.4 s\n", - "Wall time: 28.4 s\n" - ] - } - ], - "source": [ - "%time model_online = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=200, random_state=2, var_lambda=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 40, - "metadata": { - "collapsed": false, - "scrolled": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.075*image + 0.037*field + 0.034*visual + 0.031*position + 0.029*move + 0.025*map + 0.025*location + 0.021*center + 0.021*search + 0.019*human'),\n", - " (1,\n", - " '0.044*bit + 0.038*code + 0.030*hopfield + 0.029*matrix + 0.024*eq + 0.019*stored + 0.017*minimum + 0.016*stage + 0.014*optimization + 0.013*column'),\n", - " (2,\n", - " '0.031*iv + 0.025*differential + 0.023*code + 0.023*scheme + 0.020*adaptive + 0.017*find + 0.016*criterion + 0.015*he + 0.014*bound + 0.014*half'),\n", - " (3,\n", - " '0.035*activity + 0.033*array + 0.027*cell + 0.023*synaptic + 0.020*low + 0.018*rate + 0.017*synapsis + 0.016*region + 0.016*storage + 0.016*distribution'),\n", - " (4,\n", - " '0.052*role + 0.049*loop + 0.046*processor + 0.037*sequence + 0.029*gain + 0.021*product + 0.018*activation + 0.018*multiple + 0.018*edge + 0.017*address'),\n", - " (5,\n", - " '0.028*stimulus + 0.024*classification + 0.024*shape + 0.020*circuit + 0.018*fully + 0.018*design + 0.015*power + 0.015*pp + 0.014*sample + 0.014*experiment'),\n", - " (6,\n", - " '0.042*capacity + 0.034*associative_memory + 0.019*feedback + 0.018*cell + 0.017*phase + 0.016*interaction + 0.015*delay + 0.014*recall + 0.014*sequence + 0.014*matrix'),\n", - " (7,\n", - " '0.061*node + 0.049*hidden + 0.036*convergence + 0.033*energy + 0.030*gradient + 0.030*dynamic + 0.019*back_propagation + 0.016*back + 0.016*propagation + 0.016*learning_algorithm'),\n", - " (8,\n", - " '0.060*training + 0.039*representation + 0.029*connectionist + 0.028*trained + 0.020*context + 0.017*learned + 0.017*target + 0.015*mcclelland + 0.015*hidden_unit + 0.015*rumelhart'),\n", - " (9,\n", - " '0.074*firing + 0.056*stimulus + 0.056*cell + 0.037*connectivity + 0.033*path + 0.030*potential + 0.027*temporal + 0.027*control + 0.021*synaptic + 0.019*inhibition')]" - ] - }, - "execution_count": 40, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_online.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 273, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [21]\n", - "[(0, 0.16188318876615412), (1, 0.80823920909246583), (3, 0.021312448059559796)]\n", - "\n", - "Geoffrey E. Hinton\n", - "Docs: [146, 276, 235, 270]\n", - "[(0, 0.14004630013032807),\n", - " (1, 0.23772038268835666),\n", - " (2, 0.5640333145036398),\n", - " (3, 0.058200002677675597)]\n", - "\n", - "Michael I. Jordan\n", - "Docs: [205]\n", - "[(0, 0.26951795605324808),\n", - " (1, 0.1612862641672847),\n", - " (2, 0.4872153771544665),\n", - " (3, 0.081980402625000656)]\n", - "\n", - "James M. Bower\n", - "Docs: [150, 128, 162, 101, 188, 251, 244]\n", - "[(0, 0.67413384788621999),\n", - " (1, 0.071583305581578827),\n", - " (2, 0.06345028631865203),\n", - " (3, 0.19083256021354914)]\n" - ] - } - ], - "source": [ - "name = 'Yaser S.Abu-Mostafa'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online.get_author_topics(author2id[name]))\n", + "pprint(model.get_author_topics(author2id[name]))\n", "\n", "name = 'Geoffrey E. Hinton'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online.get_author_topics(author2id[name]))\n", + "pprint(model.get_author_topics(author2id[name]))\n", "\n", "name = 'Michael I. Jordan'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online.get_author_topics(author2id[name]))\n", + "pprint(model.get_author_topics(author2id[name]))\n", "\n", "name = 'James M. Bower'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online.get_author_topics(author2id[name]))" + "pprint(model.get_author_topics(author2id[name]))" ] }, { @@ -952,109 +739,29 @@ ] }, { - "cell_type": "code", - "execution_count": 42, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(onlineatvb)\n", - "OnlineAtVb = onlineatvb.OnlineAtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 212, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 34.6 s, sys: 4 ms, total: 34.6 s\n", - "Wall time: 34.6 s\n" - ] - } - ], - "source": [ - "%time model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", - " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-3, \\\n", - " iterations=1, passes=200, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=10, random_state=1, var_lambda=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 133, + "cell_type": "markdown", "metadata": { - "collapsed": false + "collapsed": true }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Amir F.Atiya\n", - "Docs: [5]\n", - "[(0, 0.26236663424329809),\n", - " (1, 0.055837758145413023),\n", - " (2, 0.32385947243135804),\n", - " (4, 0.031231118362347546),\n", - " (5, 0.049702348068489471),\n", - " (6, 0.063277167602715914),\n", - " (7, 0.11515798924424819),\n", - " (9, 0.098115022122885684)]\n", - "\n", - "FrankWilczek\n", - "Docs: [1]\n", - "[(0, 0.21018310687516228),\n", - " (1, 0.39886126379385306),\n", - " (2, 0.18071281961456737),\n", - " (3, 0.052218386110533886),\n", - " (5, 0.039636353968810233),\n", - " (8, 0.032375816267307712),\n", - " (9, 0.073725725628590477)]\n" - ] - } - ], - "source": [ - "name = 'Amir F.Atiya'\n", - "print('\\n%s' % name)\n", - "print('Docs:', model.author2doc[model.author2id[name]])\n", - "pprint(model.get_author_topics(model.author2id[name]))\n", - "\n", - "name = 'FrankWilczek'\n", - "print('\\n%s' % name)\n", - "print('Docs:', model.author2doc[model.author2id[name]])\n", - "pprint(model.get_author_topics(model.author2id[name]))\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, "source": [ - "## Mini-batch" + "## LDA" ] }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 131, "metadata": { "collapsed": true }, "outputs": [], "source": [ - "reload(minibatchatvb)\n", - "MinibatchAtVb = minibatchatvb.MinibatchAtVb" + "reload(gensim.models.ldamodel)\n", + "LdaModel = gensim.models.ldamodel.LdaModel" ] }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 151, "metadata": { "collapsed": false }, @@ -1063,40 +770,19 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 2min 1s, sys: 24 ms, total: 2min 1s\n", - "Wall time: 2min 1s\n" + "CPU times: user 2.48 s, sys: 524 ms, total: 3 s\n", + "Wall time: 2.43 s\n" ] } ], "source": [ - "%time model_online = MinibatchAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None, chunksize=1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Offline AT VB 2" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(atvb2)\n", - "AtVb2 = atvb2.AtVb2" + "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=1, \\\n", + " iterations=1, alpha='symmetric', eta='symmetric', eval_every=0)" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 154, "metadata": { "collapsed": false }, @@ -1105,21 +791,20 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 21min 58s, sys: 376 ms, total: 21min 58s\n", - "Wall time: 21min 58s\n" + "CPU times: user 288 ms, sys: 0 ns, total: 288 ms\n", + "Wall time: 290 ms\n", + "Bound: -3.588e+05\n" ] } ], "source": [ - "%time model_offline2 = AtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=100, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", - " eval_every=10, random_state=1)" + "%time lda_bound = lda.bound(sample(corpus, 10))\n", + "print('Bound: %.3e' % lda_bound)" ] }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 155, "metadata": { "collapsed": false }, @@ -1128,208 +813,39 @@ "data": { "text/plain": [ "[(0,\n", - " '0.018*path + 0.014*center + 0.013*constraint + 0.011*map + 0.011*activity + 0.010*array + 0.010*rate + 0.010*cycle + 0.010*visual + 0.010*iv'),\n", + " '0.004*neuron + 0.003*image + 0.003*layer + 0.003*field + 0.003*class + 0.003*cell + 0.003*signal + 0.003*noise + 0.003*hidden + 0.002*node'),\n", " (1,\n", - " '0.019*matrix + 0.016*delay + 0.013*associative_memory + 0.013*capacity + 0.012*potential + 0.010*storage + 0.010*classification + 0.010*dynamic + 0.010*synaptic + 0.009*rate'),\n", + " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*class + 0.003*signal + 0.003*matrix + 0.003*layer + 0.003*noise + 0.002*hidden + 0.002*recognition'),\n", " (2,\n", - " '0.044*cell + 0.020*stimulus + 0.014*probability + 0.010*region + 0.009*training + 0.008*noise + 0.007*field + 0.007*node + 0.007*actual + 0.007*area'),\n", + " '0.004*cell + 0.003*neuron + 0.003*matrix + 0.003*signal + 0.003*image + 0.003*hidden + 0.002*rule + 0.002*response + 0.002*field + 0.002*dynamic'),\n", " (3,\n", - " '0.026*code + 0.025*hopfield + 0.015*sequence + 0.015*image + 0.013*energy + 0.013*length + 0.013*machine + 0.012*field + 0.012*matrix + 0.011*minimum'),\n", + " '0.005*neuron + 0.003*layer + 0.003*image + 0.003*cell + 0.002*class + 0.002*net + 0.002*hidden + 0.002*control + 0.002*sequence + 0.002*response'),\n", " (4,\n", - " '0.032*processor + 0.023*activation + 0.012*dynamic + 0.012*operation + 0.012*hidden + 0.011*energy + 0.011*edge + 0.010*machine + 0.010*update + 0.009*training'),\n", + " '0.004*layer + 0.003*image + 0.003*neuron + 0.003*cell + 0.003*hidden + 0.003*signal + 0.003*component + 0.002*recognition + 0.002*net + 0.002*node'),\n", " (5,\n", - " '0.024*hidden + 0.016*hidden_unit + 0.013*matrix + 0.012*sequence + 0.012*adaptive + 0.012*action + 0.010*multiple + 0.009*training + 0.009*back + 0.008*learn'),\n", + " '0.005*image + 0.004*neuron + 0.004*layer + 0.003*hidden + 0.003*cell + 0.002*control + 0.002*class + 0.002*net + 0.002*noise + 0.002*signal'),\n", " (6,\n", - " '0.026*training + 0.015*stage + 0.014*bit + 0.013*optimization + 0.012*convergence + 0.011*eq + 0.011*surface + 0.011*node + 0.011*neural_net + 0.010*code'),\n", + " '0.005*neuron + 0.005*layer + 0.004*hidden + 0.003*image + 0.003*cell + 0.003*class + 0.003*rule + 0.002*noise + 0.002*net + 0.002*matrix'),\n", " (7,\n", - " '0.049*cell + 0.015*node + 0.014*feature + 0.013*region + 0.011*map + 0.011*control + 0.011*back + 0.010*temporal + 0.008*cycle + 0.008*decision'),\n", + " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*hidden + 0.003*recognition + 0.003*field + 0.003*layer + 0.002*noise + 0.002*node + 0.002*component'),\n", " (8,\n", - " '0.023*cell + 0.014*probability + 0.012*current + 0.012*position + 0.012*image + 0.011*principle + 0.010*noise + 0.008*dimensional + 0.008*shape + 0.007*firing'),\n", + " '0.004*neuron + 0.003*image + 0.003*signal + 0.003*recognition + 0.003*cell + 0.003*layer + 0.003*noise + 0.003*rule + 0.002*class + 0.002*hidden'),\n", " (9,\n", - " '0.042*representation + 0.033*activity + 0.029*role + 0.026*firing + 0.023*cell + 0.014*stimulus + 0.014*variable + 0.013*product + 0.012*potential + 0.010*synaptic')]" + " '0.005*neuron + 0.004*class + 0.003*layer + 0.003*image + 0.003*cell + 0.002*hidden + 0.002*signal + 0.002*control + 0.002*field + 0.002*net')]" ] }, - "execution_count": 27, + "execution_count": 155, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "model_offline2.show_topics()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## \"Offline\" AT VB" + "lda.show_topics()" ] }, { "cell_type": "code", - "execution_count": 356, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "phi is 286 x 2245 x 10 (6420700 elements)\n", - "mu is 286 x 2245 x 578 (371116460 elements)\n" - ] - } - ], - "source": [ - "print('phi is %d x %d x %d (%d elements)' %(len(corpus), len(dictionary.id2token), 10,\n", - " len(corpus) * len(dictionary.id2token) * 10))\n", - "print('mu is %d x %d x %d (%d elements)' %(len(corpus), len(dictionary.id2token), len(author2doc),\n", - " len(corpus) * len(dictionary.id2token) * len(author2doc)))" - ] - }, - { - "cell_type": "code", - "execution_count": 238, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 7.81 s, sys: 0 ns, total: 7.81 s\n", - "Wall time: 7.81 s\n" - ] - } - ], - "source": [ - "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", - " iterations=100, alpha='auto', eta='symmetric', random_state=1)\n", - "var_lambda = lda.state.get_lambda()" - ] - }, - { - "cell_type": "code", - "execution_count": 185, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(atvb)\n", - "AtVb = atvb.AtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 245, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 2min 34s, sys: 104 ms, total: 2min 34s\n", - "Wall time: 2min 34s\n" - ] - } - ], - "source": [ - "%time model_offline = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", - " eval_every=1, random_state=1)" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.019*path + 0.015*center + 0.014*constraint + 0.011*rate + 0.011*map + 0.011*cycle + 0.010*array + 0.010*visual + 0.009*activity + 0.009*iv'),\n", - " (1,\n", - " '0.018*matrix + 0.016*delay + 0.013*associative_memory + 0.013*potential + 0.012*capacity + 0.011*synaptic + 0.010*classification + 0.010*dynamic + 0.010*storage + 0.008*circuit'),\n", - " (2,\n", - " '0.040*cell + 0.015*stimulus + 0.014*probability + 0.010*region + 0.010*training + 0.009*noise + 0.008*convergence + 0.007*field + 0.007*node + 0.007*positive'),\n", - " (3,\n", - " '0.026*code + 0.024*hopfield + 0.015*sequence + 0.015*image + 0.013*length + 0.012*matrix + 0.012*energy + 0.012*field + 0.012*machine + 0.011*current'),\n", - " (4,\n", - " '0.032*processor + 0.023*activation + 0.013*dynamic + 0.013*energy + 0.012*operation + 0.011*edge + 0.010*hidden + 0.010*machine + 0.010*update + 0.009*matrix'),\n", - " (5,\n", - " '0.022*hidden + 0.016*hidden_unit + 0.014*matrix + 0.013*sequence + 0.012*adaptive + 0.012*action + 0.010*multiple + 0.009*training + 0.008*back + 0.008*stored'),\n", - " (6,\n", - " '0.025*training + 0.015*stage + 0.014*bit + 0.013*optimization + 0.012*convergence + 0.011*eq + 0.011*surface + 0.010*neural_net + 0.010*code + 0.010*hidden'),\n", - " (7,\n", - " '0.056*cell + 0.017*node + 0.015*region + 0.013*feature + 0.013*map + 0.012*back + 0.011*control + 0.010*temporal + 0.009*decision + 0.008*activity'),\n", - " (8,\n", - " '0.023*cell + 0.013*probability + 0.013*image + 0.012*position + 0.012*current + 0.011*principle + 0.010*noise + 0.008*dimensional + 0.007*shape + 0.007*firing'),\n", - " (9,\n", - " '0.042*representation + 0.034*activity + 0.029*role + 0.025*firing + 0.021*cell + 0.017*stimulus + 0.014*variable + 0.014*product + 0.012*potential + 0.010*synaptic')]" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_offline.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 142, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.015*dynamic + 0.014*delay + 0.012*frequency + 0.011*phase + 0.010*noise + 0.008*temporal + 0.007*filter + 0.007*oscillation + 0.007*target + 0.007*controller'),\n", - " (1,\n", - " '0.017*memory + 0.017*vector + 0.015*matrix + 0.013*hopfield + 0.011*probability + 0.008*capacity + 0.008*let + 0.008*fig + 0.007*code + 0.007*distribution'),\n", - " (2,\n", - " '0.035*cell + 0.018*response + 0.012*region + 0.012*stimulus + 0.011*cortex + 0.009*fig + 0.009*sensory + 0.009*motor + 0.009*control + 0.009*velocity'),\n", - " (3,\n", - " '0.041*image + 0.038*field + 0.023*visual + 0.016*map + 0.015*receptive + 0.014*receptive_field + 0.014*motion + 0.012*eye + 0.011*direction + 0.008*vision'),\n", - " (4,\n", - " '0.030*hidden + 0.017*hidden_unit + 0.016*activation + 0.012*propagation + 0.010*processor + 0.009*back_propagation + 0.008*gradient + 0.007*hidden_layer + 0.007*bit + 0.006*internal'),\n", - " (5,\n", - " '0.018*vector + 0.016*sequence + 0.016*object + 0.014*memory + 0.009*adaptive + 0.009*matrix + 0.008*recurrent + 0.008*action + 0.008*self + 0.008*view'),\n", - " (6,\n", - " '0.025*classifier + 0.024*recognition + 0.023*speech + 0.014*classification + 0.013*trained + 0.011*class + 0.010*test + 0.010*noise + 0.010*hidden + 0.009*word'),\n", - " (7,\n", - " '0.033*node + 0.008*position + 0.007*connectionist + 0.005*neural_net + 0.005*tree + 0.005*character + 0.004*move + 0.004*generalization + 0.004*search + 0.004*human'),\n", - " (8,\n", - " '0.036*circuit + 0.024*analog + 0.024*chip + 0.020*voltage + 0.020*current + 0.014*synapse + 0.010*transistor + 0.010*vlsi + 0.009*device + 0.009*implementation'),\n", - " (9,\n", - " '0.030*cell + 0.021*firing + 0.019*synaptic + 0.017*activity + 0.016*potential + 0.010*synapsis + 0.010*spike + 0.009*stimulus + 0.009*memory + 0.009*membrane')]" - ] - }, - "execution_count": 142, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 149, + "execution_count": 150, "metadata": { "collapsed": false }, @@ -1338,436 +854,45 @@ "name": "stdout", "output_type": "stream", "text": [ + "Document 5\n", + "[(0, 0.11806384798431847),\n", + " (1, 0.099612053680607937),\n", + " (2, 0.076668193975964943),\n", + " (3, 0.075072909998916373),\n", + " (4, 0.067243477696594139),\n", + " (5, 0.1004083782314163),\n", + " (6, 0.1049567779188061),\n", + " (7, 0.10291505408912022),\n", + " (8, 0.12682229186467239),\n", + " (9, 0.12823701455958317)]\n", "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [21]\n" - ] - }, - { - "ename": "NameError", - "evalue": "name 'model' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\n%s'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Docs:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_author_topics\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'Geoffrey E. Hinton'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mNameError\u001b[0m: name 'model' is not defined" + "Document 50\n", + "[(0, 0.12019310780479558),\n", + " (1, 0.11241507965934601),\n", + " (2, 0.084261861610351887),\n", + " (3, 0.074722708722277847),\n", + " (4, 0.089536455599529025),\n", + " (5, 0.11951468917677081),\n", + " (6, 0.077140801257090358),\n", + " (7, 0.086592729473957755),\n", + " (8, 0.12048290979429044),\n", + " (9, 0.11513965690159025)]\n" ] } ], "source": [ - "name = 'Yaser S.Abu-Mostafa'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Geoffrey E. Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Michael I. Jordan'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", + "d = 5\n", + "print('Document %d' %d)\n", + "pprint(lda[corpus[d]])\n", "\n", - "name = 'James M. Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n" + "d = 50\n", + "print('\\nDocument %d' %d)\n", + "pprint(lda[corpus[d]])" ] }, { "cell_type": "code", - "execution_count": 31, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.019*cell + 0.008*matrix + 0.008*representation + 0.008*training + 0.007*activity + 0.007*node + 0.006*dynamic + 0.006*field + 0.006*probability + 0.005*hopfield'),\n", - " (1,\n", - " '0.016*cell + 0.007*matrix + 0.007*capacity + 0.006*feature + 0.006*activity + 0.006*node + 0.006*field + 0.006*dynamic + 0.006*training + 0.006*stimulus'),\n", - " (2,\n", - " '0.012*cell + 0.010*training + 0.008*matrix + 0.007*stimulus + 0.007*hopfield + 0.006*image + 0.006*noise + 0.006*representation + 0.006*hidden + 0.006*convergence'),\n", - " (3,\n", - " '0.011*cell + 0.008*hopfield + 0.007*activity + 0.007*rate + 0.006*matrix + 0.006*hidden + 0.006*field + 0.006*training + 0.005*node + 0.005*representation'),\n", - " (4,\n", - " '0.012*cell + 0.008*activity + 0.007*matrix + 0.007*training + 0.006*field + 0.006*code + 0.006*representation + 0.006*firing + 0.006*current + 0.005*synaptic'),\n", - " (5,\n", - " '0.014*cell + 0.008*hidden + 0.007*sequence + 0.007*training + 0.006*field + 0.006*noise + 0.006*node + 0.006*dynamic + 0.006*hopfield + 0.006*representation'),\n", - " (6,\n", - " '0.025*cell + 0.011*matrix + 0.009*training + 0.006*activity + 0.006*probability + 0.006*hopfield + 0.006*synaptic + 0.005*node + 0.005*stimulus + 0.005*representation'),\n", - " (7,\n", - " '0.016*cell + 0.008*training + 0.007*activity + 0.007*representation + 0.007*matrix + 0.007*hidden + 0.007*noise + 0.006*hopfield + 0.006*probability + 0.006*firing'),\n", - " (8,\n", - " '0.012*cell + 0.008*image + 0.007*training + 0.006*feature + 0.006*hopfield + 0.006*representation + 0.006*probability + 0.006*firing + 0.006*activity + 0.005*synaptic'),\n", - " (9,\n", - " '0.012*cell + 0.008*matrix + 0.008*activity + 0.007*representation + 0.007*training + 0.006*image + 0.006*capacity + 0.006*rate + 0.006*hopfield + 0.006*node')]" - ] - }, - "execution_count": 31, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 118, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [21]\n", - "[(0, 0.090225715808980797),\n", - " (1, 0.014047723409152875),\n", - " (3, 0.38971799227229242),\n", - " (4, 0.30695125800680684),\n", - " (5, 0.11680215128570454),\n", - " (7, 0.012641840087616362),\n", - " (8, 0.069095036605336377)]\n", - "\n", - "Geoffrey E. Hinton\n", - "Docs: [276, 235, 270]\n", - "[(0, 0.17326190127690461),\n", - " (2, 0.062709625689712375),\n", - " (3, 0.023215349136065065),\n", - " (4, 0.096803072840719678),\n", - " (5, 0.1267901905748583),\n", - " (6, 0.47635551675437715),\n", - " (7, 0.025581291656655011),\n", - " (9, 0.013530262666658776)]\n", - "\n", - "Michael I. Jordan\n", - "Docs: [205]\n", - "[(0, 0.22189029162114421),\n", - " (2, 0.033072831647105602),\n", - " (4, 0.051509519512663651),\n", - " (5, 0.63361728214218349),\n", - " (7, 0.045992411979857574),\n", - " (9, 0.012757930948596466)]\n", - "\n", - "James M. Bower\n", - "Docs: [188, 251, 244]\n", - "[(1, 0.29194178492747924),\n", - " (2, 0.47740737076112999),\n", - " (3, 0.023636461735819542),\n", - " (4, 0.010413505064807139),\n", - " (7, 0.018554608959817139),\n", - " (9, 0.17063597622983562)]\n" - ] - } - ], - "source": [ - "name = 'Yaser S.Abu-Mostafa'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Geoffrey E. Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Michael I. Jordan'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'James M. Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Test on small corpus" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "lda = LdaModel(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, passes=10)\n", - "var_lambda = lda.state.get_lambda()" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(atvb)\n", - "AtVb = atvb.AtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 210, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 1min 25s, sys: 0 ns, total: 1min 25s\n", - "Wall time: 1min 25s\n" - ] - } - ], - "source": [ - "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", - " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", - " iterations=100, alpha='symmetric', eta='symmetric', \\\n", - " eval_every=10, random_state=1, var_lambda=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.071*group + 0.039*matrix + 0.032*feedback + 0.027*whose + 0.018*obtain + 0.016*scheme + 0.015*constraint + 0.015*expression + 0.014*unique + 0.013*computational'),\n", - " (1,\n", - " '0.041*map + 0.040*field + 0.034*location + 0.033*brain + 0.030*node + 0.021*requires + 0.020*propagation + 0.016*back_propagation + 0.016*distribution + 0.014*mechanism'),\n", - " (2,\n", - " '0.084*processor + 0.075*edge + 0.052*activation + 0.034*update + 0.021*column + 0.020*run + 0.019*implementation + 0.018*control + 0.018*operation + 0.017*content'),\n", - " (3,\n", - " '0.046*image + 0.038*gradient + 0.027*flow + 0.025*field + 0.024*analog + 0.023*circuit + 0.022*constraint + 0.018*square + 0.017*vision + 0.017*technique'),\n", - " (4,\n", - " '0.023*dynamic + 0.021*phase + 0.018*cell + 0.018*variable + 0.017*with_respect + 0.017*respect + 0.016*path + 0.015*noise + 0.014*energy + 0.011*limit'),\n", - " (5,\n", - " '0.080*processor + 0.061*activation + 0.040*edge + 0.040*update + 0.021*store + 0.020*operation + 0.018*required + 0.018*address + 0.017*stored + 0.016*machine'),\n", - " (6,\n", - " '0.038*map + 0.037*brain + 0.033*stimulus + 0.024*functional + 0.021*noise + 0.020*associative_memory + 0.020*recall + 0.017*series + 0.015*scale + 0.015*associated'),\n", - " (7,\n", - " '0.049*potential + 0.044*cell + 0.035*connectivity + 0.026*synaptic + 0.025*artificial + 0.023*architecture + 0.015*temporal + 0.014*brain + 0.014*computational + 0.013*action'),\n", - " (8,\n", - " '0.075*image + 0.032*log + 0.024*dimensional + 0.018*mapping + 0.017*matrix + 0.016*center + 0.015*node + 0.014*recall + 0.013*back + 0.013*th'),\n", - " (9,\n", - " '0.058*scheme + 0.048*capacity + 0.047*probability + 0.040*representation + 0.030*stored + 0.028*binary + 0.025*represented + 0.023*code + 0.022*relationship + 0.021*bound')]" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0, 0.55485121572041607),\n", - " (4, 0.17897884328936686),\n", - " (6, 0.14414251935372879),\n", - " (8, 0.11957893769069983)]" - ] - }, - "execution_count": 35, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.get_author_topics(0)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "## LDA" - ] - }, - { - "cell_type": "code", - "execution_count": 131, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(gensim.models.ldamodel)\n", - "LdaModel = gensim.models.ldamodel.LdaModel" - ] - }, - { - "cell_type": "code", - "execution_count": 151, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 2.48 s, sys: 524 ms, total: 3 s\n", - "Wall time: 2.43 s\n" - ] - } - ], - "source": [ - "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=1, \\\n", - " iterations=1, alpha='symmetric', eta='symmetric', eval_every=0)" - ] - }, - { - "cell_type": "code", - "execution_count": 154, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 288 ms, sys: 0 ns, total: 288 ms\n", - "Wall time: 290 ms\n", - "Bound: -3.588e+05\n" - ] - } - ], - "source": [ - "%time lda_bound = lda.bound(sample(corpus, 10))\n", - "print('Bound: %.3e' % lda_bound)" - ] - }, - { - "cell_type": "code", - "execution_count": 155, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.004*neuron + 0.003*image + 0.003*layer + 0.003*field + 0.003*class + 0.003*cell + 0.003*signal + 0.003*noise + 0.003*hidden + 0.002*node'),\n", - " (1,\n", - " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*class + 0.003*signal + 0.003*matrix + 0.003*layer + 0.003*noise + 0.002*hidden + 0.002*recognition'),\n", - " (2,\n", - " '0.004*cell + 0.003*neuron + 0.003*matrix + 0.003*signal + 0.003*image + 0.003*hidden + 0.002*rule + 0.002*response + 0.002*field + 0.002*dynamic'),\n", - " (3,\n", - " '0.005*neuron + 0.003*layer + 0.003*image + 0.003*cell + 0.002*class + 0.002*net + 0.002*hidden + 0.002*control + 0.002*sequence + 0.002*response'),\n", - " (4,\n", - " '0.004*layer + 0.003*image + 0.003*neuron + 0.003*cell + 0.003*hidden + 0.003*signal + 0.003*component + 0.002*recognition + 0.002*net + 0.002*node'),\n", - " (5,\n", - " '0.005*image + 0.004*neuron + 0.004*layer + 0.003*hidden + 0.003*cell + 0.002*control + 0.002*class + 0.002*net + 0.002*noise + 0.002*signal'),\n", - " (6,\n", - " '0.005*neuron + 0.005*layer + 0.004*hidden + 0.003*image + 0.003*cell + 0.003*class + 0.003*rule + 0.002*noise + 0.002*net + 0.002*matrix'),\n", - " (7,\n", - " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*hidden + 0.003*recognition + 0.003*field + 0.003*layer + 0.002*noise + 0.002*node + 0.002*component'),\n", - " (8,\n", - " '0.004*neuron + 0.003*image + 0.003*signal + 0.003*recognition + 0.003*cell + 0.003*layer + 0.003*noise + 0.003*rule + 0.002*class + 0.002*hidden'),\n", - " (9,\n", - " '0.005*neuron + 0.004*class + 0.003*layer + 0.003*image + 0.003*cell + 0.002*hidden + 0.002*signal + 0.002*control + 0.002*field + 0.002*net')]" - ] - }, - "execution_count": 155, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "lda.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 150, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Document 5\n", - "[(0, 0.11806384798431847),\n", - " (1, 0.099612053680607937),\n", - " (2, 0.076668193975964943),\n", - " (3, 0.075072909998916373),\n", - " (4, 0.067243477696594139),\n", - " (5, 0.1004083782314163),\n", - " (6, 0.1049567779188061),\n", - " (7, 0.10291505408912022),\n", - " (8, 0.12682229186467239),\n", - " (9, 0.12823701455958317)]\n", - "\n", - "Document 50\n", - "[(0, 0.12019310780479558),\n", - " (1, 0.11241507965934601),\n", - " (2, 0.084261861610351887),\n", - " (3, 0.074722708722277847),\n", - " (4, 0.089536455599529025),\n", - " (5, 0.11951468917677081),\n", - " (6, 0.077140801257090358),\n", - " (7, 0.086592729473957755),\n", - " (8, 0.12048290979429044),\n", - " (9, 0.11513965690159025)]\n" - ] - } - ], - "source": [ - "d = 5\n", - "print('Document %d' %d)\n", - "pprint(lda[corpus[d]])\n", - "\n", - "d = 50\n", - "print('\\nDocument %d' %d)\n", - "pprint(lda[corpus[d]])" - ] - }, - { - "cell_type": "code", - "execution_count": 145, + "execution_count": 145, "metadata": { "collapsed": false }, @@ -1785,663 +910,51 @@ " 'ronald',\n", " 'rosenfeld',\n", " 'david',\n", - " 'touretzky',\n", - " 'computer',\n", - " 'science',\n", - " 'department',\n", - " 'carnegie',\n", - " 'mellon',\n", - " 'university',\n", - " 'pittsburgh',\n", - " 'pennsylvania',\n", - " 'abstract']" - ] - }, - "execution_count": 145, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "docs[0][:20]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Convergence and speed plots" - ] - }, - { - "cell_type": "code", - "execution_count": 45, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from bokeh.io import output_notebook\n", - "from bokeh.models.layouts import Row, Column\n", - "from bokeh.models import Title, Legend\n", - "from bokeh.plotting import figure, output_file, show" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "
\n", - " \n", - " Loading BokehJS ...\n", - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/javascript": [ - "\n", - "(function(global) {\n", - " function now() {\n", - " return new Date();\n", - " }\n", - "\n", - " var force = \"1\";\n", - "\n", - " if (typeof (window._bokeh_onload_callbacks) === \"undefined\" || force !== \"\") {\n", - " window._bokeh_onload_callbacks = [];\n", - " window._bokeh_is_loading = undefined;\n", - " }\n", - "\n", - "\n", - " \n", - " if (typeof (window._bokeh_timeout) === \"undefined\" || force !== \"\") {\n", - " window._bokeh_timeout = Date.now() + 5000;\n", - " window._bokeh_failed_load = false;\n", - " }\n", - "\n", - " var NB_LOAD_WARNING = {'data': {'text/html':\n", - " \"
\\n\"+\n", - " \"

\\n\"+\n", - " \"BokehJS does not appear to have successfully loaded. If loading BokehJS from CDN, this \\n\"+\n", - " \"may be due to a slow or bad network connection. Possible fixes:\\n\"+\n", - " \"

\\n\"+\n", - " \"
    \\n\"+\n", - " \"
  • re-rerun `output_notebook()` to attempt to load from CDN again, or
  • \\n\"+\n", - " \"
  • use INLINE resources instead, as so:
  • \\n\"+\n", - " \"
\\n\"+\n", - " \"\\n\"+\n", - " \"from bokeh.resources import INLINE\\n\"+\n", - " \"output_notebook(resources=INLINE)\\n\"+\n", - " \"\\n\"+\n", - " \"
\"}};\n", - "\n", - " function display_loaded() {\n", - " if (window.Bokeh !== undefined) {\n", - " Bokeh.$(\"#d982e20b-e5a9-4239-8121-81cecd38c4d7\").text(\"BokehJS successfully loaded.\");\n", - " } else if (Date.now() < window._bokeh_timeout) {\n", - " setTimeout(display_loaded, 100)\n", - " }\n", - " }\n", - "\n", - " function run_callbacks() {\n", - " window._bokeh_onload_callbacks.forEach(function(callback) { callback() });\n", - " delete window._bokeh_onload_callbacks\n", - " console.info(\"Bokeh: all callbacks have finished\");\n", - " }\n", - "\n", - " function load_libs(js_urls, callback) {\n", - " window._bokeh_onload_callbacks.push(callback);\n", - " if (window._bokeh_is_loading > 0) {\n", - " console.log(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", - " return null;\n", - " }\n", - " if (js_urls == null || js_urls.length === 0) {\n", - " run_callbacks();\n", - " return null;\n", - " }\n", - " console.log(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", - " window._bokeh_is_loading = js_urls.length;\n", - " for (var i = 0; i < js_urls.length; i++) {\n", - " var url = js_urls[i];\n", - " var s = document.createElement('script');\n", - " s.src = url;\n", - " s.async = false;\n", - " s.onreadystatechange = s.onload = function() {\n", - " window._bokeh_is_loading--;\n", - " if (window._bokeh_is_loading === 0) {\n", - " console.log(\"Bokeh: all BokehJS libraries loaded\");\n", - " run_callbacks()\n", - " }\n", - " };\n", - " s.onerror = function() {\n", - " console.warn(\"failed to load library \" + url);\n", - " };\n", - " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", - " }\n", - " };var element = document.getElementById(\"d982e20b-e5a9-4239-8121-81cecd38c4d7\");\n", - " if (element == null) {\n", - " console.log(\"Bokeh: ERROR: autoload.js configured with elementid 'd982e20b-e5a9-4239-8121-81cecd38c4d7' but no matching script tag was found. \")\n", - " return false;\n", - " }\n", - "\n", - " var js_urls = ['https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.js', 'https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.js'];\n", - "\n", - " var inline_js = [\n", - " function(Bokeh) {\n", - " Bokeh.set_log_level(\"info\");\n", - " },\n", - " \n", - " function(Bokeh) {\n", - " \n", - " Bokeh.$(\"#d982e20b-e5a9-4239-8121-81cecd38c4d7\").text(\"BokehJS is loading...\");\n", - " },\n", - " function(Bokeh) {\n", - " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", - " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", - " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", - " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", - " }\n", - " ];\n", - "\n", - " function run_inline_js() {\n", - " \n", - " if ((window.Bokeh !== undefined) || (force === \"1\")) {\n", - " for (var i = 0; i < inline_js.length; i++) {\n", - " inline_js[i](window.Bokeh);\n", - " }if (force === \"1\") {\n", - " display_loaded();\n", - " }} else if (Date.now() < window._bokeh_timeout) {\n", - " setTimeout(run_inline_js, 100);\n", - " } else if (!window._bokeh_failed_load) {\n", - " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", - " window._bokeh_failed_load = true;\n", - " } else if (!force) {\n", - " var cell = $(\"#d982e20b-e5a9-4239-8121-81cecd38c4d7\").parents('.cell').data().cell;\n", - " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", - " }\n", - "\n", - " }\n", - "\n", - " if (window._bokeh_is_loading === 0) {\n", - " console.log(\"Bokeh: BokehJS loaded, going straight to plotting\");\n", - " run_inline_js();\n", - " } else {\n", - " load_libs(js_urls, function() {\n", - " console.log(\"Bokeh: BokehJS plotting callback run at\", now());\n", - " run_inline_js();\n", - " });\n", - " }\n", - "}(this));" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "output_notebook()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 10 iterations (passes)" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# NOTE: the times of both offline and online are *without* vectorization!\n", - "\n", - "offline = [-3.958e+05, -3.430e+05, -3.428e+05, -3.426e+05, -3.423e+05, -3.417e+05, -3.406e+05, -3.388e+05, -3.361e+05, -3.326e+05, -3.285e+05]\n", - "\n", - "online_1iter = [-3.958e+05, -3.471e+05, -3.456e+05, -3.417e+05, -3.338e+05, -3.244e+05, -3.165e+05, -3.111e+05, -3.075e+05, -3.051e+05, -3.036e+05]\n", - "\n", - "online_10iter = [-3.958e+05, -3.343e+05, -3.223e+05, -3.128e+05, -3.072e+05, -3.041e+05, -3.023e+05, -3.011e+05, -3.003e+05, -2.997e+05, -2.993e+05]" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "iterations = range(10)" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "offline_time = [20 * 60 + 49, 21 * 60 + 8, 21 * 60 + 25, 21 * 60 + 41, 21 * 60 + 56, 22 * 60 + 11, 22 * 60 + 25, 22 * 60 + 41, 22 * 60 + 56, 23 * 60 + 11, 23 * 60 + 26]\n", - "offline_time = np.array(offline_time) - offline_time[0]\n", - "\n", - "online_1iter_time = [23 * 60 + 54, 23 * 60 + 55, 23 * 60 + 55, 23 * 60 + 56, 23 * 60 + 58, 23 * 60 + 59, 24 * 60 + 0, 24 * 60 + 1, 24 * 60 + 2, 24 * 60 + 3, 24 * 60 + 4]\n", - "online_1iter_time = np.array(online_1iter_time) - online_1iter_time[0]\n", - " \n", - "online_10iter_time = [24 * 60 + 59, 25 * 60 + 0, 25 * 60 + 2, 25 * 60 + 3, 25 * 60 + 4, 25 * 60 + 5, 25 * 60 + 6, 25 * 60 + 7, 25 * 60 + 8, 25 * 60 + 8, 25 * 60 + 9]\n", - "online_10iter_time = np.array(online_10iter_time) - online_10iter_time[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p = figure(title=(\"Variational lower bound (initial bound at %.3e)\" % offline[0]), x_axis_label='Iterations', y_axis_label='Bound')\n", - "p.circle(iterations[1:], offline[1:], legend=\"offline\", size=5, color='red')\n", - "p.circle(iterations[1:], online_1iter[1:], legend=\"online 1 iter\", size=5, color='green')\n", - "p.circle(iterations[1:], online_10iter[1:], legend=\"online 10 iter.\", size=5, color='blue')\n", - "p.plot_height=400\n", - "p.plot_width=600\n", - "p.toolbar_location = None\n", - "show(p)" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" + " 'touretzky',\n", + " 'computer',\n", + " 'science',\n", + " 'department',\n", + " 'carnegie',\n", + " 'mellon',\n", + " 'university',\n", + " 'pittsburgh',\n", + " 'pennsylvania',\n", + " 'abstract']" ] }, + "execution_count": 145, "metadata": {}, - "output_type": "display_data" + "output_type": "execute_result" } ], "source": [ - "p1 = figure(title=(\"Offline (initial bound at %.3e)\" % offline[0]), x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "p1.circle(offline_time[1:], offline[1:], size=5, color='red')\n", - "p1.plot_height=400\n", - "p1.plot_width=400\n", - "p1.toolbar_location = None\n", - "\n", - "p2 = figure(title=\"Online\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "s1 = p2.circle(online_1iter_time[1:], online_1iter[1:], size=5, line_color='green')\n", - "s2 = p2.circle(online_10iter_time[1:], online_10iter[1:], size=5, line_color='blue')\n", - "p2.plot_height=400\n", - "p2.plot_width=400\n", - "p2.toolbar_location = None\n", - "\n", - "legend = Legend(items=[('1 iter', [s1]), ('10 iter', [s2])], location=(-100, -200))\n", - "p2.add_layout(legend, 'right')\n", - "\n", - "p3 = Row(p1, p2)\n", - "\n", - "show(p3)" + "docs[0][:20]" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### 100 iterations (passes)" - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# NOTE: the times of both offline and online are *without* vectorization!\n", - "\n", - "offline100 = [-3.957e+05, -3.304e+05, -3.049e+05, -3.005e+05, -2.989e+05, -2.981e+05, -2.976e+05, -2.973e+05, -2.970e+05, -2.968e+05, -2.966e+05]\n", - "\n", - "online_1iter = [-3.957e+05, -3.072e+05, -3.008e+05, -2.997e+05, -2.991e+05, -2.986e+05, -2.983e+05, -2.981e+05, -2.979e+05, -2.977e+05, -2.976e+05]\n", - "\n", - "online_10iter = [-3.957e+05, -3.001e+05, -2.975e+05, -2.965e+05, -2.961e+05, -2.958e+05, -2.955e+05, -2.954e+05, -2.953e+05, -2.952e+05, -2.951e+05]" - ] - }, - { - "cell_type": "code", - "execution_count": 53, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "offline_time = [38 * 60 + 8, 40 * 60 + 18, 42 * 60 + 36, 44 * 60 + 44, 46 * 60 + 57, 49 * 60 + 12, 51 * 60 + 19, 53 * 60 + 29, 55 * 60 + 40, 57 * 60 + 56, 60 * 60 + 6]\n", - "offline_time = np.array(offline_time) - offline_time[0]\n", - "\n", - "online_1iter_time = [3 * 60 + 36, 3 * 60 + 59, 4 * 60 + 20, 4 * 60 + 43, 5 * 60 + 6, 5 * 60 + 28, 5 * 60 + 51, 6 * 60 + 14, 6 * 60 + 36, 6 * 60 + 56, 7 * 60 + 16]\n", - "online_1iter_time = np.array(online_1iter_time) - online_1iter_time[0]\n", - "\n", - "online_10iter_time = [8 * 60 + 1, 10 * 60 + 28, 12 * 60 + 50, 15 * 60 + 15, 17 * 60 + 40, 20 * 60 + 10, 22 * 60 + 35, 25 * 60 + 7, 27 * 60 + 31, 29 * 60 + 54, 32 * 60 + 13]\n", - "online_10iter_time = np.array(online_10iter_time) - online_10iter_time[0]" + "## Plots" ] }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 45, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "iterations = range(0, 100, 10)" + "from bokeh.io import output_notebook\n", + "from bokeh.models.layouts import Row, Column\n", + "from bokeh.models import Title, Legend\n", + "from bokeh.plotting import figure, output_file, show" ] }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 18, "metadata": { "collapsed": false }, @@ -2449,307 +962,153 @@ { "data": { "text/html": [ - "\n", "\n", "
\n", - "
\n", - "
\n", - "" + " \n", + " Loading BokehJS ...\n", + " " ] }, "metadata": {}, "output_type": "display_data" - } - ], - "source": [ - "p = figure(title=(\"Variational lower bound (initial bound at %.3e)\" % offline[0]), x_axis_label='Iterations', y_axis_label='Bound')\n", - "p.circle(iterations[1:], offline[1:], legend=\"offline\", size=5, color='red')\n", - "p.circle(iterations[1:], online_1iter[1:], legend=\"online 1 iter\", size=5, color='green')\n", - "p.circle(iterations[1:], online_10iter[1:], legend=\"online 10 iter.\", size=5, color='blue')\n", - "p.plot_height=400\n", - "p.plot_width=600\n", - "p.toolbar_location = None\n", - "show(p)" - ] - }, - { - "cell_type": "code", - "execution_count": 105, - "metadata": { - "collapsed": false - }, - "outputs": [ + }, { "data": { - "text/html": [ + "application/javascript": [ + "\n", + "(function(global) {\n", + " function now() {\n", + " return new Date();\n", + " }\n", + "\n", + " var force = \"1\";\n", + "\n", + " if (typeof (window._bokeh_onload_callbacks) === \"undefined\" || force !== \"\") {\n", + " window._bokeh_onload_callbacks = [];\n", + " window._bokeh_is_loading = undefined;\n", + " }\n", "\n", "\n", - "
\n", - "
\n", - "
\n", - "" + "\n", + " }\n", + "\n", + " if (window._bokeh_is_loading === 0) {\n", + " console.log(\"Bokeh: BokehJS loaded, going straight to plotting\");\n", + " run_inline_js();\n", + " } else {\n", + " load_libs(js_urls, function() {\n", + " console.log(\"Bokeh: BokehJS plotting callback run at\", now());\n", + " run_inline_js();\n", + " });\n", + " }\n", + "}(this));" ] }, "metadata": {}, @@ -2757,51 +1116,7 @@ } ], "source": [ - "p1 = figure(title=\"Offline\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "p1.circle(offline_time[1:], offline[1:], size=5, color='red')\n", - "p1.plot_height=400\n", - "p1.plot_width=300\n", - "p1.toolbar_location = None\n", - "\n", - "p2 = figure(title=\"Online 1 iter\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "s1 = p2.circle(online_1iter_time[1:], online_1iter[1:], size=5, line_color='green')\n", - "p2.plot_height=400\n", - "p2.plot_width=300\n", - "p2.toolbar_location = None\n", - "\n", - "p3 = figure(title=\"Online 10 iter\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "s3 = p3.circle(online_10iter_time[1:], online_10iter[1:], size=5, line_color='blue')\n", - "p3.plot_height=400\n", - "p3.plot_width=300\n", - "p3.toolbar_location = None\n", - "\n", - "\n", - "p4 = Row(p1, p2, p3)\n", - "\n", - "show(p4)" - ] - }, - { - "cell_type": "code", - "execution_count": 108, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 6.39130435, 6.56818182, 6.47761194, 6.43333333, 6.50892857,\n", - " 6.47407407, 6.49367089, 6.5 , 6.565 , 6.6 ])" - ] - }, - "execution_count": 108, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "online_10iter_time[1:] / online_1iter_time[1:]" + "output_notebook()" ] } ], diff --git a/docs/notebooks/at_with_nips_old.ipynb b/docs/notebooks/at_with_nips_old.ipynb new file mode 100644 index 0000000000..bdbb96d3df --- /dev/null +++ b/docs/notebooks/at_with_nips_old.ipynb @@ -0,0 +1,2825 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "application/javascript": [ + "// Run for table of contents.\n", + "$.getScript('https://kmahelona.github.io/ipython_notebook_goodies/ipython_notebook_toc.js')\n", + "\n", + "// https://github.com/kmahelona/ipython_notebook_goodies" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%%javascript\n", + "// Run for table of contents.\n", + "$.getScript('https://kmahelona.github.io/ipython_notebook_goodies/ipython_notebook_toc.js')\n", + "\n", + "// https://github.com/kmahelona/ipython_notebook_goodies" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tests with NIPS data\n", + "\n", + "

Table of Contents

\n", + "
\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from nltk.tokenize import RegexpTokenizer\n", + "from nltk.stem.wordnet import WordNetLemmatizer\n", + "import gensim\n", + "from gensim.models import Phrases\n", + "from gensim.corpora import Dictionary\n", + "from gensim.models import LdaModel\n", + "from imp import reload\n", + "from pprint import pprint\n", + "from random import sample\n", + "import bokeh\n", + "\n", + "import logging\n", + "\n", + "from gensim.models import AuthorTopicModel\n", + "from gensim.models import atmodel\n", + "\n", + "from time import time\n", + "\n", + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Configure logging.\n", + "\n", + "log_dir = '../../../log_files/log.log' # On my own machine.\n", + "#log_dir = '../../../../log_files/log.log' # On Hetzner\n", + "\n", + "logger = logging.getLogger()\n", + "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", + "formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", + "fhandler.setFormatter(formatter)\n", + "logger.addHandler(fhandler)\n", + "logger.setLevel(logging.DEBUG)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load and prepare data structure" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import os\n", + "import re\n", + "\n", + "# Folder containing all NIPS papers.\n", + "data_dir = '../../../../data/nipstxt/' # On my own machine.\n", + "#data_dir = '../../../nipstxt/' # On Hetzner.\n", + "\n", + "# Folders containin individual NIPS papers.\n", + "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", + "yrs = ['00']\n", + "dirs = ['nips' + yr for yr in yrs]\n", + "\n", + "# Get all document texts and their corresponding IDs.\n", + "docs = []\n", + "doc_ids = []\n", + "for yr_dir in dirs:\n", + " files = os.listdir(data_dir + yr_dir) # List of filenames.\n", + " for filen in files:\n", + " # Get document ID.\n", + " (idx1, idx2) = re.search('[0-9]+', filen).span() # Matches the indexes of the start end end of the ID.\n", + " doc_ids.append(yr_dir[4:] + '_' + str(int(filen[idx1:idx2])))\n", + " \n", + " # Read document text.\n", + " # Note: ignoring characters that cause encoding errors.\n", + " with open(data_dir + yr_dir + '/' + filen, errors='ignore', encoding='utf-8') as fid:\n", + " txt = fid.read()\n", + " docs.append(txt)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "filenames = [data_dir + 'idx/a' + yr + '.txt' for yr in yrs] # Using the years defined in previous cell.\n", + "\n", + "# Get all author names and their corresponding document IDs.\n", + "author2id = dict()\n", + "author2doc = dict()\n", + "i = 0\n", + "for yr in yrs:\n", + " filename = data_dir + 'idx/a' + yr + '.txt'\n", + " for line in open(filename, errors='ignore', encoding='utf-8'):\n", + " contents = re.split(',', line)\n", + " author_name = (contents[1] + contents[0]).strip()\n", + " ids = [c.strip() for c in contents[2:]]\n", + " if not author2id.get(author_name):\n", + " author2id[author_name] = i\n", + " author2doc[i] = []\n", + " i += 1\n", + " \n", + " author_id = author2id[author_name]\n", + " author2doc[author_id].extend([yr + '_' + id for id in ids])\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Make a mapping from author ID to author name.\n", + "id2author = dict(zip(author2id.values(), author2id.keys()))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Use an integer ID in author2doc, instead of the IDs provided in the NIPS dataset.\n", + "\n", + "# Mapping from ID of document in NIPS datast, to an integer ID.\n", + "doc_id_dict = dict(zip(doc_ids, range(len(doc_ids))))\n", + "\n", + "for a, a_doc_ids in author2doc.items():\n", + " for i, doc_id in enumerate(a_doc_ids):\n", + " author2doc[a][i] = doc_id_dict[doc_id]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Make a mapping from document IDs to author IDs.\n", + "# Same as in the atvb code.\n", + "doc2author = {}\n", + "for d, _ in enumerate(docs):\n", + " author_ids = []\n", + " for a, a_doc_ids in author2doc.items():\n", + " if d in a_doc_ids:\n", + " author_ids.append(a)\n", + " doc2author[d] = author_ids" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Pre-process and vectorize data" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Tokenize the documents.\n", + "\n", + "# Split the documents into tokens.\n", + "tokenizer = RegexpTokenizer(r'\\w+')\n", + "for idx in range(len(docs)):\n", + " docs[idx] = docs[idx].lower() # Convert to lowercase.\n", + " docs[idx] = tokenizer.tokenize(docs[idx]) # Split into words.\n", + "\n", + "# Remove numbers, but not words that contain numbers.\n", + "docs = [[token for token in doc if not token.isnumeric()] for doc in docs]\n", + "\n", + "# Remove words that are only one character.\n", + "docs = [[token for token in doc if len(token) > 1] for doc in docs]" + ] + }, + { + "cell_type": "code", + "execution_count": 91, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Lemmatize the documents.\n", + "\n", + "# Lemmatize all words in documents.\n", + "lemmatizer = WordNetLemmatizer()\n", + "docs = [[lemmatizer.lemmatize(token) for token in doc] for doc in docs]" + ] + }, + { + "cell_type": "code", + "execution_count": 92, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Compute bigrams.\n", + "\n", + "# Add bigrams and trigrams to docs (only ones that appear 20 times or more).\n", + "bigram = Phrases(docs, min_count=20)\n", + "for idx in range(len(docs)):\n", + " for token in bigram[docs[idx]]:\n", + " if '_' in token:\n", + " # Token is a bigram, add to document.\n", + " docs[idx].append(token)" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Create a dictionary representation of the documents.\n", + "dictionary = Dictionary(docs)" + ] + }, + { + "cell_type": "code", + "execution_count": 94, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Remove rare and common tokens.\n", + "\n", + "# Filter out words that occur too frequently or too rarely.\n", + "max_freq = 0.5\n", + "min_wordcount = 20\n", + "dictionary.filter_extremes(no_below=min_wordcount, no_above=max_freq)\n", + "\n", + "dict0 = dictionary[0] # This sort of \"initializes\" dictionary.id2token." + ] + }, + { + "cell_type": "code", + "execution_count": 95, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAicAAAGcCAYAAAACtQD2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XecXVW5//HPl0DAABmaSeggEKqUhBK8EOSCAaSIYmGw\nACoWhMuNckURfnDBgogEaSqKoqCDCNIDoQmEJphwgUDovSShpJAQ0ub5/bH2YfbsnOnl7Jl836/X\neZ05a6+997P3mZnznLXXWlsRgZmZmVlZLFfrAMzMzMzynJyYmZlZqTg5MTMzs1JxcmJmZmal4uTE\nzMzMSsXJiZmZmZWKkxMzMzMrFScnZmZmVipOTszMzKxUnJyYWTOSXpV0Ue71XpIaJX2sF/b9Y0mL\ncq8HZPs+u6f3ne3v69n+1umN/XWWpB9Iel7SYkkP1jqe9pK0SXZ+D6t1LFZuTk6sFCQdnv3Tqvb4\naa3jW8ZUu6dFh+9zIelHkg7sxL4bO7qvjmoltqATx9qbJH0S+CnwT+AI4OSaBmTWA5avdQBmOUH6\nR/tioXxK74diFRFxu6QPRcTCDq56EnApcH0H1jkFOK2D++mMlmL7A3BpJ461N+0JLAK+Hr45mvVT\nTk6sbG6OiMntrSxJwMCIWNCDMS3zevrDWtKgiHgvIhrphZaTlmQf9mVOTACGAvPKmJj479G6iy/r\nWJ+R738g6cuSHgfeB/bKlkvSdyU9Lul9SW9IulDS4MJ2JOn/ZX0r5kq6TdIWkl4p9LVo1v8hV161\nX4Kk/SVNzLY5W9J1krYo1LlM0kxJ62XL35U0Q9IZVfYjSWMlPSppflZvvKTts+X3Svp3C+fqOUmt\ntli0dB6q1Fuqz4mk4ZL+IWlaFtvLkv4iaeXK+wQMBCrnqrFybrPz2pht42+SZpIuUbR4zrNlX5b0\nVLa/B4t9YLJz+0yV9T7YZjtia+m9PTb3e/WapHOr/F7dI2mypK0l/VPSe9m5/W5r70Nu/eUlnZK9\nd+8r9Sk5TdIKhdi/CNRlcS5RC/03st+dRZJWzpWdkK13Rq5s+ez9Py1XtoqkcdnfxPuSpkr678L2\n2/p7XF3SnyXNkvSOpIuBZucsq7e2pD9l5+p9Sa9LulrSeu05b9Y/ueXEyqZO0pr5goh4u1BnDHAo\ncAHwDvByVv4HoD57Pgf4CHAssJ2k3bNv5ZCu158AXAdMAEYCtwAfKuynpf4HS5VLOgK4GBgPfB9Y\nGTgamChph4h4Nbfu8tn+JgLfy47nfyQ9ExEX5zb7Z9IH0fXARaQP1NHALsD/ZcsvlDQ8Ip7OxbIr\nsDHwwyqx57X3PFTirmx/xazecqTzPB1YDzgQGBwR8yR9CfgjcE92XgCeLWzrH8CTwA9yZS2d872A\nw4BzSZc0vgNMkLRjRDzVxroflEfEknbEVnxvfwycCNxM+p3bkvTejiz8XgWwFnAT8HfgcuDzwC8k\nPRIRt1eJLe+S7BgvJ/1ujCJdftoc+EIu9qOB7YBvAALubWF7E0nv0X+Q3i+A3YAlwO65eiNJ7/nd\n2fEKuDFb73fAo8B+wNmS1o6IEwr7WervMdvG9aTf1QuBp4BDSOe9+B5dA2xKem9fJrUMjSH9Tr2K\nLZsiwg8/av4ADic15xcfS3J1BmRlC4FNC+t/PFt2SKF8v6z8s9nrIdn6VxXqnZHVuyhXdjqwsEqs\nXyP9g18ne70qMAs4r1BvaFZ+fq7s0mzd7xfq/h9wX+71J7J4zmzlnK0GzAdOK5RfkO13pVbW7ch5\n2CuL+WPZ65FZnQPbeE/n57dTOK+NwCUtLFuYe115zxcD2+TKNyR9S7+8cG6fbmubbcRWfG+HZufp\nukK9/8rqfTFXNjEr+3yubCApeftrG+dqRHacFxTKz862+R+F43ynHX9TA4B3gdNzZe+Qkp/3K78f\nwP9kx7hK9vqQLJbjC9u7ipQYbtCOv8fKNv4rV7YcKSFcAhyWla1RrOeHHxHhyzpWKgF8G9g79/hE\nlXq3R8SzhbLPkv7x3ilpzcoD+Dfpg2jPrN4+pH+q5xXWP6cLce9LSlAuL+x7CfBQbt95FxVe30Nq\n6ak4hPSBfHpLO42IWcANpG/bQGpqBz5HSjrebyXmMXT+PMzKnveTtFI76lcTwG86UH9iRHzQMToi\nXiJ9M9+3k/tvr0+QzlPxvPwWeA/Yv1A+OyKuqLyI1FfnIZq/t9V8knROikOmf0lqHSnup00RsQS4\nn9TahqRtgTrgZ8AKpFYNSK0pj0TE3Oz1fqSE44LCJs8mnYviOa/297gfsIDc73mkFqbzs+OpeI+U\n8Owpqa6Dh2j9mJMTK5uHIuKO/KNKnRerlG1G+hb2ZuExHViJ1FIAsEH23OyfaURMI33L7IxNSf9w\nJxb2PQP4z9y+K+ZmiUXeTGD13OuPAK9GRFsx/RnYWNKo7PW+wJqkb9et2TB77vB5iIjngF8B3wTe\nlnSTpG9LWrWNfRa90IG6xQ8/gKeBVSWtXmVZd6mcp6fzhZE6fL6QW17xSpVtFN/blvazODu3+f28\nRno/ivtpr3uAnbJ+K7sDr0TEI6QRcJVLO/9B+t3Nx/JqRMwvbGtqbnnei1X2uyHwWpUE+an8i2z5\nicABwAxJd0o6XlLxb8aWMe5zYn1R8Z8mpET7deDLNP9mVjEje64sa89Ih5bqDKiy7yD1d3mrSv1i\nB88lLWxXLfzcmpuyfX4JeCB7fi0i7mxjvY6ch6VExNisg+OnSK0w5wMnSBqVJTjtUe197IjiOWrv\n+9WVfbSlPe9tR5d3NIa8iaTh2buQWkgm5sp3l7Q1Kam/uwv7q/Y+iurvx1LbjohfSroaOJjUsvlj\n4IeS9si3ltmyxS0n1l88R+qMeE+x5SV7VP7JvZg9D8+vLGkY6dJM3kxggKRBhfKNquwbYEYL+55I\nxz0LrFccEVIUEYvJOl5KWo3UKfUv7dj+i9lze85DS/ueEhE/iYg9gD1IrVLfyFdpz3baabMqZcOB\ndyNiZvZ6JqkfTtFGVcraG9uL2fPm+UJJA7PtvtTO7bRnP8tL2qSwn3WAVbqwnwdIlwdHk1pKKr+L\ndwMfI11yDFILSz6W9SQVO0ZvmT23J5bKNoqX/TavUpeIeD4izo6IfYCPkjrotmuUk/VPTk6sv7iC\n1PnwpOKCbKhk5UP+VtK322ML1cZW2eZzpG96o3PbWoXUOpN3EzAX+FHW56O4/7XaeQx5V5FaNtsz\n++elpMTst6R/6u1JTjpyHpqRNFhS8X/HFNKH3Iq5snlUTxY6Y7esz0Qlho1IlwJuztV5DlhT0pa5\neuuSErai9sZWOU/HFcq/SRqRdUM7ttEe40m/a/9dKP8e6bze2JmNZpdmJpN+Z9emecvJysAxwFMR\nkW/xG0/6Wzq6sLmxpHNxUzt2PZ70u/DNSkH2t3EMzUd+fSgb/ZX3POnvacVcvWGSNq/ye2f9lC/r\nWJl0uvk6Iu7ILjOcJGkEcBvpG+NwUmfZb5NGXEyXNA44XtJ1pH+0O5I6375T2OxNwGvAJZLOysq+\nCrwBfDAPRkTMlnQMaQjzZEmXky61bEjqyPhPOvgtMCJuk9QAfFdp7pFbSJcndgcmRES+o+G/JU0l\ndYR9tD1N4R08D9D8vfkEME7S34FnSJ0rDyddvvpHrt4kYEw2P8YbwHMRUXVelnaYAtwi6TzS+3p0\n9vy/uTp/JQ2Pvi6rtwrwLdJw5e0K22tXbNl5+jlwoqTxpGRky2y795NarbosIiZL+gtwdNaZeiKw\nK+ky3RUR0dJw4faYCBwPvB0RU7P9vSHpOdLfx+8K9a8mtaz8XNKmNA0l3h/4RURU61dTdDWp1eas\nrDWoMpS42Aq5FXCzpCuAJ0jJz2dJ/aYacvXOInX8Xo90+db6u1oPF/LDj4gPhhIvAUa0UmdAVueX\nrdQ5ijQ6Yi6pmf9h4CfAkEK9/0dKPOaSvh1vTurMeFGh3gjSh9B80je671AYbpqr+3HSN/mZ2Xaf\nAn4PbJ+rcynpQ6IY9+nAgkKZSB8qT2T7n0YaobJtlfV/kMX03Q6e92rn4WXgt7k6xaHEH8mO6xlS\nC8SMbN3RhW1vAdyZbXtJ5dxmx7qENCdKq+ch/56TPqifzs7Fg5V4CuuPAR4jDZV9nDTPSLWhxC3F\n1tJ7+51se+9n5+tXwKqFOhOBSVViupTUOtHWezEgez+ey/bzAin5Wr7K9pb6HWpluwdmx3R1ofwP\nFIZD55atTBqd82oWy5PAcR35eyR1Av4zaXTX26Q5ZXag+VDitUgjxp4A5pAS43uBg6sc8+Li++JH\n/30oe+PNlnmSXgFuiohvtFm5ZCR9jzRHyQYR8Uat4zEz6wpfvzPrH75Kmm/CiYmZ9Xnuc2LWRynd\nM+UgUj+RLfDoBjPrJ5ycmDVp6d4sZTWMNDLnHdIU9hNqHI+ZWbdwnxMzMzMrFfc5MTMzs1JxcmJm\nZmal4uTEzFok6ceSivcG6u0YBkhqlFS8Y29XtrlXts2DumubHdj3ZZKe6e39mvUlTk7MOknS4dkH\nXOUxX9JTks7rR3dV7WudhDuiVscVQGON9m3WJ3i0jlnXBOn+Ny8CK5Hu/PptYD9J28TSt4y38ujK\n3X674oga7tusT3ByYtZ1N0fE5OznP0h6h3STtE8Bf6tdWG2TNCgi3qt1HMuSiFhSi/36vba+xJd1\nzLrfHaRvxhtXCiRtLOnvkt6WNE/S/ZI+mV9J0pu5GwyiZJakRbm7KiPphKxsUK5sc0lXZtufL+kh\nSQcWtl+5DDVa0oWSppPuJ9Qhkr4m6XZJ07N9TZF0VKHOryRNK5T9Otv/t3Jl62RlX23nvr+cXTqb\nL+lBSR+rUmddSZdImibpfUmPSTq8yuYCWE7SyZJelfSepFslbVzY3h7Ze/dytr2XJJ2Vv5uupB9I\nWiJpneJOsrrzJa2avV6qz4mkVSSNk/RKto+p2U0J83U2yc7VYYXySp+cE3NlP87Khkv6m6SZpBtQ\nmvUJTk7Mut+m2fPbAFn/k/tJd/M9HziRdDv46yV9KrfevcDo3OttgUpS8h+58t2AyZVvwZK2Jt0B\ndnPgZ6SZYucC1xS2X3EhaUbZ/yXdj6ejvk26CeJPgO+RboT320KCMhH4sKThhbiXkO6sXDGalCRM\nbMd+9wJ+AfyJdIO8IcAESZtXKkgaRrop4B7AucBxWax/lHR0YXsiXZLbH/h59vgY6WZ1eZ8nvV/n\nA8eQbnJ4HOnGeRWXZ9v7XJW4PwuMj4h3s9fN+vFIEnAjcCzprsdjSTdVPFvpjsidUdn+P0g36PsB\n6cZ7Zn1Dre886IcfffVB052U9yTd4n1d4AvAm6TkYO2s3ris3q65dVcm3X32uVzZ94CFwMrZ62NI\nH6z3Az/N1XsHOCv3+jbS3ZeLd6+9B3iyEG8j6W68aucxVruj74pV6t0KTM29Hprt62vZ69Wzc3A5\n8HKu3vnAtDZiGJBtazGwTa58Q9Idcy/PlV1CuqtyXWEbVwBvAStkr/fKtvkIMCBXb2wW5/A2jvdH\nWTxr58r+BdxXqLdrtp/P58ouBZ7OvT4kq3N8Yd2rgEWkmzkCbJLVO6yF83Ni4X1rBC6p9d+JH350\n5uGWE7OuEXA7KSF5Bfgr6dbvB0fTTfj2Ax6MiPsrK0XEPOAiYCNJW2XFE0n9wCqXKnbPyiZmPyNp\nW2C1rAxJq5OSo78DdZLWrDyAW4DNJK2dizeA30VEp0eqRMSCDw5eGpzt6y5guKQPZXWmA8/S1BK0\nO7AA+CWwnqQNC8fYHhMjYkoujpeA64F9s1gEfBq4Fli+yrlYHdi+sM2Lo3kfkImk9/QjLRzvoGx7\n92X18tv7G7CLpA1yZV8A3iO1iLRkP1JSekGh/GxS4rFvK+u2JoDfdHJds5pycmLWNUG6zLE38HFg\nq4jYJCJuy9XZEHiqyrpTc8sBJpM+yCqXPXajKTnZUdLAbFmQWkUgXUIS6Zvym4XHqVmd4rDmF/Mv\nJK0gaWj+0doBS9pd0h2S5gKzsn2dli2uy1W9p3AsDwL/BmYDu0uqA7ah/cnJs1XKngZWzZK0YcCq\nwNEsfS4uyuoXz0Wxz83M7Hn1SoGkDSX9WdLbpBaxN0kJKTQ/3iuy58/nyg4BbojWO6JuCLwaEfML\n5cXfj854oQvrmtWMR+uYdd1D0TRap9MiYrGkfwGjJW0CrA3cTfowXAHYhfQhPzUi3s5Wq3zBOAto\n6cZ/xQ/14ofgaNJlmSAlOiFp/Yh4vbghSZtldaeQLoG8QvrWfxCpz0T+C89E4HBJ65OSlNsiIiTd\nm72uJAJ3txB3e+SH5Fb2/SfgshbqP1J43dLIGUHqbEq6bLYq8FNSkvkesAGpz8kHxxsRr0q6n5Sc\nnCVpd9Klvss7cAytaam1a0Ar6xTfa7M+wcmJWc97idRZtWjL3PKKicD3SZ1n34yIpwEkPU5KInYn\nXcqoeD57XhQRd3Qyvkmklp+8N1uoexApUdo/u3RDFt8+VepWWkT2AUYAp2Sv7waOJCUn77J0wtCS\nzaqUDQfejYiZkuYA84DlunAuirYn9fWoj4gPhoVLaulSy+XAryR9hHRJ513gpjb28SKwm6QPFVpP\nir8flWRutcL6XWlZMSslX9Yx63njgZ0l7VIpkLQy8A3ghYh4Ild3Imkyt+NounRD9vOXSa0pH1wG\niYg3SR1cv5mNVGlG0lptBRcRsyLijsKjpSnrKy0NH/zvyC6pfKXKdp8FppM6+i5H6qdROcbNSf1D\n7utA/5fdsj43lf1uBBwA3JztbwlwNfB5SVsWV65yLtqz32rHK9L7U239v5N1WiVd0rku32elBeOB\ngaTLUXmVzrk3AUTETNJltNGFese0EEtVkuqUhp6v0t51zHqbW07MuqY9TfJnAPXAzZLOJY22OYL0\njfczhbr3k0aBDAd+myu/m9S3pdqw2+9kZY9J+h2pNWUoaaTIusAOHYy3NRNIQ27HZ/saDBwFvMHS\n/TkgJVWfJQ19npuVPUS63LApaXRNe00BbpF0HukcHZ09/2+uzvdJH94PZvFNBdYAdiS1OuUTuPac\ni8dJ/TbOyTrxzs2OZ3C1yhExXdJE4H+AVWjfJHxXk97fn0vaFHiU1El2f+AXEZHvF/N74HhJs0l9\nlD5OatnpyPt6KPDr7PmKNuqa1YRbTsy6ps1vrBExg5Qo3EL6lvtT0hDYAyLiukLd90jDgvOdXiEl\nH0EahvtKYZ2ppA/fG0jDhc8Hvkn61n0azXVmlM4H62T7+izpf8dZwNeB80hzp1RTiTvf2rOYNOy2\nvfObVGK4HTiedIynklplxmQxVbY9DdiJ1O/kM1ls/0VKJk5o6bhaKs9akA4gJQwnAieREpYjW4n1\nb6TEZBYt9wPK7yNIici5wIGkoefDge9GxA8K651C6uvyeVKSuDiLr6P3QOqv90uyfkJdGFFoZmZm\n1u1q3nIiaTlJp0t6Pps++llJJ1Wpd5qk13NTTG9aWL66pL9Imi1ppqTfZ9f1zczMrA+peXJCmlb5\nm6Trx1uQrhl/X9IxlQqSTiA1h38T2JnUI39CNu9DxV9Jvdv3IjWRjqb5NXszMzPrA2p+WUfS9aTp\nq4/KlV0JvBcRX8lev07qGDYuez2YdL358Ii4IuuZ/zgwMiIezursQ7pfxXrZdWgzMzPrA8rQcnIf\nsFc2uROStiPd5Gx89npjUg/7yoyMRMQcUoe6XbOiUcDMSmKSuY3U6WsXzMzMrM8ow1DiM0g96Z+U\ntISUMP0oIiqzKg4jJRnTC+tNp2lY4DBgRn5hRCyR9A7Nhw6amZlZyZUhOfkCacKiQ4EnSDMy/krS\n6xFxaSvribaHw7VYJ7t51z6k2Rnf72DMZmZmy7KVgI2ACbnbaXSbMiQnZ5JuB//37PXj2cyPPyTd\nWnwaKckYSvPWkyGk+SDI6jSbACq7J8bqLN3iUrEP8Jeuh29mZrbM+iJpQEq3KkNyMoilWzcayfrD\nRMQLkqaRRuE8Ch90iN2FpluM3w+sJmmHXL+TvUhJzb9a2O+LAJdddhlbbrnUTNfLlLFjxzJu3Lha\nh1EKPheJz0MTn4vE5yHxeUimTp3Kl770JSjc5by7lCE5uR74kaRXSCNuRpDuKfH7XJ1zgJMkPUs6\nEacDrwLXAkTEk5ImAL+T9G3SfSrOAxpaGanzPsCWW27JiBEjuv2g+pK6urpl/hxU+FwkPg9NfC4S\nn4fE52EpPdItogzJyTGkZOMC0qWZ10n3fTi9UiEizpQ0iDRvyWqkKa/3i4iFue0cRpq2+zZSy8uV\npJtzmZmZWR9S8+QkIuYB380erdU7lXQ/jZaWzwK+1J2xmZmZWe8rwzwnZmZmZh9wcmLU19fXOoTS\n8LlIfB6a+FwkPg+Jz0PvqPn09bUiaQQwadKkSe7cZGZm1gGTJ09m5MiRkG4bM7m7t++WEzMzMysV\nJydmZmZWKk5OzMzMrFScnJiZmVmpODkxMzOzUnFyYmZmtoyaPh023RTuuqvWkTTn5MTMzGwZtWgR\nPPccvN8jd8jpPCcnZmZmy6jGxvQs1TaOIicnZmZmy6jKPKxOTszMzKwUnJyYmZlZqTg5MTMzs1Jx\ncmJmZmal4uTEzMzMSsXJiZmZmZVKJTlZrmTZQMnCMTMzs97ieU7MzMysVHxZx8zMzErFyYmZmZmV\nipMTMzMzKxUnJ2ZmZlYqTk7MzMysVJycmJmZWal4KLGZmZmViidha4GkFyQ1Vnmcly1fUdIFkt6S\n9K6kKyUNKWxjfUk3SponaZqkMyXV/NjMzMzKzJd1WrYjMCz3+AQQwBXZ8nOA/YFDgNHAOsBVlZWz\nJGQ8sDwwCjgcOAI4rVeiNzMz66PKmpwsX+sAIuLt/GtJBwLPRcRESYOBrwKHRsRd2fIjgamSdo6I\nB4F9gC2APSPiLeAxSScDZ0g6NSIW9+oBmZmZ9RFlTU7K0HLyAUkrAF8ELs6KdiQlULdX6kTEU8DL\nwK5Z0SjgsSwxqZgA1AFb93TMZmZmfZWTk/b5NCmp+FP2eiiwMCLmFOpNJ10CInueXmU5uTpmZmZW\n4OSkfb4K3BQR09qoJ1K/lLa0p46ZmdkyqazJSc37nFRI2gDYGzg4VzwNGChpcKH1ZAhNrSPTgJ0K\nmxuaPRdbVJYyduxY6urqmpXV19dTX1/fgejNzMz6nvbMc9LQ0EBDQ0OzstmzZ/dgVCVKTkitJtNJ\nI28qJgGLgb2AqwEkDQc2AO7L6twPnChprVy/kzHAbOCJtnY6btw4RowY0S0HYGZm1pe0Z56Tal/Y\nJ0+ezMiRI3ssrlIkJ5JEGv57SUQ0VsojYo6ki4GzJc0E3gXOBe6NiIeyareQkpBLJZ0ArA2cDpwf\nEYt68TDMzMz6FF/Wad3ewPrAH6ssGwssAa4EVgRuBr5TWRgRjZIOAH5Nak2ZB1wCnNKzIZuZmfVt\nTk5aERG3AgNaWLYAODZ7tLT+K8ABPROdmZlZ/1TW5KRso3XMzMyslzg5MTMzs1JxcmJmZmal4uTE\nzMzMSqU985zUgpMTMzOzZVR75jmphZKFY2ZmZr3Fl3XMzMysVJycmJmZWak4OTEzM7NScXJiZmZm\npeLkxMzMzErFyYmZmZmViuc5MTMzs1Jxy4mZmZmViidhMzMzs1Jxy4mZmZmVipMTMzMzKxUnJ2Zm\nZlYqTk7MzMysVJycmJmZWal4nhMzMzMrFbecmJmZWal4nhMzMzMrFbecmJmZWak4OTEzM7NScXJi\nZmZmpeLkxMzMzErFyYmZmZmViuc5aYWkdSRdKuktSe9JekTSiEKd0yS9ni2/VdKmheWrS/qLpNmS\nZkr6vaSVe/dIzMzM+g63nLRA0mrAvcACYB9gS+B7wMxcnROAY4BvAjsD84AJkgbmNvXXbN29gP2B\n0cBve+EQzMzM+qSyznOyfK0DAH4AvBwRX8+VvVSocxxwekRcDyDpK8B04GDgCklbkhKbkRHxcFbn\nWOBGScdHxLSePggzM7O+xi0nLTsQ+LekKyRNlzRZ0geJiqSNgWHA7ZWyiJgD/AvYNSsaBcysJCaZ\n24AAdunpAzAzM+uLnJy07CPAt4GngDHAb4BzJX0pWz6MlGRML6w3PVtWqTMjvzAilgDv5OqYmZlZ\nTlmTkzJc1lkOeDAiTs5ePyJpa1LCclkr64mUtLSmPXXMzMyWSU5OWvYGMLVQNhX4TPbzNFKSMZTm\nrSdDgIdzdYbkNyBpALA6S7e4NDN27Fjq6uqaldXX11NfX9/+IzAzM+uD2pOcNDQ00NDQ0Kxs9uzZ\nPRhVOZKTe4HNC2Wbk3WKjYgXJE0jjcJ5FEDSYFJfkguy+vcDq0naIdfvZC9SUvOv1nY+btw4RowY\n0VoVMzOzfqk985xU+8I+efJkRo4c2WNxlSE5GQfcK+mHwBWkpOPrwFG5OucAJ0l6FngROB14FbgW\nICKelDQB+J2kbwMDgfOABo/UMTMzqy5K2vGh5slJRPxb0qeBM4CTgReA4yLi8lydMyUNIs1bshow\nEdgvIhbmNnUYcD5plE4jcCVpCLKZmZlVEVG+/iZQguQEICLGA+PbqHMqcGory2cBX2ppuZmZmTUX\nUb4J2KAcQ4nNzMysBsracuLkxMzMbBnl5MTMzMxKxcmJmZmZlUpjo5MTMzMzKxG3nJiZmVmpODkx\nMzOzUnFyYmZmZqXieU7MzMysVNxyYmZmZqXi5MTMzMxKxcmJmZmZlYrnOTEzM7NSccuJmZmZlYqT\nEzMzMysVJydmZmZWKp7nxMzMzErFLSdmZmZWKk5OzMzMrFScnJiZmVmpeJ4TMzMzKxW3nJiZmVmp\nODkxMzOzUnFyYmZmZqXi5MTMzMxKxZOwmZmZWam45cTMzMxKxcmJmZmZlYrnOWmBpFMkNRYeT+SW\nryjpAklvSXpX0pWShhS2sb6kGyXNkzRN0pmSan5sZmZmZdavW04kDZC0vaTVO7mJKcBQYFj22C23\n7Bxgf+AQYDSwDnBVbt/LAeOB5YFRwOHAEcBpnYzFzMxsmdCvkhNJ50j6WvbzAOAuYDLwiqSPd2KT\niyPizYiYkT3eybY9GPgqMDYi7oqIh4Ejgf+QtHO27j7AFsAXI+KxiJgAnAx8R9LynTk+MzOzZUG/\nSk6AzwJImAt3AAAgAElEQVSPZD8fCGxMShDGAT/pxPY2k/SapOckXSZp/ax8JKlF5PZKxYh4CngZ\n2DUrGgU8FhFv5bY3AagDtu5ELGZmZsuE/pacrAVMy37+JPD3iHga+APw0Q5u6wHSZZh9gG+REp27\nJa1MusSzMCLmFNaZni0je55eZTm5OmZmZlZQ1nlOOnvZYzqwlaQ3gH2Bo7PyQcCSjmwouwxTMUXS\ng8BLwOeB91tYTUC0Z/MdicXMzGxZUtaWk84mJ38ErgDeICUAt2bluwBPdiWgiJgt6WlgU+A2YKCk\nwYXWkyE0tY5MA3YqbGZo9lxsUVnK2LFjqaura1ZWX19PfX19Z8I3MzPrM9qTnDQ0NNDQ0NCsbPbs\n2T0YVSeTk4g4VdIUYH3SJZ0F2aIlwBldCUjSKsAmwJ+AScBiYC/g6mz5cGAD4L5slfuBEyWtlet3\nMgaYDTxBG8aNG8eIESO6ErKZmVmf1J55Tqp9YZ88eTIjR47ssbg6PZolIq4EkLRSruxPHd2OpF8A\n15Mu5awL/C8pIbk8IuZIuhg4W9JM4F3gXODeiHgo28QtpCTkUkknAGsDpwPnR8Sizh6fmZlZf1fW\nyzqdHUo8QNLJkl4D5kr6SFZ+emWIcQesB/yVdDnocuBNYFREvJ0tHwvcAFwJ3Am8TprzBICIaAQO\nILXa3Af8GbgEOKUzx2ZmZrasKGty0tmWkx+RJjv7PvC7XPkU4L+Bi9u7oYhotXNHdsno2OzRUp1X\nSAmKmZmZtVNZk5PODiD6CvCNiPgLzUfnPEKa78TMzMxKrr8lJ+sCz7awvRU6H46ZmZn1lrLOc9LZ\nkJ4Adq9S/lng4c6HY2ZmZr2lrC0nne1zchrwJ0nrkhKcz0janHS5x30/zMzM+oCyJiedajmJiGtJ\nScjewDxSsrIlcGBE3NraumZmZlYO7ZnnpBa6Ms/JPcAnujEWMzMz60X9quVE0k6SdqlSvoukHbse\nlpmZmfW0fpWcABeQpq4vWjdbZmZmZiXX35KTrYDJVcofzpaZmZlZyfW35GQBTXf+zVubdF8cMzMz\nK7n+lpzcAvxMUl2lQNJqwE8Bj9YxMzPrA8o6CVtnR+scD9wNvCSpMuna9sB04MvdEZiZmZn1rLK2\nnHQqOYmI1yRtC3wR2A6YD/wRaIiIRd0Yn5mZmfWQ/jjPyTzgom6MxczMzHpRv2o5AZA0HPg4MIRC\n35WIOK1rYZmZmVlP61fJiaSjgF8DbwHTgMgtDtJ09mZmZlZi/So5AU4CfhQRP+/OYMzMzKz3lDU5\n6ewAotWBv3dnIGZmZta7+lty8ndgTHcGYmZmZr2rv81z8ixwuqRRwGNAs+HDEXFuVwMzMzOzntXf\nhhJ/A5gL7JE98gJwcmJmZlZyZb2s09lJ2Dbu7kDMzMysd5U1OenSlSZJAyVtLqnT86WYmZlZbfSr\n5ETSIEkXA+8BjwMbZOXnSfpBN8ZnZmZmPaRfJSfAz0j31Pk48H6u/DbgC12MyczMzHpBWZOTzl6O\nORj4QkQ8ICk/O+zjwCZdD8vMzMx6WlmTk862nHwYmFGlfGWaT2VvZmZmJVXWeU46G9K/gf1zrysJ\nydeB+7sUkZmZmfWKss5z0tnk5ETgp5J+Tbo0dJykW4EjgR91JSBJP5TUKOnsXNmKki6Q9JakdyVd\nKWlIYb31Jd0oaZ6kaZLOlFTCfNDMzKwc+tVlnYi4h9QhdnnSDLFjgOnArhExqbPBSNoJOAp4pLDo\nHFJLzSHAaGAd4KrcessB47N4RgGHA0fguyObmZm1qKzJSYc7xGZzmhwGTIiIo7orEEmrAJeRLg2d\nnCsfDHwVODQi7srKjgSmSto5Ih4E9gG2APaMiLeAxySdDJwh6dSIWNxdcZqZmfUXZU1OOtxykn3Q\n/wZYqZtjuQC4PiLuKJTvSEqibs/F8BTwMrBrVjQKeCxLTComAHXA1t0cp5mZWb/Qb5KTzIPADt0V\nhKRDge2BH1ZZPBRYGBFzCuXTgWHZz8Oy18Xl5OqYmZlZTlmTk87Oc3Ih8EtJ6wGTgHn5hRHxaHs3\nlG3jHOATEbGorfr5VWnfsGUPbTYzM6uivyUnl2fP+bsPB00Jw4AObGskad6USdIHp2gAMFrSMcC+\nwIqSBhdaT4bQ1DoyDdipsN2h2XOxRaWZsWPHUldX16ysvr6e+vr6DhyCmZlZ39Oe5KShoYGGhoZm\nZbNnz+7BqDqfnHTnXYlvAz5aKLsEmAqcAbwGLAL2Aq4GkDScdD+f+7L69wMnSlor1+9kDDAbeKK1\nnY8bN44RI0Z0/SjMzMz6mMbGtidhq/aFffLkyYwcObLH4upUchIRL3VXABExj0ICIWke8HZETM1e\nXwycLWkm8C6pxebeiHgoW+WWbBuXSjoBWBs4HTi/g5eKzMzMlhn96rKOpK+0tjwi/ty5cJo2UXg9\nFlgCXAmsCNwMfCe3v0ZJBwC/JrWmzCO1vpzSxTjMzMz6rX6VnAC/KrxeARgELATeA7qUnETEfxZe\nLwCOzR4trfMKcEBX9mtmZrYs6VfJSUSsXiyTtBmp5eIXXQ3KzMzMel5Zk5Nuu/dMRDwD/IClW1XM\nzMyshPp9cpJZTLrvjZmZmZVcWZOTznaIPahYRBohcwxwb1eDMjMzs57Xr5IT4JrC6wDeBO4Avtel\niMzMzKxXLFnSj5KTiOjuy0FmZmbWi6ZNg0mT4HOfq3UkS3OSYWZmtgy6+up0WefII2sdydI6lZxI\nulLSD6qU/4+kv3c9LDMzM+tJb78Na6wBqy81OUjtdbblZA/gxirlNwOjOx+OmZmZ9YZ582DllWsd\nRXWdTU5WIc0GW7QIGNz5cMzMzKw3zJ0Lq6xS6yiq62xy8hjwhSrlh9LGXYDNzMys9ubNK29y0tmh\nxKcD/5C0CWn4MMBeQD1Qwn6/ZmZmljd3bnkv63R2KPH1kg4GTgQ+C8wHHgX2joi7ujE+MzMz6wH9\nseWEiLiR6p1izczMrOTmzoX11691FNV1dijxTpJ2qVK+i6Qdux6WmZmZ9aQyt5x0tkPsBUC1fGvd\nbJmZmZmVWJn7nHQ2OdkKmFyl/OFsmZmZmZVYfxxKvAAYWqV8bWBx58MxMzOz3tAfL+vcAvxMUl2l\nQNJqwE+BW7sjMDMzM+s5Zb6s09nROscDdwMvSXo4K9semA58uTsCMzMzs56xcCEsXlzelpPOznPy\nmqRtgS8C25HmOfkj0BARi7oxPjMzM+tmc+em5/7WckJEzAMu6sZYzMzMrBfMnp2eB5f0bnidSk4k\nfY40Vf1wIIBngL9GxJXdGJuZmZn1gDffTM9DhtQ2jpZ0qEOspOUk/Q34G2nI8LPA88DWwBWSLpek\n7g/TzMzMusuMGem5rMlJR1tOjgP2Bg6KiBvyCyQdROp3chxwTveEZ2ZmZt2tkpystVZt42hJR4cS\nHwn8TzExAYiI64DvA1/tjsDMzMysZ8yYAWusASusUOtIqutocrIZcFsry2/L6piZmVlJzZhR3ks6\n0PHkZD6wWivLBwPvd2SDkr4l6RFJs7PHfZL2zS1fUdIFkt6S9K6kKyUNKWxjfUk3SponaZqkMyV1\ndoI5MzOzfq2/JSf3A99uZfl3sjod8QpwAjAye9wBXCtpy2z5OcD+wCHAaGAd4KrKylkSMp7Uf2YU\ncDhwBHBaB+MwMzNbJpQ9Oeloh9ifAHdKWhM4C3gSELAl8D3gU8CeHdlgRNxYKDpJ0reBUZJeI/Vh\nOTQi7gKQdCQwVdLOEfEgsA+wBbBnRLwFPCbpZOAMSadGhO/1Y2ZmlvPyy7DffrWOomUdajmJiPuA\nL5ASkPuBmcA7wL1ZWX1E3NvZYLKhyocCg7LtjyQlULfnYngKeBnYNSsaBTyWJSYVE4A60hBnMzMz\nyyxeDM8/D5uVuIdohydhi4irJU0AxpAmYQN4GrglIt7rTBCStiElIysB7wKfjognJe0ALIyIOYVV\npgPDsp+HZa+LyyvLHulMTGZmZv3Ryy/DokWw6aa1jqRlnb23znuS9gb+X0S80w1xPEm6R89qpL4l\nf5Y0upX6Is1M25b21DEzM1tmPPNMeu43yYmk9SLi1ezlYcCZwDuSHgM+GRGvdCaIrF/I89nLyZJ2\nJk3mdgUwUNLgQuvJEJpaR6YBOxU2OTR7LraoLGXs2LHU1dU1K6uvr6e+vr5jB2FmZtYHPP10mt9k\ngw3aV7+hoYGGhoZmZbMrN+fpIR1tOXlS0tukPiYrAeuT+n9sBHTnVC7LASsCk4DFwF7A1QCShgMb\nAPdlde8HTpS0Vq7fyRhgNvBEWzsaN24cI0aM6MbQzczMyuuhh2C77WD5dmYA1b6wT548mZEjR/ZA\ndElHhxLXAZ8jJQ3LAeMlPU1KJPaRNKy1lauR9BNJu0naUNI2kn4G7AFclrWWXAycLenjkkaSpsi/\nNyIeyjZxCykJuVTStpL2AU4Hzo+IRR2Nx8zMrD978EHYZZdaR9G6jiYnK0TEgxHxS9KEbDuQprRf\nQhry+5ykpzq4zaHAn0n9Tm4jjdAZExF3ZMvHAjcAVwJ3Aq+T+qUAEBGNwAFZDPdl27oEOKWDcZiZ\nmfVrs2bBU0+VPznp6GWdOZIeJl3WGQgMioh7JS0mDTF+Fdi5IxuMiK+3sXwBcGz2aKnOK6QExczM\nzFowZUp63n772sbRlo62nKwD/BhYQEps/i1pIilRGQFERNzTvSGamZlZd5gyJfU12XzzWkfSuo5O\nwvZWRFwfET8E3iONkjmPNGT3LFLLyl3dH6aZmZl11ZQpMHw4DBxY60ha19Wb482OiCuARcB/AhsD\nF3Y5KjMzM+t2jz4KW/eBudO7kpxsS+pjAvASsCgipkXE37oelpmZmXWn999PI3U+9rFaR9K2Ts0Q\nCx90Qq38vE33hGNmZmY94aGHYMEC2GOPWkfStq5e1jEzM7OSW7IETjwR1lkHtt221tG0rdMtJ2Zm\nZtY3XHcd3HMP3HknDBhQ62ja5pYTMzOzfu4Pf4Bdd+0bl3TAyYmZmVm/98wzKTnpK5ycmJmZ9WMR\n8OqrsO66tY6k/ZycmJmZ9WNz5sC8eU5OzMzMrCRezWYkW2+92sbREU5OzMzM+rHXXkvPbjkxMzOz\nUnjxxfS8zjo1DaNDnJyYmZn1UxFw2WWw227lv9lfnidhMzMz64cWL4YvfAEmToRrrql1NB3j5MTM\nzKyfmTkTDjwQHngArrwSPvWpWkfUMU5OzMzM+pmjj4apU+GOO2D06FpH03Huc2JmZtaPvP8+XHst\nfP/7fTMxAScnZmZm/crdd8P8+fDJT9Y6ks5zcmJmZtaP/OMfsOGGsM02tY6k85ycmJmZ9RMXXwy/\n/S0ceihItY6m85ycmJmZ9RPjxqXnb32rtnF0lZMTMzOzfmDhQnjqKbjwQthoo1pH0zVOTszMzPqB\nZ55JE69tvXWtI+k6JydmZmb9wKOPpmcnJ2ZmZlYK110HH/0orLlmrSPpOicnZmZmfdz48WnitUMP\nrXUk3aPmyYmkH0p6UNIcSdMlXS1peKHOipIukPSWpHclXSlpSKHO+pJulDRP0jRJZ0qq+fGZmZn1\npMmT4dOfhj32SNPW9wdl+PDeHTgP2AXYG1gBuEXSh3J1zgH2Bw4BRgPrAFdVFmZJyHjSvYJGAYcD\nRwCn9Xz4ZmZmtXPhhbDuuqnlZLXVah1N96j5jf8iotkEu5KOAGYAI4F7JA0GvgocGhF3ZXWOBKZK\n2jkiHgT2AbYA9oyIt4DHJJ0MnCHp1IhY3HtHZGZm1jveey/NCHv00TBwYK2j6T5laDkpWg0I4J3s\n9UhSEnV7pUJEPAW8DOyaFY0CHssSk4oJQB3QD/otm5mZLe2ss2DePPja12odSfcqVXIiSaRLOPdE\nxBNZ8TBgYUTMKVSfni2r1JleZTm5OmZmZv1GBPzxj3DkkbDxxrWOpnvV/LJOwYXAVsBu7agrUgtL\nW9pTx8zMrE956il48UU48MBaR9L9SpOcSDof+CSwe0S8nls0DRgoaXCh9WQITa0j04CdCpscmj0X\nW1SaGTt2LHV1dc3K6uvrqa+v7+ARmJmZ9Y4XX4QxY2CNNWDPPXt2Xw0NDTQ0NDQrmz17do/uUxG1\nb1jIEpNPAXtExPOFZYOBN0kdYq/OyoYDTwK7RMRDkvYFrgfWrvQ7kfQN4OfAkIhYVGWfI4BJkyZN\nYsSIET14dGZmZt3nscdgr71glVXg9ttrc0ln8uTJjBw5EmBkREzu7u3XvOVE0oVAPXAQME9SpcVj\ndkS8HxFzJF0MnC1pJvAucC5wb0Q8lNW9BXgCuFTSCcDawOnA+dUSEzMzs77q4oth+eXh3nth7bVr\nHU3PqHlyAnyL1C/kzkL5kcCfs5/HAkuAK4EVgZuB71QqRkSjpAOAXwP3AfOAS4BTejBuMzOzXrVw\nYZoN9oAD+m9iAiVITiKizRFDEbEAODZ7tFTnFeCAbgzNzMysVI49Nt19+KKLah1JzyrVUGIzMzNr\n2YMPwuGHw8c/XutIepaTEzMzsz7ixRdhiy1qHUXPc3JiZmbWB8yeDbNmwYYb1jqSnufkxMzMrA94\n6aX0vNFGNQ2jVzg5MTMz6wMeeCA9LwstJzUfrWNmZmbVLVgA48bBH/6QRunsvTcMWwbuGOeWEzMz\nsxKKgEMOgZNOglGjoKEBJkyA5ZaBT263nJiZmZXQ3/4GN94IV10Fn/lMraPpXU5OzMzMSmLqVJg8\nGR56CC64AOrr4dOfrnVUvc/JiZmZWY0tWQLf+x6cdx40Nqab+p12WiqTah1d73NyYmZmVkMzZsBx\nx8EVV8DPfw7f+hastFK6ud+yahk+dDMzs9qaOxe22y6Nyrn0UjjssFpHVA5OTszMzGrkwgvhzTfT\nMOGNN651NOWxDAxIMjMzK5+rr4YTToCvfc2JSZGTEzMzs1727LPws5/BHnvAb35T62jKx8mJmZlZ\nL2lsTJOqbbYZTJkCp5yybI7GaYuTEzMzs15y3HHwk5/Ad78Lzz8Pe+5Z64jKyR1izczMesHLL8Pv\nfpfmLzn55FpHU25OTszMzHrAwoUwcWIajfOPf8DNN8Nqq8HRR9c6svJzcmJmZtYN3n0XfvWrlIxc\ney3Mng2zZqVl22wD3/gGnHgirLFGbePsC5ycmJmZdYPzz4dTT4X11oMxY2D99WHffdPrYcPc8bUj\nnJyYmZl1wRNPpHvg3HxzmrPk97+vdUR9n0frmJmZddK4cbDttmmG1zPPTCNxrOvccmJmZtYBS5bA\nAw/AZZelCdTGjk0Tqq24Yq0j6z+cnJiZmbVh9mx4+2044wy47jqYPh3q6uCcc+C//sv9SbqbkxMz\nM7MWvPZauv/NX/6SXq+xBnz5y7D//rDLLjB4cG3j66+cnJiZmeVcdhk89hjceis8+mhKSH71K9ho\nI9htNw8F7g1OTszMbJkXATfckDq0/utf6S7Bw4fD2WfDV76SJk+z3lOK0TqSdpd0naTXJDVKOqhK\nndMkvS7pPUm3Stq0sHx1SX+RNFvSTEm/l7Ry7x2FmZn1Fe+9B//8Z7o8s8IK6XHQQTBgAJx7brrv\nzc03p/4kTkx6X1laTlYG/g/4A3BVcaGkE4BjgMOBF4AfAxMkbRkRC7NqfwWGAnsBA4FLgN8CX+rp\n4M3MrJwWLEgzt159dUpInn8+JSVvvAFvvZVaSM48Ez70IdhkE9h7b3duLYNSJCcRcTNwM4BU9dfi\nOOD0iLg+q/MVYDpwMHCFpC2BfYCREfFwVudY4EZJx0fEtF44DDMzq7FZs+AXv4DnnoP582H8eFi8\nGJZbDlZaKSUhBx8Ma64Jn/oUjBzpIcBlVIrkpDWSNgaGAbdXyiJijqR/AbsCVwCjgJmVxCRzGxDA\nLsC1vRexmZn1ln//O42meeaZ9HrmzNR/ZNSo1AJyyimw6aapI+t669U2Vmu/0icnpMQkSC0ledOz\nZZU6M/ILI2KJpHdydczMrA9bsgQmTICbbkrJyPvvw913w9Zbw+GHp2RkpZXgiCNgnXVqHa11RV9I\nTloiUtLS1TpmZlYis2al+UUWLkx9Rd56C665JpXPnw+rrw7/+Z+w6qpwwQVw1FGwfF/+NLOl9IW3\ncxopyRhK89aTIcDDuTpD8itJGgCsztItLs2MHTuWurq6ZmX19fXU19d3LWozM2uXCHj11TTz6jXX\nwP33w7x5aVldHay7LhxwAGyxBey6K+y4YxpdY72joaGBhoaGZmWzZ8/u0X0qolwNC5IagYMj4rpc\n2evALyJiXPZ6MCnp+EpE/F3SFsDjwI65DrFjgPHAetU6xEoaAUyaNGkSI0aM6PHjMjMzmDIlTQUP\n6f40U6ak5yefTGW77AJjxsB++6XLNFtt5VlYy2jy5MmMHDkS0kCUyd29/VK0nGTzkWxKaiEB+Iik\n7YB3IuIV4BzgJEnPAi8CpwOvknV0jYgnJU0Afifp26ShxOcBDR6pY2ZWO889l1pDXn8drr02va5Y\nfvnUCrLVVqlT66c/nVpKzEqRnAA7Av8k9Q8J4JdZ+Z+Ar0bEmZIGkeYtWQ2YCOyXm+ME4DDgfNIo\nnUbgStIQZDMz6yXz5sHEiSkZueaa1IF1ueVS/5D990937/3oR1PdNdaAIUNa354tm0qRnETEXbQx\nW21EnAqc2sryWXjCNTOzXhEBL7+cRtBMmpRmU7355nTJptJfZKed4Ec/guOPh0GDahuv9S2lSE7M\nzKxcZs9OycbixU1lS5bA9den0TNvvZX6i1RsuCF8/vMwbFjqvDp0KKy1Vu/Hbf2DkxMzs2VQYyM8\n/jgsWpR+vvHGdAlmcta1cfHilIwUbbEFjBgB668PJ50EH/5wmm11u+16N37r35ycmJn1I0uWpIQj\nLyJN4/766+l52rTUMvLCC011Bg2CnXeGn/88dVQdMCCNmBk6tPm2VlzR956xnufkxMysD3nqqTQn\nyC23pOe8iFT+9tvV111xxTSb6sc+ljqp7rsvrL12WrbRRqmDqlkZODkxM6uh+fOb5v2A1Jn0mmvS\n1OwAc+c2vY6Al15K5XV1sMMOS2/vc59L95Ep2nLLdDnGrC9wcmJm1o0eeaT5XB4V8+enJOO995rK\nItJsqLNmNa+70kpN831Iaar2jTZKrzfbDEaPTn09Vl21Rw7BrOacnJiZVTFnTuqjAek+LxMmpGSi\nYsGCdN+XuXObr1dMNPK22SbdITfvyCNT8lHpxyGlWVLXXLPrx2DWVzk5MbNl1rRpabhsY2N6/frr\nKQlpbEzDZOfMaaq77rqwyirN1x8zJo1eyVt/ffjEJ6p3Gl19dXcmNWsPJydm1mctXgwPP5xGqCxZ\nkm4c9847S9ebPj2NUqk2NDZvwAA48EBYbTXYYw/45CdTMjFwIIwcmTqRmlnPc3JiZjW3cGHqk1H0\nzjtp0q/KnBs33JAm/6qYNavp0guk0Sabbbb0dpZfHn7yk6VHo6ywQkpGVl+9qcwJiFntOTkxs241\nY0bTRF7VPP54ugttRWMj3HorvPtu9forrJA6iEKa6GvMmKZlAwak2Ug//OH0esMNl770YmZ9j5MT\ns2XUjBnVWysqIuD225uGrj7zDNx9d9vbfeedpmGw1Sy3XLpkMnBgU9lRR8GoUUvXHTAgdRZdbbW2\n92tm/YeTE7M+4s034Z//bOq8Wc3EifD0021va/58uPfetutJsN566edBg+ArX2lqxWjJqqvCwQen\nCb+qGTSo+WUUM7MiJydmPWj2bHjxxZaXP/ggPPbY0uWLFsG11zafnGvBgrY7dK65Juy1V/tGhFx4\n4dLDWos23BCGD297W2Zm3cnJiVmmsbHlD/9HH4WHHmp53Ycfrt7P4plnmicYRRJstVX1Tpj775/m\nxagYOBA+9ammybmqWXHF1PnTzKwv878x69dmzkyJRVEE3HQTvPFGet3YmOa7aOmeJJASiJZGcqyx\nBhx0UOojkTdmTBoNUiyvWGst2Hjjto/DzGxZ4uTESmXOnHT5oui55+DOO1ter3LDs+KN0KZNW3oG\nz4q6Oth226bXn/tc9U6ZkDpkHnBAy0mGmZl1Hycn1mOeeCJd1ii66y549tmly+fPT6ND8lOE5w0e\n3HyER9Fmm8EhhzQvW2WVljtnDhvme5OYmZWRkxOratq0pnknHnggzU1RTbV7jlS8/Xb18ro62H33\npTttfuhDcNZZsMkmS68zaFAaUuqWCzOz/s/JyTKgsTFN3V2tP8X06anvRb4j6KJF8K9/NU8sPvKR\n6v0tBg6Er32teifN9dZLfS6KSciqq7Y9HNXMzJZdTk5KorExddxctKjtuq2NHHnrrTTFdz7ZiGh5\nFIqU7h9SnNb7i1+Ej340/Tx06NI3NzMzM+spTk46acmSpWfBfOaZ1J+iJU8+2fLEV3PmNM3E2R7b\nb199yOiAAXDiiTBkSPPyESNghx2Wri956KmZmZWLP5Za8PTTTclCRJqZ84UXml7feWea/rto4MCW\nP+xb65y53HLpEsi667YdW10dbLRRe47CzMys71nmk5PzzksdPufNayqLSB09830uVl0Vdt65qf/E\nwQfDnns239bKK8O++6YblZmZmVnnLPPJyQ03wN57L32b9Q03bD4N+Jpr+m6nZmZmvWGZT04mTEj9\nMczMzKwcWpiM28zMzKw2nJyYmZlZqfSr5ETSdyS9IGm+pAck7VTrmPqChoaGWodQGj4Xic9DE5+L\nxOch8XnoHf0mOZH0BeCXwCnADsAjwARJa9U0sD7Af2xNfC4Sn4cmPheJz0Pi89A7+k1yAowFfhsR\n/7+9ew+2sirjOP79oQKKgzh5YSwUlbybF1AoRUFFUkcdsnEcTC2qwazJrElzqrEspzvjDadGbfJa\nUU1pqaMiBylvjGLmhYsp4gUPieIRARXh6Y+1trxsz0Hw7LP3Pvv9fWb2DO9a691nrYd3r/3s9117\nv9dHxDzgbGAlMKmx3TIzM7NN0RLJiaQtgOHAPZWyiAhgOvDJRvXLzMzMNl1LJCfAdsBmwJKq8iXA\n4EHfvXcAAAoVSURBVPp3x8zMzD6sVv+dEwHRRV1/gLlz59avN02qo6ODOXPmNLobTcGxSByHdRyL\nxHFIHIek8N7ZI/eYV0RX7929R76ssxI4JSJuLZT/DtgmIiZ0ss9E4Ka6ddLMzKz1nB4RN9f6SVvi\nzElErJb0CHA0cCuAJOXty7vY7U7gdOA54K0u2piZmdn79QeGkt5La64lzpwASDoVuA6YDMwmfXvn\ns8BeEfFKI/tmZmZmG68lzpwARMS0/JsmFwM7Av8GxjsxMTMz611a5syJmZmZtYZW+SqxmZmZtYhS\nJidluAePpNGSbpX0kqS1kk7qpM3FkhZLWinpbknDquq3lXSTpA5JyyRdI2lA/UbRfZIulDRb0huS\nlkj6q6Q9qtr0kzRV0lJJyyX9WdIOVW2GSLpN0gpJ7ZJ+LqnXvH4knS3psfx/2SHpfkmfLtS3fAw6\nk4+PtZKmFMpKEQtJF+WxFx9PFepLEQcASTtJuiGPdWV+rRxc1aYM8+XCTo6JtZKuyPV1OyZ63UHU\nXSrPPXgGkNbdfJVOfutF0gXA10gLiA8FVpDi0LfQ7GZgb9K3nk4AjgB+07PdrrnRwBXASOAYYAvg\nLklbFtpcShrfKaQx7gT8pVKZX1i3k9ZojQLOAj5PWt/UW7wAXED6JeXhwAzgFkl75/oyxGA9Sh9K\nvkyaA4rKFIsnSGv0BufH4YW6UsRB0iDgPuBtYDxpzvsWsKzQpizz5QjWHQuDgXGk949pub5+x0RE\nlOoBPAhcVtgW8CJwfqP71oNjXgucVFW2GDivsD0QWAWcmrf3zvsdVGgzHngXGNzoMXUjFtvlcR1e\nGPfbwIRCmz1zm0Pz9nHAamC7QpvJpMlr80aPqRuxeBX4QhljAGwNzAeOAtqAKWU7Hkgf0OZ0UVem\nOPwUuPcD2pR1vrwUWNCIY6JUZ07ke/AAIGlXUlZcjMMbwEOsi8MoYFlEPFrYdTopix5Zp672hEGk\nMbyWt4eTsvxiLOYDz7N+LB6PiKWF57kT2AbYt6c7XGuS+kg6DdgKeIASxgCYCvw9ImZUlY+gXLH4\nuNKl32ck3ShpSC4v0zFxIvCwpGn50u8cSV+qVJZ1vszvl6cD1+aiur42SpWc4HvwVAwmvWg2FIfB\nwP+KlRGxhvSm3itjJUmkTwL/iojKtfXBwDt5simqjkVnsYJeFAtJ+0laTvr0cxXpE9A8ShQDgJyY\nHQhc2En1jpQnFg+STrmPJ93FfVdgVl4nUaZjYjfgK6QzaccCvwYul/S5XF/K+RKYQEoqrsvbdX1t\ntMzvnHTThu7BUyYbE4feHKurgH1Y/7p6VzZ2nL0pFvOAA0hnj04Brpd0xAbat1wMJH2MlKCOi4jV\nm7IrLRaLiCj+sucTkmYDi4BT6fpXs1suDqQP6bMj4vt5+zFJ+5ISlhs3sF+rz5eTgDsiov0D2vXI\nMVG2MydLgTWkDLBoB96f7bWydtIBtaE4tOft90jaDNiWXhgrSVcCxwNjImJxoaod6CtpYNUu1bGo\njlVlu9fEIiLejYhnI2JORHyXtBD0XEoUA9Lliu2BRyStlrQaOBI4V9I7pLH0K0ks1hMRHcACYBjl\nOiZeBqrvADsX2Dn/u4zz5c6kLxBcXSiu6zFRquQkf1Kq3IMHWO8ePPc3ql/1FhELSQdRMQ4DSddG\nK3F4ABgk6aDCrkeTXqQP1amrNZETk5OBsRHxfFX1I6RFa8VY7EGamIqx2L/qG13HAh3AU/RefYB+\nlCsG04H9SZd1DsiPh0mfkCv/Xk05YrEeSVsDu5MWf5bpmLiPtLCzaE/SWaTSzZfZJFIycXuhrL7H\nRKNXAzdg9fGppFXWZwJ7kb7q9SqwfaP7VuNxDiBNtgeSVlN/I28PyfXn53GfSJqs/wY8DfQtPMft\npMn6EOAw0jXZGxo9tk2Mw1WkleKjSRl85dG/qs1CYAzpk/V9wD8L9X1IZxnuAD5Buka/BPhRo8e3\nCXG4hHQ5axdgP+AnpInmqLLEYAOxee/bOmWKBfAL0tdBdwE+Bdydx/GRksVhBGkd1oWk5GwisBw4\nrdCmFPNlHodIN8S9pJO6uh0TDQ9Eg4J/Tg7+KlKmN6LRfeqBMR5JSkrWVD1+W2jzA9KnpJWkFdXD\nqp5jEOkTZQfpDf5qYKtGj20T49BZDNYAZxba9CP9FsrSPCn9Cdih6nmGAP8A3swvtp8BfRo9vk2I\nwzXAs/mYbwfuIicmZYnBBmIzg/WTk1LEAvg96WcUVpG+cXEzsGvZ4pDHcTzwnzwXPglM6qRNy8+X\neRzj8hw5rJO6uh0TvreOmZmZNZVSrTkxMzOz5ufkxMzMzJqKkxMzMzNrKk5OzMzMrKk4OTEzM7Om\n4uTEzMzMmoqTEzMzM2sqTk7MzMysqTg5MTMzs6bi5MTMWoKkNklTGt0PM+s+Jydm1m2SJkt6Q1Kf\nQtkASasl3VPVdqyktZKG1rufZtY7ODkxs1poI90Je0ShbDTwMjBKUt9C+ZHAooh4blP/iKTNu9NJ\nM+sdnJyYWbdFxAJSIjKmUDyGdGv5hcCoqvI2AElDJN0iabmkDkl/lLRDpaGkiyQ9KumLkp4F3srl\nW0m6Pu/3kqRvVvdJ0jmSFkhaJald0rTajtrMeoqTEzOrlZnA2ML22Fx2b6VcUj9gJDAjt7mFdKv5\n0cAxwO7AH6qedxjwGWACcGAu+2Xe50TgWFLCM7yyg6QRwGXA94A9gPHArG6Oz8zqxKdIzaxWZgJT\n8rqTAaREYhbQF5gM/BA4LG/PlDQO2A8YGhGLASSdATwpaXhEPJKfdwvgjIh4LbcZAEwCJkbEzFx2\nFvBioS9DgDeB2yJiBfAC8FgPjdvMasxnTsysVirrTg4BDgcWRMRS0pmTkXndyRjgmYh4EdgLeKGS\nmABExFzgdWDvwvMuqiQm2e6khGV2Yb9lwPxCm7uBRcDCfPlnoqQtazZSM+tRTk7MrCYi4hngJdIl\nnLGkpISIeJl05uIwCutNAAHRyVNVl6/opJ4u9q305U3gYOA0YDHprM1jkgZu9IDMrGGcnJhZLbWR\nEpMxpMs8FbOA44BDWZecPAXsLOmjlUaS9gG2yXVd+S/wLoVFtpK2Ja0teU9ErI2IGRHxHeAAYChw\n1IcYk5nVmdecmFkttQFTSXPLvYXyWcCVpMsxMwEiYrqkx4GbJJ2X66YCbRHxaFd/ICJWSLoW+IWk\n14BXgB8DayptJJ0A7Jb/7jLgBNIZl/nvf0YzazZOTsysltqA/sDciHilUH4vsDUwLyLaC+UnA1fk\n+rXAHcDXN+LvfJu0vuVWYDnwK6B4yeZ10jd8Lsr9eRo4La9pMbMmp4guL9uamZmZ1Z3XnJiZmVlT\ncXJiZmZmTcXJiZmZmTUVJydmZmbWVJycmJmZWVNxcmJmZmZNxcmJmZmZNRUnJ2ZmZtZUnJyYmZlZ\nU3FyYmZmZk3FyYmZmZk1FScnZmZm1lT+DwrH78/1pfXIAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Frequency distribution of words.\n", + "\n", + "one_doc = []\n", + "for doc in docs:\n", + " one_doc.extend(doc)\n", + "\n", + "bow = dictionary.doc2bow(one_doc)\n", + "word_freq = [cnt for _, cnt in bow]\n", + "\n", + "plt.plot(sorted(word_freq))\n", + "plt.xlabel('Words')\n", + "plt.ylabel('#Occurences')\n", + "plt.title('Frequency distribution of words.\\nPower-law behaviour.')\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 96, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Vectorize data.\n", + "\n", + "# Bag-of-words representation of the documents.\n", + "corpus = [dictionary.doc2bow(doc) for doc in docs]" + ] + }, + { + "cell_type": "code", + "execution_count": 97, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of authors: 166\n", + "Number of unique tokens: 681\n", + "Number of documents: 90\n" + ] + } + ], + "source": [ + "print('Number of authors: %d' % len(author2doc))\n", + "print('Number of unique tokens: %d' % len(dictionary))\n", + "print('Number of documents: %d' % len(corpus))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Disjoint set stuff" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def find_disjoint_sets(d):\n", + " while True:\n", + " for tuple_, set1 in d.items():\n", + " try:\n", + " match = next(k for k, set2 in d.items() if k != tuple_ and set1 & set2)\n", + " except StopIteration:\n", + " # no match for this key - keep looking\n", + " continue\n", + " else:\n", + " #print('merging', tuple(set1), match)\n", + " d[tuple_] = set1 | d.pop(match)\n", + " break\n", + " else:\n", + " # no match for any key - we are done!\n", + " break\n", + "\n", + " output = sorted(tuple(s) for s in d.values())\n", + " \n", + " return output" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false, + "scrolled": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[(0,), (1,), (2,), (3,), (4,), (6,), (7,), (8,), (9,), (10,), (11,), (12,), (13,), (14,), (15,), (16, 63, 39), (18,), (19, 59), (20,), (21,), (22,), (23,), (24, 53), (25, 84), (26,), (27,), (28,), (29,), (30,), (32,), (33,), (34,), (35,), (36,), (37,), (38,), (40,), (41,), (42,), (43,), (44,), (45,), (46,), (47,), (48, 17, 58, 5), (49,), (50,), (51,), (52,), (54,), (55,), (56,), (57,), (60,), (61,), (62,), (64,), (65,), (66,), (67,), (68,), (69,), (70,), (71,), (72,), (73, 31), (74,), (75,), (76,), (77,), (78,), (79,), (80,), (81,), (82,), (83,), (85,), (86,), (87,), (88,), (89,)]\n", + "81\n", + "0.0870358943939209\n" + ] + } + ], + "source": [ + "start = time()\n", + "\n", + "thing = {a: set(_list) for a, _list in author2doc.items()}\n", + "disjoint_authors = find_disjoint_sets(thing)\n", + "print(disjoint_authors)\n", + "print(len(disjoint_authors))\n", + "\n", + "print(time() - start)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Online AT VB 2" + ] + }, + { + "cell_type": "code", + "execution_count": 101, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(onlineatvb2)\n", + "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 10.9 s, sys: 12 ms, total: 10.9 s\n", + "Wall time: 10.9 s\n" + ] + } + ], + "source": [ + "%time model_online2 = OnlineAtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", + " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None)" + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Speed improvement from new algorithm: 4.709677!\n" + ] + } + ], + "source": [ + "print(\"Speed improvement from new algorithm: %f!\" %((2 * 60 + 26) / 31))" + ] + }, + { + "cell_type": "code", + "execution_count": 218, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.007*rule + 0.005*class + 0.005*classifier + 0.004*probability + 0.004*cue + 0.004*distribution + 0.004*sample + 0.003*sequence + 0.003*tree + 0.003*evidence'),\n", + " (1,\n", + " '0.056*motion + 0.052*velocity + 0.051*muscle + 0.044*robot + 0.040*reinforcement + 0.035*controller + 0.029*obstacle + 0.028*command + 0.028*reinforcement_learning + 0.027*movement'),\n", + " (2,\n", + " '0.049*cell + 0.027*spike + 0.024*stimulus + 0.022*eye + 0.020*firing + 0.019*response + 0.017*burst + 0.016*inhibition + 0.016*fiber + 0.016*wave'),\n", + " (3,\n", + " '0.029*attractor + 0.026*vc + 0.024*theorem + 0.019*bound + 0.019*xt + 0.017*fixed_point + 0.016*eigenvalue + 0.016*threshold + 0.015*let + 0.014*capacity'),\n", + " (4,\n", + " '0.039*hmm + 0.032*tdnn + 0.030*speech + 0.030*mlp + 0.028*phonetic + 0.026*speaker + 0.024*segmentation + 0.021*recognition + 0.021*hybrid + 0.021*phoneme'),\n", + " (5,\n", + " '0.055*chip + 0.055*word + 0.043*circuit + 0.033*analog + 0.031*vlsi + 0.030*pulse + 0.028*voltage + 0.027*board + 0.027*perturbation + 0.024*processor'),\n", + " (6,\n", + " '0.027*rbf + 0.023*spline + 0.015*schedule + 0.015*basis_function + 0.012*weight_decay + 0.012*approximation + 0.010*regression + 0.010*validation + 0.009*stochastic + 0.009*prediction'),\n", + " (7,\n", + " '0.071*depth + 0.068*node + 0.056*contour + 0.050*projection + 0.042*polynomial + 0.039*proof + 0.032*gate + 0.028*hidden_node + 0.027*boolean + 0.027*boolean_function'),\n", + " (8,\n", + " '0.005*image + 0.005*object + 0.004*neuron + 0.004*eq + 0.004*character + 0.003*filter + 0.003*field + 0.003*dynamic + 0.003*receptive + 0.003*receptive_field'),\n", + " (9,\n", + " '0.031*grammar + 0.027*module + 0.023*expert + 0.021*string + 0.020*symbol + 0.019*recurrent + 0.017*language + 0.014*automaton + 0.014*giles + 0.014*mozer')]" + ] + }, + "execution_count": 218, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_online2.show_topics(num_topics=10)" + ] + }, + { + "cell_type": "code", + "execution_count": 214, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Sheila \t Kannappan\n", + "Docs: [100]\n", + "[(0, 0.29470045213299129),\n", + " (1, 0.018773780023831975),\n", + " (2, 0.071451542822641448),\n", + " (3, 0.026741158302140633),\n", + " (4, 0.018099032024313566),\n", + " (5, 0.015363132745463916),\n", + " (6, 0.089347751415205109),\n", + " (7, 0.020278388465418653),\n", + " (8, 0.31198092387189108),\n", + " (9, 0.1332638381961023)]\n" + ] + } + ], + "source": [ + "name = id2author[114]\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))" + ] + }, + { + "cell_type": "code", + "execution_count": 200, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [357]\n", + "[(0, 0.16874400828774647),\n", + " (1, 0.05776392793070604),\n", + " (2, 0.018385851898290052),\n", + " (3, 0.090073600218074618),\n", + " (4, 0.12243813551115512),\n", + " (5, 0.048550522852509548),\n", + " (6, 0.1728010777698884),\n", + " (7, 0.19524400649884482),\n", + " (8, 0.056488897891914927),\n", + " (9, 0.069509971140870139)]\n", + "\n", + "Geoffrey E. Hinton\n" + ] + }, + { + "ename": "KeyError", + "evalue": "'Geoffrey E. Hinton'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'Geoffrey E. Hinton'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\n%s'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Docs:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 9\u001b[0m \u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_online2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_author_topics\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 10\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyError\u001b[0m: 'Geoffrey E. Hinton'" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online2.get_author_topics(author2id[name]))" + ] + }, + { + "cell_type": "code", + "execution_count": 162, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Speed improvement from new algorithm: 5.503876!\n" + ] + } + ], + "source": [ + "print(\"Speed improvement from new algorithm: %f!\" %(28.4 / 5.16))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Online AT VB" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 13.6 s, sys: 16 ms, total: 13.6 s\n", + "Wall time: 13.6 s\n" + ] + } + ], + "source": [ + "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", + " iterations=100, alpha='auto', eta='symmetric', random_state=1)\n", + "var_lambda = lda.state.get_lambda()" + ] + }, + { + "cell_type": "code", + "execution_count": 118, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(onlineatvb)\n", + "OnlineAtVb = onlineatvb.OnlineAtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 157, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 28.3 s, sys: 12 ms, total: 28.4 s\n", + "Wall time: 28.4 s\n" + ] + } + ], + "source": [ + "%time model_online = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", + " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=200, random_state=2, var_lambda=None)" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": { + "collapsed": false, + "scrolled": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.075*image + 0.037*field + 0.034*visual + 0.031*position + 0.029*move + 0.025*map + 0.025*location + 0.021*center + 0.021*search + 0.019*human'),\n", + " (1,\n", + " '0.044*bit + 0.038*code + 0.030*hopfield + 0.029*matrix + 0.024*eq + 0.019*stored + 0.017*minimum + 0.016*stage + 0.014*optimization + 0.013*column'),\n", + " (2,\n", + " '0.031*iv + 0.025*differential + 0.023*code + 0.023*scheme + 0.020*adaptive + 0.017*find + 0.016*criterion + 0.015*he + 0.014*bound + 0.014*half'),\n", + " (3,\n", + " '0.035*activity + 0.033*array + 0.027*cell + 0.023*synaptic + 0.020*low + 0.018*rate + 0.017*synapsis + 0.016*region + 0.016*storage + 0.016*distribution'),\n", + " (4,\n", + " '0.052*role + 0.049*loop + 0.046*processor + 0.037*sequence + 0.029*gain + 0.021*product + 0.018*activation + 0.018*multiple + 0.018*edge + 0.017*address'),\n", + " (5,\n", + " '0.028*stimulus + 0.024*classification + 0.024*shape + 0.020*circuit + 0.018*fully + 0.018*design + 0.015*power + 0.015*pp + 0.014*sample + 0.014*experiment'),\n", + " (6,\n", + " '0.042*capacity + 0.034*associative_memory + 0.019*feedback + 0.018*cell + 0.017*phase + 0.016*interaction + 0.015*delay + 0.014*recall + 0.014*sequence + 0.014*matrix'),\n", + " (7,\n", + " '0.061*node + 0.049*hidden + 0.036*convergence + 0.033*energy + 0.030*gradient + 0.030*dynamic + 0.019*back_propagation + 0.016*back + 0.016*propagation + 0.016*learning_algorithm'),\n", + " (8,\n", + " '0.060*training + 0.039*representation + 0.029*connectionist + 0.028*trained + 0.020*context + 0.017*learned + 0.017*target + 0.015*mcclelland + 0.015*hidden_unit + 0.015*rumelhart'),\n", + " (9,\n", + " '0.074*firing + 0.056*stimulus + 0.056*cell + 0.037*connectivity + 0.033*path + 0.030*potential + 0.027*temporal + 0.027*control + 0.021*synaptic + 0.019*inhibition')]" + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_online.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 273, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [21]\n", + "[(0, 0.16188318876615412), (1, 0.80823920909246583), (3, 0.021312448059559796)]\n", + "\n", + "Geoffrey E. Hinton\n", + "Docs: [146, 276, 235, 270]\n", + "[(0, 0.14004630013032807),\n", + " (1, 0.23772038268835666),\n", + " (2, 0.5640333145036398),\n", + " (3, 0.058200002677675597)]\n", + "\n", + "Michael I. Jordan\n", + "Docs: [205]\n", + "[(0, 0.26951795605324808),\n", + " (1, 0.1612862641672847),\n", + " (2, 0.4872153771544665),\n", + " (3, 0.081980402625000656)]\n", + "\n", + "James M. Bower\n", + "Docs: [150, 128, 162, 101, 188, 251, 244]\n", + "[(0, 0.67413384788621999),\n", + " (1, 0.071583305581578827),\n", + " (2, 0.06345028631865203),\n", + " (3, 0.19083256021354914)]\n" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online.get_author_topics(author2id[name]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model_online.get_author_topics(author2id[name]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Test on a small dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 202, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "n_docs = 10\n", + "\n", + "from copy import deepcopy\n", + "\n", + "small_doc2author = deepcopy(dict(list(doc2author.items())[:n_docs]))\n", + "small_doc2author = dict(small_doc2author)\n", + "\n", + "small_corpus = corpus[:n_docs]" + ] + }, + { + "cell_type": "code", + "execution_count": 203, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "authors_ids = set()\n", + "for d, a_doc_ids in small_doc2author.items():\n", + " for a in a_doc_ids:\n", + " authors_ids.add(a)\n", + "\n", + "authors_ids = list(authors_ids)\n", + "author_id_dict = dict(zip(authors_ids, range(len(authors_ids))))" + ] + }, + { + "cell_type": "code", + "execution_count": 204, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "for d, a_ids in small_doc2author.items():\n", + " for i, a in enumerate(a_ids):\n", + " small_doc2author[d][i] = author_id_dict[a]" + ] + }, + { + "cell_type": "code", + "execution_count": 205, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Make a mapping from author IDs to document IDs.\n", + "small_author2doc = {}\n", + "for a in range(len(author_id_dict)):\n", + " small_author2doc[a] = []\n", + " for d, a_ids in small_doc2author.items():\n", + " if a in a_ids:\n", + " small_author2doc[a].append(d)" + ] + }, + { + "cell_type": "code", + "execution_count": 206, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "author_id_dict_rev = dict(zip(range(len(authors_ids)), authors_ids))\n", + "\n", + "small_id2author = {}\n", + "for a, a_id in author_id_dict_rev.items():\n", + " small_id2author[a] = id2author[a_id]" + ] + }, + { + "cell_type": "code", + "execution_count": 207, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "phi is 10 x 681 x 10 (68100 elements)\n", + "mu is 10 x 681 x 21 (143010 elements)\n" + ] + } + ], + "source": [ + "print('phi is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), 10,\n", + " len(small_corpus) * len(dictionary.id2token) * 10))\n", + "print('mu is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), len(small_author2doc),\n", + " len(small_corpus) * len(dictionary.id2token) * len(small_author2doc)))" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(onlineatvb)\n", + "OnlineAtVb = onlineatvb.OnlineAtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 212, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 34.6 s, sys: 4 ms, total: 34.6 s\n", + "Wall time: 34.6 s\n" + ] + } + ], + "source": [ + "%time model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", + " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-3, \\\n", + " iterations=1, passes=200, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=10, random_state=1, var_lambda=None)" + ] + }, + { + "cell_type": "code", + "execution_count": 133, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Amir F.Atiya\n", + "Docs: [5]\n", + "[(0, 0.26236663424329809),\n", + " (1, 0.055837758145413023),\n", + " (2, 0.32385947243135804),\n", + " (4, 0.031231118362347546),\n", + " (5, 0.049702348068489471),\n", + " (6, 0.063277167602715914),\n", + " (7, 0.11515798924424819),\n", + " (9, 0.098115022122885684)]\n", + "\n", + "FrankWilczek\n", + "Docs: [1]\n", + "[(0, 0.21018310687516228),\n", + " (1, 0.39886126379385306),\n", + " (2, 0.18071281961456737),\n", + " (3, 0.052218386110533886),\n", + " (5, 0.039636353968810233),\n", + " (8, 0.032375816267307712),\n", + " (9, 0.073725725628590477)]\n" + ] + } + ], + "source": [ + "name = 'Amir F.Atiya'\n", + "print('\\n%s' % name)\n", + "print('Docs:', model.author2doc[model.author2id[name]])\n", + "pprint(model.get_author_topics(model.author2id[name]))\n", + "\n", + "name = 'FrankWilczek'\n", + "print('\\n%s' % name)\n", + "print('Docs:', model.author2doc[model.author2id[name]])\n", + "pprint(model.get_author_topics(model.author2id[name]))\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Mini-batch" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(minibatchatvb)\n", + "MinibatchAtVb = minibatchatvb.MinibatchAtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 2min 1s, sys: 24 ms, total: 2min 1s\n", + "Wall time: 2min 1s\n" + ] + } + ], + "source": [ + "%time model_online = MinibatchAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", + " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None, chunksize=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Offline AT VB 2" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(atvb2)\n", + "AtVb2 = atvb2.AtVb2" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 21min 58s, sys: 376 ms, total: 21min 58s\n", + "Wall time: 21min 58s\n" + ] + } + ], + "source": [ + "%time model_offline2 = AtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", + " iterations=100, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", + " eval_every=10, random_state=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.018*path + 0.014*center + 0.013*constraint + 0.011*map + 0.011*activity + 0.010*array + 0.010*rate + 0.010*cycle + 0.010*visual + 0.010*iv'),\n", + " (1,\n", + " '0.019*matrix + 0.016*delay + 0.013*associative_memory + 0.013*capacity + 0.012*potential + 0.010*storage + 0.010*classification + 0.010*dynamic + 0.010*synaptic + 0.009*rate'),\n", + " (2,\n", + " '0.044*cell + 0.020*stimulus + 0.014*probability + 0.010*region + 0.009*training + 0.008*noise + 0.007*field + 0.007*node + 0.007*actual + 0.007*area'),\n", + " (3,\n", + " '0.026*code + 0.025*hopfield + 0.015*sequence + 0.015*image + 0.013*energy + 0.013*length + 0.013*machine + 0.012*field + 0.012*matrix + 0.011*minimum'),\n", + " (4,\n", + " '0.032*processor + 0.023*activation + 0.012*dynamic + 0.012*operation + 0.012*hidden + 0.011*energy + 0.011*edge + 0.010*machine + 0.010*update + 0.009*training'),\n", + " (5,\n", + " '0.024*hidden + 0.016*hidden_unit + 0.013*matrix + 0.012*sequence + 0.012*adaptive + 0.012*action + 0.010*multiple + 0.009*training + 0.009*back + 0.008*learn'),\n", + " (6,\n", + " '0.026*training + 0.015*stage + 0.014*bit + 0.013*optimization + 0.012*convergence + 0.011*eq + 0.011*surface + 0.011*node + 0.011*neural_net + 0.010*code'),\n", + " (7,\n", + " '0.049*cell + 0.015*node + 0.014*feature + 0.013*region + 0.011*map + 0.011*control + 0.011*back + 0.010*temporal + 0.008*cycle + 0.008*decision'),\n", + " (8,\n", + " '0.023*cell + 0.014*probability + 0.012*current + 0.012*position + 0.012*image + 0.011*principle + 0.010*noise + 0.008*dimensional + 0.008*shape + 0.007*firing'),\n", + " (9,\n", + " '0.042*representation + 0.033*activity + 0.029*role + 0.026*firing + 0.023*cell + 0.014*stimulus + 0.014*variable + 0.013*product + 0.012*potential + 0.010*synaptic')]" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_offline2.show_topics()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## \"Offline\" AT VB" + ] + }, + { + "cell_type": "code", + "execution_count": 356, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "phi is 286 x 2245 x 10 (6420700 elements)\n", + "mu is 286 x 2245 x 578 (371116460 elements)\n" + ] + } + ], + "source": [ + "print('phi is %d x %d x %d (%d elements)' %(len(corpus), len(dictionary.id2token), 10,\n", + " len(corpus) * len(dictionary.id2token) * 10))\n", + "print('mu is %d x %d x %d (%d elements)' %(len(corpus), len(dictionary.id2token), len(author2doc),\n", + " len(corpus) * len(dictionary.id2token) * len(author2doc)))" + ] + }, + { + "cell_type": "code", + "execution_count": 238, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 7.81 s, sys: 0 ns, total: 7.81 s\n", + "Wall time: 7.81 s\n" + ] + } + ], + "source": [ + "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", + " iterations=100, alpha='auto', eta='symmetric', random_state=1)\n", + "var_lambda = lda.state.get_lambda()" + ] + }, + { + "cell_type": "code", + "execution_count": 185, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(atvb)\n", + "AtVb = atvb.AtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 245, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 2min 34s, sys: 104 ms, total: 2min 34s\n", + "Wall time: 2min 34s\n" + ] + } + ], + "source": [ + "%time model_offline = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", + " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", + " eval_every=1, random_state=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.019*path + 0.015*center + 0.014*constraint + 0.011*rate + 0.011*map + 0.011*cycle + 0.010*array + 0.010*visual + 0.009*activity + 0.009*iv'),\n", + " (1,\n", + " '0.018*matrix + 0.016*delay + 0.013*associative_memory + 0.013*potential + 0.012*capacity + 0.011*synaptic + 0.010*classification + 0.010*dynamic + 0.010*storage + 0.008*circuit'),\n", + " (2,\n", + " '0.040*cell + 0.015*stimulus + 0.014*probability + 0.010*region + 0.010*training + 0.009*noise + 0.008*convergence + 0.007*field + 0.007*node + 0.007*positive'),\n", + " (3,\n", + " '0.026*code + 0.024*hopfield + 0.015*sequence + 0.015*image + 0.013*length + 0.012*matrix + 0.012*energy + 0.012*field + 0.012*machine + 0.011*current'),\n", + " (4,\n", + " '0.032*processor + 0.023*activation + 0.013*dynamic + 0.013*energy + 0.012*operation + 0.011*edge + 0.010*hidden + 0.010*machine + 0.010*update + 0.009*matrix'),\n", + " (5,\n", + " '0.022*hidden + 0.016*hidden_unit + 0.014*matrix + 0.013*sequence + 0.012*adaptive + 0.012*action + 0.010*multiple + 0.009*training + 0.008*back + 0.008*stored'),\n", + " (6,\n", + " '0.025*training + 0.015*stage + 0.014*bit + 0.013*optimization + 0.012*convergence + 0.011*eq + 0.011*surface + 0.010*neural_net + 0.010*code + 0.010*hidden'),\n", + " (7,\n", + " '0.056*cell + 0.017*node + 0.015*region + 0.013*feature + 0.013*map + 0.012*back + 0.011*control + 0.010*temporal + 0.009*decision + 0.008*activity'),\n", + " (8,\n", + " '0.023*cell + 0.013*probability + 0.013*image + 0.012*position + 0.012*current + 0.011*principle + 0.010*noise + 0.008*dimensional + 0.007*shape + 0.007*firing'),\n", + " (9,\n", + " '0.042*representation + 0.034*activity + 0.029*role + 0.025*firing + 0.021*cell + 0.017*stimulus + 0.014*variable + 0.014*product + 0.012*potential + 0.010*synaptic')]" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_offline.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 142, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.015*dynamic + 0.014*delay + 0.012*frequency + 0.011*phase + 0.010*noise + 0.008*temporal + 0.007*filter + 0.007*oscillation + 0.007*target + 0.007*controller'),\n", + " (1,\n", + " '0.017*memory + 0.017*vector + 0.015*matrix + 0.013*hopfield + 0.011*probability + 0.008*capacity + 0.008*let + 0.008*fig + 0.007*code + 0.007*distribution'),\n", + " (2,\n", + " '0.035*cell + 0.018*response + 0.012*region + 0.012*stimulus + 0.011*cortex + 0.009*fig + 0.009*sensory + 0.009*motor + 0.009*control + 0.009*velocity'),\n", + " (3,\n", + " '0.041*image + 0.038*field + 0.023*visual + 0.016*map + 0.015*receptive + 0.014*receptive_field + 0.014*motion + 0.012*eye + 0.011*direction + 0.008*vision'),\n", + " (4,\n", + " '0.030*hidden + 0.017*hidden_unit + 0.016*activation + 0.012*propagation + 0.010*processor + 0.009*back_propagation + 0.008*gradient + 0.007*hidden_layer + 0.007*bit + 0.006*internal'),\n", + " (5,\n", + " '0.018*vector + 0.016*sequence + 0.016*object + 0.014*memory + 0.009*adaptive + 0.009*matrix + 0.008*recurrent + 0.008*action + 0.008*self + 0.008*view'),\n", + " (6,\n", + " '0.025*classifier + 0.024*recognition + 0.023*speech + 0.014*classification + 0.013*trained + 0.011*class + 0.010*test + 0.010*noise + 0.010*hidden + 0.009*word'),\n", + " (7,\n", + " '0.033*node + 0.008*position + 0.007*connectionist + 0.005*neural_net + 0.005*tree + 0.005*character + 0.004*move + 0.004*generalization + 0.004*search + 0.004*human'),\n", + " (8,\n", + " '0.036*circuit + 0.024*analog + 0.024*chip + 0.020*voltage + 0.020*current + 0.014*synapse + 0.010*transistor + 0.010*vlsi + 0.009*device + 0.009*implementation'),\n", + " (9,\n", + " '0.030*cell + 0.021*firing + 0.019*synaptic + 0.017*activity + 0.016*potential + 0.010*synapsis + 0.010*spike + 0.009*stimulus + 0.009*memory + 0.009*membrane')]" + ] + }, + "execution_count": 142, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 149, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [21]\n" + ] + }, + { + "ename": "NameError", + "evalue": "name 'model' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\n%s'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Docs:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_author_topics\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'Geoffrey E. Hinton'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'model' is not defined" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.019*cell + 0.008*matrix + 0.008*representation + 0.008*training + 0.007*activity + 0.007*node + 0.006*dynamic + 0.006*field + 0.006*probability + 0.005*hopfield'),\n", + " (1,\n", + " '0.016*cell + 0.007*matrix + 0.007*capacity + 0.006*feature + 0.006*activity + 0.006*node + 0.006*field + 0.006*dynamic + 0.006*training + 0.006*stimulus'),\n", + " (2,\n", + " '0.012*cell + 0.010*training + 0.008*matrix + 0.007*stimulus + 0.007*hopfield + 0.006*image + 0.006*noise + 0.006*representation + 0.006*hidden + 0.006*convergence'),\n", + " (3,\n", + " '0.011*cell + 0.008*hopfield + 0.007*activity + 0.007*rate + 0.006*matrix + 0.006*hidden + 0.006*field + 0.006*training + 0.005*node + 0.005*representation'),\n", + " (4,\n", + " '0.012*cell + 0.008*activity + 0.007*matrix + 0.007*training + 0.006*field + 0.006*code + 0.006*representation + 0.006*firing + 0.006*current + 0.005*synaptic'),\n", + " (5,\n", + " '0.014*cell + 0.008*hidden + 0.007*sequence + 0.007*training + 0.006*field + 0.006*noise + 0.006*node + 0.006*dynamic + 0.006*hopfield + 0.006*representation'),\n", + " (6,\n", + " '0.025*cell + 0.011*matrix + 0.009*training + 0.006*activity + 0.006*probability + 0.006*hopfield + 0.006*synaptic + 0.005*node + 0.005*stimulus + 0.005*representation'),\n", + " (7,\n", + " '0.016*cell + 0.008*training + 0.007*activity + 0.007*representation + 0.007*matrix + 0.007*hidden + 0.007*noise + 0.006*hopfield + 0.006*probability + 0.006*firing'),\n", + " (8,\n", + " '0.012*cell + 0.008*image + 0.007*training + 0.006*feature + 0.006*hopfield + 0.006*representation + 0.006*probability + 0.006*firing + 0.006*activity + 0.005*synaptic'),\n", + " (9,\n", + " '0.012*cell + 0.008*matrix + 0.008*activity + 0.007*representation + 0.007*training + 0.006*image + 0.006*capacity + 0.006*rate + 0.006*hopfield + 0.006*node')]" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 118, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [21]\n", + "[(0, 0.090225715808980797),\n", + " (1, 0.014047723409152875),\n", + " (3, 0.38971799227229242),\n", + " (4, 0.30695125800680684),\n", + " (5, 0.11680215128570454),\n", + " (7, 0.012641840087616362),\n", + " (8, 0.069095036605336377)]\n", + "\n", + "Geoffrey E. Hinton\n", + "Docs: [276, 235, 270]\n", + "[(0, 0.17326190127690461),\n", + " (2, 0.062709625689712375),\n", + " (3, 0.023215349136065065),\n", + " (4, 0.096803072840719678),\n", + " (5, 0.1267901905748583),\n", + " (6, 0.47635551675437715),\n", + " (7, 0.025581291656655011),\n", + " (9, 0.013530262666658776)]\n", + "\n", + "Michael I. Jordan\n", + "Docs: [205]\n", + "[(0, 0.22189029162114421),\n", + " (2, 0.033072831647105602),\n", + " (4, 0.051509519512663651),\n", + " (5, 0.63361728214218349),\n", + " (7, 0.045992411979857574),\n", + " (9, 0.012757930948596466)]\n", + "\n", + "James M. Bower\n", + "Docs: [188, 251, 244]\n", + "[(1, 0.29194178492747924),\n", + " (2, 0.47740737076112999),\n", + " (3, 0.023636461735819542),\n", + " (4, 0.010413505064807139),\n", + " (7, 0.018554608959817139),\n", + " (9, 0.17063597622983562)]\n" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Test on small corpus" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "lda = LdaModel(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, passes=10)\n", + "var_lambda = lda.state.get_lambda()" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(atvb)\n", + "AtVb = atvb.AtVb" + ] + }, + { + "cell_type": "code", + "execution_count": 210, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 1min 25s, sys: 0 ns, total: 1min 25s\n", + "Wall time: 1min 25s\n" + ] + } + ], + "source": [ + "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", + " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", + " iterations=100, alpha='symmetric', eta='symmetric', \\\n", + " eval_every=10, random_state=1, var_lambda=None)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.071*group + 0.039*matrix + 0.032*feedback + 0.027*whose + 0.018*obtain + 0.016*scheme + 0.015*constraint + 0.015*expression + 0.014*unique + 0.013*computational'),\n", + " (1,\n", + " '0.041*map + 0.040*field + 0.034*location + 0.033*brain + 0.030*node + 0.021*requires + 0.020*propagation + 0.016*back_propagation + 0.016*distribution + 0.014*mechanism'),\n", + " (2,\n", + " '0.084*processor + 0.075*edge + 0.052*activation + 0.034*update + 0.021*column + 0.020*run + 0.019*implementation + 0.018*control + 0.018*operation + 0.017*content'),\n", + " (3,\n", + " '0.046*image + 0.038*gradient + 0.027*flow + 0.025*field + 0.024*analog + 0.023*circuit + 0.022*constraint + 0.018*square + 0.017*vision + 0.017*technique'),\n", + " (4,\n", + " '0.023*dynamic + 0.021*phase + 0.018*cell + 0.018*variable + 0.017*with_respect + 0.017*respect + 0.016*path + 0.015*noise + 0.014*energy + 0.011*limit'),\n", + " (5,\n", + " '0.080*processor + 0.061*activation + 0.040*edge + 0.040*update + 0.021*store + 0.020*operation + 0.018*required + 0.018*address + 0.017*stored + 0.016*machine'),\n", + " (6,\n", + " '0.038*map + 0.037*brain + 0.033*stimulus + 0.024*functional + 0.021*noise + 0.020*associative_memory + 0.020*recall + 0.017*series + 0.015*scale + 0.015*associated'),\n", + " (7,\n", + " '0.049*potential + 0.044*cell + 0.035*connectivity + 0.026*synaptic + 0.025*artificial + 0.023*architecture + 0.015*temporal + 0.014*brain + 0.014*computational + 0.013*action'),\n", + " (8,\n", + " '0.075*image + 0.032*log + 0.024*dimensional + 0.018*mapping + 0.017*matrix + 0.016*center + 0.015*node + 0.014*recall + 0.013*back + 0.013*th'),\n", + " (9,\n", + " '0.058*scheme + 0.048*capacity + 0.047*probability + 0.040*representation + 0.030*stored + 0.028*binary + 0.025*represented + 0.023*code + 0.022*relationship + 0.021*bound')]" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0, 0.55485121572041607),\n", + " (4, 0.17897884328936686),\n", + " (6, 0.14414251935372879),\n", + " (8, 0.11957893769069983)]" + ] + }, + "execution_count": 35, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.get_author_topics(0)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "## LDA" + ] + }, + { + "cell_type": "code", + "execution_count": 131, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(gensim.models.ldamodel)\n", + "LdaModel = gensim.models.ldamodel.LdaModel" + ] + }, + { + "cell_type": "code", + "execution_count": 151, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 2.48 s, sys: 524 ms, total: 3 s\n", + "Wall time: 2.43 s\n" + ] + } + ], + "source": [ + "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=1, \\\n", + " iterations=1, alpha='symmetric', eta='symmetric', eval_every=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 154, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 288 ms, sys: 0 ns, total: 288 ms\n", + "Wall time: 290 ms\n", + "Bound: -3.588e+05\n" + ] + } + ], + "source": [ + "%time lda_bound = lda.bound(sample(corpus, 10))\n", + "print('Bound: %.3e' % lda_bound)" + ] + }, + { + "cell_type": "code", + "execution_count": 155, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.004*neuron + 0.003*image + 0.003*layer + 0.003*field + 0.003*class + 0.003*cell + 0.003*signal + 0.003*noise + 0.003*hidden + 0.002*node'),\n", + " (1,\n", + " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*class + 0.003*signal + 0.003*matrix + 0.003*layer + 0.003*noise + 0.002*hidden + 0.002*recognition'),\n", + " (2,\n", + " '0.004*cell + 0.003*neuron + 0.003*matrix + 0.003*signal + 0.003*image + 0.003*hidden + 0.002*rule + 0.002*response + 0.002*field + 0.002*dynamic'),\n", + " (3,\n", + " '0.005*neuron + 0.003*layer + 0.003*image + 0.003*cell + 0.002*class + 0.002*net + 0.002*hidden + 0.002*control + 0.002*sequence + 0.002*response'),\n", + " (4,\n", + " '0.004*layer + 0.003*image + 0.003*neuron + 0.003*cell + 0.003*hidden + 0.003*signal + 0.003*component + 0.002*recognition + 0.002*net + 0.002*node'),\n", + " (5,\n", + " '0.005*image + 0.004*neuron + 0.004*layer + 0.003*hidden + 0.003*cell + 0.002*control + 0.002*class + 0.002*net + 0.002*noise + 0.002*signal'),\n", + " (6,\n", + " '0.005*neuron + 0.005*layer + 0.004*hidden + 0.003*image + 0.003*cell + 0.003*class + 0.003*rule + 0.002*noise + 0.002*net + 0.002*matrix'),\n", + " (7,\n", + " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*hidden + 0.003*recognition + 0.003*field + 0.003*layer + 0.002*noise + 0.002*node + 0.002*component'),\n", + " (8,\n", + " '0.004*neuron + 0.003*image + 0.003*signal + 0.003*recognition + 0.003*cell + 0.003*layer + 0.003*noise + 0.003*rule + 0.002*class + 0.002*hidden'),\n", + " (9,\n", + " '0.005*neuron + 0.004*class + 0.003*layer + 0.003*image + 0.003*cell + 0.002*hidden + 0.002*signal + 0.002*control + 0.002*field + 0.002*net')]" + ] + }, + "execution_count": 155, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lda.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 150, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Document 5\n", + "[(0, 0.11806384798431847),\n", + " (1, 0.099612053680607937),\n", + " (2, 0.076668193975964943),\n", + " (3, 0.075072909998916373),\n", + " (4, 0.067243477696594139),\n", + " (5, 0.1004083782314163),\n", + " (6, 0.1049567779188061),\n", + " (7, 0.10291505408912022),\n", + " (8, 0.12682229186467239),\n", + " (9, 0.12823701455958317)]\n", + "\n", + "Document 50\n", + "[(0, 0.12019310780479558),\n", + " (1, 0.11241507965934601),\n", + " (2, 0.084261861610351887),\n", + " (3, 0.074722708722277847),\n", + " (4, 0.089536455599529025),\n", + " (5, 0.11951468917677081),\n", + " (6, 0.077140801257090358),\n", + " (7, 0.086592729473957755),\n", + " (8, 0.12048290979429044),\n", + " (9, 0.11513965690159025)]\n" + ] + } + ], + "source": [ + "d = 5\n", + "print('Document %d' %d)\n", + "pprint(lda[corpus[d]])\n", + "\n", + "d = 50\n", + "print('\\nDocument %d' %d)\n", + "pprint(lda[corpus[d]])" + ] + }, + { + "cell_type": "code", + "execution_count": 145, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "['scaling',\n", + " 'property',\n", + " 'of',\n", + " 'coarse',\n", + " 'coded',\n", + " 'symbol',\n", + " 'memory',\n", + " 'ronald',\n", + " 'rosenfeld',\n", + " 'david',\n", + " 'touretzky',\n", + " 'computer',\n", + " 'science',\n", + " 'department',\n", + " 'carnegie',\n", + " 'mellon',\n", + " 'university',\n", + " 'pittsburgh',\n", + " 'pennsylvania',\n", + " 'abstract']" + ] + }, + "execution_count": 145, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "docs[0][:20]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Convergence and speed plots" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from bokeh.io import output_notebook\n", + "from bokeh.models.layouts import Row, Column\n", + "from bokeh.layouts import gridplot\n", + "from bokeh.models import Title, Legend\n", + "from bokeh.plotting import figure, output_file, show" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " Loading BokehJS ...\n", + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/javascript": [ + "\n", + "(function(global) {\n", + " function now() {\n", + " return new Date();\n", + " }\n", + "\n", + " var force = \"1\";\n", + "\n", + " if (typeof (window._bokeh_onload_callbacks) === \"undefined\" || force !== \"\") {\n", + " window._bokeh_onload_callbacks = [];\n", + " window._bokeh_is_loading = undefined;\n", + " }\n", + "\n", + "\n", + " \n", + " if (typeof (window._bokeh_timeout) === \"undefined\" || force !== \"\") {\n", + " window._bokeh_timeout = Date.now() + 5000;\n", + " window._bokeh_failed_load = false;\n", + " }\n", + "\n", + " var NB_LOAD_WARNING = {'data': {'text/html':\n", + " \"
\\n\"+\n", + " \"

\\n\"+\n", + " \"BokehJS does not appear to have successfully loaded. If loading BokehJS from CDN, this \\n\"+\n", + " \"may be due to a slow or bad network connection. Possible fixes:\\n\"+\n", + " \"

\\n\"+\n", + " \"
    \\n\"+\n", + " \"
  • re-rerun `output_notebook()` to attempt to load from CDN again, or
  • \\n\"+\n", + " \"
  • use INLINE resources instead, as so:
  • \\n\"+\n", + " \"
\\n\"+\n", + " \"\\n\"+\n", + " \"from bokeh.resources import INLINE\\n\"+\n", + " \"output_notebook(resources=INLINE)\\n\"+\n", + " \"\\n\"+\n", + " \"
\"}};\n", + "\n", + " function display_loaded() {\n", + " if (window.Bokeh !== undefined) {\n", + " Bokeh.$(\"#e54e8713-405d-40ce-b938-8dcb097d7df2\").text(\"BokehJS successfully loaded.\");\n", + " } else if (Date.now() < window._bokeh_timeout) {\n", + " setTimeout(display_loaded, 100)\n", + " }\n", + " }\n", + "\n", + " function run_callbacks() {\n", + " window._bokeh_onload_callbacks.forEach(function(callback) { callback() });\n", + " delete window._bokeh_onload_callbacks\n", + " console.info(\"Bokeh: all callbacks have finished\");\n", + " }\n", + "\n", + " function load_libs(js_urls, callback) {\n", + " window._bokeh_onload_callbacks.push(callback);\n", + " if (window._bokeh_is_loading > 0) {\n", + " console.log(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", + " return null;\n", + " }\n", + " if (js_urls == null || js_urls.length === 0) {\n", + " run_callbacks();\n", + " return null;\n", + " }\n", + " console.log(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", + " window._bokeh_is_loading = js_urls.length;\n", + " for (var i = 0; i < js_urls.length; i++) {\n", + " var url = js_urls[i];\n", + " var s = document.createElement('script');\n", + " s.src = url;\n", + " s.async = false;\n", + " s.onreadystatechange = s.onload = function() {\n", + " window._bokeh_is_loading--;\n", + " if (window._bokeh_is_loading === 0) {\n", + " console.log(\"Bokeh: all BokehJS libraries loaded\");\n", + " run_callbacks()\n", + " }\n", + " };\n", + " s.onerror = function() {\n", + " console.warn(\"failed to load library \" + url);\n", + " };\n", + " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", + " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", + " }\n", + " };var element = document.getElementById(\"e54e8713-405d-40ce-b938-8dcb097d7df2\");\n", + " if (element == null) {\n", + " console.log(\"Bokeh: ERROR: autoload.js configured with elementid 'e54e8713-405d-40ce-b938-8dcb097d7df2' but no matching script tag was found. \")\n", + " return false;\n", + " }\n", + "\n", + " var js_urls = ['https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.js', 'https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.js'];\n", + "\n", + " var inline_js = [\n", + " function(Bokeh) {\n", + " Bokeh.set_log_level(\"info\");\n", + " },\n", + " \n", + " function(Bokeh) {\n", + " \n", + " Bokeh.$(\"#e54e8713-405d-40ce-b938-8dcb097d7df2\").text(\"BokehJS is loading...\");\n", + " },\n", + " function(Bokeh) {\n", + " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", + " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", + " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", + " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", + " }\n", + " ];\n", + "\n", + " function run_inline_js() {\n", + " \n", + " if ((window.Bokeh !== undefined) || (force === \"1\")) {\n", + " for (var i = 0; i < inline_js.length; i++) {\n", + " inline_js[i](window.Bokeh);\n", + " }if (force === \"1\") {\n", + " display_loaded();\n", + " }} else if (Date.now() < window._bokeh_timeout) {\n", + " setTimeout(run_inline_js, 100);\n", + " } else if (!window._bokeh_failed_load) {\n", + " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", + " window._bokeh_failed_load = true;\n", + " } else if (!force) {\n", + " var cell = $(\"#e54e8713-405d-40ce-b938-8dcb097d7df2\").parents('.cell').data().cell;\n", + " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", + " }\n", + "\n", + " }\n", + "\n", + " if (window._bokeh_is_loading === 0) {\n", + " console.log(\"Bokeh: BokehJS loaded, going straight to plotting\");\n", + " run_inline_js();\n", + " } else {\n", + " load_libs(js_urls, function() {\n", + " console.log(\"Bokeh: BokehJS plotting callback run at\", now());\n", + " run_inline_js();\n", + " });\n", + " }\n", + "}(this));" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "output_notebook()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 10 iterations (passes)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# NOTE: the times of both offline and online are *without* vectorization!\n", + "\n", + "offline = [-3.958e+05, -3.430e+05, -3.428e+05, -3.426e+05, -3.423e+05, -3.417e+05, -3.406e+05, -3.388e+05, -3.361e+05, -3.326e+05, -3.285e+05]\n", + "\n", + "online_1iter = [-3.958e+05, -3.471e+05, -3.456e+05, -3.417e+05, -3.338e+05, -3.244e+05, -3.165e+05, -3.111e+05, -3.075e+05, -3.051e+05, -3.036e+05]\n", + "\n", + "online_10iter = [-3.958e+05, -3.343e+05, -3.223e+05, -3.128e+05, -3.072e+05, -3.041e+05, -3.023e+05, -3.011e+05, -3.003e+05, -2.997e+05, -2.993e+05]" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "iterations = range(10)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "offline_time = [20 * 60 + 49, 21 * 60 + 8, 21 * 60 + 25, 21 * 60 + 41, 21 * 60 + 56, 22 * 60 + 11, 22 * 60 + 25, 22 * 60 + 41, 22 * 60 + 56, 23 * 60 + 11, 23 * 60 + 26]\n", + "offline_time = np.array(offline_time) - offline_time[0]\n", + "\n", + "online_1iter_time = [23 * 60 + 54, 23 * 60 + 55, 23 * 60 + 55, 23 * 60 + 56, 23 * 60 + 58, 23 * 60 + 59, 24 * 60 + 0, 24 * 60 + 1, 24 * 60 + 2, 24 * 60 + 3, 24 * 60 + 4]\n", + "online_1iter_time = np.array(online_1iter_time) - online_1iter_time[0]\n", + " \n", + "online_10iter_time = [24 * 60 + 59, 25 * 60 + 0, 25 * 60 + 2, 25 * 60 + 3, 25 * 60 + 4, 25 * 60 + 5, 25 * 60 + 6, 25 * 60 + 7, 25 * 60 + 8, 25 * 60 + 8, 25 * 60 + 9]\n", + "online_10iter_time = np.array(online_10iter_time) - online_10iter_time[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p = figure(title=(\"Variational lower bound (initial bound at %.3e)\" % offline[0]), x_axis_label='Iterations', y_axis_label='Bound')\n", + "p.circle(iterations[1:], offline[1:], legend=\"offline\", size=5, color='red')\n", + "p.circle(iterations[1:], online_1iter[1:], legend=\"online 1 iter\", size=5, color='green')\n", + "p.circle(iterations[1:], online_10iter[1:], legend=\"online 10 iter.\", size=5, color='blue')\n", + "p.plot_height=400\n", + "p.plot_width=600\n", + "p.toolbar_location = None\n", + "show(p)" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p1 = figure(title=(\"Offline (initial bound at %.3e)\" % offline[0]), x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "p1.circle(offline_time[1:], offline[1:], size=5, color='red')\n", + "p1.plot_height=400\n", + "p1.plot_width=400\n", + "p1.toolbar_location = None\n", + "\n", + "p2 = figure(title=\"Online\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "s1 = p2.circle(online_1iter_time[1:], online_1iter[1:], size=5, line_color='green')\n", + "s2 = p2.circle(online_10iter_time[1:], online_10iter[1:], size=5, line_color='blue')\n", + "p2.plot_height=400\n", + "p2.plot_width=400\n", + "p2.toolbar_location = None\n", + "\n", + "legend = Legend(items=[('1 iter', [s1]), ('10 iter', [s2])], location=(-100, -200))\n", + "p2.add_layout(legend, 'right')\n", + "\n", + "p3 = Row(p1, p2)\n", + "\n", + "show(p3)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 100 iterations (passes)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# NOTE: the times of both offline and online are *without* vectorization!\n", + "\n", + "offline = [-3.957e+05, -3.304e+05, -3.049e+05, -3.005e+05, -2.989e+05, -2.981e+05, -2.976e+05, -2.973e+05, -2.970e+05, -2.968e+05, -2.966e+05]\n", + "\n", + "online_1iter = [-3.957e+05, -3.072e+05, -3.008e+05, -2.997e+05, -2.991e+05, -2.986e+05, -2.983e+05, -2.981e+05, -2.979e+05, -2.977e+05, -2.976e+05]\n", + "\n", + "online_10iter = [-3.957e+05, -3.001e+05, -2.975e+05, -2.965e+05, -2.961e+05, -2.958e+05, -2.955e+05, -2.954e+05, -2.953e+05, -2.952e+05, -2.951e+05]" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "offline_time = [38 * 60 + 8, 40 * 60 + 18, 42 * 60 + 36, 44 * 60 + 44, 46 * 60 + 57, 49 * 60 + 12, 51 * 60 + 19, 53 * 60 + 29, 55 * 60 + 40, 57 * 60 + 56, 60 * 60 + 6]\n", + "offline_time = np.array(offline_time) - offline_time[0]\n", + "\n", + "online_1iter_time = [3 * 60 + 36, 3 * 60 + 59, 4 * 60 + 20, 4 * 60 + 43, 5 * 60 + 6, 5 * 60 + 28, 5 * 60 + 51, 6 * 60 + 14, 6 * 60 + 36, 6 * 60 + 56, 7 * 60 + 16]\n", + "online_1iter_time = np.array(online_1iter_time) - online_1iter_time[0]\n", + "\n", + "online_10iter_time = [8 * 60 + 1, 10 * 60 + 28, 12 * 60 + 50, 15 * 60 + 15, 17 * 60 + 40, 20 * 60 + 10, 22 * 60 + 35, 25 * 60 + 7, 27 * 60 + 31, 29 * 60 + 54, 32 * 60 + 13]\n", + "online_10iter_time = np.array(online_10iter_time) - online_10iter_time[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "iterations = range(0, 100, 10)" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p = figure(title=\"Variational lower bound\", x_axis_label='Iteration (pass)', y_axis_label='Bound')\n", + "s1 = p.circle(iterations[1:], offline[1:], size=5, color='red')\n", + "s2 = p.circle(iterations[1:], online_1iter[1:], size=5, color='green')\n", + "s3 = p.circle(iterations[1:], online_10iter[1:],size=5, color='blue')\n", + "p.plot_height=400\n", + "p.plot_width=600\n", + "#p.toolbar_location = None\n", + "\n", + "legend = Legend(items=[('offline', [s1]), ('online 1 iter', [s2]), ('online 10 iter', [s3])], location=(-150, -200))\n", + "p.add_layout(legend, 'right')\n", + "\n", + "show(p)" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p1 = figure(title=\"Offline\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "p1.circle(offline_time[1:], offline[1:], size=5, color='red')\n", + "p1.plot_height=400\n", + "p1.plot_width=300\n", + "#p1.toolbar_location = None\n", + "\n", + "p2 = figure(title=\"Online 1 iter\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "s1 = p2.circle(online_1iter_time[1:], online_1iter[1:], size=5, line_color='green')\n", + "p2.plot_height=400\n", + "p2.plot_width=300\n", + "#p2.toolbar_location = None\n", + "\n", + "p3 = figure(title=\"Online 10 iter\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", + "s3 = p3.circle(online_10iter_time[1:], online_10iter[1:], size=5, line_color='blue')\n", + "p3.plot_height=400\n", + "p3.plot_width=300\n", + "#p3.toolbar_location = None\n", + "\n", + "p4 = gridplot([[p1, p2, p3]], toolbar_location='below')\n", + "\n", + "show(p4)" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 6.39130435, 6.56818182, 6.47761194, 6.43333333, 6.50892857,\n", + " 6.47407407, 6.49367089, 6.5 , 6.565 , 6.6 ])" + ] + }, + "execution_count": 108, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "online_10iter_time[1:] / online_1iter_time[1:]" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.2" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/docs/notebooks/plots.html b/docs/notebooks/plots.html new file mode 100644 index 0000000000..fa802351d5 --- /dev/null +++ b/docs/notebooks/plots.html @@ -0,0 +1,43 @@ + + + + + + Bokeh Plot + + + + + + + + + +
+
+
+ + + + \ No newline at end of file diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index 3063de7956..253faa5417 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -16,11 +16,7 @@ from .ldamulticore import LdaMulticore from .phrases import Phrases from .normmodel import NormModel -from .onlineatvb import OnlineAtVb -from .minibatchatvb import MinibatchAtVb -from .atvb import AtVb -from .atvb2 import AtVb2 -from .onlineatvb2 import OnlineAtVb2 +from .atmodel import AuthorTopicModel from . import wrappers diff --git a/gensim/models/onlineatvb2.py b/gensim/models/atmodel.py similarity index 99% rename from gensim/models/onlineatvb2.py rename to gensim/models/atmodel.py index ae0d8efd56..0ccf52bd01 100644 --- a/gensim/models/onlineatvb2.py +++ b/gensim/models/atmodel.py @@ -38,7 +38,7 @@ logger = logging.getLogger(__name__) -class OnlineAtVb2(LdaModel): +class AuthorTopicModel(LdaModel): """ Train the author-topic model using online variational Bayes. """ From 4286e907c9f255ea19de98ed9b0f285cb42c932b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 16 Nov 2016 14:14:56 +0100 Subject: [PATCH 044/100] Cleaning up code. Removed or changed a lot of comments. Removed option of computing log probabilities, although the method still exists. Added a method of computing all the terms of the bound at once. --- gensim/models/atmodel.py | 112 +++++++++++++-------------------------- 1 file changed, 38 insertions(+), 74 deletions(-) diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index 0ccf52bd01..477501158b 100644 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -9,9 +9,6 @@ Author-topic model. """ -# NOTE: from what I understand, my name as well as Radim's should be attributed copyright above? - -from time import time import pdb from pdb import set_trace as st @@ -27,7 +24,7 @@ from pprint import pprint -# log(sum(exp(x))) that tries to avoid overflow +# log(sum(exp(x))) that tries to avoid overflow. NOTE: not used at the moment. try: # try importing from here if older scipy is installed from scipy.maxentropy import logsumexp @@ -42,7 +39,6 @@ class AuthorTopicModel(LdaModel): """ Train the author-topic model using online variational Bayes. """ - # TODO: inherit interfaces.TransformationABC. def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, author2doc=None, doc2author=None, threshold=0.001, minimum_probability=0.01, @@ -53,7 +49,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, if corpus is None and self.id2word is None: raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') - # NOTE: this stuff is confusing to me (from LDA code). Why would id2word not be none, but have length 0? + # NOTE: Why would id2word not be none, but have length 0? (From LDA code) if self.id2word is None: logger.warning("no word id mapping provided; initializing from corpus, assuming identity") self.id2word = utils.dict_from_corpus(corpus) @@ -127,7 +123,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.eval_every = eval_every self.random_state = random_state - # NOTE: I don't think this necessarily is a good way to initialize the topics. + # NOTE: this is not necessarily a good way to initialize the topics. self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) @@ -141,17 +137,13 @@ def rho(self, t): def inference(self, corpus=None, var_lambda=None): if corpus is None: - # TODO: I can't remember why I used "copy()" here. + # TODO: is copy necessary here? corpus = self.corpus.copy() self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. logger.info('Starting inference. Training on %d documents.', len(corpus)) - # Whether or not to evaluate bound and log probability, respectively. - bound_eval = True - logprob_eval = False # TODO: remove log prob evaluation, but keep the method. - vectorized = True # FIXME: set to True. if var_lambda is None: @@ -178,7 +170,7 @@ def inference(self, corpus=None, var_lambda=None): self.var_lambda = var_lambda - var_phi = dict() + var_phi = dict() # TODO: remove once non-vectorized code is not used anymore. # Initialize dirichlet expectations. Elogtheta = dirichlet_expectation(var_gamma) @@ -186,21 +178,17 @@ def inference(self, corpus=None, var_lambda=None): Elogbeta = dirichlet_expectation(var_lambda) expElogbeta = numpy.exp(Elogbeta) - t = 0 if self.eval_every > 0: - if bound_eval: - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta) - beta_bound = self.beta_bound(Elogbeta) - bound = word_bound + theta_bound + beta_bound - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) - if logprob_eval: - logprob = self.eval_logprob() - logger.info('Log prob: %.3e.', logprob) + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) for _pass in xrange(self.passes): converged = 0 # Number of documents converged for current pass over corpus. - start = time() for d, doc in enumerate(corpus): + # TODO: a smarter of computing rho may be necessary. In ldamodel, + # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). rhot = self.rho(d + _pass) ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. @@ -209,7 +197,6 @@ def inference(self, corpus=None, var_lambda=None): expElogthetad = expElogtheta[authors_d, :] expElogbetad = expElogbeta[:, ids] - if vectorized: phinorm = numpy.zeros(len(ids)) for a in authors_d: @@ -252,13 +239,9 @@ def inference(self, corpus=None, var_lambda=None): # Update gamma and lambda. # Interpolation between document d's "local" gamma (tilde_gamma), # and "global" gamma (var_gamma). Same goes for lambda. - # TODO: I may need to be smarter about computing rho. In ldamodel, - # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). var_gamma_temp = (1 - rhot) * var_gamma + rhot * tilde_gamma # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. - # FIXME: I don't need to update the entire gamma, as I only updated a few rows of it, - # corresponding to the authors in the document. The same goes for Elogtheta. Elogtheta[authors_d, :] = dirichlet_expectation(var_gamma_temp[authors_d, :]) expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :]) @@ -320,20 +303,18 @@ def inference(self, corpus=None, var_lambda=None): if self.eval_every > 0 and (_pass + 1) % self.eval_every == 0: self.var_gamma = var_gamma self.var_lambda = var_lambda - if bound_eval: - prev_bound = bound - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta) - beta_bound = self.beta_bound(Elogbeta) - bound = word_bound + theta_bound + beta_bound - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) - if logprob_eval: - logprob = self.eval_logprob() - logger.info('Log prob: %.3e.', logprob) + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + # NOTE: bound can be computed as below. We compute each term for now because it can be useful for debugging. + bound = self.eval_bound(Elogtheta, Elogbeta) #logger.info('Converged documents: %d/%d', converged, self.num_docs) - # TODO: consider whether to include somthing like this: + # TODO: consider whether to include bound convergence criterion, something like this: #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold: # break # End of pass over corpus loop. @@ -344,16 +325,12 @@ def inference(self, corpus=None, var_lambda=None): # then compute the bound. self.var_gamma = var_gamma self.var_lambda = var_lambda - if bound_eval: - prev_bound = bound - word_bound = self.word_bound(Elogtheta, Elogbeta) - theta_bound = self.theta_bound(Elogtheta) - beta_bound = self.beta_bound(Elogbeta) - bound = word_bound + theta_bound + beta_bound - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) - if logprob_eval: - logprob = self.eval_logprob() - logger.info('Log prob: %.3e.', logprob) + prev_bound = bound + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) self.var_lambda = var_lambda @@ -361,6 +338,13 @@ def inference(self, corpus=None, var_lambda=None): return var_gamma, var_lambda + def eval_bound(self, Elogtheta, Elogbeta, doc_ids=None): + word_bound = self.word_bound(Elogtheta, Elogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + return bound + def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): """ Compute the expectation of the log conditional likelihood of the data, @@ -371,16 +355,16 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): """ # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + # NOTE: computing bound is very very computationally intensive. I could, for example, + # only use a portion of the data to do that (even a held-out set). if doc_ids is None: docs = self.corpus else: docs = [self.corpus[d] for d in doc_ids] - # NOTE: computing the bound this way is very numerically unstable, which is why + # NOTE: computing the bound this way may be numerically unstable, which is why # "logsumexp" is used in the LDA code. - # NOTE: computing bound is very very computationally intensive. I could, for example, - # only use a portion of the data to do that (even a held-out set). bound= 0.0 for d, doc in enumerate(docs): authors_d = self.doc2author[d] @@ -395,24 +379,9 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): bound_d += cts[vi] * numpy.log(bound_v) bound += numpy.log(1.0 / len(authors_d)) + bound_d - # For per-word likelihood, do: + # TODO: consider using per-word bound, i.e. # likelihood *= 1 /sum(len(doc) for doc in docs) - # TODO: can I do something along the lines of (as in ldamodel): - # likelihood += numpy.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, id]) for id, cnt in doc) - # If I computed the LDA bound the way I compute the author-topic bound above: - # bound = 0.0 - # for d, doc in enumerate(docs): - # ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - # cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - # bound_d = 0.0 - # for vi, v in enumerate(ids): - # bound_v = 0.0 - # for k in xrange(self.num_topics): - # bound_v += numpy.exp(Elogtheta[d, k] + Elogbeta[k, v]) - # bound_d += cts[vi] * numpy.log(bound_v) - # bound += bound_d - return bound def theta_bound(self, Elogtheta): @@ -420,7 +389,6 @@ def theta_bound(self, Elogtheta): for a in xrange(self.num_authors): var_gamma_a = self.var_gamma[a, :] Elogtheta_a = Elogtheta[a, :] - # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) bound += numpy.sum(gammaln(var_gamma_a) - gammaln(self.alpha)) bound += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(var_gamma_a)) @@ -446,8 +414,6 @@ def eval_logprob(self, doc_ids=None): summing over all documents, and dividing by the number of documents. """ - # TODO: if var_lambda is supplied from LDA, normalizing it every time - # is unnecessary. norm_gamma = self.var_gamma.copy() for a in xrange(self.num_authors): norm_gamma[a, :] = self.var_gamma[a, :] / self.var_gamma.sum(axis=1)[a] @@ -508,8 +474,6 @@ def get_author_topics(self, author_id, minimum_probability=None): author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) if topicvalue >= minimum_probability] - # author_name = self.id2author[author_id] - return author_topics From 12f231c665f2330ca7a1aa19752269d41af5bc2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Mon, 21 Nov 2016 11:27:39 +0100 Subject: [PATCH 045/100] Was computing the norm of phi incorrectly, fixed that, speed-up not as large as first thought. Made a method for computing phinorm. Updating lambda in a different way. Implemented a numerically stable softmax (phi is a softmax). --- gensim/models/atmodel.py | 89 ++++++++++++++++++++++++++++------------ 1 file changed, 62 insertions(+), 27 deletions(-) diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index 477501158b..621cd7d575 100644 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -132,9 +132,26 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, if corpus is not None: self.inference(corpus, var_lambda=var_lambda) + def __str__(self): + return "AuthorTopicModel(num_terms=%s, num_topics=%s, num_authors=%s, decay=%s)" % \ + (self.num_terms, self.num_topics, self.num_authors, self.decay) + def rho(self, t): return pow(self.offset + t, -self.decay) + def compute_phinorm(self, ids, authors_d, Elogtheta, Elogbeta, maxElogtheta=None, maxElogbeta=None): + phinorm = numpy.zeros(len(ids)) + if self.numstable_sm: + for a in authors_d: + for k in xrange(self.num_topics): + phinorm += numpy.exp(Elogtheta[a, :] - maxElogtheta) * numpy.exp(Elogbeta[:, ids] - maxElogbeta) + else: + for a in authors_d: + for k in xrange(self.num_topics): + phinorm += numpy.exp(Elogtheta[a, :]) * numpy.exp(Elogbeta[:, ids]) + + return phinorm + def inference(self, corpus=None, var_lambda=None): if corpus is None: # TODO: is copy necessary here? @@ -145,6 +162,8 @@ def inference(self, corpus=None, var_lambda=None): logger.info('Starting inference. Training on %d documents.', len(corpus)) vectorized = True # FIXME: set to True. + numstable_sm = False # FIXME: set to True. + self.numstable_sm = numstable_sm if var_lambda is None: self.optimize_lambda = True @@ -174,9 +193,15 @@ def inference(self, corpus=None, var_lambda=None): # Initialize dirichlet expectations. Elogtheta = dirichlet_expectation(var_gamma) - expElogtheta = numpy.exp(Elogtheta) Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) + if numstable_sm: + maxElogtheta = Elogtheta.max() + maxElogbeta = Elogbeta.max(axis=0) + expElogtheta = numpy.exp(Elogtheta - maxElogtheta) + expElogbeta = numpy.exp(Elogbeta - maxElogbeta) + else: + expElogtheta = numpy.exp(Elogtheta) + expElogbeta = numpy.exp(Elogbeta) if self.eval_every > 0: word_bound = self.word_bound(Elogtheta, Elogbeta) @@ -194,20 +219,21 @@ def inference(self, corpus=None, var_lambda=None): cts = numpy.array([cnt for _, cnt in doc]) # Word counts. authors_d = self.doc2author[d] # List of author IDs for document d. - expElogthetad = expElogtheta[authors_d, :] - expElogbetad = expElogbeta[:, ids] - if vectorized: - phinorm = numpy.zeros(len(ids)) - for a in authors_d: - phinorm += numpy.dot(expElogtheta[a, :], expElogbetad) + if not numstable_sm: + maxElogbeta = None + maxElogtheta = None + phinorm = self.compute_phinorm(ids, authors_d, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta) else: var_phi = dict() + expElogthetad = expElogtheta[authors_d, :] + expElogbetad = expElogbeta[:, ids] + for iteration in xrange(self.iterations): #logger.info('iteration %i', iteration) - lastgamma = tilde_gamma.copy() + lastgamma = tilde_gamma[authors_d, :] ## Update phi. if not vectorized: @@ -236,24 +262,28 @@ def inference(self, corpus=None, var_lambda=None): for a in authors_d: tilde_gamma[a, :] = self.alpha + len(self.author2doc[a]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) + # Update gamma and lambda. # Interpolation between document d's "local" gamma (tilde_gamma), # and "global" gamma (var_gamma). Same goes for lambda. - var_gamma_temp = (1 - rhot) * var_gamma + rhot * tilde_gamma + tilde_gamma[authors_d, :] = (1 - rhot) * var_gamma[authors_d, :] + rhot * tilde_gamma[authors_d, :] # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. - Elogtheta[authors_d, :] = dirichlet_expectation(var_gamma_temp[authors_d, :]) - expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :]) + Elogtheta[authors_d, :] = dirichlet_expectation(tilde_gamma[authors_d, :]) + if numstable_sm: + temp_max = Elogtheta[authors_d, :].max() + maxElogtheta = temp_max if temp_max > maxElogtheta else maxElogtheta + expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :] - maxElogtheta) + else: + expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :]) if vectorized: - phinorm = numpy.zeros(len(ids)) - for a in authors_d: - phinorm += numpy.dot(expElogtheta[a, :], expElogbetad) - + phinorm = self.compute_phinorm(ids, authors_d, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta) + # Check for convergence. # Criterion is mean change in "local" gamma and lambda. if iteration > 0: - meanchange_gamma = numpy.mean(abs(var_gamma_temp - lastgamma)) + meanchange_gamma = numpy.mean(abs(tilde_gamma[authors_d, :] - lastgamma)) gamma_condition = meanchange_gamma < self.threshold # logger.info('Mean change in gamma: %.3e', meanchange_gamma) if gamma_condition: @@ -263,21 +293,19 @@ def inference(self, corpus=None, var_lambda=None): # End of iterations loop. # FIXME: there are too many different gamma variables! - var_gamma = var_gamma_temp.copy() + var_gamma = tilde_gamma.copy() if self.optimize_lambda: # Update lambda. # only one update per document). if vectorized: - phi_sum = numpy.zeros((self.num_topics, len(ids))) - phinorm_rep = numpy.tile(phinorm, [self.num_topics, 1]) - for a in authors_d: - expElogtheta_a_rep = numpy.tile(expElogtheta[a, :], [len(ids), 1]) - phi_sum += expElogtheta_a_rep.T * expElogbetad / phinorm_rep + # NOTE: summing up sstats style of updating lambda. + expElogtheta_sum_a = expElogtheta[authors_d, :].sum(axis=0) + sstats = numpy.outer(expElogtheta_sum_a.T, cts/phinorm) + sstats *= expElogbeta[:, ids] eta_rep = numpy.tile(self.eta[ids], [self.num_topics, 1]) - cts_rep = numpy.tile(cts, [self.num_topics, 1]) - tilde_lambda[:, ids] = eta_rep + self.num_docs * cts_rep * phi_sum + tilde_lambda[:, ids] = eta_rep + self.num_docs * sstats else: for k in xrange(self.num_topics): for vi, v in enumerate(ids): @@ -292,14 +320,19 @@ def inference(self, corpus=None, var_lambda=None): # the words in document d, hence the [:, ids] indexing. var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids] Elogbeta = dirichlet_expectation(var_lambda) - expElogbeta = numpy.exp(Elogbeta) + if numstable_sm: + temp_max = Elogbeta[:, ids].max(axis=0) + maxElogbeta[ids][temp_max > maxElogbeta[ids]] = temp_max[temp_max > maxElogbeta[ids]] + expElogbeta = numpy.exp(Elogbeta - maxElogbeta) + else: + expElogbeta = numpy.exp(Elogbeta) var_lambda = var_lambda.copy() # Print topics: # pprint(self.show_topics()) - # End of corpus loop. + if self.eval_every > 0 and (_pass + 1) % self.eval_every == 0: self.var_gamma = var_gamma self.var_lambda = var_lambda @@ -376,6 +409,8 @@ def word_bound(self, Elogtheta, Elogbeta, doc_ids=None): for k in xrange(self.num_topics): for a in authors_d: bound_v += numpy.exp(Elogtheta[a, k] + Elogbeta[k, v]) + #if bound_v == 0.0: + # st() bound_d += cts[vi] * numpy.log(bound_v) bound += numpy.log(1.0 / len(authors_d)) + bound_d From 76764ff02e906bad07ff493ad5ee5a3ece837280 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Tue, 22 Nov 2016 12:33:50 +0100 Subject: [PATCH 046/100] Working on numerically stable phi update and bound computation. Is not converging the same way, so it is an option for now. --- docs/notebooks/at_with_nips.ipynb | 641 ++++++++++++++++++++++++-- docs/notebooks/at_with_nips_old.ipynb | 119 +++-- gensim/models/atmodel.py | 74 ++- 3 files changed, 718 insertions(+), 116 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 1459e154cc..a3c7735bf7 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 18, + "execution_count": 1, "metadata": { "collapsed": false }, @@ -62,6 +62,7 @@ "from pprint import pprint\n", "from random import sample\n", "import bokeh\n", + "import line_profiler\n", "\n", "import logging\n", "\n", @@ -322,7 +323,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAicAAAGcCAYAAAACtQD2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XecXVW5//HPl0DAABmaSeggEKqUhBK8EOSCAaSIYmGw\nACoWhMuNckURfnDBgogEaSqKoqCDCNIDoQmEJphwgUDovSShpJAQ0ub5/bH2YfbsnOnl7Jl836/X\neZ05a6+997P3mZnznLXXWlsRgZmZmVlZLFfrAMzMzMzynJyYmZlZqTg5MTMzs1JxcmJmZmal4uTE\nzMzMSsXJiZmZmZWKkxMzMzMrFScnZmZmVipOTszMzKxUnJyYWTOSXpV0Ue71XpIaJX2sF/b9Y0mL\ncq8HZPs+u6f3ne3v69n+1umN/XWWpB9Iel7SYkkP1jqe9pK0SXZ+D6t1LFZuTk6sFCQdnv3Tqvb4\naa3jW8ZUu6dFh+9zIelHkg7sxL4bO7qvjmoltqATx9qbJH0S+CnwT+AI4OSaBmTWA5avdQBmOUH6\nR/tioXxK74diFRFxu6QPRcTCDq56EnApcH0H1jkFOK2D++mMlmL7A3BpJ461N+0JLAK+Hr45mvVT\nTk6sbG6OiMntrSxJwMCIWNCDMS3zevrDWtKgiHgvIhrphZaTlmQf9mVOTACGAvPKmJj479G6iy/r\nWJ+R738g6cuSHgfeB/bKlkvSdyU9Lul9SW9IulDS4MJ2JOn/ZX0r5kq6TdIWkl4p9LVo1v8hV161\nX4Kk/SVNzLY5W9J1krYo1LlM0kxJ62XL35U0Q9IZVfYjSWMlPSppflZvvKTts+X3Svp3C+fqOUmt\ntli0dB6q1Fuqz4mk4ZL+IWlaFtvLkv4iaeXK+wQMBCrnqrFybrPz2pht42+SZpIuUbR4zrNlX5b0\nVLa/B4t9YLJz+0yV9T7YZjtia+m9PTb3e/WapHOr/F7dI2mypK0l/VPSe9m5/W5r70Nu/eUlnZK9\nd+8r9Sk5TdIKhdi/CNRlcS5RC/03st+dRZJWzpWdkK13Rq5s+ez9Py1XtoqkcdnfxPuSpkr678L2\n2/p7XF3SnyXNkvSOpIuBZucsq7e2pD9l5+p9Sa9LulrSeu05b9Y/ueXEyqZO0pr5goh4u1BnDHAo\ncAHwDvByVv4HoD57Pgf4CHAssJ2k3bNv5ZCu158AXAdMAEYCtwAfKuynpf4HS5VLOgK4GBgPfB9Y\nGTgamChph4h4Nbfu8tn+JgLfy47nfyQ9ExEX5zb7Z9IH0fXARaQP1NHALsD/ZcsvlDQ8Ip7OxbIr\nsDHwwyqx57X3PFTirmx/xazecqTzPB1YDzgQGBwR8yR9CfgjcE92XgCeLWzrH8CTwA9yZS2d872A\nw4BzSZc0vgNMkLRjRDzVxroflEfEknbEVnxvfwycCNxM+p3bkvTejiz8XgWwFnAT8HfgcuDzwC8k\nPRIRt1eJLe+S7BgvJ/1ujCJdftoc+EIu9qOB7YBvAALubWF7E0nv0X+Q3i+A3YAlwO65eiNJ7/nd\n2fEKuDFb73fAo8B+wNmS1o6IEwr7WervMdvG9aTf1QuBp4BDSOe9+B5dA2xKem9fJrUMjSH9Tr2K\nLZsiwg8/av4ADic15xcfS3J1BmRlC4FNC+t/PFt2SKF8v6z8s9nrIdn6VxXqnZHVuyhXdjqwsEqs\nXyP9g18ne70qMAs4r1BvaFZ+fq7s0mzd7xfq/h9wX+71J7J4zmzlnK0GzAdOK5RfkO13pVbW7ch5\n2CuL+WPZ65FZnQPbeE/n57dTOK+NwCUtLFuYe115zxcD2+TKNyR9S7+8cG6fbmubbcRWfG+HZufp\nukK9/8rqfTFXNjEr+3yubCApeftrG+dqRHacFxTKz862+R+F43ynHX9TA4B3gdNzZe+Qkp/3K78f\nwP9kx7hK9vqQLJbjC9u7ipQYbtCOv8fKNv4rV7YcKSFcAhyWla1RrOeHHxHhyzpWKgF8G9g79/hE\nlXq3R8SzhbLPkv7x3ilpzcoD+Dfpg2jPrN4+pH+q5xXWP6cLce9LSlAuL+x7CfBQbt95FxVe30Nq\n6ak4hPSBfHpLO42IWcANpG/bQGpqBz5HSjrebyXmMXT+PMzKnveTtFI76lcTwG86UH9iRHzQMToi\nXiJ9M9+3k/tvr0+QzlPxvPwWeA/Yv1A+OyKuqLyI1FfnIZq/t9V8knROikOmf0lqHSnup00RsQS4\nn9TahqRtgTrgZ8AKpFYNSK0pj0TE3Oz1fqSE44LCJs8mnYviOa/297gfsIDc73mkFqbzs+OpeI+U\n8Owpqa6Dh2j9mJMTK5uHIuKO/KNKnRerlG1G+hb2ZuExHViJ1FIAsEH23OyfaURMI33L7IxNSf9w\nJxb2PQP4z9y+K+ZmiUXeTGD13OuPAK9GRFsx/RnYWNKo7PW+wJqkb9et2TB77vB5iIjngF8B3wTe\nlnSTpG9LWrWNfRa90IG6xQ8/gKeBVSWtXmVZd6mcp6fzhZE6fL6QW17xSpVtFN/blvazODu3+f28\nRno/ivtpr3uAnbJ+K7sDr0TEI6QRcJVLO/9B+t3Nx/JqRMwvbGtqbnnei1X2uyHwWpUE+an8i2z5\nicABwAxJd0o6XlLxb8aWMe5zYn1R8Z8mpET7deDLNP9mVjEje64sa89Ih5bqDKiy7yD1d3mrSv1i\nB88lLWxXLfzcmpuyfX4JeCB7fi0i7mxjvY6ch6VExNisg+OnSK0w5wMnSBqVJTjtUe197IjiOWrv\n+9WVfbSlPe9tR5d3NIa8iaTh2buQWkgm5sp3l7Q1Kam/uwv7q/Y+iurvx1LbjohfSroaOJjUsvlj\n4IeS9si3ltmyxS0n1l88R+qMeE+x5SV7VP7JvZg9D8+vLGkY6dJM3kxggKRBhfKNquwbYEYL+55I\nxz0LrFccEVIUEYvJOl5KWo3UKfUv7dj+i9lze85DS/ueEhE/iYg9gD1IrVLfyFdpz3baabMqZcOB\ndyNiZvZ6JqkfTtFGVcraG9uL2fPm+UJJA7PtvtTO7bRnP8tL2qSwn3WAVbqwnwdIlwdHk1pKKr+L\ndwMfI11yDFILSz6W9SQVO0ZvmT23J5bKNoqX/TavUpeIeD4izo6IfYCPkjrotmuUk/VPTk6sv7iC\n1PnwpOKCbKhk5UP+VtK322ML1cZW2eZzpG96o3PbWoXUOpN3EzAX+FHW56O4/7XaeQx5V5FaNtsz\n++elpMTst6R/6u1JTjpyHpqRNFhS8X/HFNKH3Iq5snlUTxY6Y7esz0Qlho1IlwJuztV5DlhT0pa5\neuuSErai9sZWOU/HFcq/SRqRdUM7ttEe40m/a/9dKP8e6bze2JmNZpdmJpN+Z9emecvJysAxwFMR\nkW/xG0/6Wzq6sLmxpHNxUzt2PZ70u/DNSkH2t3EMzUd+fSgb/ZX3POnvacVcvWGSNq/ye2f9lC/r\nWJl0uvk6Iu7ILjOcJGkEcBvpG+NwUmfZb5NGXEyXNA44XtJ1pH+0O5I6375T2OxNwGvAJZLOysq+\nCrwBfDAPRkTMlnQMaQjzZEmXky61bEjqyPhPOvgtMCJuk9QAfFdp7pFbSJcndgcmRES+o+G/JU0l\ndYR9tD1N4R08D9D8vfkEME7S34FnSJ0rDyddvvpHrt4kYEw2P8YbwHMRUXVelnaYAtwi6TzS+3p0\n9vy/uTp/JQ2Pvi6rtwrwLdJw5e0K22tXbNl5+jlwoqTxpGRky2y795NarbosIiZL+gtwdNaZeiKw\nK+ky3RUR0dJw4faYCBwPvB0RU7P9vSHpOdLfx+8K9a8mtaz8XNKmNA0l3h/4RURU61dTdDWp1eas\nrDWoMpS42Aq5FXCzpCuAJ0jJz2dJ/aYacvXOInX8Xo90+db6u1oPF/LDj4gPhhIvAUa0UmdAVueX\nrdQ5ijQ6Yi6pmf9h4CfAkEK9/0dKPOaSvh1vTurMeFGh3gjSh9B80je671AYbpqr+3HSN/mZ2Xaf\nAn4PbJ+rcynpQ6IY9+nAgkKZSB8qT2T7n0YaobJtlfV/kMX03Q6e92rn4WXgt7k6xaHEH8mO6xlS\nC8SMbN3RhW1vAdyZbXtJ5dxmx7qENCdKq+ch/56TPqifzs7Fg5V4CuuPAR4jDZV9nDTPSLWhxC3F\n1tJ7+51se+9n5+tXwKqFOhOBSVViupTUOtHWezEgez+ey/bzAin5Wr7K9pb6HWpluwdmx3R1ofwP\nFIZD55atTBqd82oWy5PAcR35eyR1Av4zaXTX26Q5ZXag+VDitUgjxp4A5pAS43uBg6sc8+Li++JH\n/30oe+PNlnmSXgFuiohvtFm5ZCR9jzRHyQYR8Uat4zEz6wpfvzPrH75Kmm/CiYmZ9Xnuc2LWRynd\nM+UgUj+RLfDoBjPrJ5ycmDVp6d4sZTWMNDLnHdIU9hNqHI+ZWbdwnxMzMzMrFfc5MTMzs1JxcmJm\nZmal4uTEzFok6ceSivcG6u0YBkhqlFS8Y29XtrlXts2DumubHdj3ZZKe6e39mvUlTk7MOknS4dkH\nXOUxX9JTks7rR3dV7WudhDuiVscVQGON9m3WJ3i0jlnXBOn+Ny8CK5Hu/PptYD9J28TSt4y38ujK\n3X674oga7tusT3ByYtZ1N0fE5OznP0h6h3STtE8Bf6tdWG2TNCgi3qt1HMuSiFhSi/36vba+xJd1\nzLrfHaRvxhtXCiRtLOnvkt6WNE/S/ZI+mV9J0pu5GwyiZJakRbm7KiPphKxsUK5sc0lXZtufL+kh\nSQcWtl+5DDVa0oWSppPuJ9Qhkr4m6XZJ07N9TZF0VKHOryRNK5T9Otv/t3Jl62RlX23nvr+cXTqb\nL+lBSR+rUmddSZdImibpfUmPSTq8yuYCWE7SyZJelfSepFslbVzY3h7Ze/dytr2XJJ2Vv5uupB9I\nWiJpneJOsrrzJa2avV6qz4mkVSSNk/RKto+p2U0J83U2yc7VYYXySp+cE3NlP87Khkv6m6SZpBtQ\nmvUJTk7Mut+m2fPbAFn/k/tJd/M9HziRdDv46yV9KrfevcDo3OttgUpS8h+58t2AyZVvwZK2Jt0B\ndnPgZ6SZYucC1xS2X3EhaUbZ/yXdj6ejvk26CeJPgO+RboT320KCMhH4sKThhbiXkO6sXDGalCRM\nbMd+9wJ+AfyJdIO8IcAESZtXKkgaRrop4B7AucBxWax/lHR0YXsiXZLbH/h59vgY6WZ1eZ8nvV/n\nA8eQbnJ4HOnGeRWXZ9v7XJW4PwuMj4h3s9fN+vFIEnAjcCzprsdjSTdVPFvpjsidUdn+P0g36PsB\n6cZ7Zn1Dre886IcfffVB052U9yTd4n1d4AvAm6TkYO2s3ris3q65dVcm3X32uVzZ94CFwMrZ62NI\nH6z3Az/N1XsHOCv3+jbS3ZeLd6+9B3iyEG8j6W68aucxVruj74pV6t0KTM29Hprt62vZ69Wzc3A5\n8HKu3vnAtDZiGJBtazGwTa58Q9Idcy/PlV1CuqtyXWEbVwBvAStkr/fKtvkIMCBXb2wW5/A2jvdH\nWTxr58r+BdxXqLdrtp/P58ouBZ7OvT4kq3N8Yd2rgEWkmzkCbJLVO6yF83Ni4X1rBC6p9d+JH350\n5uGWE7OuEXA7KSF5Bfgr6dbvB0fTTfj2Ax6MiPsrK0XEPOAiYCNJW2XFE0n9wCqXKnbPyiZmPyNp\nW2C1rAxJq5OSo78DdZLWrDyAW4DNJK2dizeA30VEp0eqRMSCDw5eGpzt6y5guKQPZXWmA8/S1BK0\nO7AA+CWwnqQNC8fYHhMjYkoujpeA64F9s1gEfBq4Fli+yrlYHdi+sM2Lo3kfkImk9/QjLRzvoGx7\n92X18tv7G7CLpA1yZV8A3iO1iLRkP1JSekGh/GxS4rFvK+u2JoDfdHJds5pycmLWNUG6zLE38HFg\nq4jYJCJuy9XZEHiqyrpTc8sBJpM+yCqXPXajKTnZUdLAbFmQWkUgXUIS6Zvym4XHqVmd4rDmF/Mv\nJK0gaWj+0doBS9pd0h2S5gKzsn2dli2uy1W9p3AsDwL/BmYDu0uqA7ah/cnJs1XKngZWzZK0YcCq\nwNEsfS4uyuoXz0Wxz83M7Hn1SoGkDSX9WdLbpBaxN0kJKTQ/3iuy58/nyg4BbojWO6JuCLwaEfML\n5cXfj854oQvrmtWMR+uYdd1D0TRap9MiYrGkfwGjJW0CrA3cTfowXAHYhfQhPzUi3s5Wq3zBOAto\n6cZ/xQ/14ofgaNJlmSAlOiFp/Yh4vbghSZtldaeQLoG8QvrWfxCpz0T+C89E4HBJ65OSlNsiIiTd\nm72uJAJ3txB3e+SH5Fb2/SfgshbqP1J43dLIGUHqbEq6bLYq8FNSkvkesAGpz8kHxxsRr0q6n5Sc\nnCVpd9Klvss7cAytaam1a0Ar6xTfa7M+wcmJWc97idRZtWjL3PKKicD3SZ1n34yIpwEkPU5KInYn\nXcqoeD57XhQRd3Qyvkmklp+8N1uoexApUdo/u3RDFt8+VepWWkT2AUYAp2Sv7waOJCUn77J0wtCS\nzaqUDQfejYiZkuYA84DlunAuirYn9fWoj4gPhoVLaulSy+XAryR9hHRJ513gpjb28SKwm6QPFVpP\nir8flWRutcL6XWlZMSslX9Yx63njgZ0l7VIpkLQy8A3ghYh4Ild3Imkyt+NounRD9vOXSa0pH1wG\niYg3SR1cv5mNVGlG0lptBRcRsyLijsKjpSnrKy0NH/zvyC6pfKXKdp8FppM6+i5H6qdROcbNSf1D\n7utA/5fdsj43lf1uBBwA3JztbwlwNfB5SVsWV65yLtqz32rHK9L7U239v5N1WiVd0rku32elBeOB\ngaTLUXmVzrk3AUTETNJltNGFese0EEtVkuqUhp6v0t51zHqbW07MuqY9TfJnAPXAzZLOJY22OYL0\njfczhbr3k0aBDAd+myu/m9S3pdqw2+9kZY9J+h2pNWUoaaTIusAOHYy3NRNIQ27HZ/saDBwFvMHS\n/TkgJVWfJQ19npuVPUS63LApaXRNe00BbpF0HukcHZ09/2+uzvdJH94PZvFNBdYAdiS1OuUTuPac\ni8dJ/TbOyTrxzs2OZ3C1yhExXdJE4H+AVWjfJHxXk97fn0vaFHiU1El2f+AXEZHvF/N74HhJs0l9\nlD5OatnpyPt6KPDr7PmKNuqa1YRbTsy6ps1vrBExg5Qo3EL6lvtT0hDYAyLiukLd90jDgvOdXiEl\nH0EahvtKYZ2ppA/fG0jDhc8Hvkn61n0azXVmlM4H62T7+izpf8dZwNeB80hzp1RTiTvf2rOYNOy2\nvfObVGK4HTiedIynklplxmQxVbY9DdiJ1O/kM1ls/0VKJk5o6bhaKs9akA4gJQwnAieREpYjW4n1\nb6TEZBYt9wPK7yNIici5wIGkoefDge9GxA8K651C6uvyeVKSuDiLr6P3QOqv90uyfkJdGFFoZmZm\n1u1q3nIiaTlJp0t6Pps++llJJ1Wpd5qk13NTTG9aWL66pL9Imi1ppqTfZ9f1zczMrA+peXJCmlb5\nm6Trx1uQrhl/X9IxlQqSTiA1h38T2JnUI39CNu9DxV9Jvdv3IjWRjqb5NXszMzPrA2p+WUfS9aTp\nq4/KlV0JvBcRX8lev07qGDYuez2YdL358Ii4IuuZ/zgwMiIezursQ7pfxXrZdWgzMzPrA8rQcnIf\nsFc2uROStiPd5Gx89npjUg/7yoyMRMQcUoe6XbOiUcDMSmKSuY3U6WsXzMzMrM8ow1DiM0g96Z+U\ntISUMP0oIiqzKg4jJRnTC+tNp2lY4DBgRn5hRCyR9A7Nhw6amZlZyZUhOfkCacKiQ4EnSDMy/krS\n6xFxaSvribaHw7VYJ7t51z6k2Rnf72DMZmZmy7KVgI2ACbnbaXSbMiQnZ5JuB//37PXj2cyPPyTd\nWnwaKckYSvPWkyGk+SDI6jSbACq7J8bqLN3iUrEP8Jeuh29mZrbM+iJpQEq3KkNyMoilWzcayfrD\nRMQLkqaRRuE8Ch90iN2FpluM3w+sJmmHXL+TvUhJzb9a2O+LAJdddhlbbrnUTNfLlLFjxzJu3Lha\nh1EKPheJz0MTn4vE5yHxeUimTp3Kl770JSjc5by7lCE5uR74kaRXSCNuRpDuKfH7XJ1zgJMkPUs6\nEacDrwLXAkTEk5ImAL+T9G3SfSrOAxpaGanzPsCWW27JiBEjuv2g+pK6urpl/hxU+FwkPg9NfC4S\nn4fE52EpPdItogzJyTGkZOMC0qWZ10n3fTi9UiEizpQ0iDRvyWqkKa/3i4iFue0cRpq2+zZSy8uV\npJtzmZmZWR9S8+QkIuYB380erdU7lXQ/jZaWzwK+1J2xmZmZWe8rwzwnZmZmZh9wcmLU19fXOoTS\n8LlIfB6a+FwkPg+Jz0PvqPn09bUiaQQwadKkSe7cZGZm1gGTJ09m5MiRkG4bM7m7t++WEzMzMysV\nJydmZmZWKk5OzMzMrFScnJiZmVmpODkxMzOzUnFyYmZmtoyaPh023RTuuqvWkTTn5MTMzGwZtWgR\nPPccvN8jd8jpPCcnZmZmy6jGxvQs1TaOIicnZmZmy6jKPKxOTszMzKwUnJyYmZlZqTg5MTMzs1Jx\ncmJmZmal4uTEzMzMSsXJiZmZmZVKJTlZrmTZQMnCMTMzs97ieU7MzMysVHxZx8zMzErFyYmZmZmV\nipMTMzMzKxUnJ2ZmZlYqTk7MzMysVJycmJmZWal4KLGZmZmViidha4GkFyQ1Vnmcly1fUdIFkt6S\n9K6kKyUNKWxjfUk3SponaZqkMyXV/NjMzMzKzJd1WrYjMCz3+AQQwBXZ8nOA/YFDgNHAOsBVlZWz\nJGQ8sDwwCjgcOAI4rVeiNzMz66PKmpwsX+sAIuLt/GtJBwLPRcRESYOBrwKHRsRd2fIjgamSdo6I\nB4F9gC2APSPiLeAxSScDZ0g6NSIW9+oBmZmZ9RFlTU7K0HLyAUkrAF8ELs6KdiQlULdX6kTEU8DL\nwK5Z0SjgsSwxqZgA1AFb93TMZmZmfZWTk/b5NCmp+FP2eiiwMCLmFOpNJ10CInueXmU5uTpmZmZW\n4OSkfb4K3BQR09qoJ1K/lLa0p46ZmdkyqazJSc37nFRI2gDYGzg4VzwNGChpcKH1ZAhNrSPTgJ0K\nmxuaPRdbVJYyduxY6urqmpXV19dTX1/fgejNzMz6nvbMc9LQ0EBDQ0OzstmzZ/dgVCVKTkitJtNJ\nI28qJgGLgb2AqwEkDQc2AO7L6twPnChprVy/kzHAbOCJtnY6btw4RowY0S0HYGZm1pe0Z56Tal/Y\nJ0+ezMiRI3ssrlIkJ5JEGv57SUQ0VsojYo6ki4GzJc0E3gXOBe6NiIeyareQkpBLJZ0ArA2cDpwf\nEYt68TDMzMz6FF/Wad3ewPrAH6ssGwssAa4EVgRuBr5TWRgRjZIOAH5Nak2ZB1wCnNKzIZuZmfVt\nTk5aERG3AgNaWLYAODZ7tLT+K8ABPROdmZlZ/1TW5KRso3XMzMyslzg5MTMzs1JxcmJmZmal4uTE\nzMzMSqU985zUgpMTMzOzZVR75jmphZKFY2ZmZr3Fl3XMzMysVJycmJmZWak4OTEzM7NScXJiZmZm\npeLkxMzMzErFyYmZmZmViuc5MTMzs1Jxy4mZmZmViidhMzMzs1Jxy4mZmZmVipMTMzMzKxUnJ2Zm\nZlYqTk7MzMysVJycmJmZWal4nhMzMzMrFbecmJmZWal4nhMzMzMrFbecmJmZWak4OTEzM7NScXJi\nZmZmpeLkxMzMzErFyYmZmZmViuc5aYWkdSRdKuktSe9JekTSiEKd0yS9ni2/VdKmheWrS/qLpNmS\nZkr6vaSVe/dIzMzM+g63nLRA0mrAvcACYB9gS+B7wMxcnROAY4BvAjsD84AJkgbmNvXXbN29gP2B\n0cBve+EQzMzM+qSyznOyfK0DAH4AvBwRX8+VvVSocxxwekRcDyDpK8B04GDgCklbkhKbkRHxcFbn\nWOBGScdHxLSePggzM7O+xi0nLTsQ+LekKyRNlzRZ0geJiqSNgWHA7ZWyiJgD/AvYNSsaBcysJCaZ\n24AAdunpAzAzM+uLnJy07CPAt4GngDHAb4BzJX0pWz6MlGRML6w3PVtWqTMjvzAilgDv5OqYmZlZ\nTlmTkzJc1lkOeDAiTs5ePyJpa1LCclkr64mUtLSmPXXMzMyWSU5OWvYGMLVQNhX4TPbzNFKSMZTm\nrSdDgIdzdYbkNyBpALA6S7e4NDN27Fjq6uqaldXX11NfX9/+IzAzM+uD2pOcNDQ00NDQ0Kxs9uzZ\nPRhVOZKTe4HNC2Wbk3WKjYgXJE0jjcJ5FEDSYFJfkguy+vcDq0naIdfvZC9SUvOv1nY+btw4RowY\n0VoVMzOzfqk985xU+8I+efJkRo4c2WNxlSE5GQfcK+mHwBWkpOPrwFG5OucAJ0l6FngROB14FbgW\nICKelDQB+J2kbwMDgfOABo/UMTMzqy5K2vGh5slJRPxb0qeBM4CTgReA4yLi8lydMyUNIs1bshow\nEdgvIhbmNnUYcD5plE4jcCVpCLKZmZlVEVG+/iZQguQEICLGA+PbqHMqcGory2cBX2ppuZmZmTUX\nUb4J2KAcQ4nNzMysBsracuLkxMzMbBnl5MTMzMxKxcmJmZmZlUpjo5MTMzMzKxG3nJiZmVmpODkx\nMzOzUnFyYmZmZqXieU7MzMysVNxyYmZmZqXi5MTMzMxKxcmJmZmZlYrnOTEzM7NSccuJmZmZlYqT\nEzMzMysVJydmZmZWKp7nxMzMzErFLSdmZmZWKk5OzMzMrFScnJiZmVmpeJ4TMzMzKxW3nJiZmVmp\nODkxMzOzUnFyYmZmZqXi5MTMzMxKxZOwmZmZWam45cTMzMxKxcmJmZmZlYrnOWmBpFMkNRYeT+SW\nryjpAklvSXpX0pWShhS2sb6kGyXNkzRN0pmSan5sZmZmZdavW04kDZC0vaTVO7mJKcBQYFj22C23\n7Bxgf+AQYDSwDnBVbt/LAeOB5YFRwOHAEcBpnYzFzMxsmdCvkhNJ50j6WvbzAOAuYDLwiqSPd2KT\niyPizYiYkT3eybY9GPgqMDYi7oqIh4Ejgf+QtHO27j7AFsAXI+KxiJgAnAx8R9LynTk+MzOzZUG/\nSk6AzwJImAt3AAAgAElEQVSPZD8fCGxMShDGAT/pxPY2k/SapOckXSZp/ax8JKlF5PZKxYh4CngZ\n2DUrGgU8FhFv5bY3AagDtu5ELGZmZsuE/pacrAVMy37+JPD3iHga+APw0Q5u6wHSZZh9gG+REp27\nJa1MusSzMCLmFNaZni0je55eZTm5OmZmZlZQ1nlOOnvZYzqwlaQ3gH2Bo7PyQcCSjmwouwxTMUXS\ng8BLwOeB91tYTUC0Z/MdicXMzGxZUtaWk84mJ38ErgDeICUAt2bluwBPdiWgiJgt6WlgU+A2YKCk\nwYXWkyE0tY5MA3YqbGZo9lxsUVnK2LFjqaura1ZWX19PfX19Z8I3MzPrM9qTnDQ0NNDQ0NCsbPbs\n2T0YVSeTk4g4VdIUYH3SJZ0F2aIlwBldCUjSKsAmwJ+AScBiYC/g6mz5cGAD4L5slfuBEyWtlet3\nMgaYDTxBG8aNG8eIESO6ErKZmVmf1J55Tqp9YZ88eTIjR47ssbg6PZolIq4EkLRSruxPHd2OpF8A\n15Mu5awL/C8pIbk8IuZIuhg4W9JM4F3gXODeiHgo28QtpCTkUkknAGsDpwPnR8Sizh6fmZlZf1fW\nyzqdHUo8QNLJkl4D5kr6SFZ+emWIcQesB/yVdDnocuBNYFREvJ0tHwvcAFwJ3Am8TprzBICIaAQO\nILXa3Af8GbgEOKUzx2ZmZrasKGty0tmWkx+RJjv7PvC7XPkU4L+Bi9u7oYhotXNHdsno2OzRUp1X\nSAmKmZmZtVNZk5PODiD6CvCNiPgLzUfnPEKa78TMzMxKrr8lJ+sCz7awvRU6H46ZmZn1lrLOc9LZ\nkJ4Adq9S/lng4c6HY2ZmZr2lrC0nne1zchrwJ0nrkhKcz0janHS5x30/zMzM+oCyJiedajmJiGtJ\nScjewDxSsrIlcGBE3NraumZmZlYO7ZnnpBa6Ms/JPcAnujEWMzMz60X9quVE0k6SdqlSvoukHbse\nlpmZmfW0fpWcABeQpq4vWjdbZmZmZiXX35KTrYDJVcofzpaZmZlZyfW35GQBTXf+zVubdF8cMzMz\nK7n+lpzcAvxMUl2lQNJqwE8Bj9YxMzPrA8o6CVtnR+scD9wNvCSpMuna9sB04MvdEZiZmZn1rLK2\nnHQqOYmI1yRtC3wR2A6YD/wRaIiIRd0Yn5mZmfWQ/jjPyTzgom6MxczMzHpRv2o5AZA0HPg4MIRC\n35WIOK1rYZmZmVlP61fJiaSjgF8DbwHTgMgtDtJ09mZmZlZi/So5AU4CfhQRP+/OYMzMzKz3lDU5\n6ewAotWBv3dnIGZmZta7+lty8ndgTHcGYmZmZr2rv81z8ixwuqRRwGNAs+HDEXFuVwMzMzOzntXf\nhhJ/A5gL7JE98gJwcmJmZlZyZb2s09lJ2Dbu7kDMzMysd5U1OenSlSZJAyVtLqnT86WYmZlZbfSr\n5ETSIEkXA+8BjwMbZOXnSfpBN8ZnZmZmPaRfJSfAz0j31Pk48H6u/DbgC12MyczMzHpBWZOTzl6O\nORj4QkQ8ICk/O+zjwCZdD8vMzMx6WlmTk862nHwYmFGlfGWaT2VvZmZmJVXWeU46G9K/gf1zrysJ\nydeB+7sUkZmZmfWKss5z0tnk5ETgp5J+Tbo0dJykW4EjgR91JSBJP5TUKOnsXNmKki6Q9JakdyVd\nKWlIYb31Jd0oaZ6kaZLOlFTCfNDMzKwc+tVlnYi4h9QhdnnSDLFjgOnArhExqbPBSNoJOAp4pLDo\nHFJLzSHAaGAd4KrcessB47N4RgGHA0fguyObmZm1qKzJSYc7xGZzmhwGTIiIo7orEEmrAJeRLg2d\nnCsfDHwVODQi7srKjgSmSto5Ih4E9gG2APaMiLeAxySdDJwh6dSIWNxdcZqZmfUXZU1OOtxykn3Q\n/wZYqZtjuQC4PiLuKJTvSEqibs/F8BTwMrBrVjQKeCxLTComAHXA1t0cp5mZWb/Qb5KTzIPADt0V\nhKRDge2BH1ZZPBRYGBFzCuXTgWHZz8Oy18Xl5OqYmZlZTlmTk87Oc3Ih8EtJ6wGTgHn5hRHxaHs3\nlG3jHOATEbGorfr5VWnfsGUPbTYzM6uivyUnl2fP+bsPB00Jw4AObGskad6USdIHp2gAMFrSMcC+\nwIqSBhdaT4bQ1DoyDdipsN2h2XOxRaWZsWPHUldX16ysvr6e+vr6DhyCmZlZ39Oe5KShoYGGhoZm\nZbNnz+7BqDqfnHTnXYlvAz5aKLsEmAqcAbwGLAL2Aq4GkDScdD+f+7L69wMnSlor1+9kDDAbeKK1\nnY8bN44RI0Z0/SjMzMz6mMbGtidhq/aFffLkyYwcObLH4upUchIRL3VXABExj0ICIWke8HZETM1e\nXwycLWkm8C6pxebeiHgoW+WWbBuXSjoBWBs4HTi/g5eKzMzMlhn96rKOpK+0tjwi/ty5cJo2UXg9\nFlgCXAmsCNwMfCe3v0ZJBwC/JrWmzCO1vpzSxTjMzMz6rX6VnAC/KrxeARgELATeA7qUnETEfxZe\nLwCOzR4trfMKcEBX9mtmZrYs6VfJSUSsXiyTtBmp5eIXXQ3KzMzMel5Zk5Nuu/dMRDwD/IClW1XM\nzMyshPp9cpJZTLrvjZmZmZVcWZOTznaIPahYRBohcwxwb1eDMjMzs57Xr5IT4JrC6wDeBO4Avtel\niMzMzKxXLFnSj5KTiOjuy0FmZmbWi6ZNg0mT4HOfq3UkS3OSYWZmtgy6+up0WefII2sdydI6lZxI\nulLSD6qU/4+kv3c9LDMzM+tJb78Na6wBqy81OUjtdbblZA/gxirlNwOjOx+OmZmZ9YZ582DllWsd\nRXWdTU5WIc0GW7QIGNz5cMzMzKw3zJ0Lq6xS6yiq62xy8hjwhSrlh9LGXYDNzMys9ubNK29y0tmh\nxKcD/5C0CWn4MMBeQD1Qwn6/ZmZmljd3bnkv63R2KPH1kg4GTgQ+C8wHHgX2joi7ujE+MzMz6wH9\nseWEiLiR6p1izczMrOTmzoX11691FNV1dijxTpJ2qVK+i6Qdux6WmZmZ9aQyt5x0tkPsBUC1fGvd\nbJmZmZmVWJn7nHQ2OdkKmFyl/OFsmZmZmZVYfxxKvAAYWqV8bWBx58MxMzOz3tAfL+vcAvxMUl2l\nQNJqwE+BW7sjMDMzM+s5Zb6s09nROscDdwMvSXo4K9semA58uTsCMzMzs56xcCEsXlzelpPOznPy\nmqRtgS8C25HmOfkj0BARi7oxPjMzM+tmc+em5/7WckJEzAMu6sZYzMzMrBfMnp2eB5f0bnidSk4k\nfY40Vf1wIIBngL9GxJXdGJuZmZn1gDffTM9DhtQ2jpZ0qEOspOUk/Q34G2nI8LPA88DWwBWSLpek\n7g/TzMzMusuMGem5rMlJR1tOjgP2Bg6KiBvyCyQdROp3chxwTveEZ2ZmZt2tkpystVZt42hJR4cS\nHwn8TzExAYiI64DvA1/tjsDMzMysZ8yYAWusASusUOtIqutocrIZcFsry2/L6piZmVlJzZhR3ks6\n0PHkZD6wWivLBwPvd2SDkr4l6RFJs7PHfZL2zS1fUdIFkt6S9K6kKyUNKWxjfUk3SponaZqkMyV1\ndoI5MzOzfq2/JSf3A99uZfl3sjod8QpwAjAye9wBXCtpy2z5OcD+wCHAaGAd4KrKylkSMp7Uf2YU\ncDhwBHBaB+MwMzNbJpQ9Oeloh9ifAHdKWhM4C3gSELAl8D3gU8CeHdlgRNxYKDpJ0reBUZJeI/Vh\nOTQi7gKQdCQwVdLOEfEgsA+wBbBnRLwFPCbpZOAMSadGhO/1Y2ZmlvPyy7DffrWOomUdajmJiPuA\nL5ASkPuBmcA7wL1ZWX1E3NvZYLKhyocCg7LtjyQlULfnYngKeBnYNSsaBTyWJSYVE4A60hBnMzMz\nyyxeDM8/D5uVuIdohydhi4irJU0AxpAmYQN4GrglIt7rTBCStiElIysB7wKfjognJe0ALIyIOYVV\npgPDsp+HZa+LyyvLHulMTGZmZv3Ryy/DokWw6aa1jqRlnb23znuS9gb+X0S80w1xPEm6R89qpL4l\nf5Y0upX6Is1M25b21DEzM1tmPPNMeu43yYmk9SLi1ezlYcCZwDuSHgM+GRGvdCaIrF/I89nLyZJ2\nJk3mdgUwUNLgQuvJEJpaR6YBOxU2OTR7LraoLGXs2LHU1dU1K6uvr6e+vr5jB2FmZtYHPP10mt9k\ngw3aV7+hoYGGhoZmZbMrN+fpIR1tOXlS0tukPiYrAeuT+n9sBHTnVC7LASsCk4DFwF7A1QCShgMb\nAPdlde8HTpS0Vq7fyRhgNvBEWzsaN24cI0aM6MbQzczMyuuhh2C77WD5dmYA1b6wT548mZEjR/ZA\ndElHhxLXAZ8jJQ3LAeMlPU1KJPaRNKy1lauR9BNJu0naUNI2kn4G7AFclrWWXAycLenjkkaSpsi/\nNyIeyjZxCykJuVTStpL2AU4Hzo+IRR2Nx8zMrD978EHYZZdaR9G6jiYnK0TEgxHxS9KEbDuQprRf\nQhry+5ykpzq4zaHAn0n9Tm4jjdAZExF3ZMvHAjcAVwJ3Aq+T+qUAEBGNwAFZDPdl27oEOKWDcZiZ\nmfVrs2bBU0+VPznp6GWdOZIeJl3WGQgMioh7JS0mDTF+Fdi5IxuMiK+3sXwBcGz2aKnOK6QExczM\nzFowZUp63n772sbRlo62nKwD/BhYQEps/i1pIilRGQFERNzTvSGamZlZd5gyJfU12XzzWkfSuo5O\nwvZWRFwfET8E3iONkjmPNGT3LFLLyl3dH6aZmZl11ZQpMHw4DBxY60ha19Wb482OiCuARcB/AhsD\nF3Y5KjMzM+t2jz4KW/eBudO7kpxsS+pjAvASsCgipkXE37oelpmZmXWn999PI3U+9rFaR9K2Ts0Q\nCx90Qq38vE33hGNmZmY94aGHYMEC2GOPWkfStq5e1jEzM7OSW7IETjwR1lkHtt221tG0rdMtJ2Zm\nZtY3XHcd3HMP3HknDBhQ62ja5pYTMzOzfu4Pf4Bdd+0bl3TAyYmZmVm/98wzKTnpK5ycmJmZ9WMR\n8OqrsO66tY6k/ZycmJmZ9WNz5sC8eU5OzMzMrCRezWYkW2+92sbREU5OzMzM+rHXXkvPbjkxMzOz\nUnjxxfS8zjo1DaNDnJyYmZn1UxFw2WWw227lv9lfnidhMzMz64cWL4YvfAEmToRrrql1NB3j5MTM\nzKyfmTkTDjwQHngArrwSPvWpWkfUMU5OzMzM+pmjj4apU+GOO2D06FpH03Huc2JmZtaPvP8+XHst\nfP/7fTMxAScnZmZm/crdd8P8+fDJT9Y6ks5zcmJmZtaP/OMfsOGGsM02tY6k85ycmJmZ9RMXXwy/\n/S0ceihItY6m85ycmJmZ9RPjxqXnb32rtnF0lZMTMzOzfmDhQnjqKbjwQthoo1pH0zVOTszMzPqB\nZ55JE69tvXWtI+k6JydmZmb9wKOPpmcnJ2ZmZlYK110HH/0orLlmrSPpOicnZmZmfdz48WnitUMP\nrXUk3aPmyYmkH0p6UNIcSdMlXS1peKHOipIukPSWpHclXSlpSKHO+pJulDRP0jRJZ0qq+fGZmZn1\npMmT4dOfhj32SNPW9wdl+PDeHTgP2AXYG1gBuEXSh3J1zgH2Bw4BRgPrAFdVFmZJyHjSvYJGAYcD\nRwCn9Xz4ZmZmtXPhhbDuuqnlZLXVah1N96j5jf8iotkEu5KOAGYAI4F7JA0GvgocGhF3ZXWOBKZK\n2jkiHgT2AbYA9oyIt4DHJJ0MnCHp1IhY3HtHZGZm1jveey/NCHv00TBwYK2j6T5laDkpWg0I4J3s\n9UhSEnV7pUJEPAW8DOyaFY0CHssSk4oJQB3QD/otm5mZLe2ss2DePPja12odSfcqVXIiSaRLOPdE\nxBNZ8TBgYUTMKVSfni2r1JleZTm5OmZmZv1GBPzxj3DkkbDxxrWOpnvV/LJOwYXAVsBu7agrUgtL\nW9pTx8zMrE956il48UU48MBaR9L9SpOcSDof+CSwe0S8nls0DRgoaXCh9WQITa0j04CdCpscmj0X\nW1SaGTt2LHV1dc3K6uvrqa+v7+ARmJmZ9Y4XX4QxY2CNNWDPPXt2Xw0NDTQ0NDQrmz17do/uUxG1\nb1jIEpNPAXtExPOFZYOBN0kdYq/OyoYDTwK7RMRDkvYFrgfWrvQ7kfQN4OfAkIhYVGWfI4BJkyZN\nYsSIET14dGZmZt3nscdgr71glVXg9ttrc0ln8uTJjBw5EmBkREzu7u3XvOVE0oVAPXAQME9SpcVj\ndkS8HxFzJF0MnC1pJvAucC5wb0Q8lNW9BXgCuFTSCcDawOnA+dUSEzMzs77q4oth+eXh3nth7bVr\nHU3PqHlyAnyL1C/kzkL5kcCfs5/HAkuAK4EVgZuB71QqRkSjpAOAXwP3AfOAS4BTejBuMzOzXrVw\nYZoN9oAD+m9iAiVITiKizRFDEbEAODZ7tFTnFeCAbgzNzMysVI49Nt19+KKLah1JzyrVUGIzMzNr\n2YMPwuGHw8c/XutIepaTEzMzsz7ixRdhiy1qHUXPc3JiZmbWB8yeDbNmwYYb1jqSnufkxMzMrA94\n6aX0vNFGNQ2jVzg5MTMz6wMeeCA9LwstJzUfrWNmZmbVLVgA48bBH/6QRunsvTcMWwbuGOeWEzMz\nsxKKgEMOgZNOglGjoKEBJkyA5ZaBT263nJiZmZXQ3/4GN94IV10Fn/lMraPpXU5OzMzMSmLqVJg8\nGR56CC64AOrr4dOfrnVUvc/JiZmZWY0tWQLf+x6cdx40Nqab+p12WiqTah1d73NyYmZmVkMzZsBx\nx8EVV8DPfw7f+hastFK6ud+yahk+dDMzs9qaOxe22y6Nyrn0UjjssFpHVA5OTszMzGrkwgvhzTfT\nMOGNN651NOWxDAxIMjMzK5+rr4YTToCvfc2JSZGTEzMzs1727LPws5/BHnvAb35T62jKx8mJmZlZ\nL2lsTJOqbbYZTJkCp5yybI7GaYuTEzMzs15y3HHwk5/Ad78Lzz8Pe+5Z64jKyR1izczMesHLL8Pv\nfpfmLzn55FpHU25OTszMzHrAwoUwcWIajfOPf8DNN8Nqq8HRR9c6svJzcmJmZtYN3n0XfvWrlIxc\ney3Mng2zZqVl22wD3/gGnHgirLFGbePsC5ycmJmZdYPzz4dTT4X11oMxY2D99WHffdPrYcPc8bUj\nnJyYmZl1wRNPpHvg3HxzmrPk97+vdUR9n0frmJmZddK4cbDttmmG1zPPTCNxrOvccmJmZtYBS5bA\nAw/AZZelCdTGjk0Tqq24Yq0j6z+cnJiZmbVh9mx4+2044wy47jqYPh3q6uCcc+C//sv9SbqbkxMz\nM7MWvPZauv/NX/6SXq+xBnz5y7D//rDLLjB4cG3j66+cnJiZmeVcdhk89hjceis8+mhKSH71K9ho\nI9htNw8F7g1OTszMbJkXATfckDq0/utf6S7Bw4fD2WfDV76SJk+z3lOK0TqSdpd0naTXJDVKOqhK\nndMkvS7pPUm3Stq0sHx1SX+RNFvSTEm/l7Ry7x2FmZn1Fe+9B//8Z7o8s8IK6XHQQTBgAJx7brrv\nzc03p/4kTkx6X1laTlYG/g/4A3BVcaGkE4BjgMOBF4AfAxMkbRkRC7NqfwWGAnsBA4FLgN8CX+rp\n4M3MrJwWLEgzt159dUpInn8+JSVvvAFvvZVaSM48Ez70IdhkE9h7b3duLYNSJCcRcTNwM4BU9dfi\nOOD0iLg+q/MVYDpwMHCFpC2BfYCREfFwVudY4EZJx0fEtF44DDMzq7FZs+AXv4DnnoP582H8eFi8\nGJZbDlZaKSUhBx8Ma64Jn/oUjBzpIcBlVIrkpDWSNgaGAbdXyiJijqR/AbsCVwCjgJmVxCRzGxDA\nLsC1vRexmZn1ln//O42meeaZ9HrmzNR/ZNSo1AJyyimw6aapI+t669U2Vmu/0icnpMQkSC0ledOz\nZZU6M/ILI2KJpHdydczMrA9bsgQmTICbbkrJyPvvw913w9Zbw+GHp2RkpZXgiCNgnXVqHa11RV9I\nTloiUtLS1TpmZlYis2al+UUWLkx9Rd56C665JpXPnw+rrw7/+Z+w6qpwwQVw1FGwfF/+NLOl9IW3\ncxopyRhK89aTIcDDuTpD8itJGgCsztItLs2MHTuWurq6ZmX19fXU19d3LWozM2uXCHj11TTz6jXX\nwP33w7x5aVldHay7LhxwAGyxBey6K+y4YxpdY72joaGBhoaGZmWzZ8/u0X0qolwNC5IagYMj4rpc\n2evALyJiXPZ6MCnp+EpE/F3SFsDjwI65DrFjgPHAetU6xEoaAUyaNGkSI0aM6PHjMjMzmDIlTQUP\n6f40U6ak5yefTGW77AJjxsB++6XLNFtt5VlYy2jy5MmMHDkS0kCUyd29/VK0nGTzkWxKaiEB+Iik\n7YB3IuIV4BzgJEnPAi8CpwOvknV0jYgnJU0Afifp26ShxOcBDR6pY2ZWO889l1pDXn8drr02va5Y\nfvnUCrLVVqlT66c/nVpKzEqRnAA7Av8k9Q8J4JdZ+Z+Ar0bEmZIGkeYtWQ2YCOyXm+ME4DDgfNIo\nnUbgStIQZDMz6yXz5sHEiSkZueaa1IF1ueVS/5D990937/3oR1PdNdaAIUNa354tm0qRnETEXbQx\nW21EnAqc2sryWXjCNTOzXhEBL7+cRtBMmpRmU7355nTJptJfZKed4Ec/guOPh0GDahuv9S2lSE7M\nzKxcZs9OycbixU1lS5bA9den0TNvvZX6i1RsuCF8/vMwbFjqvDp0KKy1Vu/Hbf2DkxMzs2VQYyM8\n/jgsWpR+vvHGdAlmcta1cfHilIwUbbEFjBgB668PJ50EH/5wmm11u+16N37r35ycmJn1I0uWpIQj\nLyJN4/766+l52rTUMvLCC011Bg2CnXeGn/88dVQdMCCNmBk6tPm2VlzR956xnufkxMysD3nqqTQn\nyC23pOe8iFT+9tvV111xxTSb6sc+ljqp7rsvrL12WrbRRqmDqlkZODkxM6uh+fOb5v2A1Jn0mmvS\n1OwAc+c2vY6Al15K5XV1sMMOS2/vc59L95Ep2nLLdDnGrC9wcmJm1o0eeaT5XB4V8+enJOO995rK\nItJsqLNmNa+70kpN831Iaar2jTZKrzfbDEaPTn09Vl21Rw7BrOacnJiZVTFnTuqjAek+LxMmpGSi\nYsGCdN+XuXObr1dMNPK22SbdITfvyCNT8lHpxyGlWVLXXLPrx2DWVzk5MbNl1rRpabhsY2N6/frr\nKQlpbEzDZOfMaaq77rqwyirN1x8zJo1eyVt/ffjEJ6p3Gl19dXcmNWsPJydm1mctXgwPP5xGqCxZ\nkm4c9847S9ebPj2NUqk2NDZvwAA48EBYbTXYYw/45CdTMjFwIIwcmTqRmlnPc3JiZjW3cGHqk1H0\nzjtp0q/KnBs33JAm/6qYNavp0guk0Sabbbb0dpZfHn7yk6VHo6ywQkpGVl+9qcwJiFntOTkxs241\nY0bTRF7VPP54ugttRWMj3HorvPtu9forrJA6iEKa6GvMmKZlAwak2Ug//OH0esMNl770YmZ9j5MT\ns2XUjBnVWysqIuD225uGrj7zDNx9d9vbfeedpmGw1Sy3XLpkMnBgU9lRR8GoUUvXHTAgdRZdbbW2\n92tm/YeTE7M+4s034Z//bOq8Wc3EifD0021va/58uPfetutJsN566edBg+ArX2lqxWjJqqvCwQen\nCb+qGTSo+WUUM7MiJydmPWj2bHjxxZaXP/ggPPbY0uWLFsG11zafnGvBgrY7dK65Juy1V/tGhFx4\n4dLDWos23BCGD297W2Zm3cnJiVmmsbHlD/9HH4WHHmp53Ycfrt7P4plnmicYRRJstVX1Tpj775/m\nxagYOBA+9ammybmqWXHF1PnTzKwv878x69dmzkyJRVEE3HQTvPFGet3YmOa7aOmeJJASiJZGcqyx\nBhx0UOojkTdmTBoNUiyvWGst2Hjjto/DzGxZ4uTESmXOnHT5oui55+DOO1ter3LDs+KN0KZNW3oG\nz4q6Oth226bXn/tc9U6ZkDpkHnBAy0mGmZl1Hycn1mOeeCJd1ii66y549tmly+fPT6ND8lOE5w0e\n3HyER9Fmm8EhhzQvW2WVljtnDhvme5OYmZWRkxOratq0pnknHnggzU1RTbV7jlS8/Xb18ro62H33\npTttfuhDcNZZsMkmS68zaFAaUuqWCzOz/s/JyTKgsTFN3V2tP8X06anvRb4j6KJF8K9/NU8sPvKR\n6v0tBg6Er32teifN9dZLfS6KSciqq7Y9HNXMzJZdTk5KorExddxctKjtuq2NHHnrrTTFdz7ZiGh5\nFIqU7h9SnNb7i1+Ej340/Tx06NI3NzMzM+spTk46acmSpWfBfOaZ1J+iJU8+2fLEV3PmNM3E2R7b\nb199yOiAAXDiiTBkSPPyESNghx2Wri956KmZmZWLP5Za8PTTTclCRJqZ84UXml7feWea/rto4MCW\nP+xb65y53HLpEsi667YdW10dbLRRe47CzMys71nmk5PzzksdPufNayqLSB09830uVl0Vdt65qf/E\nwQfDnns239bKK8O++6YblZmZmVnnLPPJyQ03wN57L32b9Q03bD4N+Jpr+m6nZmZmvWGZT04mTEj9\nMczMzKwcWpiM28zMzKw2nJyYmZlZqfSr5ETSdyS9IGm+pAck7VTrmPqChoaGWodQGj4Xic9DE5+L\nxOch8XnoHf0mOZH0BeCXwCnADsAjwARJa9U0sD7Af2xNfC4Sn4cmPheJz0Pi89A7+k1yAowFfhsR\n/7+9ew+2sirjOP79oQKKgzh5YSwUlbybF1AoRUFFUkcdsnEcTC2qwazJrElzqrEspzvjDadGbfJa\nUU1pqaMiBylvjGLmhYsp4gUPieIRARXh6Y+1trxsz0Hw7LP3Pvv9fWb2DO9a691nrYd3r/3s9117\nv9dHxDzgbGAlMKmx3TIzM7NN0RLJiaQtgOHAPZWyiAhgOvDJRvXLzMzMNl1LJCfAdsBmwJKq8iXA\n4EHfvXcAAAoVSURBVPp3x8zMzD6sVv+dEwHRRV1/gLlz59avN02qo6ODOXPmNLobTcGxSByHdRyL\nxHFIHIek8N7ZI/eYV0RX7929R76ssxI4JSJuLZT/DtgmIiZ0ss9E4Ka6ddLMzKz1nB4RN9f6SVvi\nzElErJb0CHA0cCuAJOXty7vY7U7gdOA54K0u2piZmdn79QeGkt5La64lzpwASDoVuA6YDMwmfXvn\ns8BeEfFKI/tmZmZmG68lzpwARMS0/JsmFwM7Av8GxjsxMTMz611a5syJmZmZtYZW+SqxmZmZtYhS\nJidluAePpNGSbpX0kqS1kk7qpM3FkhZLWinpbknDquq3lXSTpA5JyyRdI2lA/UbRfZIulDRb0huS\nlkj6q6Q9qtr0kzRV0lJJyyX9WdIOVW2GSLpN0gpJ7ZJ+LqnXvH4knS3psfx/2SHpfkmfLtS3fAw6\nk4+PtZKmFMpKEQtJF+WxFx9PFepLEQcASTtJuiGPdWV+rRxc1aYM8+XCTo6JtZKuyPV1OyZ63UHU\nXSrPPXgGkNbdfJVOfutF0gXA10gLiA8FVpDi0LfQ7GZgb9K3nk4AjgB+07PdrrnRwBXASOAYYAvg\nLklbFtpcShrfKaQx7gT8pVKZX1i3k9ZojQLOAj5PWt/UW7wAXED6JeXhwAzgFkl75/oyxGA9Sh9K\nvkyaA4rKFIsnSGv0BufH4YW6UsRB0iDgPuBtYDxpzvsWsKzQpizz5QjWHQuDgXGk949pub5+x0RE\nlOoBPAhcVtgW8CJwfqP71oNjXgucVFW2GDivsD0QWAWcmrf3zvsdVGgzHngXGNzoMXUjFtvlcR1e\nGPfbwIRCmz1zm0Pz9nHAamC7QpvJpMlr80aPqRuxeBX4QhljAGwNzAeOAtqAKWU7Hkgf0OZ0UVem\nOPwUuPcD2pR1vrwUWNCIY6JUZ07ke/AAIGlXUlZcjMMbwEOsi8MoYFlEPFrYdTopix5Zp672hEGk\nMbyWt4eTsvxiLOYDz7N+LB6PiKWF57kT2AbYt6c7XGuS+kg6DdgKeIASxgCYCvw9ImZUlY+gXLH4\nuNKl32ck3ShpSC4v0zFxIvCwpGn50u8cSV+qVJZ1vszvl6cD1+aiur42SpWc4HvwVAwmvWg2FIfB\nwP+KlRGxhvSm3itjJUmkTwL/iojKtfXBwDt5simqjkVnsYJeFAtJ+0laTvr0cxXpE9A8ShQDgJyY\nHQhc2En1jpQnFg+STrmPJ93FfVdgVl4nUaZjYjfgK6QzaccCvwYul/S5XF/K+RKYQEoqrsvbdX1t\ntMzvnHTThu7BUyYbE4feHKurgH1Y/7p6VzZ2nL0pFvOAA0hnj04Brpd0xAbat1wMJH2MlKCOi4jV\nm7IrLRaLiCj+sucTkmYDi4BT6fpXs1suDqQP6bMj4vt5+zFJ+5ISlhs3sF+rz5eTgDsiov0D2vXI\nMVG2MydLgTWkDLBoB96f7bWydtIBtaE4tOft90jaDNiWXhgrSVcCxwNjImJxoaod6CtpYNUu1bGo\njlVlu9fEIiLejYhnI2JORHyXtBD0XEoUA9Lliu2BRyStlrQaOBI4V9I7pLH0K0ks1hMRHcACYBjl\nOiZeBqrvADsX2Dn/u4zz5c6kLxBcXSiu6zFRquQkf1Kq3IMHWO8ePPc3ql/1FhELSQdRMQ4DSddG\nK3F4ABgk6aDCrkeTXqQP1amrNZETk5OBsRHxfFX1I6RFa8VY7EGamIqx2L/qG13HAh3AU/RefYB+\nlCsG04H9SZd1DsiPh0mfkCv/Xk05YrEeSVsDu5MWf5bpmLiPtLCzaE/SWaTSzZfZJFIycXuhrL7H\nRKNXAzdg9fGppFXWZwJ7kb7q9SqwfaP7VuNxDiBNtgeSVlN/I28PyfXn53GfSJqs/wY8DfQtPMft\npMn6EOAw0jXZGxo9tk2Mw1WkleKjSRl85dG/qs1CYAzpk/V9wD8L9X1IZxnuAD5Buka/BPhRo8e3\nCXG4hHQ5axdgP+AnpInmqLLEYAOxee/bOmWKBfAL0tdBdwE+Bdydx/GRksVhBGkd1oWk5GwisBw4\nrdCmFPNlHodIN8S9pJO6uh0TDQ9Eg4J/Tg7+KlKmN6LRfeqBMR5JSkrWVD1+W2jzA9KnpJWkFdXD\nqp5jEOkTZQfpDf5qYKtGj20T49BZDNYAZxba9CP9FsrSPCn9Cdih6nmGAP8A3swvtp8BfRo9vk2I\nwzXAs/mYbwfuIicmZYnBBmIzg/WTk1LEAvg96WcUVpG+cXEzsGvZ4pDHcTzwnzwXPglM6qRNy8+X\neRzj8hw5rJO6uh0TvreOmZmZNZVSrTkxMzOz5ufkxMzMzJqKkxMzMzNrKk5OzMzMrKk4OTEzM7Om\n4uTEzMzMmoqTEzMzM2sqTk7MzMysqTg5MTMzs6bi5MTMWoKkNklTGt0PM+s+Jydm1m2SJkt6Q1Kf\nQtkASasl3VPVdqyktZKG1rufZtY7ODkxs1poI90Je0ShbDTwMjBKUt9C+ZHAooh4blP/iKTNu9NJ\nM+sdnJyYWbdFxAJSIjKmUDyGdGv5hcCoqvI2AElDJN0iabmkDkl/lLRDpaGkiyQ9KumLkp4F3srl\nW0m6Pu/3kqRvVvdJ0jmSFkhaJald0rTajtrMeoqTEzOrlZnA2ML22Fx2b6VcUj9gJDAjt7mFdKv5\n0cAxwO7AH6qedxjwGWACcGAu+2Xe50TgWFLCM7yyg6QRwGXA94A9gPHArG6Oz8zqxKdIzaxWZgJT\n8rqTAaREYhbQF5gM/BA4LG/PlDQO2A8YGhGLASSdATwpaXhEPJKfdwvgjIh4LbcZAEwCJkbEzFx2\nFvBioS9DgDeB2yJiBfAC8FgPjdvMasxnTsysVirrTg4BDgcWRMRS0pmTkXndyRjgmYh4EdgLeKGS\nmABExFzgdWDvwvMuqiQm2e6khGV2Yb9lwPxCm7uBRcDCfPlnoqQtazZSM+tRTk7MrCYi4hngJdIl\nnLGkpISIeJl05uIwCutNAAHRyVNVl6/opJ4u9q305U3gYOA0YDHprM1jkgZu9IDMrGGcnJhZLbWR\nEpMxpMs8FbOA44BDWZecPAXsLOmjlUaS9gG2yXVd+S/wLoVFtpK2Ja0teU9ErI2IGRHxHeAAYChw\n1IcYk5nVmdecmFkttQFTSXPLvYXyWcCVpMsxMwEiYrqkx4GbJJ2X66YCbRHxaFd/ICJWSLoW+IWk\n14BXgB8DayptJJ0A7Jb/7jLgBNIZl/nvf0YzazZOTsysltqA/sDciHilUH4vsDUwLyLaC+UnA1fk\n+rXAHcDXN+LvfJu0vuVWYDnwK6B4yeZ10jd8Lsr9eRo4La9pMbMmp4guL9uamZmZ1Z3XnJiZmVlT\ncXJiZmZmTcXJiZmZmTUVJydmZmbWVJycmJmZWVNxcmJmZmZNxcmJmZmZNRUnJ2ZmZtZUnJyYmZlZ\nU3FyYmZmZk3FyYmZmZk1FScnZmZm1lT+DwrH78/1pfXIAAAAAElFTkSuQmCC\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -457,7 +458,7 @@ }, { "cell_type": "code", - "execution_count": 101, + "execution_count": 99, "metadata": { "collapsed": false }, @@ -469,7 +470,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 100, "metadata": { "collapsed": false }, @@ -478,51 +479,52 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1.84 s, sys: 0 ns, total: 1.84 s\n", - "Wall time: 1.85 s\n" + "CPU times: user 7.17 s, sys: 8 ms, total: 7.18 s\n", + "Wall time: 7.19 s\n" ] } ], "source": [ "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", " eval_every=1, random_state=1, var_lambda=None)" ] }, { "cell_type": "code", - "execution_count": 218, + "execution_count": 22, "metadata": { - "collapsed": false + "collapsed": false, + "scrolled": false }, "outputs": [ { "data": { "text/plain": [ "[(0,\n", - " '0.007*rule + 0.005*class + 0.005*classifier + 0.004*probability + 0.004*cue + 0.004*distribution + 0.004*sample + 0.003*sequence + 0.003*tree + 0.003*evidence'),\n", + " '0.053*stimulus + 0.042*constraint + 0.028*search + 0.025*region + 0.023*temporal + 0.023*optimization + 0.021*map + 0.019*differential + 0.018*frequency + 0.017*activity'),\n", " (1,\n", - " '0.056*motion + 0.052*velocity + 0.051*muscle + 0.044*robot + 0.040*reinforcement + 0.035*controller + 0.029*obstacle + 0.028*command + 0.028*reinforcement_learning + 0.027*movement'),\n", + " '0.031*loop + 0.023*potential + 0.023*circuit + 0.021*delay + 0.021*feedback + 0.020*interaction + 0.018*eq + 0.014*device + 0.013*storage + 0.013*current'),\n", " (2,\n", - " '0.049*cell + 0.027*spike + 0.024*stimulus + 0.022*eye + 0.020*firing + 0.019*response + 0.017*burst + 0.016*inhibition + 0.016*fiber + 0.016*wave'),\n", + " '0.041*stage + 0.031*rate + 0.025*noise + 0.023*feature + 0.022*word + 0.020*pp + 0.017*sequence + 0.016*recall + 0.014*xi + 0.012*relation'),\n", " (3,\n", - " '0.029*attractor + 0.026*vc + 0.024*theorem + 0.019*bound + 0.019*xt + 0.017*fixed_point + 0.016*eigenvalue + 0.016*threshold + 0.015*let + 0.014*capacity'),\n", + " '0.074*image + 0.034*field + 0.032*surface + 0.030*eq + 0.030*visual + 0.026*gradient + 0.023*location + 0.019*average + 0.018*computed + 0.017*correlation'),\n", " (4,\n", - " '0.039*hmm + 0.032*tdnn + 0.030*speech + 0.030*mlp + 0.028*phonetic + 0.026*speaker + 0.024*segmentation + 0.021*recognition + 0.021*hybrid + 0.021*phoneme'),\n", + " '0.067*code + 0.045*capacity + 0.041*hopfield + 0.038*processor + 0.037*matrix + 0.030*stored + 0.028*product + 0.026*activation + 0.020*address + 0.020*machine'),\n", " (5,\n", - " '0.055*chip + 0.055*word + 0.043*circuit + 0.033*analog + 0.031*vlsi + 0.030*pulse + 0.028*voltage + 0.027*board + 0.027*perturbation + 0.024*processor'),\n", + " '0.068*hidden + 0.056*training + 0.041*hidden_unit + 0.031*back_propagation + 0.029*back + 0.027*trained + 0.024*sequence + 0.024*propagation + 0.023*convergence + 0.020*dynamic'),\n", " (6,\n", - " '0.027*rbf + 0.023*spline + 0.015*schedule + 0.015*basis_function + 0.012*weight_decay + 0.012*approximation + 0.010*regression + 0.010*validation + 0.009*stochastic + 0.009*prediction'),\n", + " '0.051*bit + 0.023*iv + 0.022*path + 0.021*minimum + 0.016*binary + 0.015*find + 0.015*start + 0.014*sequence + 0.014*suppose + 0.013*strategy'),\n", " (7,\n", - " '0.071*depth + 0.068*node + 0.056*contour + 0.050*projection + 0.042*polynomial + 0.039*proof + 0.032*gate + 0.028*hidden_node + 0.027*boolean + 0.027*boolean_function'),\n", + " '0.049*node + 0.026*classification + 0.021*energy + 0.021*group + 0.020*cycle + 0.013*decision + 0.013*minimum + 0.012*move + 0.012*recognition + 0.012*experiment'),\n", " (8,\n", - " '0.005*image + 0.005*object + 0.004*neuron + 0.004*eq + 0.004*character + 0.003*filter + 0.003*field + 0.003*dynamic + 0.003*receptive + 0.003*receptive_field'),\n", + " '0.060*role + 0.057*representation + 0.031*position + 0.024*connectionist + 0.022*move + 0.016*estimate + 0.016*target + 0.016*mapping + 0.016*log + 0.015*scheme'),\n", " (9,\n", - " '0.031*grammar + 0.027*module + 0.023*expert + 0.021*string + 0.020*symbol + 0.019*recurrent + 0.017*language + 0.014*automaton + 0.014*giles + 0.014*mozer')]" + " '0.100*cell + 0.068*firing + 0.034*connectivity + 0.031*synaptic + 0.029*activity + 0.027*phase + 0.024*synapsis + 0.023*stimulus + 0.019*via + 0.018*control')]" ] }, - "execution_count": 218, + "execution_count": 22, "metadata": {}, "output_type": "execute_result" } @@ -627,6 +629,88 @@ "pprint(model.get_author_topics(author2id[name]))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Vectorization speed-up" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of authors: 166\n", + "Number of unique tokens: 681\n", + "Number of documents: 90\n", + "Speed-up 5.44811320754717\n" + ] + } + ], + "source": [ + "print('Number of authors: %d' % len(author2doc))\n", + "print('Number of unique tokens: %d' % len(dictionary))\n", + "print('Number of documents: %d' % len(corpus))\n", + "print('Speed-up', 23.1 / 4.24)" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of authors: 376\n", + "Number of unique tokens: 1524\n", + "Number of documents: 185\n", + "Speed-up 4.90566037735849\n" + ] + } + ], + "source": [ + "print('Number of authors: %d' % len(author2doc))\n", + "print('Number of unique tokens: %d' % len(dictionary))\n", + "print('Number of documents: %d' % len(corpus))\n", + "print('Speed-up', (1 * 60 + 18) / 15.9)" + ] + }, + { + "cell_type": "code", + "execution_count": 101, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of authors: 536\n", + "Number of unique tokens: 2245\n", + "Number of documents: 286\n", + "Speed-up 4.743589743589744\n" + ] + } + ], + "source": [ + "print('Number of authors: %d' % len(author2doc))\n", + "print('Number of unique tokens: %d' % len(dictionary))\n", + "print('Number of documents: %d' % len(corpus))\n", + "print('Speed-up', (2 * 60 + 28) / 31.2)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -940,7 +1024,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 58, "metadata": { "collapsed": false }, @@ -954,7 +1038,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 59, "metadata": { "collapsed": false }, @@ -965,7 +1049,7 @@ "\n", "
\n", " \n", - " Loading BokehJS ...\n", + " Loading BokehJS ...\n", "
" ] }, @@ -1013,7 +1097,7 @@ "\n", " function display_loaded() {\n", " if (window.Bokeh !== undefined) {\n", - " Bokeh.$(\"#d982e20b-e5a9-4239-8121-81cecd38c4d7\").text(\"BokehJS successfully loaded.\");\n", + " Bokeh.$(\"#e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0\").text(\"BokehJS successfully loaded.\");\n", " } else if (Date.now() < window._bokeh_timeout) {\n", " setTimeout(display_loaded, 100)\n", " }\n", @@ -1055,9 +1139,9 @@ " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", " }\n", - " };var element = document.getElementById(\"d982e20b-e5a9-4239-8121-81cecd38c4d7\");\n", + " };var element = document.getElementById(\"e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0\");\n", " if (element == null) {\n", - " console.log(\"Bokeh: ERROR: autoload.js configured with elementid 'd982e20b-e5a9-4239-8121-81cecd38c4d7' but no matching script tag was found. \")\n", + " console.log(\"Bokeh: ERROR: autoload.js configured with elementid 'e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0' but no matching script tag was found. \")\n", " return false;\n", " }\n", "\n", @@ -1070,7 +1154,7 @@ " \n", " function(Bokeh) {\n", " \n", - " Bokeh.$(\"#d982e20b-e5a9-4239-8121-81cecd38c4d7\").text(\"BokehJS is loading...\");\n", + " Bokeh.$(\"#e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0\").text(\"BokehJS is loading...\");\n", " },\n", " function(Bokeh) {\n", " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", @@ -1093,7 +1177,7 @@ " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", " window._bokeh_failed_load = true;\n", " } else if (!force) {\n", - " var cell = $(\"#d982e20b-e5a9-4239-8121-81cecd38c4d7\").parents('.cell').data().cell;\n", + " var cell = $(\"#e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0\").parents('.cell').data().cell;\n", " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", " }\n", "\n", @@ -1118,6 +1202,507 @@ "source": [ "output_notebook()" ] + }, + { + "cell_type": "code", + "execution_count": 60, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from scipy.special import psi" + ] + }, + { + "cell_type": "code", + "execution_count": 112, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p = figure()\n", + "x = np.zeros(101)\n", + "x[:100] = np.linspace(0.1, 1, 100)\n", + "x[100] = 10000\n", + "p.circle(x, psi(x) - psi(x.sum()))\n", + "\n", + "show(p)" + ] + }, + { + "cell_type": "code", + "execution_count": 103, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from scipy.stats import norm" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false + }, + "source": [ + "## Line profiling" + ] + }, + { + "cell_type": "code", + "execution_count": 198, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "reload(atmodel)\n", + "AuthorTopicModel = atmodel.AuthorTopicModel" + ] + }, + { + "cell_type": "code", + "execution_count": 199, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Timer unit: 1e-06 s\n", + "\n", + "Total time: 728.228 s\n", + "File: /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/atmodel.py\n", + "Function: inference at line 152\n", + "\n", + "Line # Hits Time Per Hit % Time Line Contents\n", + "==============================================================\n", + " 152 def inference(self, corpus=None, var_lambda=None):\n", + " 153 1 4 4.0 0.0 if corpus is None:\n", + " 154 # TODO: is copy necessary here?\n", + " 155 corpus = self.corpus.copy()\n", + " 156 \n", + " 157 1 5 5.0 0.0 self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online.\n", + " 158 \n", + " 159 1 355 355.0 0.0 logger.info('Starting inference. Training on %d documents.', len(corpus))\n", + " 160 \n", + " 161 1 3 3.0 0.0 vectorized = False # FIXME: set to True.\n", + " 162 1 3 3.0 0.0 numstable_sm = False # FIXME: set to True.\n", + " 163 \n", + " 164 1 2 2.0 0.0 if not numstable_sm:\n", + " 165 1 4 4.0 0.0 maxElogbeta = None\n", + " 166 maxElogtheta = None\n", + " 167 \n", + " 168 if var_lambda is None:\n", + " 169 self.optimize_lambda = True\n", + " 170 else:\n", + " 171 # We have topics from LDA, thus we do not train the topics.\n", + " 172 1 4 4.0 0.0 self.optimize_lambda = False\n", + " 173 1 3120 3120.0 0.0 \n", + " 174 1 49 49.0 0.0 # Initial values of gamma and lambda.\n", + " 175 1 14 14.0 0.0 # Parameters of gamma distribution same as in `ldamodel`.\n", + " 176 var_gamma = self.random_state.gamma(100., 1. / 100.,\n", + " 177 1 4 4.0 0.0 (self.num_authors, self.num_topics))\n", + " 178 1 5 5.0 0.0 tilde_gamma = var_gamma.copy()\n", + " 179 1 11563 11563.0 0.0 self.var_gamma = var_gamma\n", + " 180 1 141 141.0 0.0 \n", + " 181 if var_lambda is None:\n", + " 182 var_lambda = self.random_state.gamma(100., 1. / 100.,\n", + " 183 (self.num_topics, self.num_terms))\n", + " 184 tilde_lambda = var_lambda.copy()\n", + " 185 else:\n", + " 186 1 10 10.0 0.0 self.norm_lambda = var_lambda.copy()\n", + " 187 for k in xrange(self.num_topics):\n", + " 188 1 8 8.0 0.0 self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k]\n", + " 189 \n", + " 190 self.var_lambda = var_lambda\n", + " 191 1 370334 370334.0 0.1 \n", + " 192 1 1157125 1157125.0 0.2 var_phi = dict() # TODO: remove once non-vectorized code is not used anymore.\n", + " 193 1 4 4.0 0.0 \n", + " 194 # Initialize dirichlet expectations.\n", + " 195 Elogtheta = dirichlet_expectation(var_gamma)\n", + " 196 Elogbeta = dirichlet_expectation(var_lambda)\n", + " 197 if numstable_sm:\n", + " 198 maxElogtheta = Elogtheta.max()\n", + " 199 1 551 551.0 0.0 maxElogbeta = Elogbeta.max(axis=0)\n", + " 200 1 1720 1720.0 0.0 expElogtheta = numpy.exp(Elogtheta - maxElogtheta)\n", + " 201 expElogbeta = numpy.exp(Elogbeta - maxElogbeta)\n", + " 202 1 3 3.0 0.0 else:\n", + " 203 expElogtheta = numpy.exp(Elogtheta)\n", + " 204 expElogbeta = numpy.exp(Elogbeta)\n", + " 205 \n", + " 206 if self.eval_every > 0:\n", + " 207 word_bound = self.word_bound(corpus, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", + " 208 2 10 5.0 0.0 theta_bound = self.theta_bound(Elogtheta)\n", + " 209 1 2 2.0 0.0 beta_bound = self.beta_bound(Elogbeta)\n", + " 210 1741 6426 3.7 0.0 bound = word_bound + theta_bound + beta_bound\n", + " 211 logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound)\n", + " 212 for _pass in xrange(self.passes):\n", + " 213 1740 21268 12.2 0.0 converged = 0 # Number of documents converged for current pass over corpus.\n", + " 214 1740 359952 206.9 0.0 for d, doc in enumerate(corpus):\n", + " 215 1740 213240 122.6 0.0 # TODO: a smarter of computing rho may be necessary. In ldamodel,\n", + " 216 1740 5804 3.3 0.0 # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay).\n", + " 217 rhot = self.rho(d + _pass)\n", + " 218 1740 3509 2.0 0.0 ids = numpy.array([id for id, _ in doc]) # Word IDs in doc.\n", + " 219 cts = numpy.array([cnt for _, cnt in doc]) # Word counts.\n", + " 220 authors_d = self.doc2author[d] # List of author IDs for document d.\n", + " 221 \n", + " 222 if vectorized:\n", + " 223 phinorm = self.compute_phinorm(ids, authors_d, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", + " 224 1740 2172516 1248.6 0.3 else:\n", + " 225 var_phi = dict()\n", + " 226 \n", + " 227 1740 47495 27.3 0.0 # TODO: if not used, get rid of these.\n", + " 228 1740 106401 61.1 0.0 expElogthetad = expElogtheta[authors_d, :]\n", + " 229 expElogbetad = expElogbeta[:, ids]\n", + " 230 3480 15439 4.4 0.0 \n", + " 231 for iteration in xrange(self.iterations):\n", + " 232 #logger.info('iteration %i', iteration)\n", + " 233 1740 16311 9.4 0.0 \n", + " 234 lastgamma = tilde_gamma[authors_d, :]\n", + " 235 \n", + " 236 1740 4061 2.3 0.0 ## Update phi.\n", + " 237 953484 1988572 2.1 0.3 if not vectorized:\n", + " 238 951744 1886620 2.0 0.3 for v in ids:\n", + " 239 3141324 6424372 2.0 0.9 phi_sum = 0.0\n", + " 240 24085380 49026745 2.0 6.7 for a in authors_d:\n", + " 241 21895800 59556256 2.7 8.2 for k in xrange(self.num_topics):\n", + " 242 21895800 53252861 2.4 7.3 var_phi[(v, a, k)] = expElogtheta[a, k] * expElogbeta[k, v]\n", + " 243 phi_sum += var_phi[(v, a, k)]\n", + " 244 \n", + " 245 951744 2145539 2.3 0.3 # Normalize phi over k.\n", + " 246 3141324 6397555 2.0 0.9 phi_norm_const = 1.0 / (phi_sum + 1e-100)\n", + " 247 24085380 48475653 2.0 6.7 for a in authors_d:\n", + " 248 21895800 52318586 2.4 7.2 for k in xrange(self.num_topics):\n", + " 249 var_phi[(v, a, k)] *= phi_norm_const\n", + " 250 5731 13183 2.3 0.0 \n", + " 251 43901 99066 2.3 0.0 for a in authors_d:\n", + " 252 39910 97817 2.5 0.0 for k in xrange(self.num_topics):\n", + " 253 21935710 46111162 2.1 6.3 tilde_gamma[a, k] = 0.0\n", + " 254 21895800 96645068 4.4 13.3 for vi, v in enumerate(ids):\n", + " 255 39910 176150 4.4 0.0 tilde_gamma[a, k] += cts[vi] * var_phi[(v, a, k)]\n", + " 256 39910 111018 2.8 0.0 tilde_gamma[a, k] *= len(self.author2doc[a])\n", + " 257 tilde_gamma[a, k] += self.alpha[k]\n", + " 258 else:\n", + " 259 # Update gamma.\n", + " 260 for a in authors_d:\n", + " 261 tilde_gamma[a, :] = self.alpha + len(self.author2doc[a]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T)\n", + " 262 \n", + " 263 # Update gamma and lambda.\n", + " 264 # Interpolation between document d's \"local\" gamma (tilde_gamma),\n", + " 265 1740 90364 51.9 0.0 # and \"global\" gamma (var_gamma). Same goes for lambda.\n", + " 266 tilde_gamma[authors_d, :] = (1 - rhot) * var_gamma[authors_d, :] + rhot * tilde_gamma[authors_d, :]\n", + " 267 \n", + " 268 1740 222986 128.2 0.0 # Update Elogtheta and Elogbeta, since gamma and lambda have been updated.\n", + " 269 1740 4349 2.5 0.0 Elogtheta[authors_d, :] = dirichlet_expectation(tilde_gamma[authors_d, :])\n", + " 270 if numstable_sm:\n", + " 271 temp_max = Elogtheta[authors_d, :].max()\n", + " 272 maxElogtheta = temp_max if temp_max > maxElogtheta else maxElogtheta\n", + " 273 expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :] - maxElogtheta)\n", + " 274 1740 26103 15.0 0.0 else:\n", + " 275 expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :])\n", + " 276 1740 3716 2.1 0.0 \n", + " 277 if vectorized:\n", + " 278 phinorm = self.compute_phinorm(ids, authors_d, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", + " 279 \n", + " 280 # Check for convergence.\n", + " 281 1740 3772 2.2 0.0 # Criterion is mean change in \"local\" gamma and lambda.\n", + " 282 if iteration > 0:\n", + " 283 meanchange_gamma = numpy.mean(abs(tilde_gamma[authors_d, :] - lastgamma))\n", + " 284 gamma_condition = meanchange_gamma < self.threshold\n", + " 285 # logger.info('Mean change in gamma: %.3e', meanchange_gamma)\n", + " 286 if gamma_condition:\n", + " 287 # logger.info('Converged after %d iterations.', iteration)\n", + " 288 converged += 1\n", + " 289 break\n", + " 290 # End of iterations loop.\n", + " 291 \n", + " 292 1740 62078 35.7 0.0 # FIXME: there are too many different gamma variables!\n", + " 293 var_gamma = tilde_gamma.copy()\n", + " 294 1740 4404 2.5 0.0 \n", + " 295 if self.optimize_lambda:\n", + " 296 # Update lambda.\n", + " 297 # only one update per document.\n", + " 298 1740 3590 2.1 0.0 \n", + " 299 if vectorized:\n", + " 300 # NOTE: probably not much speed-up is gained here. Consider\n", + " 301 # whether it can be done better.\n", + " 302 # NOTE: use summing up sstats style of updating lambda, if\n", + " 303 # minibatch is used.\n", + " 304 expElogtheta_sum_a = expElogtheta[authors_d, :].sum(axis=0)\n", + " 305 sstats = numpy.outer(expElogtheta_sum_a.T, cts/phinorm)\n", + " 306 sstats *= expElogbeta[:, ids]\n", + " 307 eta_rep = numpy.tile(self.eta[ids], [self.num_topics, 1])\n", + " 308 tilde_lambda[:, ids] = eta_rep + self.num_docs * sstats\n", + " 309 19140 47104 2.5 0.0 else:\n", + " 310 9534840 21285620 2.2 2.9 for k in xrange(self.num_topics):\n", + " 311 9517440 20443355 2.1 2.8 for vi, v in enumerate(ids):\n", + " 312 9517440 19164083 2.0 2.6 cnt = cts[vi]\n", + " 313 31413240 66466759 2.1 9.1 phi_sum = 0.0\n", + " 314 21895800 55718359 2.5 7.7 for a in authors_d:\n", + " 315 9517440 43921370 4.6 6.0 phi_sum += var_phi[(v, a, k)]\n", + " 316 tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * phi_sum\n", + " 317 \n", + " 318 # Note that we only changed the elements in lambda corresponding to \n", + " 319 1740 332712 191.2 0.0 # the words in document d, hence the [:, ids] indexing.\n", + " 320 1740 67830245 38982.9 9.3 var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids]\n", + " 321 1740 6063 3.5 0.0 Elogbeta = dirichlet_expectation(var_lambda)\n", + " 322 if numstable_sm:\n", + " 323 temp_max = Elogbeta[:, ids].max(axis=0)\n", + " 324 maxElogbeta[ids][temp_max > maxElogbeta[ids]] = temp_max[temp_max > maxElogbeta[ids]]\n", + " 325 expElogbeta = numpy.exp(Elogbeta - maxElogbeta)\n", + " 326 1740 3189258 1832.9 0.4 else:\n", + " 327 1740 157876 90.7 0.0 expElogbeta = numpy.exp(Elogbeta)\n", + " 328 var_lambda = var_lambda.copy()\n", + " 329 \n", + " 330 # Print topics:\n", + " 331 # pprint(self.show_topics())\n", + " 332 # End of corpus loop.\n", + " 333 \n", + " 334 1 5 5.0 0.0 \n", + " 335 if self.eval_every > 0 and (_pass + 1) % self.eval_every == 0:\n", + " 336 self.var_gamma = var_gamma\n", + " 337 self.var_lambda = var_lambda\n", + " 338 prev_bound = bound\n", + " 339 word_bound = self.word_bound(corpus, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", + " 340 theta_bound = self.theta_bound(Elogtheta)\n", + " 341 beta_bound = self.beta_bound(Elogbeta)\n", + " 342 bound = word_bound + theta_bound + beta_bound\n", + " 343 logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound)\n", + " 344 # NOTE: bound can be computed as below. We compute each term for now because it can be useful for debugging.\n", + " 345 \n", + " 346 #logger.info('Converged documents: %d/%d', converged, self.num_docs)\n", + " 347 \n", + " 348 # TODO: consider whether to include bound convergence criterion, something like this:\n", + " 349 #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold:\n", + " 350 # break\n", + " 351 # End of pass over corpus loop.\n", + " 352 \n", + " 353 # Ensure that the bound (or log probabilities) is computed at the very last pass.\n", + " 354 1 4 4.0 0.0 if self.eval_every > 0 and not (_pass + 1) % self.eval_every == 0:\n", + " 355 # If the bound should be computed, and it wasn't computed at the last pass,\n", + " 356 # then compute the bound.\n", + " 357 self.var_gamma = var_gamma\n", + " 358 self.var_lambda = var_lambda\n", + " 359 prev_bound = bound\n", + " 360 word_bound = self.word_bound(corpus, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", + " 361 theta_bound = self.theta_bound(Elogtheta)\n", + " 362 beta_bound = self.beta_bound(Elogbeta)\n", + " 363 bound = word_bound + theta_bound + beta_bound\n", + " 364 logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound)\n", + " 365 \n", + " 366 \n", + " 367 1 5 5.0 0.0 self.var_lambda = var_lambda\n", + " 368 1 5 5.0 0.0 self.var_gamma = var_gamma\n", + " 369 \n", + " 370 1 4 4.0 0.0 return var_gamma, var_lambda\n", + "\n" + ] + } + ], + "source": [ + "model = AuthorTopicModel(corpus=None, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", + " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=0, random_state=1, var_lambda=None)\n", + "profile = line_profiler.LineProfiler(model.inference)\n", + "result = profile.runcall(model.inference, corpus=corpus)\n", + "profile.print_stats()" + ] + }, + { + "cell_type": "code", + "execution_count": 152, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.9659297466278076\n" + ] + } + ], + "source": [ + "N = 1000\n", + "A = np.random.rand(N,N)\n", + "start = time()\n", + "B = dirichlet_expectation(A)\n", + "print(time() - start)" + ] + }, + { + "cell_type": "code", + "execution_count": 139, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from gensim.models.ldamodel import dirichlet_expectation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/docs/notebooks/at_with_nips_old.ipynb b/docs/notebooks/at_with_nips_old.ipynb index bdbb96d3df..0fca25b393 100644 --- a/docs/notebooks/at_with_nips_old.ipynb +++ b/docs/notebooks/at_with_nips_old.ipynb @@ -1807,7 +1807,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 32, "metadata": { "collapsed": false }, @@ -1815,14 +1815,14 @@ "source": [ "from bokeh.io import output_notebook\n", "from bokeh.models.layouts import Row, Column\n", - "from bokeh.layouts import gridplot\n", - "from bokeh.models import Title, Legend\n", + "from bokeh.layouts import gridplot, layout\n", + "from bokeh.models import Title, Legend, Div\n", "from bokeh.plotting import figure, output_file, show" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 8, "metadata": { "collapsed": false }, @@ -1833,7 +1833,7 @@ "\n", "
\n", " \n", - " Loading BokehJS ...\n", + " Loading BokehJS ...\n", "
" ] }, @@ -1881,7 +1881,7 @@ "\n", " function display_loaded() {\n", " if (window.Bokeh !== undefined) {\n", - " Bokeh.$(\"#e54e8713-405d-40ce-b938-8dcb097d7df2\").text(\"BokehJS successfully loaded.\");\n", + " Bokeh.$(\"#8e011ac0-f662-4201-8e19-c1d0bd286cb0\").text(\"BokehJS successfully loaded.\");\n", " } else if (Date.now() < window._bokeh_timeout) {\n", " setTimeout(display_loaded, 100)\n", " }\n", @@ -1923,9 +1923,9 @@ " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", " }\n", - " };var element = document.getElementById(\"e54e8713-405d-40ce-b938-8dcb097d7df2\");\n", + " };var element = document.getElementById(\"8e011ac0-f662-4201-8e19-c1d0bd286cb0\");\n", " if (element == null) {\n", - " console.log(\"Bokeh: ERROR: autoload.js configured with elementid 'e54e8713-405d-40ce-b938-8dcb097d7df2' but no matching script tag was found. \")\n", + " console.log(\"Bokeh: ERROR: autoload.js configured with elementid '8e011ac0-f662-4201-8e19-c1d0bd286cb0' but no matching script tag was found. \")\n", " return false;\n", " }\n", "\n", @@ -1938,7 +1938,7 @@ " \n", " function(Bokeh) {\n", " \n", - " Bokeh.$(\"#e54e8713-405d-40ce-b938-8dcb097d7df2\").text(\"BokehJS is loading...\");\n", + " Bokeh.$(\"#8e011ac0-f662-4201-8e19-c1d0bd286cb0\").text(\"BokehJS is loading...\");\n", " },\n", " function(Bokeh) {\n", " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", @@ -1961,7 +1961,7 @@ " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", " window._bokeh_failed_load = true;\n", " } else if (!force) {\n", - " var cell = $(\"#e54e8713-405d-40ce-b938-8dcb097d7df2\").parents('.cell').data().cell;\n", + " var cell = $(\"#8e011ac0-f662-4201-8e19-c1d0bd286cb0\").parents('.cell').data().cell;\n", " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", " }\n", "\n", @@ -1996,7 +1996,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 9, "metadata": { "collapsed": true }, @@ -2013,7 +2013,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 10, "metadata": { "collapsed": true }, @@ -2388,7 +2388,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 11, "metadata": { "collapsed": true }, @@ -2405,7 +2405,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 12, "metadata": { "collapsed": false }, @@ -2423,7 +2423,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 13, "metadata": { "collapsed": false }, @@ -2434,7 +2434,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 14, "metadata": { "collapsed": false }, @@ -2445,7 +2445,7 @@ "\n", "\n", "
\n", - "
\n", + "
\n", "
\n", " - - - - - -
-
-
- - - - \ No newline at end of file From df11bb42913a21a40cc87ef6116510d173b68d05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 30 Nov 2016 14:24:21 +0100 Subject: [PATCH 056/100] In the process of refactoring (atmodel2.py will become the new atmodel.py). --- docs/notebooks/at_with_nips.ipynb | 415 ++++++++++++++++++++---------- gensim/models/atmodel.py | 8 + gensim/models/atmodel2.py | 329 +++++++++++++++++++++++ 3 files changed, 612 insertions(+), 140 deletions(-) create mode 100755 gensim/models/atmodel2.py diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index d460d7fc8b..6d907a82d5 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -104,7 +104,7 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 34, "metadata": { "collapsed": false }, @@ -119,7 +119,7 @@ "\n", "# Folders containin individual NIPS papers.\n", "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00']\n", + "yrs = ['00', '01', '02']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -141,7 +141,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 35, "metadata": { "collapsed": false }, @@ -171,7 +171,7 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 36, "metadata": { "collapsed": false }, @@ -183,7 +183,7 @@ }, { "cell_type": "code", - "execution_count": 74, + "execution_count": 37, "metadata": { "collapsed": false }, @@ -201,7 +201,7 @@ }, { "cell_type": "code", - "execution_count": 75, + "execution_count": 38, "metadata": { "collapsed": false }, @@ -227,7 +227,7 @@ }, { "cell_type": "code", - "execution_count": 76, + "execution_count": 39, "metadata": { "collapsed": false }, @@ -250,7 +250,7 @@ }, { "cell_type": "code", - "execution_count": 77, + "execution_count": 40, "metadata": { "collapsed": false }, @@ -265,11 +265,20 @@ }, { "cell_type": "code", - "execution_count": 78, + "execution_count": 41, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/phrases.py:248: UserWarning: For a faster implementation, use the gensim.models.phrases.Phraser class\n", + " warnings.warn(\"For a faster implementation, use the gensim.models.phrases.Phraser class\")\n" + ] + } + ], "source": [ "# Compute bigrams.\n", "\n", @@ -284,7 +293,7 @@ }, { "cell_type": "code", - "execution_count": 79, + "execution_count": 42, "metadata": { "collapsed": true }, @@ -296,7 +305,7 @@ }, { "cell_type": "code", - "execution_count": 80, + "execution_count": 44, "metadata": { "collapsed": false }, @@ -314,16 +323,16 @@ }, { "cell_type": "code", - "execution_count": 81, + "execution_count": 45, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAicAAAGcCAYAAAACtQD2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XecXVW5//HPl0DAABmaSeggEKqUhBK8EOSCAaSIYmGw\nACoWhMuNckURfnDBgogEaSqKoqCDCNIDoQmEJphwgUDovSShpJAQ0ub5/bH2YfbsnOnl7Jl836/X\neZ05a6+997P3mZnznLXXWlsRgZmZmVlZLFfrAMzMzMzynJyYmZlZqTg5MTMzs1JxcmJmZmal4uTE\nzMzMSsXJiZmZmZWKkxMzMzMrFScnZmZmVipOTszMzKxUnJyYWTOSXpV0Ue71XpIaJX2sF/b9Y0mL\ncq8HZPs+u6f3ne3v69n+1umN/XWWpB9Iel7SYkkP1jqe9pK0SXZ+D6t1LFZuTk6sFCQdnv3Tqvb4\naa3jW8ZUu6dFh+9zIelHkg7sxL4bO7qvjmoltqATx9qbJH0S+CnwT+AI4OSaBmTWA5avdQBmOUH6\nR/tioXxK74diFRFxu6QPRcTCDq56EnApcH0H1jkFOK2D++mMlmL7A3BpJ461N+0JLAK+Hr45mvVT\nTk6sbG6OiMntrSxJwMCIWNCDMS3zevrDWtKgiHgvIhrphZaTlmQf9mVOTACGAvPKmJj479G6iy/r\nWJ+R738g6cuSHgfeB/bKlkvSdyU9Lul9SW9IulDS4MJ2JOn/ZX0r5kq6TdIWkl4p9LVo1v8hV161\nX4Kk/SVNzLY5W9J1krYo1LlM0kxJ62XL35U0Q9IZVfYjSWMlPSppflZvvKTts+X3Svp3C+fqOUmt\ntli0dB6q1Fuqz4mk4ZL+IWlaFtvLkv4iaeXK+wQMBCrnqrFybrPz2pht42+SZpIuUbR4zrNlX5b0\nVLa/B4t9YLJz+0yV9T7YZjtia+m9PTb3e/WapHOr/F7dI2mypK0l/VPSe9m5/W5r70Nu/eUlnZK9\nd+8r9Sk5TdIKhdi/CNRlcS5RC/03st+dRZJWzpWdkK13Rq5s+ez9Py1XtoqkcdnfxPuSpkr678L2\n2/p7XF3SnyXNkvSOpIuBZucsq7e2pD9l5+p9Sa9LulrSeu05b9Y/ueXEyqZO0pr5goh4u1BnDHAo\ncAHwDvByVv4HoD57Pgf4CHAssJ2k3bNv5ZCu158AXAdMAEYCtwAfKuynpf4HS5VLOgK4GBgPfB9Y\nGTgamChph4h4Nbfu8tn+JgLfy47nfyQ9ExEX5zb7Z9IH0fXARaQP1NHALsD/ZcsvlDQ8Ip7OxbIr\nsDHwwyqx57X3PFTirmx/xazecqTzPB1YDzgQGBwR8yR9CfgjcE92XgCeLWzrH8CTwA9yZS2d872A\nw4BzSZc0vgNMkLRjRDzVxroflEfEknbEVnxvfwycCNxM+p3bkvTejiz8XgWwFnAT8HfgcuDzwC8k\nPRIRt1eJLe+S7BgvJ/1ujCJdftoc+EIu9qOB7YBvAALubWF7E0nv0X+Q3i+A3YAlwO65eiNJ7/nd\n2fEKuDFb73fAo8B+wNmS1o6IEwr7WervMdvG9aTf1QuBp4BDSOe9+B5dA2xKem9fJrUMjSH9Tr2K\nLZsiwg8/av4ADic15xcfS3J1BmRlC4FNC+t/PFt2SKF8v6z8s9nrIdn6VxXqnZHVuyhXdjqwsEqs\nXyP9g18ne70qMAs4r1BvaFZ+fq7s0mzd7xfq/h9wX+71J7J4zmzlnK0GzAdOK5RfkO13pVbW7ch5\n2CuL+WPZ65FZnQPbeE/n57dTOK+NwCUtLFuYe115zxcD2+TKNyR9S7+8cG6fbmubbcRWfG+HZufp\nukK9/8rqfTFXNjEr+3yubCApeftrG+dqRHacFxTKz862+R+F43ynHX9TA4B3gdNzZe+Qkp/3K78f\nwP9kx7hK9vqQLJbjC9u7ipQYbtCOv8fKNv4rV7YcKSFcAhyWla1RrOeHHxHhyzpWKgF8G9g79/hE\nlXq3R8SzhbLPkv7x3ilpzcoD+Dfpg2jPrN4+pH+q5xXWP6cLce9LSlAuL+x7CfBQbt95FxVe30Nq\n6ak4hPSBfHpLO42IWcANpG/bQGpqBz5HSjrebyXmMXT+PMzKnveTtFI76lcTwG86UH9iRHzQMToi\nXiJ9M9+3k/tvr0+QzlPxvPwWeA/Yv1A+OyKuqLyI1FfnIZq/t9V8knROikOmf0lqHSnup00RsQS4\nn9TahqRtgTrgZ8AKpFYNSK0pj0TE3Oz1fqSE44LCJs8mnYviOa/297gfsIDc73mkFqbzs+OpeI+U\n8Owpqa6Dh2j9mJMTK5uHIuKO/KNKnRerlG1G+hb2ZuExHViJ1FIAsEH23OyfaURMI33L7IxNSf9w\nJxb2PQP4z9y+K+ZmiUXeTGD13OuPAK9GRFsx/RnYWNKo7PW+wJqkb9et2TB77vB5iIjngF8B3wTe\nlnSTpG9LWrWNfRa90IG6xQ8/gKeBVSWtXmVZd6mcp6fzhZE6fL6QW17xSpVtFN/blvazODu3+f28\nRno/ivtpr3uAnbJ+K7sDr0TEI6QRcJVLO/9B+t3Nx/JqRMwvbGtqbnnei1X2uyHwWpUE+an8i2z5\nicABwAxJd0o6XlLxb8aWMe5zYn1R8Z8mpET7deDLNP9mVjEje64sa89Ih5bqDKiy7yD1d3mrSv1i\nB88lLWxXLfzcmpuyfX4JeCB7fi0i7mxjvY6ch6VExNisg+OnSK0w5wMnSBqVJTjtUe197IjiOWrv\n+9WVfbSlPe9tR5d3NIa8iaTh2buQWkgm5sp3l7Q1Kam/uwv7q/Y+iurvx1LbjohfSroaOJjUsvlj\n4IeS9si3ltmyxS0n1l88R+qMeE+x5SV7VP7JvZg9D8+vLGkY6dJM3kxggKRBhfKNquwbYEYL+55I\nxz0LrFccEVIUEYvJOl5KWo3UKfUv7dj+i9lze85DS/ueEhE/iYg9gD1IrVLfyFdpz3baabMqZcOB\ndyNiZvZ6JqkfTtFGVcraG9uL2fPm+UJJA7PtvtTO7bRnP8tL2qSwn3WAVbqwnwdIlwdHk1pKKr+L\ndwMfI11yDFILSz6W9SQVO0ZvmT23J5bKNoqX/TavUpeIeD4izo6IfYCPkjrotmuUk/VPTk6sv7iC\n1PnwpOKCbKhk5UP+VtK322ML1cZW2eZzpG96o3PbWoXUOpN3EzAX+FHW56O4/7XaeQx5V5FaNtsz\n++elpMTst6R/6u1JTjpyHpqRNFhS8X/HFNKH3Iq5snlUTxY6Y7esz0Qlho1IlwJuztV5DlhT0pa5\neuuSErai9sZWOU/HFcq/SRqRdUM7ttEe40m/a/9dKP8e6bze2JmNZpdmJpN+Z9emecvJysAxwFMR\nkW/xG0/6Wzq6sLmxpHNxUzt2PZ70u/DNSkH2t3EMzUd+fSgb/ZX3POnvacVcvWGSNq/ye2f9lC/r\nWJl0uvk6Iu7ILjOcJGkEcBvpG+NwUmfZb5NGXEyXNA44XtJ1pH+0O5I6375T2OxNwGvAJZLOysq+\nCrwBfDAPRkTMlnQMaQjzZEmXky61bEjqyPhPOvgtMCJuk9QAfFdp7pFbSJcndgcmRES+o+G/JU0l\ndYR9tD1N4R08D9D8vfkEME7S34FnSJ0rDyddvvpHrt4kYEw2P8YbwHMRUXVelnaYAtwi6TzS+3p0\n9vy/uTp/JQ2Pvi6rtwrwLdJw5e0K22tXbNl5+jlwoqTxpGRky2y795NarbosIiZL+gtwdNaZeiKw\nK+ky3RUR0dJw4faYCBwPvB0RU7P9vSHpOdLfx+8K9a8mtaz8XNKmNA0l3h/4RURU61dTdDWp1eas\nrDWoMpS42Aq5FXCzpCuAJ0jJz2dJ/aYacvXOInX8Xo90+db6u1oPF/LDj4gPhhIvAUa0UmdAVueX\nrdQ5ijQ6Yi6pmf9h4CfAkEK9/0dKPOaSvh1vTurMeFGh3gjSh9B80je671AYbpqr+3HSN/mZ2Xaf\nAn4PbJ+rcynpQ6IY9+nAgkKZSB8qT2T7n0YaobJtlfV/kMX03Q6e92rn4WXgt7k6xaHEH8mO6xlS\nC8SMbN3RhW1vAdyZbXtJ5dxmx7qENCdKq+ch/56TPqifzs7Fg5V4CuuPAR4jDZV9nDTPSLWhxC3F\n1tJ7+51se+9n5+tXwKqFOhOBSVViupTUOtHWezEgez+ey/bzAin5Wr7K9pb6HWpluwdmx3R1ofwP\nFIZD55atTBqd82oWy5PAcR35eyR1Av4zaXTX26Q5ZXag+VDitUgjxp4A5pAS43uBg6sc8+Li++JH\n/30oe+PNlnmSXgFuiohvtFm5ZCR9jzRHyQYR8Uat4zEz6wpfvzPrH75Kmm/CiYmZ9Xnuc2LWRynd\nM+UgUj+RLfDoBjPrJ5ycmDVp6d4sZTWMNDLnHdIU9hNqHI+ZWbdwnxMzMzMrFfc5MTMzs1JxcmJm\nZmal4uTEzFok6ceSivcG6u0YBkhqlFS8Y29XtrlXts2DumubHdj3ZZKe6e39mvUlTk7MOknS4dkH\nXOUxX9JTks7rR3dV7WudhDuiVscVQGON9m3WJ3i0jlnXBOn+Ny8CK5Hu/PptYD9J28TSt4y38ujK\n3X674oga7tusT3ByYtZ1N0fE5OznP0h6h3STtE8Bf6tdWG2TNCgi3qt1HMuSiFhSi/36vba+xJd1\nzLrfHaRvxhtXCiRtLOnvkt6WNE/S/ZI+mV9J0pu5GwyiZJakRbm7KiPphKxsUK5sc0lXZtufL+kh\nSQcWtl+5DDVa0oWSppPuJ9Qhkr4m6XZJ07N9TZF0VKHOryRNK5T9Otv/t3Jl62RlX23nvr+cXTqb\nL+lBSR+rUmddSZdImibpfUmPSTq8yuYCWE7SyZJelfSepFslbVzY3h7Ze/dytr2XJJ2Vv5uupB9I\nWiJpneJOsrrzJa2avV6qz4mkVSSNk/RKto+p2U0J83U2yc7VYYXySp+cE3NlP87Khkv6m6SZpBtQ\nmvUJTk7Mut+m2fPbAFn/k/tJd/M9HziRdDv46yV9KrfevcDo3OttgUpS8h+58t2AyZVvwZK2Jt0B\ndnPgZ6SZYucC1xS2X3EhaUbZ/yXdj6ejvk26CeJPgO+RboT320KCMhH4sKThhbiXkO6sXDGalCRM\nbMd+9wJ+AfyJdIO8IcAESZtXKkgaRrop4B7AucBxWax/lHR0YXsiXZLbH/h59vgY6WZ1eZ8nvV/n\nA8eQbnJ4HOnGeRWXZ9v7XJW4PwuMj4h3s9fN+vFIEnAjcCzprsdjSTdVPFvpjsidUdn+P0g36PsB\n6cZ7Zn1Dre886IcfffVB052U9yTd4n1d4AvAm6TkYO2s3ris3q65dVcm3X32uVzZ94CFwMrZ62NI\nH6z3Az/N1XsHOCv3+jbS3ZeLd6+9B3iyEG8j6W68aucxVruj74pV6t0KTM29Hprt62vZ69Wzc3A5\n8HKu3vnAtDZiGJBtazGwTa58Q9Idcy/PlV1CuqtyXWEbVwBvAStkr/fKtvkIMCBXb2wW5/A2jvdH\nWTxr58r+BdxXqLdrtp/P58ouBZ7OvT4kq3N8Yd2rgEWkmzkCbJLVO6yF83Ni4X1rBC6p9d+JH350\n5uGWE7OuEXA7KSF5Bfgr6dbvB0fTTfj2Ax6MiPsrK0XEPOAiYCNJW2XFE0n9wCqXKnbPyiZmPyNp\nW2C1rAxJq5OSo78DdZLWrDyAW4DNJK2dizeA30VEp0eqRMSCDw5eGpzt6y5guKQPZXWmA8/S1BK0\nO7AA+CWwnqQNC8fYHhMjYkoujpeA64F9s1gEfBq4Fli+yrlYHdi+sM2Lo3kfkImk9/QjLRzvoGx7\n92X18tv7G7CLpA1yZV8A3iO1iLRkP1JSekGh/GxS4rFvK+u2JoDfdHJds5pycmLWNUG6zLE38HFg\nq4jYJCJuy9XZEHiqyrpTc8sBJpM+yCqXPXajKTnZUdLAbFmQWkUgXUIS6Zvym4XHqVmd4rDmF/Mv\nJK0gaWj+0doBS9pd0h2S5gKzsn2dli2uy1W9p3AsDwL/BmYDu0uqA7ah/cnJs1XKngZWzZK0YcCq\nwNEsfS4uyuoXz0Wxz83M7Hn1SoGkDSX9WdLbpBaxN0kJKTQ/3iuy58/nyg4BbojWO6JuCLwaEfML\n5cXfj854oQvrmtWMR+uYdd1D0TRap9MiYrGkfwGjJW0CrA3cTfowXAHYhfQhPzUi3s5Wq3zBOAto\n6cZ/xQ/14ofgaNJlmSAlOiFp/Yh4vbghSZtldaeQLoG8QvrWfxCpz0T+C89E4HBJ65OSlNsiIiTd\nm72uJAJ3txB3e+SH5Fb2/SfgshbqP1J43dLIGUHqbEq6bLYq8FNSkvkesAGpz8kHxxsRr0q6n5Sc\nnCVpd9Klvss7cAytaam1a0Ar6xTfa7M+wcmJWc97idRZtWjL3PKKicD3SZ1n34yIpwEkPU5KInYn\nXcqoeD57XhQRd3Qyvkmklp+8N1uoexApUdo/u3RDFt8+VepWWkT2AUYAp2Sv7waOJCUn77J0wtCS\nzaqUDQfejYiZkuYA84DlunAuirYn9fWoj4gPhoVLaulSy+XAryR9hHRJ513gpjb28SKwm6QPFVpP\nir8flWRutcL6XWlZMSslX9Yx63njgZ0l7VIpkLQy8A3ghYh4Ild3Imkyt+NounRD9vOXSa0pH1wG\niYg3SR1cv5mNVGlG0lptBRcRsyLijsKjpSnrKy0NH/zvyC6pfKXKdp8FppM6+i5H6qdROcbNSf1D\n7utA/5fdsj43lf1uBBwA3JztbwlwNfB5SVsWV65yLtqz32rHK9L7U239v5N1WiVd0rku32elBeOB\ngaTLUXmVzrk3AUTETNJltNGFese0EEtVkuqUhp6v0t51zHqbW07MuqY9TfJnAPXAzZLOJY22OYL0\njfczhbr3k0aBDAd+myu/m9S3pdqw2+9kZY9J+h2pNWUoaaTIusAOHYy3NRNIQ27HZ/saDBwFvMHS\n/TkgJVWfJQ19npuVPUS63LApaXRNe00BbpF0HukcHZ09/2+uzvdJH94PZvFNBdYAdiS1OuUTuPac\ni8dJ/TbOyTrxzs2OZ3C1yhExXdJE4H+AVWjfJHxXk97fn0vaFHiU1El2f+AXEZHvF/N74HhJs0l9\nlD5OatnpyPt6KPDr7PmKNuqa1YRbTsy6ps1vrBExg5Qo3EL6lvtT0hDYAyLiukLd90jDgvOdXiEl\nH0EahvtKYZ2ppA/fG0jDhc8Hvkn61n0azXVmlM4H62T7+izpf8dZwNeB80hzp1RTiTvf2rOYNOy2\nvfObVGK4HTiedIynklplxmQxVbY9DdiJ1O/kM1ls/0VKJk5o6bhaKs9akA4gJQwnAieREpYjW4n1\nb6TEZBYt9wPK7yNIici5wIGkoefDge9GxA8K651C6uvyeVKSuDiLr6P3QOqv90uyfkJdGFFoZmZm\n1u1q3nIiaTlJp0t6Pps++llJJ1Wpd5qk13NTTG9aWL66pL9Imi1ppqTfZ9f1zczMrA+peXJCmlb5\nm6Trx1uQrhl/X9IxlQqSTiA1h38T2JnUI39CNu9DxV9Jvdv3IjWRjqb5NXszMzPrA2p+WUfS9aTp\nq4/KlV0JvBcRX8lev07qGDYuez2YdL358Ii4IuuZ/zgwMiIezursQ7pfxXrZdWgzMzPrA8rQcnIf\nsFc2uROStiPd5Gx89npjUg/7yoyMRMQcUoe6XbOiUcDMSmKSuY3U6WsXzMzMrM8ow1DiM0g96Z+U\ntISUMP0oIiqzKg4jJRnTC+tNp2lY4DBgRn5hRCyR9A7Nhw6amZlZyZUhOfkCacKiQ4EnSDMy/krS\n6xFxaSvribaHw7VYJ7t51z6k2Rnf72DMZmZmy7KVgI2ACbnbaXSbMiQnZ5JuB//37PXj2cyPPyTd\nWnwaKckYSvPWkyGk+SDI6jSbACq7J8bqLN3iUrEP8Jeuh29mZrbM+iJpQEq3KkNyMoilWzcayfrD\nRMQLkqaRRuE8Ch90iN2FpluM3w+sJmmHXL+TvUhJzb9a2O+LAJdddhlbbrnUTNfLlLFjxzJu3Lha\nh1EKPheJz0MTn4vE5yHxeUimTp3Kl770JSjc5by7lCE5uR74kaRXSCNuRpDuKfH7XJ1zgJMkPUs6\nEacDrwLXAkTEk5ImAL+T9G3SfSrOAxpaGanzPsCWW27JiBEjuv2g+pK6urpl/hxU+FwkPg9NfC4S\nn4fE52EpPdItogzJyTGkZOMC0qWZ10n3fTi9UiEizpQ0iDRvyWqkKa/3i4iFue0cRpq2+zZSy8uV\npJtzmZmZWR9S8+QkIuYB380erdU7lXQ/jZaWzwK+1J2xmZmZWe8rwzwnZmZmZh9wcmLU19fXOoTS\n8LlIfB6a+FwkPg+Jz0PvqPn09bUiaQQwadKkSe7cZGZm1gGTJ09m5MiRkG4bM7m7t++WEzMzMysV\nJydmZmZWKk5OzMzMrFScnJiZmVmpODkxMzOzUnFyYmZmtoyaPh023RTuuqvWkTTn5MTMzGwZtWgR\nPPccvN8jd8jpPCcnZmZmy6jGxvQs1TaOIicnZmZmy6jKPKxOTszMzKwUnJyYmZlZqTg5MTMzs1Jx\ncmJmZmal4uTEzMzMSsXJiZmZmZVKJTlZrmTZQMnCMTMzs97ieU7MzMysVHxZx8zMzErFyYmZmZmV\nipMTMzMzKxUnJ2ZmZlYqTk7MzMysVJycmJmZWal4KLGZmZmViidha4GkFyQ1Vnmcly1fUdIFkt6S\n9K6kKyUNKWxjfUk3SponaZqkMyXV/NjMzMzKzJd1WrYjMCz3+AQQwBXZ8nOA/YFDgNHAOsBVlZWz\nJGQ8sDwwCjgcOAI4rVeiNzMz66PKmpwsX+sAIuLt/GtJBwLPRcRESYOBrwKHRsRd2fIjgamSdo6I\nB4F9gC2APSPiLeAxSScDZ0g6NSIW9+oBmZmZ9RFlTU7K0HLyAUkrAF8ELs6KdiQlULdX6kTEU8DL\nwK5Z0SjgsSwxqZgA1AFb93TMZmZmfZWTk/b5NCmp+FP2eiiwMCLmFOpNJ10CInueXmU5uTpmZmZW\n4OSkfb4K3BQR09qoJ1K/lLa0p46ZmdkyqazJSc37nFRI2gDYGzg4VzwNGChpcKH1ZAhNrSPTgJ0K\nmxuaPRdbVJYyduxY6urqmpXV19dTX1/fgejNzMz6nvbMc9LQ0EBDQ0OzstmzZ/dgVCVKTkitJtNJ\nI28qJgGLgb2AqwEkDQc2AO7L6twPnChprVy/kzHAbOCJtnY6btw4RowY0S0HYGZm1pe0Z56Tal/Y\nJ0+ezMiRI3ssrlIkJ5JEGv57SUQ0VsojYo6ki4GzJc0E3gXOBe6NiIeyareQkpBLJZ0ArA2cDpwf\nEYt68TDMzMz6FF/Wad3ewPrAH6ssGwssAa4EVgRuBr5TWRgRjZIOAH5Nak2ZB1wCnNKzIZuZmfVt\nTk5aERG3AgNaWLYAODZ7tLT+K8ABPROdmZlZ/1TW5KRso3XMzMyslzg5MTMzs1JxcmJmZmal4uTE\nzMzMSqU985zUgpMTMzOzZVR75jmphZKFY2ZmZr3Fl3XMzMysVJycmJmZWak4OTEzM7NScXJiZmZm\npeLkxMzMzErFyYmZmZmViuc5MTMzs1Jxy4mZmZmViidhMzMzs1Jxy4mZmZmVipMTMzMzKxUnJ2Zm\nZlYqTk7MzMysVJycmJmZWal4nhMzMzMrFbecmJmZWal4nhMzMzMrFbecmJmZWak4OTEzM7NScXJi\nZmZmpeLkxMzMzErFyYmZmZmViuc5aYWkdSRdKuktSe9JekTSiEKd0yS9ni2/VdKmheWrS/qLpNmS\nZkr6vaSVe/dIzMzM+g63nLRA0mrAvcACYB9gS+B7wMxcnROAY4BvAjsD84AJkgbmNvXXbN29gP2B\n0cBve+EQzMzM+qSyznOyfK0DAH4AvBwRX8+VvVSocxxwekRcDyDpK8B04GDgCklbkhKbkRHxcFbn\nWOBGScdHxLSePggzM7O+xi0nLTsQ+LekKyRNlzRZ0geJiqSNgWHA7ZWyiJgD/AvYNSsaBcysJCaZ\n24AAdunpAzAzM+uLnJy07CPAt4GngDHAb4BzJX0pWz6MlGRML6w3PVtWqTMjvzAilgDv5OqYmZlZ\nTlmTkzJc1lkOeDAiTs5ePyJpa1LCclkr64mUtLSmPXXMzMyWSU5OWvYGMLVQNhX4TPbzNFKSMZTm\nrSdDgIdzdYbkNyBpALA6S7e4NDN27Fjq6uqaldXX11NfX9/+IzAzM+uD2pOcNDQ00NDQ0Kxs9uzZ\nPRhVOZKTe4HNC2Wbk3WKjYgXJE0jjcJ5FEDSYFJfkguy+vcDq0naIdfvZC9SUvOv1nY+btw4RowY\n0VoVMzOzfqk985xU+8I+efJkRo4c2WNxlSE5GQfcK+mHwBWkpOPrwFG5OucAJ0l6FngROB14FbgW\nICKelDQB+J2kbwMDgfOABo/UMTMzqy5K2vGh5slJRPxb0qeBM4CTgReA4yLi8lydMyUNIs1bshow\nEdgvIhbmNnUYcD5plE4jcCVpCLKZmZlVEVG+/iZQguQEICLGA+PbqHMqcGory2cBX2ppuZmZmTUX\nUb4J2KAcQ4nNzMysBsracuLkxMzMbBnl5MTMzMxKxcmJmZmZlUpjo5MTMzMzKxG3nJiZmVmpODkx\nMzOzUnFyYmZmZqXieU7MzMysVNxyYmZmZqXi5MTMzMxKxcmJmZmZlYrnOTEzM7NSccuJmZmZlYqT\nEzMzMysVJydmZmZWKp7nxMzMzErFLSdmZmZWKk5OzMzMrFScnJiZmVmpeJ4TMzMzKxW3nJiZmVmp\nODkxMzOzUnFyYmZmZqXi5MTMzMxKxZOwmZmZWam45cTMzMxKxcmJmZmZlYrnOWmBpFMkNRYeT+SW\nryjpAklvSXpX0pWShhS2sb6kGyXNkzRN0pmSan5sZmZmZdavW04kDZC0vaTVO7mJKcBQYFj22C23\n7Bxgf+AQYDSwDnBVbt/LAeOB5YFRwOHAEcBpnYzFzMxsmdCvkhNJ50j6WvbzAOAuYDLwiqSPd2KT\niyPizYiYkT3eybY9GPgqMDYi7oqIh4Ejgf+QtHO27j7AFsAXI+KxiJgAnAx8R9LynTk+MzOzZUG/\nSk6AzwJImAt3AAAgAElEQVSPZD8fCGxMShDGAT/pxPY2k/SapOckXSZp/ax8JKlF5PZKxYh4CngZ\n2DUrGgU8FhFv5bY3AagDtu5ELGZmZsuE/pacrAVMy37+JPD3iHga+APw0Q5u6wHSZZh9gG+REp27\nJa1MusSzMCLmFNaZni0je55eZTm5OmZmZlZQ1nlOOnvZYzqwlaQ3gH2Bo7PyQcCSjmwouwxTMUXS\ng8BLwOeB91tYTUC0Z/MdicXMzGxZUtaWk84mJ38ErgDeICUAt2bluwBPdiWgiJgt6WlgU+A2YKCk\nwYXWkyE0tY5MA3YqbGZo9lxsUVnK2LFjqaura1ZWX19PfX19Z8I3MzPrM9qTnDQ0NNDQ0NCsbPbs\n2T0YVSeTk4g4VdIUYH3SJZ0F2aIlwBldCUjSKsAmwJ+AScBiYC/g6mz5cGAD4L5slfuBEyWtlet3\nMgaYDTxBG8aNG8eIESO6ErKZmVmf1J55Tqp9YZ88eTIjR47ssbg6PZolIq4EkLRSruxPHd2OpF8A\n15Mu5awL/C8pIbk8IuZIuhg4W9JM4F3gXODeiHgo28QtpCTkUkknAGsDpwPnR8Sizh6fmZlZf1fW\nyzqdHUo8QNLJkl4D5kr6SFZ+emWIcQesB/yVdDnocuBNYFREvJ0tHwvcAFwJ3Am8TprzBICIaAQO\nILXa3Af8GbgEOKUzx2ZmZrasKGty0tmWkx+RJjv7PvC7XPkU4L+Bi9u7oYhotXNHdsno2OzRUp1X\nSAmKmZmZtVNZk5PODiD6CvCNiPgLzUfnPEKa78TMzMxKrr8lJ+sCz7awvRU6H46ZmZn1lrLOc9LZ\nkJ4Adq9S/lng4c6HY2ZmZr2lrC0nne1zchrwJ0nrkhKcz0janHS5x30/zMzM+oCyJiedajmJiGtJ\nScjewDxSsrIlcGBE3NraumZmZlYO7ZnnpBa6Ms/JPcAnujEWMzMz60X9quVE0k6SdqlSvoukHbse\nlpmZmfW0fpWcABeQpq4vWjdbZmZmZiXX35KTrYDJVcofzpaZmZlZyfW35GQBTXf+zVubdF8cMzMz\nK7n+lpzcAvxMUl2lQNJqwE8Bj9YxMzPrA8o6CVtnR+scD9wNvCSpMuna9sB04MvdEZiZmZn1rLK2\nnHQqOYmI1yRtC3wR2A6YD/wRaIiIRd0Yn5mZmfWQ/jjPyTzgom6MxczMzHpRv2o5AZA0HPg4MIRC\n35WIOK1rYZmZmVlP61fJiaSjgF8DbwHTgMgtDtJ09mZmZlZi/So5AU4CfhQRP+/OYMzMzKz3lDU5\n6ewAotWBv3dnIGZmZta7+lty8ndgTHcGYmZmZr2rv81z8ixwuqRRwGNAs+HDEXFuVwMzMzOzntXf\nhhJ/A5gL7JE98gJwcmJmZlZyZb2s09lJ2Dbu7kDMzMysd5U1OenSlSZJAyVtLqnT86WYmZlZbfSr\n5ETSIEkXA+8BjwMbZOXnSfpBN8ZnZmZmPaRfJSfAz0j31Pk48H6u/DbgC12MyczMzHpBWZOTzl6O\nORj4QkQ8ICk/O+zjwCZdD8vMzMx6WlmTk862nHwYmFGlfGWaT2VvZmZmJVXWeU46G9K/gf1zrysJ\nydeB+7sUkZmZmfWKss5z0tnk5ETgp5J+Tbo0dJykW4EjgR91JSBJP5TUKOnsXNmKki6Q9JakdyVd\nKWlIYb31Jd0oaZ6kaZLOlFTCfNDMzKwc+tVlnYi4h9QhdnnSDLFjgOnArhExqbPBSNoJOAp4pLDo\nHFJLzSHAaGAd4KrcessB47N4RgGHA0fguyObmZm1qKzJSYc7xGZzmhwGTIiIo7orEEmrAJeRLg2d\nnCsfDHwVODQi7srKjgSmSto5Ih4E9gG2APaMiLeAxySdDJwh6dSIWNxdcZqZmfUXZU1OOtxykn3Q\n/wZYqZtjuQC4PiLuKJTvSEqibs/F8BTwMrBrVjQKeCxLTComAHXA1t0cp5mZWb/Qb5KTzIPADt0V\nhKRDge2BH1ZZPBRYGBFzCuXTgWHZz8Oy18Xl5OqYmZlZTlmTk87Oc3Ih8EtJ6wGTgHn5hRHxaHs3\nlG3jHOATEbGorfr5VWnfsGUPbTYzM6uivyUnl2fP+bsPB00Jw4AObGskad6USdIHp2gAMFrSMcC+\nwIqSBhdaT4bQ1DoyDdipsN2h2XOxRaWZsWPHUldX16ysvr6e+vr6DhyCmZlZ39Oe5KShoYGGhoZm\nZbNnz+7BqDqfnHTnXYlvAz5aKLsEmAqcAbwGLAL2Aq4GkDScdD+f+7L69wMnSlor1+9kDDAbeKK1\nnY8bN44RI0Z0/SjMzMz6mMbGtidhq/aFffLkyYwcObLH4upUchIRL3VXABExj0ICIWke8HZETM1e\nXwycLWkm8C6pxebeiHgoW+WWbBuXSjoBWBs4HTi/g5eKzMzMlhn96rKOpK+0tjwi/ty5cJo2UXg9\nFlgCXAmsCNwMfCe3v0ZJBwC/JrWmzCO1vpzSxTjMzMz6rX6VnAC/KrxeARgELATeA7qUnETEfxZe\nLwCOzR4trfMKcEBX9mtmZrYs6VfJSUSsXiyTtBmp5eIXXQ3KzMzMel5Zk5Nuu/dMRDwD/IClW1XM\nzMyshPp9cpJZTLrvjZmZmZVcWZOTznaIPahYRBohcwxwb1eDMjMzs57Xr5IT4JrC6wDeBO4Avtel\niMzMzKxXLFnSj5KTiOjuy0FmZmbWi6ZNg0mT4HOfq3UkS3OSYWZmtgy6+up0WefII2sdydI6lZxI\nulLSD6qU/4+kv3c9LDMzM+tJb78Na6wBqy81OUjtdbblZA/gxirlNwOjOx+OmZmZ9YZ582DllWsd\nRXWdTU5WIc0GW7QIGNz5cMzMzKw3zJ0Lq6xS6yiq62xy8hjwhSrlh9LGXYDNzMys9ubNK29y0tmh\nxKcD/5C0CWn4MMBeQD1Qwn6/ZmZmljd3bnkv63R2KPH1kg4GTgQ+C8wHHgX2joi7ujE+MzMz6wH9\nseWEiLiR6p1izczMrOTmzoX11691FNV1dijxTpJ2qVK+i6Qdux6WmZmZ9aQyt5x0tkPsBUC1fGvd\nbJmZmZmVWJn7nHQ2OdkKmFyl/OFsmZmZmZVYfxxKvAAYWqV8bWBx58MxMzOz3tAfL+vcAvxMUl2l\nQNJqwE+BW7sjMDMzM+s5Zb6s09nROscDdwMvSXo4K9semA58uTsCMzMzs56xcCEsXlzelpPOznPy\nmqRtgS8C25HmOfkj0BARi7oxPjMzM+tmc+em5/7WckJEzAMu6sZYzMzMrBfMnp2eB5f0bnidSk4k\nfY40Vf1wIIBngL9GxJXdGJuZmZn1gDffTM9DhtQ2jpZ0qEOspOUk/Q34G2nI8LPA88DWwBWSLpek\n7g/TzMzMusuMGem5rMlJR1tOjgP2Bg6KiBvyCyQdROp3chxwTveEZ2ZmZt2tkpystVZt42hJR4cS\nHwn8TzExAYiI64DvA1/tjsDMzMysZ8yYAWusASusUOtIqutocrIZcFsry2/L6piZmVlJzZhR3ks6\n0PHkZD6wWivLBwPvd2SDkr4l6RFJs7PHfZL2zS1fUdIFkt6S9K6kKyUNKWxjfUk3SponaZqkMyV1\ndoI5MzOzfq2/JSf3A99uZfl3sjod8QpwAjAye9wBXCtpy2z5OcD+wCHAaGAd4KrKylkSMp7Uf2YU\ncDhwBHBaB+MwMzNbJpQ9Oeloh9ifAHdKWhM4C3gSELAl8D3gU8CeHdlgRNxYKDpJ0reBUZJeI/Vh\nOTQi7gKQdCQwVdLOEfEgsA+wBbBnRLwFPCbpZOAMSadGhO/1Y2ZmlvPyy7DffrWOomUdajmJiPuA\nL5ASkPuBmcA7wL1ZWX1E3NvZYLKhyocCg7LtjyQlULfnYngKeBnYNSsaBTyWJSYVE4A60hBnMzMz\nyyxeDM8/D5uVuIdohydhi4irJU0AxpAmYQN4GrglIt7rTBCStiElIysB7wKfjognJe0ALIyIOYVV\npgPDsp+HZa+LyyvLHulMTGZmZv3Ryy/DokWw6aa1jqRlnb23znuS9gb+X0S80w1xPEm6R89qpL4l\nf5Y0upX6Is1M25b21DEzM1tmPPNMeu43yYmk9SLi1ezlYcCZwDuSHgM+GRGvdCaIrF/I89nLyZJ2\nJk3mdgUwUNLgQuvJEJpaR6YBOxU2OTR7LraoLGXs2LHU1dU1K6uvr6e+vr5jB2FmZtYHPP10mt9k\ngw3aV7+hoYGGhoZmZbMrN+fpIR1tOXlS0tukPiYrAeuT+n9sBHTnVC7LASsCk4DFwF7A1QCShgMb\nAPdlde8HTpS0Vq7fyRhgNvBEWzsaN24cI0aM6MbQzczMyuuhh2C77WD5dmYA1b6wT548mZEjR/ZA\ndElHhxLXAZ8jJQ3LAeMlPU1KJPaRNKy1lauR9BNJu0naUNI2kn4G7AFclrWWXAycLenjkkaSpsi/\nNyIeyjZxCykJuVTStpL2AU4Hzo+IRR2Nx8zMrD978EHYZZdaR9G6jiYnK0TEgxHxS9KEbDuQprRf\nQhry+5ykpzq4zaHAn0n9Tm4jjdAZExF3ZMvHAjcAVwJ3Aq+T+qUAEBGNwAFZDPdl27oEOKWDcZiZ\nmfVrs2bBU0+VPznp6GWdOZIeJl3WGQgMioh7JS0mDTF+Fdi5IxuMiK+3sXwBcGz2aKnOK6QExczM\nzFowZUp63n772sbRlo62nKwD/BhYQEps/i1pIilRGQFERNzTvSGamZlZd5gyJfU12XzzWkfSuo5O\nwvZWRFwfET8E3iONkjmPNGT3LFLLyl3dH6aZmZl11ZQpMHw4DBxY60ha19Wb482OiCuARcB/AhsD\nF3Y5KjMzM+t2jz4KW/eBudO7kpxsS+pjAvASsCgipkXE37oelpmZmXWn999PI3U+9rFaR9K2Ts0Q\nCx90Qq38vE33hGNmZmY94aGHYMEC2GOPWkfStq5e1jEzM7OSW7IETjwR1lkHtt221tG0rdMtJ2Zm\nZtY3XHcd3HMP3HknDBhQ62ja5pYTMzOzfu4Pf4Bdd+0bl3TAyYmZmVm/98wzKTnpK5ycmJmZ9WMR\n8OqrsO66tY6k/ZycmJmZ9WNz5sC8eU5OzMzMrCRezWYkW2+92sbREU5OzMzM+rHXXkvPbjkxMzOz\nUnjxxfS8zjo1DaNDnJyYmZn1UxFw2WWw227lv9lfnidhMzMz64cWL4YvfAEmToRrrql1NB3j5MTM\nzKyfmTkTDjwQHngArrwSPvWpWkfUMU5OzMzM+pmjj4apU+GOO2D06FpH03Huc2JmZtaPvP8+XHst\nfP/7fTMxAScnZmZm/crdd8P8+fDJT9Y6ks5zcmJmZtaP/OMfsOGGsM02tY6k85ycmJmZ9RMXXwy/\n/S0ceihItY6m85ycmJmZ9RPjxqXnb32rtnF0lZMTMzOzfmDhQnjqKbjwQthoo1pH0zVOTszMzPqB\nZ55JE69tvXWtI+k6JydmZmb9wKOPpmcnJ2ZmZlYK110HH/0orLlmrSPpOicnZmZmfdz48WnitUMP\nrXUk3aPmyYmkH0p6UNIcSdMlXS1peKHOipIukPSWpHclXSlpSKHO+pJulDRP0jRJZ0qq+fGZmZn1\npMmT4dOfhj32SNPW9wdl+PDeHTgP2AXYG1gBuEXSh3J1zgH2Bw4BRgPrAFdVFmZJyHjSvYJGAYcD\nRwCn9Xz4ZmZmtXPhhbDuuqnlZLXVah1N96j5jf8iotkEu5KOAGYAI4F7JA0GvgocGhF3ZXWOBKZK\n2jkiHgT2AbYA9oyIt4DHJJ0MnCHp1IhY3HtHZGZm1jveey/NCHv00TBwYK2j6T5laDkpWg0I4J3s\n9UhSEnV7pUJEPAW8DOyaFY0CHssSk4oJQB3QD/otm5mZLe2ss2DePPja12odSfcqVXIiSaRLOPdE\nxBNZ8TBgYUTMKVSfni2r1JleZTm5OmZmZv1GBPzxj3DkkbDxxrWOpnvV/LJOwYXAVsBu7agrUgtL\nW9pTx8zMrE956il48UU48MBaR9L9SpOcSDof+CSwe0S8nls0DRgoaXCh9WQITa0j04CdCpscmj0X\nW1SaGTt2LHV1dc3K6uvrqa+v7+ARmJmZ9Y4XX4QxY2CNNWDPPXt2Xw0NDTQ0NDQrmz17do/uUxG1\nb1jIEpNPAXtExPOFZYOBN0kdYq/OyoYDTwK7RMRDkvYFrgfWrvQ7kfQN4OfAkIhYVGWfI4BJkyZN\nYsSIET14dGZmZt3nscdgr71glVXg9ttrc0ln8uTJjBw5EmBkREzu7u3XvOVE0oVAPXAQME9SpcVj\ndkS8HxFzJF0MnC1pJvAucC5wb0Q8lNW9BXgCuFTSCcDawOnA+dUSEzMzs77q4oth+eXh3nth7bVr\nHU3PqHlyAnyL1C/kzkL5kcCfs5/HAkuAK4EVgZuB71QqRkSjpAOAXwP3AfOAS4BTejBuMzOzXrVw\nYZoN9oAD+m9iAiVITiKizRFDEbEAODZ7tFTnFeCAbgzNzMysVI49Nt19+KKLah1JzyrVUGIzMzNr\n2YMPwuGHw8c/XutIepaTEzMzsz7ixRdhiy1qHUXPc3JiZmbWB8yeDbNmwYYb1jqSnufkxMzMrA94\n6aX0vNFGNQ2jVzg5MTMz6wMeeCA9LwstJzUfrWNmZmbVLVgA48bBH/6QRunsvTcMWwbuGOeWEzMz\nsxKKgEMOgZNOglGjoKEBJkyA5ZaBT263nJiZmZXQ3/4GN94IV10Fn/lMraPpXU5OzMzMSmLqVJg8\nGR56CC64AOrr4dOfrnVUvc/JiZmZWY0tWQLf+x6cdx40Nqab+p12WiqTah1d73NyYmZmVkMzZsBx\nx8EVV8DPfw7f+hastFK6ud+yahk+dDMzs9qaOxe22y6Nyrn0UjjssFpHVA5OTszMzGrkwgvhzTfT\nMOGNN651NOWxDAxIMjMzK5+rr4YTToCvfc2JSZGTEzMzs1727LPws5/BHnvAb35T62jKx8mJmZlZ\nL2lsTJOqbbYZTJkCp5yybI7GaYuTEzMzs15y3HHwk5/Ad78Lzz8Pe+5Z64jKyR1izczMesHLL8Pv\nfpfmLzn55FpHU25OTszMzHrAwoUwcWIajfOPf8DNN8Nqq8HRR9c6svJzcmJmZtYN3n0XfvWrlIxc\ney3Mng2zZqVl22wD3/gGnHgirLFGbePsC5ycmJmZdYPzz4dTT4X11oMxY2D99WHffdPrYcPc8bUj\nnJyYmZl1wRNPpHvg3HxzmrPk97+vdUR9n0frmJmZddK4cbDttmmG1zPPTCNxrOvccmJmZtYBS5bA\nAw/AZZelCdTGjk0Tqq24Yq0j6z+cnJiZmbVh9mx4+2044wy47jqYPh3q6uCcc+C//sv9SbqbkxMz\nM7MWvPZauv/NX/6SXq+xBnz5y7D//rDLLjB4cG3j66+cnJiZmeVcdhk89hjceis8+mhKSH71K9ho\nI9htNw8F7g1OTszMbJkXATfckDq0/utf6S7Bw4fD2WfDV76SJk+z3lOK0TqSdpd0naTXJDVKOqhK\nndMkvS7pPUm3Stq0sHx1SX+RNFvSTEm/l7Ry7x2FmZn1Fe+9B//8Z7o8s8IK6XHQQTBgAJx7brrv\nzc03p/4kTkx6X1laTlYG/g/4A3BVcaGkE4BjgMOBF4AfAxMkbRkRC7NqfwWGAnsBA4FLgN8CX+rp\n4M3MrJwWLEgzt159dUpInn8+JSVvvAFvvZVaSM48Ez70IdhkE9h7b3duLYNSJCcRcTNwM4BU9dfi\nOOD0iLg+q/MVYDpwMHCFpC2BfYCREfFwVudY4EZJx0fEtF44DDMzq7FZs+AXv4DnnoP582H8eFi8\nGJZbDlZaKSUhBx8Ma64Jn/oUjBzpIcBlVIrkpDWSNgaGAbdXyiJijqR/AbsCVwCjgJmVxCRzGxDA\nLsC1vRexmZn1ln//O42meeaZ9HrmzNR/ZNSo1AJyyimw6aapI+t669U2Vmu/0icnpMQkSC0ledOz\nZZU6M/ILI2KJpHdydczMrA9bsgQmTICbbkrJyPvvw913w9Zbw+GHp2RkpZXgiCNgnXVqHa11RV9I\nTloiUtLS1TpmZlYis2al+UUWLkx9Rd56C665JpXPnw+rrw7/+Z+w6qpwwQVw1FGwfF/+NLOl9IW3\ncxopyRhK89aTIcDDuTpD8itJGgCsztItLs2MHTuWurq6ZmX19fXU19d3LWozM2uXCHj11TTz6jXX\nwP33w7x5aVldHay7LhxwAGyxBey6K+y4YxpdY72joaGBhoaGZmWzZ8/u0X0qolwNC5IagYMj4rpc\n2evALyJiXPZ6MCnp+EpE/F3SFsDjwI65DrFjgPHAetU6xEoaAUyaNGkSI0aM6PHjMjMzmDIlTQUP\n6f40U6ak5yefTGW77AJjxsB++6XLNFtt5VlYy2jy5MmMHDkS0kCUyd29/VK0nGTzkWxKaiEB+Iik\n7YB3IuIV4BzgJEnPAi8CpwOvknV0jYgnJU0Afifp26ShxOcBDR6pY2ZWO889l1pDXn8drr02va5Y\nfvnUCrLVVqlT66c/nVpKzEqRnAA7Av8k9Q8J4JdZ+Z+Ar0bEmZIGkeYtWQ2YCOyXm+ME4DDgfNIo\nnUbgStIQZDMz6yXz5sHEiSkZueaa1IF1ueVS/5D990937/3oR1PdNdaAIUNa354tm0qRnETEXbQx\nW21EnAqc2sryWXjCNTOzXhEBL7+cRtBMmpRmU7355nTJptJfZKed4Ec/guOPh0GDahuv9S2lSE7M\nzKxcZs9OycbixU1lS5bA9den0TNvvZX6i1RsuCF8/vMwbFjqvDp0KKy1Vu/Hbf2DkxMzs2VQYyM8\n/jgsWpR+vvHGdAlmcta1cfHilIwUbbEFjBgB668PJ50EH/5wmm11u+16N37r35ycmJn1I0uWpIQj\nLyJN4/766+l52rTUMvLCC011Bg2CnXeGn/88dVQdMCCNmBk6tPm2VlzR956xnufkxMysD3nqqTQn\nyC23pOe8iFT+9tvV111xxTSb6sc+ljqp7rsvrL12WrbRRqmDqlkZODkxM6uh+fOb5v2A1Jn0mmvS\n1OwAc+c2vY6Al15K5XV1sMMOS2/vc59L95Ep2nLLdDnGrC9wcmJm1o0eeaT5XB4V8+enJOO995rK\nItJsqLNmNa+70kpN831Iaar2jTZKrzfbDEaPTn09Vl21Rw7BrOacnJiZVTFnTuqjAek+LxMmpGSi\nYsGCdN+XuXObr1dMNPK22SbdITfvyCNT8lHpxyGlWVLXXLPrx2DWVzk5MbNl1rRpabhsY2N6/frr\nKQlpbEzDZOfMaaq77rqwyirN1x8zJo1eyVt/ffjEJ6p3Gl19dXcmNWsPJydm1mctXgwPP5xGqCxZ\nkm4c9847S9ebPj2NUqk2NDZvwAA48EBYbTXYYw/45CdTMjFwIIwcmTqRmlnPc3JiZjW3cGHqk1H0\nzjtp0q/KnBs33JAm/6qYNavp0guk0Sabbbb0dpZfHn7yk6VHo6ywQkpGVl+9qcwJiFntOTkxs241\nY0bTRF7VPP54ugttRWMj3HorvPtu9forrJA6iEKa6GvMmKZlAwak2Ug//OH0esMNl770YmZ9j5MT\ns2XUjBnVWysqIuD225uGrj7zDNx9d9vbfeedpmGw1Sy3XLpkMnBgU9lRR8GoUUvXHTAgdRZdbbW2\n92tm/YeTE7M+4s034Z//bOq8Wc3EifD0021va/58uPfetutJsN566edBg+ArX2lqxWjJqqvCwQen\nCb+qGTSo+WUUM7MiJydmPWj2bHjxxZaXP/ggPPbY0uWLFsG11zafnGvBgrY7dK65Juy1V/tGhFx4\n4dLDWos23BCGD297W2Zm3cnJiVmmsbHlD/9HH4WHHmp53Ycfrt7P4plnmicYRRJstVX1Tpj775/m\nxagYOBA+9ammybmqWXHF1PnTzKwv878x69dmzkyJRVEE3HQTvPFGet3YmOa7aOmeJJASiJZGcqyx\nBhx0UOojkTdmTBoNUiyvWGst2Hjjto/DzGxZ4uTESmXOnHT5oui55+DOO1ter3LDs+KN0KZNW3oG\nz4q6Oth226bXn/tc9U6ZkDpkHnBAy0mGmZl1Hycn1mOeeCJd1ii66y549tmly+fPT6ND8lOE5w0e\n3HyER9Fmm8EhhzQvW2WVljtnDhvme5OYmZWRkxOratq0pnknHnggzU1RTbV7jlS8/Xb18ro62H33\npTttfuhDcNZZsMkmS68zaFAaUuqWCzOz/s/JyTKgsTFN3V2tP8X06anvRb4j6KJF8K9/NU8sPvKR\n6v0tBg6Er32teifN9dZLfS6KSciqq7Y9HNXMzJZdTk5KorExddxctKjtuq2NHHnrrTTFdz7ZiGh5\nFIqU7h9SnNb7i1+Ej340/Tx06NI3NzMzM+spTk46acmSpWfBfOaZ1J+iJU8+2fLEV3PmNM3E2R7b\nb199yOiAAXDiiTBkSPPyESNghx2Wri956KmZmZWLP5Za8PTTTclCRJqZ84UXml7feWea/rto4MCW\nP+xb65y53HLpEsi667YdW10dbLRRe47CzMys71nmk5PzzksdPufNayqLSB09830uVl0Vdt65qf/E\nwQfDnns239bKK8O++6YblZmZmVnnLPPJyQ03wN57L32b9Q03bD4N+Jpr+m6nZmZmvWGZT04mTEj9\nMczMzKwcWpiM28zMzKw2nJyYmZlZqfSr5ETSdyS9IGm+pAck7VTrmPqChoaGWodQGj4Xic9DE5+L\nxOch8XnoHf0mOZH0BeCXwCnADsAjwARJa9U0sD7Af2xNfC4Sn4cmPheJz0Pi89A7+k1yAowFfhsR\n/7+9ew+2sirjOP79oQKKgzh5YSwUlbybF1AoRUFFUkcdsnEcTC2qwazJrElzqrEspzvjDadGbfJa\nUU1pqaMiBylvjGLmhYsp4gUPieIRARXh6Y+1trxsz0Hw7LP3Pvv9fWb2DO9a691nrYd3r/3s9117\nv9dHxDzgbGAlMKmx3TIzM7NN0RLJiaQtgOHAPZWyiAhgOvDJRvXLzMzMNl1LJCfAdsBmwJKq8iXA\n4EHfvXcAAAoVSURBVPp3x8zMzD6sVv+dEwHRRV1/gLlz59avN02qo6ODOXPmNLobTcGxSByHdRyL\nxHFIHIek8N7ZI/eYV0RX7929R76ssxI4JSJuLZT/DtgmIiZ0ss9E4Ka6ddLMzKz1nB4RN9f6SVvi\nzElErJb0CHA0cCuAJOXty7vY7U7gdOA54K0u2piZmdn79QeGkt5La64lzpwASDoVuA6YDMwmfXvn\ns8BeEfFKI/tmZmZmG68lzpwARMS0/JsmFwM7Av8GxjsxMTMz611a5syJmZmZtYZW+SqxmZmZtYhS\nJidluAePpNGSbpX0kqS1kk7qpM3FkhZLWinpbknDquq3lXSTpA5JyyRdI2lA/UbRfZIulDRb0huS\nlkj6q6Q9qtr0kzRV0lJJyyX9WdIOVW2GSLpN0gpJ7ZJ+LqnXvH4knS3psfx/2SHpfkmfLtS3fAw6\nk4+PtZKmFMpKEQtJF+WxFx9PFepLEQcASTtJuiGPdWV+rRxc1aYM8+XCTo6JtZKuyPV1OyZ63UHU\nXSrPPXgGkNbdfJVOfutF0gXA10gLiA8FVpDi0LfQ7GZgb9K3nk4AjgB+07PdrrnRwBXASOAYYAvg\nLklbFtpcShrfKaQx7gT8pVKZX1i3k9ZojQLOAj5PWt/UW7wAXED6JeXhwAzgFkl75/oyxGA9Sh9K\nvkyaA4rKFIsnSGv0BufH4YW6UsRB0iDgPuBtYDxpzvsWsKzQpizz5QjWHQuDgXGk949pub5+x0RE\nlOoBPAhcVtgW8CJwfqP71oNjXgucVFW2GDivsD0QWAWcmrf3zvsdVGgzHngXGNzoMXUjFtvlcR1e\nGPfbwIRCmz1zm0Pz9nHAamC7QpvJpMlr80aPqRuxeBX4QhljAGwNzAeOAtqAKWU7Hkgf0OZ0UVem\nOPwUuPcD2pR1vrwUWNCIY6JUZ07ke/AAIGlXUlZcjMMbwEOsi8MoYFlEPFrYdTopix5Zp672hEGk\nMbyWt4eTsvxiLOYDz7N+LB6PiKWF57kT2AbYt6c7XGuS+kg6DdgKeIASxgCYCvw9ImZUlY+gXLH4\nuNKl32ck3ShpSC4v0zFxIvCwpGn50u8cSV+qVJZ1vszvl6cD1+aiur42SpWc4HvwVAwmvWg2FIfB\nwP+KlRGxhvSm3itjJUmkTwL/iojKtfXBwDt5simqjkVnsYJeFAtJ+0laTvr0cxXpE9A8ShQDgJyY\nHQhc2En1jpQnFg+STrmPJ93FfVdgVl4nUaZjYjfgK6QzaccCvwYul/S5XF/K+RKYQEoqrsvbdX1t\ntMzvnHTThu7BUyYbE4feHKurgH1Y/7p6VzZ2nL0pFvOAA0hnj04Brpd0xAbat1wMJH2MlKCOi4jV\nm7IrLRaLiCj+sucTkmYDi4BT6fpXs1suDqQP6bMj4vt5+zFJ+5ISlhs3sF+rz5eTgDsiov0D2vXI\nMVG2MydLgTWkDLBoB96f7bWydtIBtaE4tOft90jaDNiWXhgrSVcCxwNjImJxoaod6CtpYNUu1bGo\njlVlu9fEIiLejYhnI2JORHyXtBD0XEoUA9Lliu2BRyStlrQaOBI4V9I7pLH0K0ks1hMRHcACYBjl\nOiZeBqrvADsX2Dn/u4zz5c6kLxBcXSiu6zFRquQkf1Kq3IMHWO8ePPc3ql/1FhELSQdRMQ4DSddG\nK3F4ABgk6aDCrkeTXqQP1amrNZETk5OBsRHxfFX1I6RFa8VY7EGamIqx2L/qG13HAh3AU/RefYB+\nlCsG04H9SZd1DsiPh0mfkCv/Xk05YrEeSVsDu5MWf5bpmLiPtLCzaE/SWaTSzZfZJFIycXuhrL7H\nRKNXAzdg9fGppFXWZwJ7kb7q9SqwfaP7VuNxDiBNtgeSVlN/I28PyfXn53GfSJqs/wY8DfQtPMft\npMn6EOAw0jXZGxo9tk2Mw1WkleKjSRl85dG/qs1CYAzpk/V9wD8L9X1IZxnuAD5Buka/BPhRo8e3\nCXG4hHQ5axdgP+AnpInmqLLEYAOxee/bOmWKBfAL0tdBdwE+Bdydx/GRksVhBGkd1oWk5GwisBw4\nrdCmFPNlHodIN8S9pJO6uh0TDQ9Eg4J/Tg7+KlKmN6LRfeqBMR5JSkrWVD1+W2jzA9KnpJWkFdXD\nqp5jEOkTZQfpDf5qYKtGj20T49BZDNYAZxba9CP9FsrSPCn9Cdih6nmGAP8A3swvtp8BfRo9vk2I\nwzXAs/mYbwfuIicmZYnBBmIzg/WTk1LEAvg96WcUVpG+cXEzsGvZ4pDHcTzwnzwXPglM6qRNy8+X\neRzj8hw5rJO6uh0TvreOmZmZNZVSrTkxMzOz5ufkxMzMzJqKkxMzMzNrKk5OzMzMrKk4OTEzM7Om\n4uTEzMzMmoqTEzMzM2sqTk7MzMysqTg5MTMzs6bi5MTMWoKkNklTGt0PM+s+Jydm1m2SJkt6Q1Kf\nQtkASasl3VPVdqyktZKG1rufZtY7ODkxs1poI90Je0ShbDTwMjBKUt9C+ZHAooh4blP/iKTNu9NJ\nM+sdnJyYWbdFxAJSIjKmUDyGdGv5hcCoqvI2AElDJN0iabmkDkl/lLRDpaGkiyQ9KumLkp4F3srl\nW0m6Pu/3kqRvVvdJ0jmSFkhaJald0rTajtrMeoqTEzOrlZnA2ML22Fx2b6VcUj9gJDAjt7mFdKv5\n0cAxwO7AH6qedxjwGWACcGAu+2Xe50TgWFLCM7yyg6QRwGXA94A9gPHArG6Oz8zqxKdIzaxWZgJT\n8rqTAaREYhbQF5gM/BA4LG/PlDQO2A8YGhGLASSdATwpaXhEPJKfdwvgjIh4LbcZAEwCJkbEzFx2\nFvBioS9DgDeB2yJiBfAC8FgPjdvMasxnTsysVirrTg4BDgcWRMRS0pmTkXndyRjgmYh4EdgLeKGS\nmABExFzgdWDvwvMuqiQm2e6khGV2Yb9lwPxCm7uBRcDCfPlnoqQtazZSM+tRTk7MrCYi4hngJdIl\nnLGkpISIeJl05uIwCutNAAHRyVNVl6/opJ4u9q305U3gYOA0YDHprM1jkgZu9IDMrGGcnJhZLbWR\nEpMxpMs8FbOA44BDWZecPAXsLOmjlUaS9gG2yXVd+S/wLoVFtpK2Ja0teU9ErI2IGRHxHeAAYChw\n1IcYk5nVmdecmFkttQFTSXPLvYXyWcCVpMsxMwEiYrqkx4GbJJ2X66YCbRHxaFd/ICJWSLoW+IWk\n14BXgB8DayptJJ0A7Jb/7jLgBNIZl/nvf0YzazZOTsysltqA/sDciHilUH4vsDUwLyLaC+UnA1fk\n+rXAHcDXN+LvfJu0vuVWYDnwK6B4yeZ10jd8Lsr9eRo4La9pMbMmp4guL9uamZmZ1Z3XnJiZmVlT\ncXJiZmZmTcXJiZmZmTUVJydmZmbWVJycmJmZWVNxcmJmZmZNxcmJmZmZNRUnJ2ZmZtZUnJyYmZlZ\nU3FyYmZmZk3FyYmZmZk1FScnZmZm1lT+DwrH78/1pfXIAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -349,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 82, + "execution_count": 46, "metadata": { "collapsed": true }, @@ -363,7 +372,7 @@ }, { "cell_type": "code", - "execution_count": 83, + "execution_count": 47, "metadata": { "collapsed": false }, @@ -372,9 +381,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 166\n", - "Number of unique tokens: 681\n", - "Number of documents: 90\n" + "Number of authors: 536\n", + "Number of unique tokens: 2245\n", + "Number of documents: 286\n" ] } ], @@ -393,7 +402,7 @@ }, { "cell_type": "code", - "execution_count": 128, + "execution_count": 48, "metadata": { "collapsed": false }, @@ -405,7 +414,7 @@ }, { "cell_type": "code", - "execution_count": 129, + "execution_count": 49, "metadata": { "collapsed": false }, @@ -414,21 +423,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 4.65 s, sys: 0 ns, total: 4.65 s\n", - "Wall time: 4.66 s\n" + "CPU times: user 18.1 s, sys: 8 ms, total: 18.2 s\n", + "Wall time: 18.2 s\n" ] } ], "source": [ "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None, chunksize=1)" + " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=0, random_state=1, var_lambda=None, chunksize=2000)" ] }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 104, "metadata": { "collapsed": false, "scrolled": false @@ -438,39 +447,61 @@ "data": { "text/plain": [ "[(0,\n", - " '0.005*class + 0.004*bound + 0.004*hidden + 0.004*approximation + 0.004*gaussian + 0.004*sample + 0.004*estimate + 0.003*optimal + 0.003*threshold + 0.003*generalization'),\n", + " '0.014*\"response\" + 0.013*\"frequency\" + 0.011*\"cell\" + 0.011*\"phase\" + 0.008*\"synaptic\" + 0.008*\"oscillation\" + 0.007*\"control\" + 0.006*\"cortex\" + 0.006*\"mode\" + 0.005*\"fig\"'),\n", " (1,\n", - " '0.013*neuron + 0.008*cell + 0.008*motion + 0.007*activity + 0.006*direction + 0.005*visual + 0.004*synaptic + 0.004*layer + 0.004*object + 0.004*frequency'),\n", + " '0.009*\"vector\" + 0.007*\"fig\" + 0.007*\"matrix\" + 0.007*\"activity\" + 0.006*\"memory\" + 0.006*\"node\" + 0.005*\"element\" + 0.005*\"sequence\" + 0.004*\"dynamic\" + 0.004*\"threshold\"'),\n", " (2,\n", - " '0.009*layer + 0.007*net + 0.007*recognition + 0.005*node + 0.005*character + 0.005*eye + 0.004*word + 0.004*field + 0.004*table + 0.003*visual'),\n", + " '0.009*\"node\" + 0.008*\"activation\" + 0.008*\"memory\" + 0.007*\"processor\" + 0.007*\"speech\" + 0.005*\"current\" + 0.005*\"region\" + 0.005*\"recognition\" + 0.005*\"machine\" + 0.005*\"role\"'),\n", " (3,\n", - " '0.013*cell + 0.012*image + 0.009*rule + 0.005*face + 0.004*ii + 0.004*analog + 0.004*visual + 0.004*distance + 0.004*field + 0.003*response'),\n", + " '0.036*\"classifier\" + 0.020*\"memory\" + 0.019*\"vector\" + 0.016*\"capacity\" + 0.014*\"hopfield\" + 0.013*\"matrix\" + 0.013*\"classification\" + 0.012*\"code\" + 0.012*\"stored\" + 0.011*\"chip\"'),\n", " (4,\n", - " '0.007*mixture + 0.006*likelihood + 0.005*gaussian + 0.005*em + 0.005*bayesian + 0.005*density + 0.005*classifier + 0.004*class + 0.004*estimate + 0.004*prior'),\n", + " '0.033*\"field\" + 0.016*\"delay\" + 0.015*\"tree\" + 0.011*\"receptive_field\" + 0.011*\"receptive\" + 0.009*\"region\" + 0.009*\"memory\" + 0.008*\"synaptic\" + 0.008*\"fixed_point\" + 0.008*\"stability\"'),\n", " (5,\n", - " '0.008*neuron + 0.005*spike + 0.004*stimulus + 0.004*noise + 0.004*map + 0.003*layer + 0.003*response + 0.003*let + 0.003*fig + 0.003*solution'),\n", + " '0.047*\"cell\" + 0.025*\"firing\" + 0.016*\"potential\" + 0.012*\"activity\" + 0.011*\"stimulus\" + 0.010*\"membrane\" + 0.009*\"fig\" + 0.008*\"inhibitory\" + 0.008*\"threshold\" + 0.008*\"response\"'),\n", " (6,\n", - " '0.007*recognition + 0.006*speech + 0.005*matrix + 0.004*word + 0.004*class + 0.004*image + 0.004*sequence + 0.004*optimal + 0.003*object + 0.003*gradient'),\n", + " '0.017*\"cell\" + 0.013*\"map\" + 0.011*\"circuit\" + 0.007*\"field\" + 0.007*\"cortical\" + 0.006*\"cortex\" + 0.006*\"region\" + 0.005*\"response\" + 0.005*\"constraint\" + 0.005*\"visual\"'),\n", " (7,\n", - " '0.008*hidden + 0.007*signal + 0.006*layer + 0.005*net + 0.005*architecture + 0.005*hidden_unit + 0.004*noise + 0.004*trained + 0.004*prediction + 0.004*control'),\n", + " '0.017*\"hidden\" + 0.011*\"hidden_unit\" + 0.009*\"propagation\" + 0.007*\"back_propagation\" + 0.006*\"gradient\" + 0.005*\"internal\" + 0.005*\"probability\" + 0.005*\"procedure\" + 0.004*\"target\" + 0.004*\"node\"'),\n", " (8,\n", - " '0.009*image + 0.007*circuit + 0.005*node + 0.005*analog + 0.004*architecture + 0.004*chip + 0.004*robot + 0.003*connection + 0.003*net + 0.003*component'),\n", + " '0.011*\"noise\" + 0.009*\"activation\" + 0.009*\"node\" + 0.009*\"chip\" + 0.008*\"threshold\" + 0.007*\"analog\" + 0.007*\"match\" + 0.007*\"cycle\" + 0.006*\"pulse\" + 0.006*\"distribution\"'),\n", " (9,\n", - " '0.021*neuron + 0.013*memory + 0.009*cell + 0.009*field + 0.007*dynamic + 0.005*synaptic + 0.005*fig + 0.005*circuit + 0.005*connection + 0.005*phase')]" + " '0.022*\"image\" + 0.014*\"vector\" + 0.010*\"recognition\" + 0.008*\"hidden\" + 0.007*\"noise\" + 0.007*\"object\" + 0.007*\"speech\" + 0.006*\"visual\" + 0.006*\"pixel\" + 0.005*\"frame\"')]" + ] + }, + "execution_count": 104, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.show_topics(num_topics=10)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "197" ] }, - "execution_count": 22, + "execution_count": 33, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "model2.show_topics(num_topics=10)" + "author2id['James M. Bower']" ] }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 105, "metadata": { "collapsed": false }, @@ -481,23 +512,33 @@ "text": [ "\n", "Yaser S.Abu-Mostafa\n", - "Docs: [643, 1161]\n", - "[(0, 0.16659016030019738),\n", - " (3, 0.010393726041544113),\n", - " (5, 0.14815620716021766),\n", - " (8, 0.66880000219039293)]\n", + "Docs: [62]\n", + "[(0, 0.019643887783549894),\n", + " (1, 0.03909003995731989),\n", + " (2, 0.16804942558366059),\n", + " (3, 0.10477718721148226),\n", + " (6, 0.021371962138910492),\n", + " (7, 0.22727551202952315),\n", + " (8, 0.31201895712462607),\n", + " (9, 0.1066712694188155)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [143, 284, 230, 197]\n", - "[(0, 0.010692157293087015), (7, 0.61370450917735686), (8, 0.37526934907962062)]\n", + "[(1, 0.027378250046950596),\n", + " (7, 0.28611123303779012),\n", + " (8, 0.01409793422740707),\n", + " (9, 0.67186598917760154)]\n", "\n", "Michael I. Jordan\n", "Docs: [237]\n", - "[(0, 0.052599095186532552), (7, 0.83157959026144268), (8, 0.11420166707185267)]\n", + "[(0, 0.032384767535828737),\n", + " (1, 0.41066501849642167),\n", + " (5, 0.028938355831066891),\n", + " (7, 0.52523422248412255)]\n", "\n", "James M. Bower\n", "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(1, 0.99977071147699681)]\n" + "[(2, 0.029310008934256015), (6, 0.97040282458105698)]\n" ] } ], @@ -505,22 +546,22 @@ "name = 'Yaser S.Abu-Mostafa'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model2.get_author_topics(author2id[name]))\n", + "pprint(model.get_author_topics(author2id[name]))\n", "\n", "name = 'Geoffrey E. Hinton'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model2.get_author_topics(author2id[name]))\n", + "pprint(model.get_author_topics(author2id[name]))\n", "\n", "name = 'Michael I. Jordan'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model2.get_author_topics(author2id[name]))\n", + "pprint(model.get_author_topics(author2id[name]))\n", "\n", "name = 'James M. Bower'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model2.get_author_topics(author2id[name]))" + "pprint(model.get_author_topics(author2id[name]))" ] }, { @@ -843,94 +884,6 @@ "lda.show_topics()" ] }, - { - "cell_type": "code", - "execution_count": 150, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Document 5\n", - "[(0, 0.11806384798431847),\n", - " (1, 0.099612053680607937),\n", - " (2, 0.076668193975964943),\n", - " (3, 0.075072909998916373),\n", - " (4, 0.067243477696594139),\n", - " (5, 0.1004083782314163),\n", - " (6, 0.1049567779188061),\n", - " (7, 0.10291505408912022),\n", - " (8, 0.12682229186467239),\n", - " (9, 0.12823701455958317)]\n", - "\n", - "Document 50\n", - "[(0, 0.12019310780479558),\n", - " (1, 0.11241507965934601),\n", - " (2, 0.084261861610351887),\n", - " (3, 0.074722708722277847),\n", - " (4, 0.089536455599529025),\n", - " (5, 0.11951468917677081),\n", - " (6, 0.077140801257090358),\n", - " (7, 0.086592729473957755),\n", - " (8, 0.12048290979429044),\n", - " (9, 0.11513965690159025)]\n" - ] - } - ], - "source": [ - "d = 5\n", - "print('Document %d' %d)\n", - "pprint(lda[corpus[d]])\n", - "\n", - "d = 50\n", - "print('\\nDocument %d' %d)\n", - "pprint(lda[corpus[d]])" - ] - }, - { - "cell_type": "code", - "execution_count": 145, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "['scaling',\n", - " 'property',\n", - " 'of',\n", - " 'coarse',\n", - " 'coded',\n", - " 'symbol',\n", - " 'memory',\n", - " 'ronald',\n", - " 'rosenfeld',\n", - " 'david',\n", - " 'touretzky',\n", - " 'computer',\n", - " 'science',\n", - " 'department',\n", - " 'carnegie',\n", - " 'mellon',\n", - " 'university',\n", - " 'pittsburgh',\n", - " 'pennsylvania',\n", - " 'abstract']" - ] - }, - "execution_count": 145, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "docs[0][:20]" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -1641,6 +1594,188 @@ "\n", "print(time() - start)" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## LDA author-topic hack" + ] + }, + { + "cell_type": "code", + "execution_count": 132, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "author_corpus = []\n", + "for a, doc_ids in author2doc.items():\n", + " temp = {}\n", + " for d in doc_ids:\n", + " for v, cnt in corpus[d]:\n", + " if temp.get(v):\n", + " temp[v] += cnt\n", + " else:\n", + " temp[v] = cnt\n", + " author_corpus.append(list(temp.items()))" + ] + }, + { + "cell_type": "code", + "execution_count": 133, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "reload(gensim.models.ldamodel)\n", + "LdaModel = gensim.models.ldamodel.LdaModel" + ] + }, + { + "cell_type": "code", + "execution_count": 134, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 4min 2s, sys: 7min 16s, total: 11min 18s\n", + "Wall time: 3min 25s\n" + ] + } + ], + "source": [ + "%time lda = LdaModel(corpus=author_corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", + " iterations=100, alpha='symmetric', eta='symmetric', eval_every=0, chunksize=2000, random_state=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 135, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0,\n", + " '0.019*\"signal\" + 0.017*\"component\" + 0.015*\"source\" + 0.009*\"independent\" + 0.009*\"ica\" + 0.008*\"noise\" + 0.008*\"eeg\" + 0.008*\"frequency\" + 0.007*\"response\" + 0.007*\"separation\"'),\n", + " (1,\n", + " '0.005*\"policy\" + 0.005*\"optimal\" + 0.005*\"bound\" + 0.005*\"action\" + 0.005*\"kernel\" + 0.005*\"let\" + 0.004*\"xi\" + 0.004*\"class\" + 0.004*\"decision\" + 0.004*\"reinforcement\"'),\n", + " (2,\n", + " '0.010*\"control\" + 0.010*\"cluster\" + 0.009*\"distance\" + 0.008*\"image\" + 0.007*\"clustering\" + 0.007*\"class\" + 0.006*\"nonlinear\" + 0.006*\"classification\" + 0.006*\"controller\" + 0.004*\"measure\"'),\n", + " (3,\n", + " '0.028*\"image\" + 0.013*\"object\" + 0.011*\"visual\" + 0.009*\"motion\" + 0.007*\"position\" + 0.006*\"field\" + 0.006*\"direction\" + 0.005*\"filter\" + 0.005*\"pixel\" + 0.005*\"view\"'),\n", + " (4,\n", + " '0.013*\"layer\" + 0.011*\"hidden\" + 0.008*\"net\" + 0.006*\"node\" + 0.006*\"memory\" + 0.006*\"neuron\" + 0.006*\"hidden_unit\" + 0.005*\"activation\" + 0.005*\"threshold\" + 0.004*\"propagation\"'),\n", + " (5,\n", + " '0.008*\"word\" + 0.007*\"recognition\" + 0.005*\"classifier\" + 0.005*\"rule\" + 0.004*\"class\" + 0.004*\"classification\" + 0.004*\"character\" + 0.004*\"table\" + 0.003*\"trained\" + 0.003*\"language\"'),\n", + " (6,\n", + " '0.010*\"speech\" + 0.006*\"mixture\" + 0.006*\"estimate\" + 0.006*\"recognition\" + 0.005*\"hidden\" + 0.005*\"prediction\" + 0.005*\"sequence\" + 0.005*\"estimation\" + 0.005*\"context\" + 0.005*\"likelihood\"'),\n", + " (7,\n", + " '0.017*\"circuit\" + 0.014*\"chip\" + 0.014*\"neuron\" + 0.013*\"analog\" + 0.010*\"voltage\" + 0.007*\"vlsi\" + 0.007*\"signal\" + 0.006*\"control\" + 0.005*\"cell\" + 0.005*\"implementation\"'),\n", + " (8,\n", + " '0.009*\"gaussian\" + 0.006*\"matrix\" + 0.005*\"noise\" + 0.005*\"prior\" + 0.005*\"field\" + 0.005*\"likelihood\" + 0.005*\"posterior\" + 0.005*\"bayesian\" + 0.004*\"mixture\" + 0.004*\"approximation\"'),\n", + " (9,\n", + " '0.020*\"cell\" + 0.020*\"neuron\" + 0.010*\"stimulus\" + 0.010*\"spike\" + 0.009*\"response\" + 0.007*\"synaptic\" + 0.006*\"activity\" + 0.006*\"firing\" + 0.006*\"cortex\" + 0.005*\"orientation\"')]" + ] + }, + "execution_count": 135, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lda.show_topics()" + ] + }, + { + "cell_type": "code", + "execution_count": 136, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [643, 1161]\n", + "[(1, 0.23332003952694552),\n", + " (5, 0.53385075047018016),\n", + " (6, 0.10891675344072629),\n", + " (8, 0.12227386376013714)]\n", + "\n", + "Geoffrey E. Hinton\n", + "Docs: [143, 284, 230, 197]\n", + "[(0, 0.02352470105235863),\n", + " (1, 0.010279793220247807),\n", + " (2, 0.020384798749417784),\n", + " (3, 0.22316974630812836),\n", + " (4, 0.29378098848291623),\n", + " (5, 0.28354005954382777),\n", + " (6, 0.06921176883627865),\n", + " (8, 0.076066638965696737)]\n", + "\n", + "Michael I. Jordan\n", + "Docs: [237]\n", + "[(1, 0.22743516568855809),\n", + " (2, 0.35536065944136824),\n", + " (3, 0.03147447824503067),\n", + " (4, 0.33259716011404672),\n", + " (6, 0.019782536548970251),\n", + " (8, 0.032916511196237168)]\n", + "\n", + "James M. Bower\n", + "Docs: [131, 101, 126, 127, 281, 208, 225]\n", + "[(0, 0.024730774978235743),\n", + " (2, 0.013137901461419016),\n", + " (3, 0.098173137689669399),\n", + " (5, 0.037453180336151123),\n", + " (7, 0.20974998834305741),\n", + " (9, 0.60758868832493407)]\n" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(lda.get_document_topics(author_corpus[author2id[name]]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(lda.get_document_topics(author_corpus[author2id[name]]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(lda.get_document_topics(author_corpus[author2id[name]]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(lda.get_document_topics(author_corpus[author2id[name]]))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index e7083ca7bf..aea8413534 100644 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -34,6 +34,12 @@ logger = logging.getLogger(__name__) +class AuthorTopicState: + def __init__(self, atmodel): + self.atmodel = atmodel + + def get_lambda(self): + return self.atmodel.var_lambda class AuthorTopicModel(LdaModel): """ @@ -129,6 +135,8 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.random_state = get_random_state(random_state) + self.state = AuthorTopicState(self) + if corpus is not None: self.inference(corpus, var_lambda=var_lambda) diff --git a/gensim/models/atmodel2.py b/gensim/models/atmodel2.py new file mode 100755 index 0000000000..24bbf1c252 --- /dev/null +++ b/gensim/models/atmodel2.py @@ -0,0 +1,329 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2016 Radim Rehurek +# Copyright (C) 2016 Olavur Mortensen +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + + +""" +Author-topic model. +""" + +# TODO: write proper docstrings. + +import pdb +from pdb import set_trace as st +from pprint import pprint + +import logging +import np # for arrays, array broadcasting etc. +import numbers + +from gensim import interfaces, utils, matutils +from gensim.models import LdaModel +from gensim.models.ldamodel import dirichlet_expectation, get_random_seed, LdaState +from itertools import chain +from scipy.special import gammaln, psi # gamma function utils +from scipy.special import polygamma +from six.moves import xrange +import six + +# log(sum(exp(x))) that tries to avoid overflow +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger('gensim.models.atmodel') + +# TODO: should there be an AuthorTopicState, instead of just using the LdaState? +#class AutorTopicState(utils.SaveLoad): +# """ +# Encapsulate information for distributed computation of AuthorTopicModel objects. +# +# Objects of this class are sent over the network, so try to keep them lean to +# reduce traffic. +# """ +# +# def __init__(self, eta, shape): +# self.eta = eta +# self.sstats = np.zeros(shape) +# self.numdocs = 0 +# self.lda_state = LdaState(self.eta, shape) +# +# def reset(self): +# self.lda_state.reset() + +class AuthorTopicModel(LdaModel): + """ + """ + def __init__(self, corpus=None, num_topics=100, id2word=None, + author2doc=None, doc2author=None, id2author=None, var_lambda=None, + distributed=False, chunksize=2000, passes=1, update_every=1, + alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, + eval_every=10, iterations=50, gamma_threshold=0.001, + minimum_probability=0.01, random_state=None, ns_conf={}, + minimum_phi_value=0.01, per_word_topics=False): + """ + """ + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + # NOTE: Why would id2word not be none, but have length 0? (From LDA code) + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + logger.info('Vocabulary consists of %d words.', self.num_terms) + + if doc2author is None and author2doc is None: + raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') + + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). + # If either doc2author or author2doc is missing, construct them from the other. + if doc2author is None: + # Make a mapping from document IDs to author IDs. + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + elif author2doc is None: + # Make a mapping from author IDs to document IDs. + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + + self.author2doc = author2doc + self.doc2author = doc2author + + self.num_authors = len(self.author2doc) + logger.info('Number of authors: %d.', self.num_authors) + + self.id2author = id2author + if self.id2author is None: + logger.warning("no author id mapping provided; initializing from corpus, assuming identity") + author_integer_ids = [str(i) for i in range(len(author2doc))] + self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) + + # Make the reverse mapping, from author names to author IDs. + self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) + + self.distributed = False # NOTE: distributed not yet implemented. + self.num_topics = num_topics + self.chunksize = chunksize + self.decay = decay + self.offset = offset + self.minimum_probability = minimum_probability + self.num_updates = 0 + + self.passes = passes + self.update_every = update_every + self.eval_every = eval_every + self.minimum_phi_value = minimum_phi_value + self.per_word_topics = per_word_topics + + self.corpus = corpus + self.iterations = iterations + self.threshold = threshold + self.num_authors = len(author2doc) + self.random_state = random_state + + # NOTE: this is not necessarily a good way to initialize the topics. + self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) + self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + + self.random_state = get_random_state(random_state) + + if corpus is not None: + self.update(corpus) + + def init_dir_prior(self, prior, name): + # TODO: all of this + init_prior = None + is_auto = None + return init_prior, is_auto + + def __str__(self): + return "AuthorTopicModel(num_terms=%s, num_topics=%s, num_authors=%s, decay=%s)" % \ + (self.num_terms, self.num_topics, self.num_authors, self.decay) + + def sync_state(self): + """sync_state not implemented for AuthorTopicModel.""" + pass + + def clear(self): + """clear not implemented for AuthorTopicModel.""" + pass + + def inference(self, chunk, collect_sstats=False): + """ + """ + return gamma, sstats + + def do_estep(self, chunk, state=None): + """ + """ + return gamma + + # TODO: probably just use LdaModel's update_alpha and update_eta (once my PR fixing eta is merged). + def update_alpha(self, gammat, rho): + """ + """ + return self.alpha + + def update_eta(self, lambdat, rho): + """ + """ + return self.eta + + # NOTE: this method can be used directly, but self.bound needs to be updated slightly. + # def log_perplexity(self, chunk, total_docs=None): + + def update(self, corpus, chunksize=None, decay=None, offset=None, + passes=None, update_every=None, eval_every=None, iterations=None, + gamma_threshold=None, chunks_as_numpy=False): + """ + """ + # TODO: this + pass + + def do_mstep(self, rho, other, extra_pass=False): + """ + """ + # TODO: this + pass + + def bound(self, corpus, gamma=None, subsample_ratio=1.0): + """ + """ + # TODO: this + pass + + def print_topics(self, num_topics=10, num_words=10): + # TODO: this + pass + + def show_topics(self, num_topics=10, num_words=10, log=False, formatted=True): + """ + """ + # TODO: this + pass + + def show_topic(self, topicid, topn=10): + """ + """ + # TODO: this + pass + + def get_topic_terms(self, topicid, topn=10): + """ + """ + # TODO: this + pass + + def print_topic(self, topicid, topn=10): + # TODO: this + pass + + def top_topics(self, corpus, num_words=20): + # TODO: this + pass + + def get_term_topics(self, word_id, minimum_probability=None): + # TODO: this + pass + + def __getitem__(self, bow, eps=None): + """ + """ + # TODO: this + pass + + def save(self, fname, ignore=['state', 'dispatcher'], *args, **kwargs): + """ + Save the model to file. + + Large internal arrays may be stored into separate files, with `fname` as prefix. + + `separately` can be used to define which arrays should be stored in separate files. + + `ignore` parameter can be used to define which variables should be ignored, i.e. left + out from the pickled lda model. By default the internal `state` is ignored as it uses + its own serialisation not the one provided by `LdaModel`. The `state` and `dispatcher` + will be added to any ignore parameter defined. + + + Note: do not save as a compressed file if you intend to load the file back with `mmap`. + + Note: If you intend to use models across Python 2/3 versions there are a few things to + keep in mind: + + 1. The pickled Python dictionaries will not work across Python versions + 2. The `save` method does not automatically save all NumPy arrays using NumPy, only + those ones that exceed `sep_limit` set in `gensim.utils.SaveLoad.save`. The main + concern here is the `alpha` array if for instance using `alpha='auto'`. + + Please refer to the wiki recipes section (https://github.com/piskvorky/gensim/wiki/Recipes-&-FAQ#q9-how-do-i-load-a-model-in-python-3-that-was-trained-and-saved-using-python-2) + for an example on how to work around these issues. + """ + # TODO: this + if self.state is not None: + self.state.save(utils.smart_extension(fname, '.state'), *args, **kwargs) + + # make sure 'state' and 'dispatcher' are ignored from the pickled object, even if + # someone sets the ignore list themselves + if ignore is not None and ignore: + if isinstance(ignore, six.string_types): + ignore = [ignore] + ignore = [e for e in ignore if e] # make sure None and '' are not in the list + ignore = list(set(['state', 'dispatcher']) | set(ignore)) + else: + ignore = ['state', 'dispatcher'] + super(LdaModel, self).save(fname, *args, ignore=ignore, **kwargs) + + @classmethod + def load(cls, fname, *args, **kwargs): + """ + Load a previously saved object from file (also see `save`). + + Large arrays can be memmap'ed back as read-only (shared memory) by setting `mmap='r'`: + + >>> LdaModel.load(fname, mmap='r') + + """ + # TODO: this + kwargs['mmap'] = kwargs.get('mmap', None) + result = super(LdaModel, cls).load(fname, *args, **kwargs) + state_fname = utils.smart_extension(fname, '.state') + try: + result.state = super(LdaModel, cls).load(state_fname, *args, **kwargs) + except Exception as e: + logging.warning("failed to load state from %s: %s", state_fname, e) + return result +# endclass LdaModel From 9d9da44663755eeb0ceaceb6e5edcefa634a8203 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 30 Nov 2016 17:32:32 +0100 Subject: [PATCH 057/100] Refactoring the code. A lot left to do. --- gensim/models/atmodel2.py | 329 ++++++++++++++++++++++++++++---------- 1 file changed, 248 insertions(+), 81 deletions(-) diff --git a/gensim/models/atmodel2.py b/gensim/models/atmodel2.py index 24bbf1c252..e88d1634c8 100755 --- a/gensim/models/atmodel2.py +++ b/gensim/models/atmodel2.py @@ -39,23 +39,6 @@ logger = logging.getLogger('gensim.models.atmodel') -# TODO: should there be an AuthorTopicState, instead of just using the LdaState? -#class AutorTopicState(utils.SaveLoad): -# """ -# Encapsulate information for distributed computation of AuthorTopicModel objects. -# -# Objects of this class are sent over the network, so try to keep them lean to -# reduce traffic. -# """ -# -# def __init__(self, eta, shape): -# self.eta = eta -# self.sstats = np.zeros(shape) -# self.numdocs = 0 -# self.lda_state = LdaState(self.eta, shape) -# -# def reset(self): -# self.lda_state.reset() class AuthorTopicModel(LdaModel): """ @@ -149,58 +132,213 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, self.per_word_topics = per_word_topics self.corpus = corpus - self.iterations = iterations - self.threshold = threshold self.num_authors = len(author2doc) - self.random_state = random_state - # NOTE: this is not necessarily a good way to initialize the topics. - self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) - self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + self.alpha, self.optimize_alpha = self.init_dir_prior(alpha, 'alpha') + + assert self.alpha.shape == (self.num_topics,), "Invalid alpha shape. Got shape %s, but expected (%d, )" % (str(self.alpha.shape), self.num_topics) + + if isinstance(eta, six.string_types): + if eta == 'asymmetric': + raise ValueError("The 'asymmetric' option cannot be used for eta") + + self.eta, self.optimize_eta = self.init_dir_prior(eta, 'eta') self.random_state = get_random_state(random_state) - if corpus is not None: - self.update(corpus) + assert (self.eta.shape == (self.num_terms,) or self.eta.shape == (self.num_topics, self.num_terms)), ( + "Invalid eta shape. Got shape %s, but expected (%d, 1) or (%d, %d)" % + (str(self.eta.shape), self.num_terms, self.num_topics, self.num_terms)) + + if not distributed: + self.dispatcher = None + self.numworkers = 1 + else: + # TODO: implement distributed version. + pass - def init_dir_prior(self, prior, name): - # TODO: all of this - init_prior = None - is_auto = None - return init_prior, is_auto + # VB constants + self.iterations = iterations + self.gamma_threshold = gamma_threshold + + # Initialize the variational distribution q(beta|lambda) + self.state = LdaState(self.eta, (self.num_topics, self.num_terms)) + self.state.sstats = self.random_state.gamma(100., 1. / 100., (self.num_topics, self.num_terms)) + self.expElogbeta = np.exp(dirichlet_expectation(self.state.sstats)) + + # if a training corpus was provided, start estimating the model right away + if corpus is not None: + use_numpy = self.dispatcher is not None + self.update(corpus, chunks_as_numpy=use_numpy) def __str__(self): - return "AuthorTopicModel(num_terms=%s, num_topics=%s, num_authors=%s, decay=%s)" % \ - (self.num_terms, self.num_topics, self.num_authors, self.decay) + return "AuthorTopicModel(num_terms=%s, num_topics=%s, num_authors=%s, decay=%s, chunksize=%s)" % \ + (self.num_terms, self.num_topics, self.num_authors, self.decay, self.chunksize) - def sync_state(self): - """sync_state not implemented for AuthorTopicModel.""" - pass + def compute_phinorm(self, ids, authors_d, expElogthetad, expElogbetad): + """Efficiently computes the normalizing factor in phi.""" + phinorm = numpy.zeros(len(ids)) + expElogtheta_sum = numpy.zeros(self.num_topics) + for a in xrange(len(authors_d)): + expElogtheta_sum += expElogthetad[a, :] + phinorm = expElogtheta_sum.dot(expElogbetad) - def clear(self): - """clear not implemented for AuthorTopicModel.""" - pass + return phinorm def inference(self, chunk, collect_sstats=False): """ + Given a chunk of sparse document vectors, estimate gamma (parameters + controlling the topic weights) for each document in the chunk. + + This function does not modify the model (=is read-only aka const). The + whole input chunk of document is assumed to fit in RAM; chunking of a + large corpus must be done earlier in the pipeline. + + If `collect_sstats` is True, also collect sufficient statistics needed + to update the model's topic-word distributions, and return a 2-tuple + `(gamma, sstats)`. Otherwise, return `(gamma, None)`. `gamma` is of shape + `len(chunk) x self.num_topics`. + + Avoids computing the `phi` variational parameter directly using the + optimization presented in **Lee, Seung: Algorithms for non-negative matrix factorization, NIPS 2001**. + """ + try: + _ = len(chunk) + except: + # convert iterators/generators to plain list, so we have len() etc. + chunk = list(chunk) + if len(chunk) > 1: + logger.debug("performing inference on a chunk of %i documents", len(chunk)) + + # Initialize the variational distribution q(theta|gamma) for the chunk + # FIXME: + # num_authors_chunk = ??? + gamma = self.random_state.gamma(100., 1. / 100., (num_authors_chunk, self.num_topics)) + Elogtheta = dirichlet_expectation(gamma) + expElogtheta = np.exp(Elogtheta) + if collect_sstats: + sstats = np.zeros_like(self.expElogbeta) + else: + sstats = None + converged = 0 + + # Now, for each document d update that document's gamma and phi + for d, doc in enumerate(chunk): + # FIXME: + # doc_no = ??? + if doc and not isinstance(doc[0][0], six.integer_types): + # make sure the term IDs are ints, otherwise np will get upset + ids = [int(id) for id, _ in doc] + else: + ids = [id for id, _ in doc] + cts = np.array([cnt for _, cnt in doc]) + authors_d = self.doc2author[doc_no] # List of author IDs for the current document. + + gammad = state.get_gamma(authors_d) # FIXME: implement this method. + tilde_gammad = np.zeros(gammad.shape) + + Elogthetad = dirichlet_expectation(tilde_gammad) + expElogthetad = numpy.exp(Elogthetad) + expElogbetad = expElogbeta[:, ids] + + phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) + + # Iterate between gamma and phi until convergence + for iteration in xrange(self.iterations): + #logger.info('iteration %i', iteration) + + lastgamma = gammad + + # Update gamma. + for a in authors_d: + tilde_gammad[a, :] = self.alpha + len(self.author2doc[a]) * expElogthetad[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). Same goes for lambda. + tilde_gamma = (1 - rhot) * gammad + rhot * tilde_gamma + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + Elogthetad = dirichlet_expectation(tilde_gammad) + expElogthetad = numpy.exp(Elogtheta[authors_d, :]) + + phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) + + # Check for convergence. + # Criterion is mean change in "local" gamma and lambda. + if iteration > 0: + meanchange_gamma = numpy.mean(abs(tilde_gamma - lastgamma)) + gamma_condition = meanchange_gamma < self.threshold + # logger.info('Mean change in gamma: %.3e', meanchange_gamma) + if gamma_condition: + # logger.info('Converged after %d iterations.', iteration) + converged += 1 + break + # End of iterations loop. + + for _ in xrange(self.iterations): + lastgamma = gammad + # We represent phi implicitly to save memory and time. + # Substituting the value of the optimal phi back into + # the update for gamma gives this update. Cf. Lee&Seung 2001. + gammad = self.alpha + expElogthetad * np.dot(cts / phinorm, expElogbetad.T) + Elogthetad = dirichlet_expectation(gammad) + expElogthetad = np.exp(Elogthetad) + phinorm = np.dot(expElogthetad, expElogbetad) + 1e-100 + # If gamma hasn't changed much, we're done. + meanchange = np.mean(abs(gammad - lastgamma)) + if (meanchange < self.gamma_threshold): + converged += 1 + break + if collect_sstats: + # Contribution of document d to the expected sufficient + # statistics for the M step. + sstats[:, ids] += np.outer(expElogthetad.T, cts / phinorm) + + if len(chunk) > 1: + logger.debug("%i/%i documents converged within %i iterations", + converged, len(chunk), self.iterations) + + if collect_sstats: + # This step finishes computing the sufficient statistics for the + # M step, so that + # sstats[k, w] = \sum_d n_{dw} * phi_{dwk} + # = \sum_d n_{dw} * exp{Elogtheta_{dk} + Elogbeta_{kw}} / phinorm_{dw}. + sstats *= self.expElogbeta return gamma, sstats def do_estep(self, chunk, state=None): """ + Perform inference on a chunk of documents, and accumulate the collected + sufficient statistics in `state` (or `self.state` if None). + """ + if state is None: + state = self.state + gamma, sstats = self.inference(chunk, collect_sstats=True) + state.sstats += sstats + state.numdocs += gamma.shape[0] # avoids calling len(chunk) on a generator return gamma - # TODO: probably just use LdaModel's update_alpha and update_eta (once my PR fixing eta is merged). - def update_alpha(self, gammat, rho): + def inference(self, chunk, collect_sstats=False): """ """ - return self.alpha + return gamma, sstats - def update_eta(self, lambdat, rho): + def do_estep(self, chunk, state=None): """ + Perform inference on a chunk of documents, and accumulate the collected + sufficient statistics in `state` (or `self.state` if None). + """ - return self.eta + if state is None: + state = self.state + gamma, sstats = self.inference(chunk, collect_sstats=True) + state.sstats += sstats + # NOTE: why not use chunksize here? + state.numdocs += len(chunk) + return gamma # NOTE: this method can be used directly, but self.bound needs to be updated slightly. # def log_perplexity(self, chunk, total_docs=None): @@ -221,43 +359,69 @@ def do_mstep(self, rho, other, extra_pass=False): def bound(self, corpus, gamma=None, subsample_ratio=1.0): """ - """ - # TODO: this - pass - - def print_topics(self, num_topics=10, num_words=10): - # TODO: this - pass + Estimate the variational bound of documents from `corpus`: + E_q[log p(corpus)] - E_q[log q(corpus)] - def show_topics(self, num_topics=10, num_words=10, log=False, formatted=True): - """ - """ - # TODO: this - pass + `gamma` are the variational parameters on topic weights for each `corpus` + document (=2d matrix=what comes out of `inference()`). + If not supplied, will be inferred from the model. - def show_topic(self, topicid, topn=10): """ - """ - # TODO: this - pass - def get_topic_terms(self, topicid, topn=10): - """ - """ - # TODO: this - pass - - def print_topic(self, topicid, topn=10): - # TODO: this - pass - - def top_topics(self, corpus, num_words=20): - # TODO: this - pass - - def get_term_topics(self, word_id, minimum_probability=None): - # TODO: this - pass + _lambda = self.state.get_lambda() + Elogbeta = dirichlet_expectation(_lambda) + + word_score = 0.0 + authors_set = set() # Used in computing theta bound. + theta_score = 0.0 + for d, doc in enumerate(corpus): # stream the input doc-by-doc, in case it's too large to fit in RAM + authors_d = self.doc2author[d] + ids = np.array([id for id, _ in doc]) # Word IDs in doc. + cts = np.array([cnt for _, cnt in doc]) # Word counts. + + if d % self.chunksize == 0: + logger.debug("bound: at document #%i", d) + if gamma is None: + gammad, _ = self.inference([doc]) + else: + gammad = gamma[d] + Elogthetad = dirichlet_expectation(gammad) # Shape (len(authors_d), self.num_topics). + + # Computing the bound requires summing over expElogtheta[a, k] * expElogbeta[k, v], which + # is the same computation as in normalizing phi. + phinorm = self.compute_phinorm(ids, authors_d, np.exp(Elogthetad), np.exp(Elogbeta[:, ids])) + word_score += np.log(1.0 / len(authors_d)) + cts.dot(np.log(phinorm)) + + # E[log p(theta | alpha) - log q(theta | gamma)] + # The code blow ensure we compute the score of each author only once. + for ai, a in enumerate(authors_d): + if a not in authors_set: + theta_score += numpy.sum((self.alpha - gammad[ai, :]) * Elogthetad[ai]) + theta_score += numpy.sum(gammaln(gammad[ai, :]) - gammaln(self.alpha)) + theta_score += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(gammad[ai, :])) + authors_set.add(a) + + # compensate likelihood for when `corpus` above is only a sample of the whole corpus + word_score *= subsample_ratio + + # TODO: theta_score should probably be multiplied by subsample ratio as well. Maybe it + # has to be a different subsample ratio, for example something along the lines of: + # theta_score *= self.num_authors / len(authors_set) + + # E[log p(beta | eta) - log q (beta | lambda)] + beta_score = 0.0 + beta_score += np.sum((self.eta - _lambda) * Elogbeta) + beta_score += np.sum(gammaln(_lambda) - gammaln(self.eta)) + sum_eta = np.sum(self.eta) + beta_score += np.sum(gammaln(sum_eta) - gammaln(np.sum(_lambda, 1))) + + total_score = word_score + theta_score + beta_score + + return total_score + + # NOTE: method `top_topics` is used directly. There is no topic coherence measure for + # the author-topic model. c_v topic coherence is a valid measure of topic quality in + # the author-topic model, although it does not take authorship information into account. def __getitem__(self, bow, eps=None): """ @@ -274,8 +438,8 @@ def save(self, fname, ignore=['state', 'dispatcher'], *args, **kwargs): `separately` can be used to define which arrays should be stored in separate files. `ignore` parameter can be used to define which variables should be ignored, i.e. left - out from the pickled lda model. By default the internal `state` is ignored as it uses - its own serialisation not the one provided by `LdaModel`. The `state` and `dispatcher` + out from the pickled author-topic model. By default the internal `state` is ignored as it uses + its own serialisation not the one provided by `AuthorTopicModel`. The `state` and `dispatcher` will be added to any ignore parameter defined. @@ -305,7 +469,10 @@ def save(self, fname, ignore=['state', 'dispatcher'], *args, **kwargs): ignore = list(set(['state', 'dispatcher']) | set(ignore)) else: ignore = ['state', 'dispatcher'] - super(LdaModel, self).save(fname, *args, ignore=ignore, **kwargs) + # TODO: the only difference between this save method and LdaModel's is the use of + # "AuthorTopicModel" below. This should be an easy refactor. + # Same goes for load method below. + super(AuthorTopicModel, self).save(fname, *args, ignore=ignore, **kwargs) @classmethod def load(cls, fname, *args, **kwargs): @@ -314,12 +481,12 @@ def load(cls, fname, *args, **kwargs): Large arrays can be memmap'ed back as read-only (shared memory) by setting `mmap='r'`: - >>> LdaModel.load(fname, mmap='r') + >>> AuthorTopicModel.load(fname, mmap='r') """ # TODO: this kwargs['mmap'] = kwargs.get('mmap', None) - result = super(LdaModel, cls).load(fname, *args, **kwargs) + result = super(AuthorTopicModel, cls).load(fname, *args, **kwargs) state_fname = utils.smart_extension(fname, '.state') try: result.state = super(LdaModel, cls).load(state_fname, *args, **kwargs) From 336ff92ec6e1bcd8a2cedd3ac9b5ac915d05013b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Thu, 1 Dec 2016 16:52:52 +0100 Subject: [PATCH 058/100] The refactored code now runs, converges almost exactly as the old code, produces some decent results, but somehow the results are slightly different. Made some slight changes to LdaModel to make the author-topic model work. --- docs/notebooks/at_with_nips.ipynb | 338 +++++++++++++++++++++++------- gensim/models/__init__.py | 1 + gensim/models/atmodel.py | 15 +- gensim/models/atmodel2.py | 233 ++++++++++---------- gensim/models/ldamodel.py | 20 +- 5 files changed, 396 insertions(+), 211 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 6d907a82d5..518ae34244 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -68,6 +68,10 @@ "\n", "from gensim.models import AuthorTopicModel\n", "from gensim.models import atmodel\n", + "from gensim.models import AuthorTopicModel2\n", + "from gensim.models import atmodel2\n", + "from gensim.models import LdaModel\n", + "from gensim.models import ldamodel\n", "\n", "from time import time\n", "\n", @@ -104,7 +108,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 64, "metadata": { "collapsed": false }, @@ -118,8 +122,8 @@ "#data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", - "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00', '01', '02']\n", + "yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", + "#yrs = ['00', '01', '02']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -141,7 +145,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 65, "metadata": { "collapsed": false }, @@ -171,7 +175,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 66, "metadata": { "collapsed": false }, @@ -183,7 +187,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 67, "metadata": { "collapsed": false }, @@ -201,7 +205,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 68, "metadata": { "collapsed": false }, @@ -227,7 +231,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 69, "metadata": { "collapsed": false }, @@ -250,7 +254,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 70, "metadata": { "collapsed": false }, @@ -265,7 +269,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 71, "metadata": { "collapsed": false }, @@ -293,7 +297,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 72, "metadata": { "collapsed": true }, @@ -305,7 +309,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 73, "metadata": { "collapsed": false }, @@ -323,16 +327,16 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 74, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYXFWd//H3J4GgLGn44SSAgIAIggKSCARlnUiQTZ0B\nhcYFFNRBQKaVRRHGDLgAAmFXFBBBbYwoo6xhUwgQRQirhKDsiwlb6IQkEJJ8f3+cU3Bzqd6X6ur6\nvJ6nnkqde+6551Z1p791VkUEZmZmZvVsWK0rYGZmZtZbDmjMzMys7jmgMTMzs7rngMbMzMzqngMa\nMzMzq3sOaMzMzKzuOaAxMzOzuueAxszMzOqeAxozMzOrew5ozKzbJD0j6aeF1+MlLZX0kQG49vck\nvVF4PTxf+/T+vna+3sH5emsNxPV6StK3JD0mabGkO2tdn66S9N78/u5f67pYfXFAY3VD0gH5P7pq\njx/Uun4NptqeKd3eR0XSdyTt1YNrL+3utbqrg7oFPbjXgSRpd+AHwJ+AA4Hja1ohswGwXK0rYNZN\nQfrP+YlS+oMDXxWriIibJL0zIhZ189TjgEuBK7txzneBE7p5nZ5or24XAZf24F4H0s7AG8DB4Q37\nrEE4oLF6dF1ETO9qZkkCRkTE6/1Yp4bX33/gJa0YEQsiYikD0ELTnhwgDOZgBmA0MH8wBjP+fbT+\n4i4nG1KK4ykkfV7S34HXgPH5uCR9Q9LfJb0m6V+SzpM0slSOJP1PHivyqqQbJb1f0tOlsSPLjOco\npFcdZyFpD0lTc5ltkv4o6f2lPL+UNEfS2vn4PEnPSzqpynUkqUXS/ZIW5nzXSPpQPn67pLvaea8e\nldRhy0h770OVfG8bQyNpI0m/lzQr1+0pSb+StFLlcwJGAJX3amnlvc3v69Jcxm8kzSF1n7T7nudj\nn5c0M1/vzvKYnvze/qPKeW+W2YW6tffZHl74uXpW0llVfq5ukzRd0gck/UnSgvzefqOjz6Fw/nKS\nvps/u9eUxsicIGn5Ut0/CzTlei5RO+NR8s/OG5JWKqQdk887qZC2XP78TyikrSxpUv6deE3SDEn/\nXSq/s9/H1SRdIukVSS9LuhBY5j3L+daU9Iv8Xr0m6TlJV0hauyvvmzUGt9BYPWqStHoxISJeKuWZ\nAOwHnAu8DDyV0y8CmvPzGcAGwOHAFpK2z9/+IY0/OAb4IzAFGAtcD7yzdJ32xlO8LV3SgcCFwDXA\n0cBKwNeAqZK2jIhnCucul683Ffhmvp+jJP0jIi4sFHsJ6Y/XlcBPSX+EdwC2Ae7Nx8+TtFFEPFKo\ny7bA+sC3q9S9qKvvQ6XelfJXyPmGkd7n2cDawF7AyIiYL+lzwM+B2/L7AvDPUlm/Bx4GvlVIa+89\nHw/sD5xF6m45FJgi6cMRMbOTc99Mj4glXahb+bP9HnAscB3pZ24T0mc7tvRzFcC7gGuB3wKXAZ8B\nfiTpvoi4qUrdii7O93gZ6WdjHKlrbGNg30LdvwZsAXwFEHB7O+VNJX1GHyV9XgDbAUuA7Qv5xpI+\n81vz/Qq4Op/3M+B+YDfgdElrRsQxpeu87fcxl3El6Wf1PGAmsDfpfS9/Rv8HbEj6bJ8itUBNIP1M\nPYMZQET44UddPIADSF0N5ceSQp7hOW0RsGHp/J3ysb1L6bvl9H3y61H5/N+V8p2U8/20kHYisKhK\nXQ8i/VFYK79eBXgFOLuUb3ROP6eQdmk+9+hS3nuBOwqvd8n1OaWD92xVYCFwQin93Hzdd3Rwbnfe\nh/G5zh/Jr8fmPHt18pkuLJZTel+XAhe3c2xR4XXlM18MfLCQ/h5Sa8Blpff2kc7K7KRu5c92dH6f\n/ljK9/Wc77OFtKk57TOFtBGkgO/XnbxXY/J9nltKPz2X+dHSfb7chd+p4cA84MRC2sukgOm1ys8H\ncFS+x5Xz671zXY4slfc7UjC5bhd+HytlfL2QNowURC4B9s9p/6+czw8/qj3c5WT1JoBDgI8VHrtU\nyXdTRPyzlLYP6T/rP0tavfIA7iL98do559uV9B/x2aXzz+hFvT9OCmouK117CfC3wrWLflp6fRup\nRalib9If8RPbu2hEvAJcRfpWD6RuAODTpEDltQ7qPIGevw+v5OfdJL2jC/mrCeAn3cg/NSLeHBwe\nEU+SWgA+3sPrd9UupPep/L6cDywA9iilt0XE5MqLSGOP/sayn201u5Pek/L09NNIrTDl63QqIpYA\n00itekjaHGgCfggsT2o9gdRqc19EvJpf70YKUs4tFXk66b0ov+fVfh93A16n8HMeqSXrnHw/FQtI\nQdLOkpq6eYvWQBzQWD36W0TcXHxUyfNElbT3kb7tvVB6zAbeQWqRAFg3Py/zH3BEzCJ9m+2JDUn/\nSU8tXft54N8L1654NQcjRXOA1QqvNwCeiYjO6nQJsL6kcfn1x4HVSd/iO/Ke/Nzt9yEiHgXOBL4K\nvCTpWkmHSFqlk2uWPd6NvOU/mACPAKtIWq3Ksb5SeZ8eKSZGGvT6eOF4xdNVyih/tu1dZ3F+b4vX\neZb0eZSv01W3AVvlcTjbA09HxH2kmYOVbqePkn52i3V5JiIWlsqaUThe9ESV674HeLZKUD2z+CIf\nPxbYE3he0p8lHSmp/DtjDc5jaGyoKv9HCymAfw74PMt+A6x4Pj9XjnVlhkh7eYZXuXaQxu+8WCV/\neZDrknbKVTv/7si1+ZqfA/6Sn5+NiD93cl533oe3iYiWPMjzk6TWnnOAYySNy0FRV1T7HLuj/B51\n9fPqzTU605XPtrvHu1uHoqmkqfDbkFpiphbSt5f0AdIXgVt7cb1qn6Oo/nm8reyIOE3SFcCnSC2o\n3wO+LWnHYqucNTa30FgjeZQ0IPO2cgtPflT+Y3wiP29UPFnSGqRuo6I5wHBJK5bS16tybYDn27n2\nVLrvn8Da5Zk0ZRGxmDz4VNKqpIG5v+pC+U/k5668D+1d+8GI+H5E7AjsSGr9+koxS1fK6aL3VUnb\nCJgXEXPy6zmkcUVl61VJ62rdnsjPGxcTJY3I5T7ZxXK6cp3lJL23dJ21gJV7cZ2/kLoudyC1yFR+\nFm8FPkLqDg1SS06xLmtLKg8O3yQ/d6UulTLKXZIbV8lLRDwWEadHxK7AZqRByl2aHWaNwQGNNZLJ\npAGYx5UP5GmplcDgBtK36MNL2VqqlPko6RvlDoWyVia1AhVdC7wKfCePYSlf/11dvIei35FaWbuy\nCuylpGDufNIfgq4ENN15H5YhaaSk8v8vD5L+MK5QSJtP9QCjJ7bLY0AqdViP1E1xXSHPo8DqkjYp\n5Hs3Kcgr62rdKu/TEaX0r5Jmsl3VhTK64hrSz9p/l9K/SXpfr+5JobnbaDrpZ3ZNlm2hWQk4DJgZ\nEcWWxWtIv0tfKxXXQnovru3Cpa8h/Sx8tZKQfzcOY9kZc+/Ms+aKHiP9Pq1QyLeGpI2r/NxZg3CX\nk9WbHjetR8TNuQvkOEljgBtJ30w3Ig0YPoQ0U2W2pEnAkZL+SPrP+cOkAcgvl4q9FngWuFjSqTnt\nS8C/gDfXKYmINkmHkaaLT5d0Gakb6D2kwZx/opvfNiPiRkmtwDeU1oa5ntR1sj0wJSKKgy3vkjSD\nNBj4/q4003fzfYBlP5tdgEmSfgv8gzTA9ABS19rvC/nuBibk9Uv+BTwaEVXXzemCB4HrJZ1N+ly/\nlp//t5Dn16Sp6H/M+VYG/os0NXyLUnldqlt+n04GjpV0DSmA2SSXO43UOtZrETFd0q+Ar+UB5VOB\nbUldiJMjor2p2V0xFTgSeCkiZuTr/UvSo6Tfj5+V8l9BasE5WdKGvDVtew/gRxFRbZxQ2RWk1qFT\nc6tTZdp2ubVzU+A6SZOBh0gB0z6kcWCthXynkga/r03qWrZGU+tpVn740dUH6Q/iEmBMB3mG5zyn\ndZDny6RZJa+SuiDuAb4PjCrl+x9SsPIq6Vv4xqQBnT8t5RtD+sO1kPTN8VBKU3sLeXcitRjMyeXO\nBC4APlTIcynpD0u53icCr5fSRPpD9FC+/izSzJ7Nq5z/rVynb3Tzfa/2PjwFnF/IU562vUG+r3+Q\nWjqez+fuUCr7/cCfc9lLKu9tvtclpDVrOnwfip856Y/7I/m9uLNSn9L5E4AHSNOS/05aB6batO32\n6tbeZ3toLu+1/H6dCaxSyjMVuLtKnS4ltYJ09lkMz5/Ho/k6j5MCtuWqlPe2n6EOyt0r39MVpfSL\nKE09LxxbiTSr6Zlcl4eBI7rz+0gaCH0JaVbcS6Q1f7Zk2Wnb7yLNtHsImEsKpm8HPlXlnheXPxc/\nGueh/INgZl0g6Wng2oj4SqeZBxlJ3yStIbNuRPyr1vUxM+tL7ms0axxfIq0H4mDGzIYcj6ExG8KU\n9uj5BGncy/vxrBAzG6Ic0Jh1T3t7AQ1Wa5BmNL1M2v5gSo3rY2bWLzyGxszMzOqex9CYmZlZ3XNA\nY2ZmZnXPAY2Z9Yqk70kq70U10HUYLmmppPJO1L0pc3wu8xN9VWY3rv1LSf8Y6Oua1TMHNGb9SNIB\n+Y9i5bFQ0kxJZw+h3YLrbaB0d9TqvgJYWqNrm9Ulz3Iy639B2m/pCeAdpB2NDwF2k/TBiHithnWz\njvVmF+veOLCG1zarSw5ozAbGdRExPf/7Ikkvkzby+yTwm9pVq3OSVoyIBbWuRyOJiCW1uK4/a6tn\n7nIyq42bSd/A168kSFpf0m8lvSRpvqRpknYvniTphcImmCh5RdIbhd3CkXRMTluxkLaxpMtz+Qsl\n/U3SXqXyK11kO0g6T9Js0v5V3SLpIEk3SZqdr/WgpC+X8pwpaVYp7cf5+v9VSFsrp32pi9f+fO7W\nWyjpTkkfqZLn3ZIuljRL0muSHpB0QJXiAhgm6XhJz0haIOkGSeuXytsxf3ZP5fKelHRqcZdoSd+S\ntETSWuWL5LwLJa2SX79tDI2klSVNkvR0vsaMvHFmMc9783u1fym9Msbo2ELa93LaRpJ+I2kOaZNU\ns7rkgMasNjbMzy8B5PE000i7VJ8DHAusAFwp6ZOF824Hdii83hyoBDIfLaRvB0yvfNuW9AHSzsYb\nAz8krRj8KvB/pfIrziOtLPy/pP2fuusQ0kad3we+Sdqs8fxSUDMV+DdJG5XqvYS0Y3jFDqTAYmoX\nrjse+BHwC9ImjqOAKZI2rmSQtAZp48odgbOAI3Jdfy7pa6XyROou3AM4OT8+QtpQsegzpM/rHOAw\n0kacR5A2d6y4LJf36Sr13ge4JiLm5dfLjEuSJOBq4HDSbt4tpI0/T1fa6bsnKuX/nrSJ5LdIm0Oa\n1ada747phx9D+cFbO4TvDKwOvBvYF3iBFFCsmfNNyvm2LZy7EmlX5UcLad8EFgEr5deHkf4YTwN+\nUMj3MnBq4fWNpF3Fy7sy3wY8XKrvUtIu0+riPVbbqXqFKvluAGYUXo/O1zoov14tvweXAU8V8p0D\nzOqkDsNzWYuBDxbS30PaCfqyQtrFpN3Cm0plTAZeBJbPr8fnMu8DhhfyteR6btTJ/X4n12fNQtpf\ngTtK+bbN1/lMIe1S4JHC671zniNL5/4OeIO04SjAe3O+/dt5f44tfW5LgYtr/Xvihx998XALjVn/\nE3ATKYh5Gvg1MBf4VLy1UeRuwJ0RMa1yUkTMB34KrCdp05w8lTT2rdKNsn1Om5r/jaTNgVVzGpJW\nIwVUvwWaJK1eeQDXA++TtGahvgH8LCJ6PMMnIl5/8+alkflatwAbSXpnzjMb+CdvtThtD7wOnAas\nLek9pXvsiqkR8WChHk8CVwIfz3UR8B/AH4DlqrwXqwEfKpV5YSw7pmUq6TPdoJ37XTGXd0fOVyzv\nN8A2ktYtpO0LLCC1vLRnN1Ige24p/XRSsPLxDs7tSAA/6eG5ZoOKAxqz/hekLpiPATsBm0bEeyPi\nxkKe9wAzq5w7o3AcYDrpj1+lS2Y73gpoPixpRD4WpNYXSN1bIn0jf6H0mJjzlKeQP1F8IWl5SaOL\nj45uWNL2km6W9CrwSr7WCflwUyHrbaV7uRO4C2gDtpfUBHyQrgc0/6yS9giwSg7s1gBWAb7G29+L\nn+b85feiPIZoTn5erZIg6T2SLpH0Eqnl7QVSEAvL3u/k/PyZQtrewFXR8WDc9wDPRMTCUnr556Mn\nHu/FuWaDhmc5mQ2Mv8Vbs5x6LCIWS/orsIOk9wJrAreS/oAuD2xDCgxmRMRL+bTKF5dTgfY2pywH\nAuU/nDuQuoyCFByFpHUi4rlyQZLel/M+SOqeeZrUuvAJ0hiQ4hepqcABktYhBTY3RkRIuj2/rgQP\nt7ZT764oTn+uXPsXwC/byX9f6XV7M44EacAtqUtvFeAHpMB0AbAuaQzNm/cbEc9ImkYKaE6VtD2p\nG/KybtxDR9prVRvewTnlz9qsLjmgMRscniQN2C3bpHC8YipwNGkA8QsR8QiApL+TAo/tSd0sFY/l\n5zci4uYe1u9uUgtT0Qvt5P0EKbjaI3crkeu3a5W8lZaXXYExwHfz61uBL5ICmnm8Pchoz/uqpG0E\nzIuIOZLmAvOBYb14L8o+RBq70hwRb07Bl9ReN9BlwJmSNiB1N80Dru3kGk8A20l6Z6mVpvzzUQkA\nVy2d35sWHLO64C4ns8HhGmBrSdtUEiStBHwFeDwiHirknUpaoO8I3upWIv/786RWmze7aCLiBdIg\n36/mGT7LkPSuzioXEa9ExM2lR3vbHVRaNN78/yV393yhSrn/BGaTBjsPI407qdzjxqTxLnd0YzzP\ndnkMUeW66wF7Atfl6y0BrgA+I2mT8slV3ouuXLfa/Yr0+VQ7/7fkgbuk7qY/FsfgtOMaYASpq6yo\nMkD5WoCImEPq4tuhlO+wdupSlaQmpWn+K3f1HLNacwuNWf/rSnfBSUAzcJ2ks0izlA4kfbP+z1Le\naaTZMxsB5xfSbyWN1ak2xfnQnPaApJ+RWm1Gk2bYvBvYspv17cgU0vTma/K1RgJfBv7F28enQArE\n9iFNM381p/2N1BWyIWlWUlc9CFwv6WzSe/S1/Py/hTxHk/7g35nrNwP4f8CHSa1bxaCvK+/F30nj\nUM7IA5lfzfczslrmiJgtaSpwFLAyXVtY8QrS53uypA2B+0kDhfcAfhQRxXE+FwBHSmojjbnaidSC\n1J3PdT/gx/l5cid5zQYFt9CY9b9OvxlHxPOk4OJ60rfpH5CmG+8ZEX8s5V1AmoJdHPgLKWAJ0pTn\np0vnzCD9wb6KNDX7HOCrpG/3J7CsnsxuevOcfK19SP+/nAocDJxNWtummkq9i61Ki0lTnLu6/kyl\nDjcBR5LucSKp9WdCrlOl7FnAVqRxNP+Z6/Z1UgByTHv31V56bqnakxRkHAscRwpyvthBXX9DCmZe\nof1xTcVrBCl4OQvYizTNfyPgGxHxrdJ53yWN3fkMKbBcnOvX3T23hur+XDZEqRczM83MzMwGhUHR\nQpOX9D5D0hN5afHbJH24lOcESc8Vlh7fsHR8NUm/ktQmaY6kC/IYhGKezSXdmpcYf1LSUQNxf2Zm\nZta/BkVAQ1puezzwWdKaEzcAN1YW+5J0DKkZ/qvA1qRZClPymhsVvyaN+B9PaprdgcL4AqU9UqaQ\n+rrHkPqvJ0o6uF/vzMzMzPpdzbucJL2DNG1xr4i4rpB+F2lvk/+R9Bxp4NukfGwkqW/8gIiYnGcr\n/B0YGxH35Dy7kvY+WTsiZkk6hLSw2Bq5fx5JPwQ+GRGbYmZmZnVrMLTQLEda9Kk8bXEhaQrm+qRZ\nB5VVN4mIuaQBg9vmpHHAnEowk91IGtS2TSHPrZVgJpsCbJxXIzUzM7M6VfOAJk/TnAYcL2lNScMk\nfY4UrKxJCmaC1CJTNJu3pleuATxfKncJaeprMU+1MmDZaZpmZmZWZwbLOjSfI00zfJY0xXA6aUzM\nmA7OEZ1PK+wsT2Vdhqp58gZzu5JW6Xytk2uZmZnZW94BrAdMKWzF0m8GRUATEY8DO+ddeEfmhacu\nIw3gnUUKPEazbAvLKNJaHOQ8yyzYlfdXWS0fq+Qpb6hXOafcclOxK/Crbt+QmZmZVXyW1EjRrwZF\nQFOR9yhZmJdJ3xU4MiIelzSLNHvpfnhzUPA2wLn51GnAqpK2LIyjGU8KhO4s5PmepOG5OwpgAjAz\nItraqdITAL/85S/ZZJO3rZJed1paWpg0aVKtq9FnfD+D11C6F/D9DGZD6V5gaN3PjBkz+NznPgf5\nb2l/GxQBjaQJpOBjJmlzuVNIy5FfnLOcARwn6Z+kN+ZE4BngDwAR8bCkKcDP8mymEaTVP1vzqqCQ\nosP/AS6SdDKwGWl10CM6qNprAJtssgljxnTU+1UfmpqahsR9VPh+Bq+hdC/g+xnMhtK9wNC7n2xA\nhmwMioAGaAJ+SNpT5mXgcuC4SktKRJwiaUXSujKrkpZC3y0iFhXK2J+0nPuNpI3fLqcQrETE3DyV\n+xzgLuBFYGJEXNjP92ZmZmb9bFAENBHxW9IOtB3lmUjam6W946+QBhd3VMYDwI7dr6GZmZkNZjWf\ntm1mZmbWWw5oGkhzc3Otq9CnfD+D11C6F/D9DGZD6V5g6N3PQKr51geDmaQxwN133333UBykZWZm\n1m+mT5/O2LFjIW1LNL2/r+cWGjMzM6t7DmjMzMys7jmgMTMzs7rngMbMzMzqngMaMzMzq3sOaMzM\nzAyAu+6Ce++tdS16ZlCsFGxmZma1953vQFMTTJ5c65p0n1tozMzMrO45oDEzM7O654DGzMzMAKjn\nzQMc0JiZmVndc0BjZmZmb5JqXYOecUBjZmZmdc8BjZmZmdU9BzRmZmYGeFCwmZmZDREeQ2NmZmZW\nIw5ozMzMrO45oDEzMzPAY2jMzMzMaqrmAY2kYZJOlPSYpAWS/inpuCr5TpD0XM5zg6QNS8dXk/Qr\nSW2S5ki6QNJKpTybS7pV0kJJT0o6qr/vz8zMrJ54UHDPfQv4KvA14P3A0cDRkg6rZJB0DHBYzrc1\nMB+YImlEoZxfA5sA44E9gB2A8wtlrAJMAR4HxgBHARMlHdxvd2ZmZmYDYrlaVwDYFvhDRFyXXz8l\naX9S4FJxBHBiRFwJIOkLwGzgU8BkSZsAuwJjI+KenOdw4GpJR0bELOBzwPLAQRGxGJghaUvgG8AF\n/X6XZmZmg5zH0PTOHcB4Se8DkLQF8FHgmvx6fWAN4KbKCRExF/grKRgCGAfMqQQz2Y1AANsU8tya\ng5mKKcDGkpr6+qbMzMxs4AyGFpqTgJHAw5KWkIKs70TEZfn4GqTAZHbpvNn5WCXP88WDEbFE0sul\nPI9VKaNyrK2X92FmZlb36nUMzWAIaPYF9gf2Ax4CPgScKem5iLi0g/NECnQ60lmeysdWx41sZmZm\nNhgCmlOAH0TEb/Prv0taD/g2cCkwixR4jGbZVppRQKWLaVZ+/SZJw4HV8rFKntGla1fOKbf+LKOl\npYWmpmV7pZqbm2lubu7oNDMzs7rS0zE0ra2ttLa2LpPW1jawHR+DIaBZkbe3kCwlj++JiMclzSLN\nXrofQNJI0tiYc3P+acCqkrYsjKMZTwqE7izk+Z6k4RGxJKdNAGZGRIfv+qRJkxgzZkxP78/MzGxI\nq/Ylf/r06YwdO3bA6jAYBgVfCXxH0u6S3iPpP4AW4PeFPGcAx0naS9JmwCXAM8AfACLiYdIA359J\n2krSR4GzgdY8wwnStO5FwEWSNpW0L/B14LQBuEczMzPrR4OhheYw4ERSa8so4DngxzkNgIg4RdKK\npHVlVgWmArtFxKJCOfsD55BmNy0FLidN966UMVfSrjnPXcCLwMSIuLD/bs3MzKy+eFBwD0XEfNJa\nMN/oJN9EYGIHx18hrTXTURkPADt2u5JmZmYNwOvQmJmZmdWQAxozMzOrew5ozMzM7E31OobGAY2Z\nmZnVPQc0ZmZmBnhQsJmZmVlNOaAxMzOzuueAxszMzN7kQcFmZmZW1zyGxszMzKyGHNCYmZlZ3XNA\nY2ZmZm/yGBozMzOrax5DY2ZmZlZDDmjMzMys7jmgMTMzs7rngMbMzMyANIbGg4LNzMzMasQBjZmZ\nmdU9BzRmZmZW9xzQmJmZGeAxNGZmZmY15YDGzMzM6p4DGjMzM6t7NQ9oJD0uaWmVx9n5+AqSzpX0\noqR5ki6XNKpUxjqSrpY0X9IsSadIGlbKs5OkuyW9JukRSQcM5H2amZlZ/6l5QAN8GFij8NgFCGBy\nPn4GsAewN7ADsBbwu8rJOXC5BlgOGAccABwInFDIsx5wFXATsAVwJnCBpF3666bMzMzqTT0PCl6u\n1hWIiJeKryXtBTwaEVMljQS+BOwXEbfk418EZkjaOiLuBHYF3g/sHBEvAg9IOh44SdLEiFgMHAI8\nFhFH58vMlLQd0ALcMBD3aWZmZv1nMLTQvEnS8sBngQtz0odJQddNlTwRMRN4Ctg2J40DHsjBTMUU\noAn4QCHPjaXLTSmUYWZmZnVsUAU0wH+QApFf5NejgUURMbeUbzape4r8PLvKcbqQZ6SkFXpbaTMz\nM6utmnc5lXwJuDYiZnWST6RxNp3pKI+6kAeAlpYWmpqalklrbm6mubm5C1UwMzOrD9GVv6xVtLa2\n0traukxaW1tbH9So6wZNQCNpXeBjwKcKybOAEZJGllppRvFWi8ssYKtScaMLxyrPo0t5RgFzI2JR\nZ3WbNGkSY8aM6fwmzMzM6lxPBgVX+5I/ffp0xo4d20e16txg6nL6EilIuaaQdjewGBhfSZC0EbAu\ncEdOmgZsJuldhfMmAG3AjEKe8SxrQk43MzOzOjcoWmgkiTTV+uKIWFpJj4i5ki4ETpc0B5gHnAXc\nHhF/y9muBx4CLpV0DLAmcCJwTkS8kfP8BDhM0snARaTgZh9g936/OTMzM+t3gyKgIXU1rQP8vMqx\nFmAJcDmwAnAdcGjlYEQslbQn8GNSq8184GLgu4U8T0jaAzgd+DrwDHBQRJRnPpmZmTWsno6hGQwG\nRUATETcAw9s59jpweH60d/7TwJ6dXOMWYOA688zMzOpMPS+sN5jG0JiZmVkNOaAxMzOzuueAxszM\nzOqeAxozMzMbEhzQmJmZWV1zC42ZmZnVPQc0ZmZmVvcc0JiZmVndc0BjZmZmdc8BjZmZmdU9BzRm\nZmZW9xwx6zFGAAAgAElEQVTQmJmZWd1zQGNmZmZ1zwGNmZmZ1T0HNGZmZlb3HNCYmZlZ3XNAY2Zm\nZnXPAY2ZmZnVPQc0ZmZmVvcc0JiZmVndc0BjZmZmdc8BjZmZmdU9BzRmZmZW9yJgWJ1GBoOi2pLW\nknSppBclLZB0n6QxpTwnSHouH79B0oal46tJ+pWkNklzJF0gaaVSns0l3SppoaQnJR01EPdnZmZW\nD5YudQtNj0laFbgdeB3YFdgE+CYwp5DnGOAw4KvA1sB8YIqkEYWifp3PHQ/sAewAnF8oYxVgCvA4\nMAY4Cpgo6eD+ujczM7N6Us9dTsvVugLAt4CnIqIYWDxZynMEcGJEXAkg6QvAbOBTwGRJm5CCobER\ncU/OczhwtaQjI2IW8DlgeeCgiFgMzJC0JfAN4IL+uz0zM7P6UM8BTc1baIC9gLskTZY0W9L0YquJ\npPWBNYCbKmkRMRf4K7BtThoHzKkEM9mNQADbFPLcmoOZiinAxpKa+vqmzMzM6o0Dmt7ZADgEmAlM\nAH4CnCXpc/n4GqTAZHbpvNn5WCXP88WDEbEEeLmUp1oZFPKYmZk1rHoOaAZDl9Mw4M6IOD6/vk/S\nB0hBzi87OE+kQKcjneWpfGydlWNmZjbkOaDpnX8BM0ppM4D/zP+eRQo8RrNsC8so4J5CnlHFAiQN\nB1bLxyp5RpeuUzmn3HKzjJaWFpqalu2Vam5uprm5uaPTzMzM6kpPA5rW1lZaW1uXSWtra+ujWnXN\nYAhobgc2LqVtTB4YHBGPS5pFmr10P4CkkaSxMefm/NOAVSVtWRhHM54UCN1ZyPM9ScNzdxSkLq6Z\nEdHhuz5p0iTGjBnTURYzM7O619OAptqX/OnTpzN27Ng+qlnnBsMYmknAOEnflvReSfsDBwPnFPKc\nARwnaS9JmwGXAM8AfwCIiIdJA3x/JmkrSR8FzgZa8wwnSNO6FwEXSdpU0r7A14HTBuAezczMBj13\nOfVCRNwl6T+Ak4DjSevEHBERlxXynCJpRdK6MqsCU4HdImJRoaj9SUHQjcBS4HLSdO9KGXMl7Zrz\n3AW8CEyMiAv78/7MzMzqRT2vFFzzgAYgIq4Brukkz0RgYgfHXyGtNdNRGQ8AO3a/hmZmZkOfVwo2\nMzOzulfPXU4OaMzMzAxwQGNmZmZDQMMHNJKGS/qQpNX6ojwzMzMbeA0X0Eg6Q9JB+d/DgVuA6cDT\nknbqu+qZmZnZQGm4gAbYB7gv/3svYH3g/aQ1Zb7fB/UyMzOzAdaIAc27eGtLgd2B30bEI8BFwGZ9\nUTEzMzMbWI0Y0MwGNs3dTR8nLWYHsCKwpN2zzMzMbNCq54Cmpwvr/RyYTNpYMoAbcvo2wMN9UC8z\nMzMbYA23UnBETJT0ILAOqbvp9XxoCWkLAzMzM6sz9bxScI+3PoiIywEkvaOQ9ou+qJSZmZkNvHru\ncurptO3hko6X9CzwqqQNcvqJlencZmZmVl8aLqABvgMcCBwNFHe8fhA4uJd1MjMzsxpoxIDmC8BX\nIuJXLDur6T7SejRmZmZWZxoxoHk38M92ylu+59UxMzOzWmnEgOYhYPsq6fsA9/S8OmZmZlYr9RzQ\n9HSW0wnALyS9mxQU/aekjUldUXv2VeXMzMxs4NRzQNOjFpqI+AMpcPkYMJ8U4GwC7BURN3R0rpmZ\nmQ1O9RzQ9GYdmtuAXfqwLmZmZlZD9bxScE/XodlK0jZV0reR9OHeV8vMzMwGUkR6rtcWmp7GYeeS\ntj0oe3c+ZmZmZnWkUQOaTYHpVdLvycfMzMysjjRqQPM6MLpK+prA4p5Xx8zMzGqhUQOa64EfSmqq\nJEhaFfgB4FlOZmZmdaZRA5ojSWNonpT0J0l/Ah4H1gC+2Z2CJH1X0tLS46HC8RUknSvpRUnzJF0u\naVSpjHUkXS1pvqRZkk6RNKyUZydJd0t6TdIjkg7o4b2bmZkNOQ0Z0ETEs8DmpM0pHwLuBo4ANouI\np3tQ5IOkLqw18mO7wrEzgD2AvYEdgLWA31UO5sDlGtIU9HHAAaSNM08o5FkPuAq4CdgCOBO4QJKn\nnZuZmVH/AU1v1qGZD/y0j+qxOCJeKCdKGgl8CdgvIm7JaV8EZkjaOiLuBHYlbYi5c0S8CDwg6Xjg\nJEkTI2IxcAjwWEQcnYueKWk7oAV3kZmZmTVuQCNpI2AnYBSllp6IOKHaOR14n6RngdeAacC3c0vP\n2FzHmwplz5T0FLAtcCepVeaBHMxUTAF+DHyAtAP4OODG0jWnAJO6WU8zM7MhqSEDGklfJgUMLwKz\ngCgcDgrdPV3wF1IX0UzSLKmJwK2SPkjqfloUEXNL58zOx8jPs6scrxy7r4M8IyWtEBGvd6O+ZmZm\nQ04loKnXlYJ72kJzHPCdiDi5txWIiCmFlw9KuhN4EvgMqcWmGrFsENVu8R0cUxfyANDS0kJTU9My\nac3NzTQ3N3ehCmZmZoPf0qXpuSctNK2trbS2ti6T1tbW1ge16rqeBjSrAb/ty4pURESbpEeADUnd\nRCMkjSy10ozirRaXWcBWpWJGF45Vnsvr5owC5kbEos7qNGnSJMaMGdONuzAzM6svvelyqvYlf/r0\n6YwdO7YPatY1PW1Y+i0woS8rUiFpZeC9wHOk2VOLgfGF4xsB6wJ35KRpwGaS3lUoZgLQBswo5BnP\nsibkdDMzs4bXkGNogH8CJ0oaBzwAvFE8GBFndbUgST8CriR1M70b+F9SEHNZRMyVdCFwuqQ5wDzg\nLOD2iPhbLuJ60tTxSyUdQxqHcyJwTkRU6vUT4DBJJwMXkYKbfYDdu33nZmZmQ1CjBjRfAV4FdsyP\noiAFHV21NvBrYHXgBeA2YFxEvJSPtwBLgMuBFYDrgEPfvFjEUkl7kgYp3wHMBy4GvlvI84SkPYDT\nga8DzwAHRUR55pOZmVlDasiAJiLW76sKRESHI2vzDKTD86O9PE8De3ZSzi2kaeBmZmZWUu8BTa8m\nZ0kaIWljST1ez8bMzMxqryEDGkkr5rEtC4C/kwbpIulsSd/qw/qZmZnZAGjIgAb4IWlPpJ1Ydq2Y\nG4F9e1knMzMzG2D1HtD0tKvoU8C+EfEXScWF6f5OmnJtZmZmdaTeA5qettD8G/B8lfSV6NoKvmZm\nZjaILFmSnocPr209eqqnAc1dwB6F15Ug5mC8WJ2ZmVndqQQ0y9XpNJ+eVvtY4FpJm+YyjpD0AdIO\n2OV1aczMzGyQW7w4PTdUC01E3EYaFLwcaaXgCaS9lbaNiLv7rnpmZmY2EBquhSavObM/MCUivtz3\nVTIzM7OB1nAtNBGxmLQ30jv6vjpmZmZWC/XeQtPTQcF3Alv2ZUXMzMysduq9haancdh5wGmS1gbu\nJm0I+aaIuL+3FTMzM7OBUwlo6rWFpqfVviw/F3fVDkD5uU7jOzMzs8ZU711OPa12n+22bWZmZrXX\nkF1OEfFkX1fEzMzMaqchW2gkfaGj4xFxSc+qY2ZmZrXQkC00wJml18sDKwKLgAWAAxozM7M60pAt\nNBGxWjlN0vuAHwM/6m2lzMzMbGDVewtNT9eheZuI+AfwLd7eemNmZmaDXKPutt2excBafVymmZmZ\n9bPXXkvP76jTfQB6Oij4E+UkYE3gMOD23lbKzMzMBtbChen5ne+sbT16qqdDf/6v9DqAF4CbgW/2\nqkZmZmY24CoBTUO10EREX3dVmZmZWQ0tXAgjRsCwOv0LP+iqLenbkpZKOr2QtoKkcyW9KGmepMsl\njSqdt46kqyXNlzRL0imShpXy7CTpbkmvSXpE0gEDdV9mZmaD2euvwwor1LoWPdejgCYHFN+qkn6U\npN/2tDKStgK+DNxXOnQGsAewN7ADaeDx7wrnDQOuIbU4jQMOAA4ETijkWQ+4CrgJ2II0G+sCSbv0\ntL5mZmZDxZIl9TvDCXreQrMjcHWV9OtIAUe3SVoZ+CVwMPBKIX0k8CWgJSJuiYh7gC8CH5W0dc62\nK/B+4LMR8UBETAGOBw6VVOlWOwR4LCKOjoiZEXEucDnQ0pP6mpmZDSVLlzZmQLMyaVXgsjeAkT0s\n81zgyoi4uZT+YVLLy02VhIiYCTwFbJuTxgEPRMSLhfOmAE3ABwp5biyVPaVQhpmZWcNaurR+x89A\nzwOaB4B9q6TvBzzU3cIk7Qd8CPh2lcOjgUURMbeUPhtYI/97jfy6fJwu5BkpqY57Dc3MzHpvyZL6\nDmh6Om37ROD3kt5LmqoNMB5oBj7dnYIkrU0aI7NLRLzRnVNJ08U701EedSGPmZnZkFfvXU49nbZ9\npaRPAccC+wALgfuBj0XELd0sbizwb8DdkioBxnBgB0mHAR8HVpA0stRKM4q3WlxmAVuVyh1dOFZ5\nHl3KMwqYGxHVus/e1NLSQlNT0zJpzc3NNDc3d3hjZmZm9aI3LTStra20trYuk9bW1tYHteq6Hu+p\nGRFXU31gcHfdCGxWSrsYmAGcBDxLGpszHrgCQNJGwLrAHTn/NOBYSe8qjKOZALTlcip5ditdZ0JO\n79CkSZMYM2ZM1+/IzMyszvRmDE21L/nTp09n7NixfVCzrunp1gdbAcMi4q+l9G2AJRFxV1fLioj5\nlMbdSJoPvBQRM/LrC4HTJc0B5gFnAbdHxN/yKdfnMi6VdAxpG4YTgXMK3Vg/AQ6TdDJwESlA2gfY\nvet3bmZmNjTVe5dTT4f/nAusUyX93flYb5XHtLSQ1pC5HPgz8BxpTZqUOWIpsCewhNRqcwmplee7\nhTxPkNay+Rhwby7zoIgoz3wyMzNrOI06KHhTYHqV9HvysV6JiH8vvX4dODw/2jvnaVJQ01G5t5DG\n7JiZmVlBo7bQvM7bB9hC6upZ3PPqmJmZWS006jo01wM/lPTm1B9JqwI/AG7oi4qZmZnZwGnULqcj\ngVuBJyXdk9M+RJpG/fm+qJiZmZkNnHrvcurpOjTPStoc+Cxpo8eFwM+B1m4ujmdmZmaDQL13OfVm\nHZr5wE/7sC5mZmZWIw3Z5STp06RtDjYiTbH+B/DriLi8D+tmZmZmA6Teu5y6FYtJGibpN8BvSNOz\n/wk8RtrRerKkywrbF5iZmVmdaLQWmiNIC9N9IiKuKh6Q9AnSOJojSJtNmpmZWZ2o9zE03a36F4Gj\nysEMQET8ETga+FJfVMzMzMwGTkN1OQHvI20m2Z4bcx4zMzOrI/Xe5dTdqi8EVu3g+EjgtZ5Xx8zM\nzGqh0VpopgGHdHD80JzHzMzM6ki9t9B0d1Dw94E/S1odOBV4GBCwCfBN4JPAzn1aQzMzM+t3r74K\nK69c61r0XLcCmoi4Q9K+pAX19i4dngM0R8TtfVU5MzMzGxhtbbD++rWuRc91e2G9iLhC0hRgAmlh\nPYBHgOsjYkFfVs7MzMwGRlsbjBxZ61r0XE/3clog6WPA/0TEy31cJzMzMxtgbW3Q1FTrWvRcd1cK\nXrvwcn9g5Zz+gKR1+rJiZmZmNnDmzq3vgKa7LTQPS3oJuB14B7AO8BSwHrB831bNzMzMBkJECmjq\nucupuxO0moBPA3fnc6+R9AiwArCrpDX6uH5mZmbWz159Na1DU88tNN0NaJaPiDsj4jTSIntbkrZD\nWELa8uBRSTP7uI5mZmbWj+bOTc/13ELT3S6nuZLuIXU5jQBWjIjbJS0G9gWeAbbu4zqamZlZP3r1\n1fS8yiq1rUdvdLeFZi3ge8DrpGDoLklTScHNGCAi4ra+raKZmZn1pwV50ZUVV6xtPXqjWwFNRLwY\nEVdGxLeBBcBWwNlAkFYOnivplr6vppmZmfWXhgtoqmiLiMnAG8C/A+sD53WnAEn/Jek+SW35cYek\njxeOryDpXEkvSpon6XJJo0plrCPpaknzJc2SdIqkYaU8O0m6W9Jrkh6RdEDPb9vMzGzoqAQ0K61U\n23r0Rm8Cms1JY2YAngTeiIhZEfGbbpbzNHAMMDY/bgb+IGmTfPwMYA/SVgs7kLq9flc5OQcu15C6\nwMYBBwAHAicU8qwHXAXcBGwBnAlcIGmXbtbVzMxsyJk/Pz3XcwtNj1YKBoiIpwv//mAvyrm6lHSc\npEOAcZKeJc2e2i8ibgGQ9EVghqStI+JOYFfg/cDOEfEi8ICk44GTJE2MiMWkHcIfi4ij8zVmStoO\naAFu6GndzczMhgJ3OfUxScMk7QesCEwjtdgsR2pZASAiZpIW89s2J40DHsjBTMUU0po5HyjkubF0\nuSmFMszMzBpWJaB55ztrW4/eGBQBjaQPSppHmj11HvAfEfEwsAawKCLmlk6ZnY+Rn2dXOU4X8oyU\ntEIf3IKZmVndWrAgBTPDBkVU0DM97nLqYw+TxrasShorc4mkHTrIL9LMqs50lEddyGNmZjbkzZ9f\n391NMEgCmjzO5bH8crqkrYEjgMnACEkjS600o3irxWUWafp40ejCscrz6FKeUcDciFjUWf1aWlpo\nKq0H3dzcTHNzc2enmpmZDXoLFvQuoGltbaW1tXWZtLa2tl7WqnsGRUBTxTDS/lB3A4uB8cAVAJI2\nAtYF7sh5pwHHSnpXYRzNBKANmFHIs1vpGhNyeqcmTZrEmDFjenYnZmZmg9yCBb2bsl3tS/706dMZ\nO3ZsL2vWdTUPaCR9H7iWNH17FeCzwI7AhIiYK+lC4HRJc4B5wFnA7RHxt1zE9cBDwKWSjgHWBE4E\nzomIN3KenwCHSToZuIgUIO0D7D4Q92hmZjaYucupb4wGLiEFIm3A/aRg5uZ8vIW0+eXlpFab64BD\nKydHxFJJewI/JrXazAcuBr5byPOEpD2A04Gvk9bPOSgiyjOfzMzMGk5vu5wGg5oHNBFxcCfHXwcO\nz4/28jwN7NlJObeQpoGbmZlZQW+7nAaDOp6gZWZmZn1hKLTQOKAxMzNrcHPmwMiRta5F7zigMTMz\na3D/+hestVata9E7DmjMzMwaWEQKaNZcs9Y16R0HNGZmZg1szhxYtMgBjZmZmdWxf/0rPTugMTMz\ns7rlgMbMzMzq3gsvpOdRo2pbj95yQGNmZtbAXnoJRozwwnpmZmZWx156CVZfHaRa16R3HNCYmZk1\nsEpAU+8c0JiZmTWwtjZoaqp1LXrPAY2ZmVkDGwobU4IDGjMzs4Y2FDamBAc0ZmZmDc0BjZmZmdW9\nhQvhne+sdS16zwGNmZlZA3v1VbfQmJmZWZ2bNQtGj651LXrPAY2ZmVmDWrgwrUOzzjq1rknvOaAx\nMzNrUJWNKd/97trWoy84oDEzM2tQzz6bntdaq7b16AsOaMzMzBrUc8+lZ7fQmJmZWd167rm0SvAq\nq9S6Jr3ngMbMzKxBPfxwGhBc7zttwyAIaCR9W9KdkuZKmi3pCkkblfKsIOlcSS9KmifpckmjSnnW\nkXS1pPmSZkk6RdKwUp6dJN0t6TVJj0g6YCDu0czMbDC6914YN67WtegbNQ9ogO2Bs4FtgI8BywPX\nSyquW3gGsAewN7ADsBbwu8rBHLhcAywHjAMOAA4ETijkWQ+4CrgJ2AI4E7hA0i79cldmZmaD3GOP\nwXvfW+ta9I3lal2BiNi9+FrSgcDzwFjgNkkjgS8B+0XELTnPF4EZkraOiDuBXYH3AztHxIvAA5KO\nB06SNDEiFgOHAI9FxNH5UjMlbQe0ADf0+42amZkNIvPmwYsvwgYb1LomfWMwtNCUrQoE8HJ+PZYU\neN1UyRARM4GngG1z0jjggRzMVEwBmoAPFPLcWLrWlEIZZmZmDePJJ9PzeuvVtBp9ZlAFNJJE6l66\nLSIeyslrAIsiYm4p++x8rJJndpXjdCHPSEkr9LbuZmZm9aSyqN5QWIMGBkGXU8l5wKbAdl3IK1JL\nTmc6yqMu5DEzMxty7r037bLtgKaPSToH2B3YPiKeKxyaBYyQNLLUSjOKt1pcZgFblYocXThWeS5v\nvzUKmBsRizqqW0tLC01NTcukNTc309zc3NFpZmZmg9b558MOO8CIEb0vq7W1ldbW1mXS2trael9w\nNwyKgCYHM58EdoyIp0qH7wYWA+OBK3L+jYB1gTtynmnAsZLeVRhHMwFoA2YU8uxWKntCTu/QpEmT\nGDNmTLfuyczMbLB65hl49FE48si+Ka/al/zp06czduzYvrlAF9Q8oJF0HtAMfAKYL6nSitIWEa9F\nxFxJFwKnS5oDzAPOAm6PiL/lvNcDDwGXSjoGWBM4ETgnIt7IeX4CHCbpZOAiUoC0D6lVyMzMrGHc\nfnt6njChtvXoS4NhUPB/ASOBPwPPFR6fKeRpIa0hc3kh396VgxGxFNgTWEJqtbkEuBj4biHPE6S1\nbD4G3JvLPCgiyjOfzMzMhrRnnknP665b23r0pZq30EREp0FVRLwOHJ4f7eV5mhTUdFTOLaRp4GZm\nZg3rqadg441huZpHAX1nMLTQmJmZ2QCaPh222KLWtehbDmjMzMwayMKFMG0abLNNrWvStxzQmJmZ\nNZCHH4YlS+BDH6p1TfqWAxozM7MG8pe/pGd3OZmZmVnduuOO1N20+uq1rknfckBjZmbWQO67b+i1\nzoADGjMzs4YxZw48+CCMG1frmvQ9BzRmZmYNorUVhg2DXXetdU36ngMaMzOzBvHLX8LOOw+dHbaL\nHNCYmZk1gNmz0/oz++1X65r0Dwc0ZmZmDeCqq0CCT3yi1jXpHw5ozMzMGsAFF8Auu8C//Vuta9I/\nhtC2VGZmZlbNLbekBfUmT651TfqPW2jMzMyGsDfegMMPT1sdfPKTta5N/3ELjZmZ2RB2wQXwwAPw\npz/BiBG1rk3/cQuNmZnZELVkCZx0EjQ3w0471bo2/csBjZmZ2RB13nnw9NNwxBG1rkn/c0BjZmY2\nBM2bBz/6Eey7b9qMcqhzQGNmZjbERMBRR8HLL8P3v1/r2gwMDwo2MzMbQiKgpQXOPx9+8APYYINa\n12hgOKAxMzMbQo4+Gs48E047LQU2jcIBjZmZ2RBx881w6qkpmPnGN2pdm4HlMTRmZmZDwKJFqXVm\ns80aq2WmYlAENJK2l/RHSc9KWirpbVtnSTpB0nOSFki6QdKGpeOrSfqVpDZJcyRdIGmlUp7NJd0q\naaGkJyUd1d/3ZmZmNhC+/W24/374+c/TJpSNZlAENMBKwL3AoUCUD0o6BjgM+CqwNTAfmCKpuObh\nr4FNgPHAHsAOwPmFMlYBpgCPA2OAo4CJkg7uh/sxMzMbMFdfDaefDiefDGPH1ro2tTEoxtBExHXA\ndQBS1bjyCODEiLgy5/kCMBv4FDBZ0ibArsDYiLgn5zkcuFrSkRExC/gcsDxwUEQsBmZI2hL4BnBB\nv96gmZlZP5k1Cw48EHbfHf77v2tdm9oZLC007ZK0PrAGcFMlLSLmAn8Fts1J44A5lWAmu5HU2rNN\nIc+tOZipmAJsLKmpn6pvZmbWb+bOhU9/GpYuhQsvbMyupopBH9CQgpkgtcgUzc7HKnmeLx6MiCXA\ny6U81cqgkMfMzKwuzJsH++0H994L//d/sEaD/yUbFF1OPSSqjLfpZp5KLNtZOWZmZoPG66/D+PHw\n8MMweTJsv32ta1R79RDQzCIFHqNZtoVlFHBPIc+o4kmShgOr5WOVPKNLZVfOKbfcLKOlpYWmpmV7\npZqbm2lubu7aHZiZmfWh006De+6BW26Bj3yk1rWB1tZWWltbl0lra2sb0DoM+oAmIh6XNIs0e+l+\nAEkjSWNjzs3ZpgGrStqyMI5mPCkQurOQ53uShufuKIAJwMyI6PBdnzRpEmPGjOmzezIzM+uJCPjx\nj+H44+HQQwdHMAPVv+RPnz6dsQM45WpQjKGRtJKkLSR9KCdtkF+vk1+fARwnaS9JmwGXAM8AfwCI\niIdJA3x/JmkrSR8FzgZa8wwnSNO6FwEXSdpU0r7A14HTBuQmzczMeuG222DzzVMg89Wvpina9pbB\n0kLzYeBPpLEswVtBxi+AL0XEKZJWJK0rsyowFdgtIhYVytgfOIc0u2kpcDlpujeQZkZJ2jXnuQt4\nEZgYERf2542ZmZn11l//CrvtllYBvvVWj5mpZlAENBFxC520FkXERGBiB8dfIa0101EZDwD/v717\nj6+jrPM4/vklbZImIb1Q0lBsKVLK/V4ELEgQChVEF9gtFwFX5CWgrmxVdFm5KAooIgsqrAi6iEAV\neCmCllel0lKhuJWWrShtubQUeqWFNm3SNM3l2T9+M5zJybVtknMm+b5fr3mdM888M/M8mZOZ3zzz\nzMxJO15CERGR3Pjtb+Hiiz2YefppKCvrep6BKC8uOYmIiEhrTU1wxx1w0UVQXQ2//72Cmc4ooBER\nEckzzz0HEyfCV74CU6fCww/D7rvnulT5TQGNiIhInnjlFTjnHO8jM3gwPP+8v2yyoiLXJct/CmhE\nRERy7O9/h898xu9iWrQI7r3XOwIfd1yuS5YeCmhERERy5J134Mtf9kBm1iy49VZvpbnsMijQEXqH\n5MVdTiIiIgPJxo1w551w223Q3AzXXAM33ABFRbkuWXopoBEREekjK1b4awvuv99fLvmlL8H116vD\nb09QQCMiItLLVq6EW26B++6DoUPhc5/zJ/7us0+uS9Z/KKARERHpBU1NMGMG/PSn/lle7u9guuoq\n2G23XJeu/1FAIyIi0kNCgIUL4cEH4bHHvGXmqKP8hZJTp8Lw4bkuYf+lgEZERGQXbdsGjzwC99wD\n8+bBiBH+PJkrr/SARnqfAhoREZGdsGwZzJwJf/yjv2Oprg5OOgmeeAKmTPEH40nfUUAjIiLShRD8\n+TCPPw5//jO89JI/Q6aw0F9RcO21cNZZcPDBuS7pwKWARkREpB2LF8Ps2fDXv/orCF57zV9BMGkS\nXHEFHHQQfPzjemFkvlBAIyIiA15Li7fAzJ8PzzzjrTBvveWXjQ47zN+tdNttcOqpUFqa69JKexTQ\niIjIgNLSAq+/DkuWeOvLvHn+3qS6OjCDww/3O5ImTYLTT4chQ3JdYukOBTQiItIvNTV5x90lS2DB\nAnj5ZW+FWbYMGhs9z8iRHrhcf73fjXTssXpGTFopoBERkVQLATZtguXLPXB57jl48UVYutTfkwRQ\nWWU5J6EAABFhSURBVAmHHOJ3H40fDxMmeB+YvfbyVhlJPwU0IiKS10LwlznGrSsrV8KqVd5J9+23\nfby21vMWFMCRR0J1NXzxix64TJgAH/iAApf+TgGNiIjkREsLbNgA774L69bB6tU+xN9XrvSOuevX\ne/+W2IgRMHq0t7RMmeKtLGPH+nDggf6uJBl4FNCIiEiPa2yENWu8JeXNNz1Ieecd//7GGx6wrF8P\nDQ2t5ysvh6oqD1hGj4bjj4dRozxoOeAA2G8/ddKV9imgERGRLjU2wubN3pqyZYsHI+vW+fi772Za\nVlat8u8bNrSev6TEA5W99vKHz02eDHvsAXvv7S0uVVWw557qkCs7TwGNiEg/1tLi7xmqrfWAZOtW\nHzZu9Ms4mzd7gFJX5x1ra2p82qZN8N57/rlxo39vT3m5v3Bx9GgPSI4/HsaM8e9VVf597Fh/IJ36\nsEhvUkAjIpIHWloywUZtrQcZW7f6Z22tByXbtnnAUV+fCVJqa9vOs2lTZr6amq7XXVrqw9ChMGyY\nDyNGwLhxHqxUVHjAUlEBu++e+Rw1ylteRPLBgAtozOwLwFeBKmAR8G8hhL/mtlR9Y/r06VxwwQW5\nLkaPUX3yV1rrEoI/u2TLFu/bsW2bBwi/+c10qqsvYPt22L7dp2/b5nkaGryVI57W0JAJTJJp8Tzx\nMuvq/DLO9u3+2dDgQU1Xyso8+Cgp8c+KCu9TUl7uAUZpqV+2qajwvMOHZ+YZNszzzpkznQsvvIDy\ncp8vzS9RTOtvrSP9rT59aUAFNGZ2HvAD4HPAfGAaMNPMJoQQNnQ6cz/Q3/5RVJ/8FAI8/PB0zj33\nApqa/GC9dSvvf29qygzxQT+Z1tW0xkZveYiX1diYCQjq6lqnNTV5AFFf3/464paOOH/8sLW2pgPt\nb5viYg8KSkr8e1GRBw1lZf69qMgDhjFjPL2kJBOADB6cmV5SkglOyso8+Cgt9e+77ZZZfmHhrm+j\nm26azte/nv7fGvSf/5tYf6tPXxpQAQ0ewNwTQngAwMyuAM4ELgVuzWXBJP+F4GfQLS3+sK74e3KI\n07du7V6+eKiv9ztAnnoqM625ufX3lhY/wG/b1jqtvc/m5kwQEY83N7cdr6tru57k0NKSaanInpa9\nnDhIiB9kVlzcM3/3ggIYNCgzFBZ6MFBU5OODB2eG8vJMWnGxBwOVlf6ZXEY8xMFFchmDB7cOIIYM\ngW98A37yk0zAUl6emU/9QkTyw4AJaMxsMHA0cHOcFkIIZjYLOH5Xlx8fLJIHvfj7jnw2NPhZYjy+\nM8uID3yNja3T16yBRx9tu9z2xrO/NzVlmsSz19XdtOZmPzDu7PzJg39LCyxcCKec0npadt7s+ZKB\nRGd5k9Pjs/u+cMYZXeeJD+oFBZ1/xgfcwsKOh9LSTACQPS1eTtzi0NVyBg/OlO3uu2HatEzgUFaW\nmZ4dUJSUtE6L88UBRUFB7//du1JR4bcMi0j+GjABDTASKATWZaWvA/bvbMbTTvOdavbZdfLgGAcz\n+W7q1B2fx8wPUiUl/ncw88/k9+58xgfZ7uTvaJqZHzzjA21lZSZ/e0NyeSUlmTPq7s5TUJA5K+9s\nnuxhyBA/KMfjcYCQHJJpxcVw5ZVw//2ZacmgIv4eH/Dz3ZNPwiWX5LoUIjKQDKSApiMGdBSKlABU\nVy9m1KjOD2CFhX5Qig+6yQNwPA5t07OnDRrkB8/uzNvZOgoL/cAXp5nBddfVcPPNCzstR9x8npye\nr6ZNq+Hqqxfmuhg7Lb5cA976tX17DevWpbc+STU1NSxc2D/qAqpPPutPdYH+VZ/FixfHX/vkXjgL\naWhW6AHRJaetwLkhhCcS6fcDQ0MIZ7czz4XAQ31WSBERkf7nUyGEh3t7JQOmhSaE0GhmC4BTgCcA\nzMyi8R92MNtM4FPAm8C2PiimiIhIf1ECjMOPpb1uwLTQAJjZVOAXwOVkbtv+Z+CAEML6XJZNRERE\ndt6AaaEBCCE8YmYjgRuBUcD/AacrmBEREUm3AdVCIyIiIv1THjzhQURERGTXKKDpgJl9wcyWm1m9\nmf3FzI7JgzKdaGZPmNkqM2sxs0+0k+dGM1ttZlvN7GkzG581fbiZPWRmNWa20czuM7OyrDyHmdnc\nqO4rzOzqXqjLNWY238w2m9k6M/utmU3IylNsZneZ2QYz22Jmj5lZZVaeMWb2BzOrM7O1ZnarmRVk\n5ak2swVmts3MXjWzT/dCfa4ws0XR37XGzOaZ2ZQ01qWdul0T/d5uT2t9zOyGqA7J4ZUU12e0mf0y\nKu/W6Ld3VFaetOwLlrezbVrM7EfR9NRsGzMrMLNvm9my6O/+upld206+VGybaD3lZnaHmb0Zlfc5\nM5uYl/UJIWjIGoDz8LuaLgEOAO4B3gNG5rhcU/D+P/8ENAOfyJr+9aicZwGHAI8DbwBFiTxPAQuB\nicCHgVeBBxPTdwPW4J2nDwSmAnXAZT1clxnAxdE6DgV+j99NNiSR57+jtJOAI4F5wJ8T0wuAl/Ee\n9IcCpwPvAN9J5BkH1OKvttgf+ALQCEzu4fqcGW2f8dHwHaABODBtdcmq1zHAMuAl4PY0bptoXTcA\nfwP2ACqjYUQa6wMMA5YD9+FPP98bOBXYJ6X7gt0T26QSv/O0GTgxhdvmP6N1TwHGAucAm4EvpnHb\nROv6dfT3nQR8EP9f2gTsmW/16dGK95cB+AtwZ2LcgJXA13JdtkSZWmgb0KwGpiXGK4B6YGo0fmA0\n35GJPKcDTUBVNH4lsAEYlMhzC/BKL9dnZFS2ExJlbwDOTuTZP8rzoWj8Y9FOaWQiz+XAxrj8wPeA\nv2Wtazowow+20bvAZ9JaF6AcWAp8FJhNFNCksT74TnhhB9NSVR/gu8CzXeRJ877gDuDVlG6bJ4F7\ns9IeAx5I47bBb7tuBKZkpb8I3Jhv9dElpyyWeefTn+K04H/dHnnnU28xs32AKlqXezPwv2TKfRyw\nMYTwUmLWWfiTko9N5JkbQki+vWgmsL+ZDe2l4oOfdQY80gffBoNoXZ+lwFu0rs/LofWb0mcCQ4GD\nE3lmZa1rJr24LaNm5/OBUuAF0luXu4AnQwjPZKVPJJ312c/8cu0bZvagmY2J0tO2fc4CXjSzR8wv\n1y40s8viiWneF0T7308BP4uS0vZbmwecYmb7AZjZ4XjLxoxoPG3bZhD+yqCGrPR64IR8q48CmrY6\ne+dTVd8Xp9uq8B9IZ+WuwptD3xdCaMaDiGSe9pYBvVR/MzP8rOy5EELcr6EK2B79c2SXZUfK2lGe\nCjProfdBOzM7xMy24P/8d+NnlUtIZ13OB44Armln8ihSVh+81fVf8TPDK4B9gLnRdfy0bZ8P4me0\nS4HTgJ8APzSzixLlSOW+ADgbD0R+EY2n7bf2XfwSzRIz2w4sAO4IIfwqUY7UbJsQQi1+Unadme0Z\nnaxdhAcre5Jn9RlQz6HZRZ298ymfdafcXeWJ3+jUW/W/GzgIOKEbebu7HXJRnyXA4Xhr07nAA2b2\nkS7KkXd1MbMP4AHm5BBC447M2s1y9Pm2CSEkn1T6dzObD6zAr9V39BTwfK1PATA/hHBdNL7IzA7G\ng5wHuyhLvu8LLgWeCiGs7SJfvm6b84ALgfOBV/CTgjvNbHUI4ZddlCVft81FwM+BVfhlooXAw8BR\nncyTk/qohaatDXiHtFFZ6ZW0jSDzyVr8B9BZuddG4+8zs0JgeDQtztPeMqAX6m9mPwbOAKpDCKsT\nk9YCRWZW0U5ZkvXJLuuoxLSO8lQCm0MI23el7NlCCE0hhGUhhIUhhG8Ai4CrSF9djsY7zy4ws0Yz\na8Q7ZF4VnXWuA4pTVJ82Qgg1eMfE8aRv+6wBFmelLcY7ocblSOO+YCzeufneRHLats2twC0hhEdD\nCP8IITwE/BeZls7UbZsQwvIQwslAGTAmhHAcUIR3TM+r+iigyRKdkcbvfAJavfNpXq7K1ZUQQvzj\nSpa7Ar9GGZf7BWCYmR2ZmPUU/Ac5P5HnI9EPLnYasDQ6CPSYKJj5JHByCOGtrMkL8LOBZH0m4Dvt\nZH0ONX/6c7KsNWR2+C8kl5HI80JP1KELBUAx6avLLPxukSPwFqfD8U6ADya+N5Ke+rRhZuXAvniH\nxrRtn+fxjrFJ++MtTqncF0QuxQ9eMxJpads2pbRtUWghOtameNsQQqgPIawzs+H4pdvH864+Pdkj\nur8MeDN0Pa1v234X2CPH5SrDDyhH4P8k/x6Nj4mmfy0q51n4Aelx4DVa3z43Az8gHYN3VlsK/DIx\nvQLfyf8Cvwx0Hn6742d7uC5343chnIhH5vFQkpVnOVCNtxo8T9vbNRfhtwQehv+TrQO+ncgzLir/\n9/Cd/ueB7cCpPVyfm/BLZnvjty7egu+IP5q2unRQv/fvckpjfYDvAx+Jts+Hgaej8uyetvrgHWUb\n8LP+ffFLHFuA8xN5UrMviNZl+K3ZN7UzLU3b5n/wDstnRL+1s/H+IzeneNucFv1NxwGT8Uc4zAMK\n860+PVrx/jREP/g38cDmBWBiHpTpJDyQac4afp7I883oh7EV7yU+PmsZw/Az7Ro8oLgXKM3Kcyjw\nbLSMt4Cv9kJd2qtHM3BJIk8x8CP8MuAW4FGgMms5Y/Bn2NRGO7HvAQXt/N0WRNvyNeDiXqjPffjz\nWurxM5Y/EgUzaatLB/V7htYBTarqg9+iuzJaz1t4H4B9UlyfM/Dn6mwF/gFc2k6eb5KCfUG0nsnR\n///4dqalZtvgJ5234wFYXbSeb5G4HTmF2+ZfgNejv9sq4E5gt3ysj97lJCIiIqmnPjQiIiKSegpo\nREREJPUU0IiIiEjqKaARERGR1FNAIyIiIqmngEZERERSTwGNiIiIpJ4CGhEREUk9BTQiIiKSegpo\nRGTAMLPZZnZ7rsshIj1PAY2I9Akzu9zMNptZQSKtzMwazexPWXlPNrMWMxvX1+UUkXRSQCMifWU2\n/vK+iYm0E4E1wHFmVpRIPwlYEUJ4c0dXYmaDdqWQIpJOCmhEpE+EEF7Fg5fqRHI18Dj+duLjstJn\nA5jZGDP7nZltMbMaM/u1mVXGGc3sBjN7ycw+a2bLgG1ReqmZPRDNt8rMvpxdJjP7vJm9amb1ZrbW\nzB7p2VqLSF9RQCMifWkOcHJi/OQo7dk43cyKgWOBZ6I8vwOG4a05pwL7Ar/KWu544BzgbOCIKO22\naJ6zgNPwIOnoeAYzmwjcCVwLTABOB+buYv1EJEfUNCsifWkOcHvUj6YMDz7mAkXA5cC3gEnR+Bwz\nmwwcAowLIawGMLOLgX+Y2dEhhAXRcgcDF4cQ3ovylAGXAheGEOZEaZ8GVibKMgaoBf4QQqgD3gYW\n9VK9RaSXqYVGRPpS3I/mGOAE4NUQwga8hebYqB9NNfBGCGElcADwdhzMAIQQFgObgAMTy10RBzOR\nffEgZ35ivo3A0kSep4EVwPLo0tSFZjakx2oqIn1KAY2I9JkQwhvAKvzy0sl4IEMIYQ3eQjKJRP8Z\nwIDQzqKy0+vamU4H88ZlqQWOAs4HVuOtQ4vMrKLbFRKRvKGARkT62mw8mKnGL0HF5gIfAz5EJqB5\nBRhrZnvFmczsIGBoNK0jrwNNJDoam9lwvK/M+0IILSGEZ0II/wEcDowDProTdRKRHFMfGhHpa7OB\nu/D9z7OJ9LnAj/FLRXMAQgizzOxl4CEzmxZNuwuYHUJ4qaMVhBDqzOxnwPfN7D1gPfAdoDnOY2Zn\nAh+M1rsROBNv2Vnadokiku8U0IhIX5sNlACLQwjrE+nPAuXAkhDC2kT6J4EfRdNbgKeAL3VjPVfj\n/XWeALYAPwCSl5M24XdG3RCV5zXg/KiPjoikjIXQ4SVmERERkVRQHxoRERFJPQU0IiIiknoKaERE\nRCT1FNCIiIhI6imgERERkdRTQCMiIiKpp4BGREREUk8BjYiIiKSeAhoRERFJPQU0IiIiknoKaERE\nRCT1FNCIiIhI6v0/Nc3dzWma43MAAAAASUVORK5CYII=\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -358,7 +362,7 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 75, "metadata": { "collapsed": true }, @@ -372,7 +376,7 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": 76, "metadata": { "collapsed": false }, @@ -381,9 +385,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 536\n", - "Number of unique tokens: 2245\n", - "Number of documents: 286\n" + "Number of authors: 2720\n", + "Number of unique tokens: 8640\n", + "Number of documents: 1740\n" ] } ], @@ -402,19 +406,23 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 94, "metadata": { "collapsed": false }, "outputs": [], "source": [ + "reload(atmodel2)\n", + "AuthorTopicModel2 = atmodel2.AuthorTopicModel2\n", "reload(atmodel)\n", - "AuthorTopicModel = atmodel.AuthorTopicModel" + "AuthorTopicModel = atmodel.AuthorTopicModel\n", + "reload(ldamodel)\n", + "LdaModel = ldamodel.LdaModel" ] }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 98, "metadata": { "collapsed": false }, @@ -423,21 +431,24 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 18.1 s, sys: 8 ms, total: 18.2 s\n", - "Wall time: 18.2 s\n" + "CPU times: user 7min 31s, sys: 2min 6s, total: 9min 38s\n", + "Wall time: 7min 16s\n" ] } ], "source": [ - "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=0, random_state=1, var_lambda=None, chunksize=2000)" + "%time model = AuthorTopicModel2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", + " author2doc=author2doc, doc2author=doc2author, id2author=id2author, var_lambda=None, \\\n", + " distributed=False, chunksize=2000, passes=100, update_every=1, \\\n", + " alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, \\\n", + " eval_every=1, iterations=10, gamma_threshold=1e-10, \\\n", + " minimum_probability=0.01, random_state=1, ns_conf={}, \\\n", + " minimum_phi_value=0.01, per_word_topics=False)\n" ] }, { "cell_type": "code", - "execution_count": 104, + "execution_count": 99, "metadata": { "collapsed": false, "scrolled": false @@ -447,28 +458,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.014*\"response\" + 0.013*\"frequency\" + 0.011*\"cell\" + 0.011*\"phase\" + 0.008*\"synaptic\" + 0.008*\"oscillation\" + 0.007*\"control\" + 0.006*\"cortex\" + 0.006*\"mode\" + 0.005*\"fig\"'),\n", + " '0.018*\"hidden\" + 0.011*\"layer\" + 0.010*\"recognition\" + 0.009*\"net\" + 0.009*\"speech\" + 0.009*\"word\" + 0.009*\"hidden_unit\" + 0.006*\"sequence\" + 0.006*\"architecture\" + 0.006*\"trained\"'),\n", " (1,\n", - " '0.009*\"vector\" + 0.007*\"fig\" + 0.007*\"matrix\" + 0.007*\"activity\" + 0.006*\"memory\" + 0.006*\"node\" + 0.005*\"element\" + 0.005*\"sequence\" + 0.004*\"dynamic\" + 0.004*\"threshold\"'),\n", + " '0.022*\"neuron\" + 0.015*\"cell\" + 0.010*\"response\" + 0.010*\"spike\" + 0.008*\"stimulus\" + 0.008*\"signal\" + 0.007*\"frequency\" + 0.007*\"firing\" + 0.007*\"synaptic\" + 0.007*\"activity\"'),\n", " (2,\n", - " '0.009*\"node\" + 0.008*\"activation\" + 0.008*\"memory\" + 0.007*\"processor\" + 0.007*\"speech\" + 0.005*\"current\" + 0.005*\"region\" + 0.005*\"recognition\" + 0.005*\"machine\" + 0.005*\"role\"'),\n", + " '0.014*\"circuit\" + 0.014*\"chip\" + 0.012*\"signal\" + 0.011*\"analog\" + 0.006*\"vlsi\" + 0.006*\"voltage\" + 0.006*\"motion\" + 0.005*\"code\" + 0.005*\"filter\" + 0.005*\"implementation\"'),\n", " (3,\n", - " '0.036*\"classifier\" + 0.020*\"memory\" + 0.019*\"vector\" + 0.016*\"capacity\" + 0.014*\"hopfield\" + 0.013*\"matrix\" + 0.013*\"classification\" + 0.012*\"code\" + 0.012*\"stored\" + 0.011*\"chip\"'),\n", + " '0.014*\"cell\" + 0.012*\"layer\" + 0.012*\"neuron\" + 0.009*\"map\" + 0.008*\"connection\" + 0.007*\"orientation\" + 0.006*\"cortical\" + 0.005*\"region\" + 0.005*\"net\" + 0.005*\"self\"'),\n", " (4,\n", - " '0.033*\"field\" + 0.016*\"delay\" + 0.015*\"tree\" + 0.011*\"receptive_field\" + 0.011*\"receptive\" + 0.009*\"region\" + 0.009*\"memory\" + 0.008*\"synaptic\" + 0.008*\"fixed_point\" + 0.008*\"stability\"'),\n", + " '0.008*\"gaussian\" + 0.007*\"mixture\" + 0.007*\"density\" + 0.006*\"matrix\" + 0.006*\"component\" + 0.006*\"estimate\" + 0.006*\"likelihood\" + 0.005*\"prior\" + 0.005*\"noise\" + 0.005*\"variance\"'),\n", " (5,\n", - " '0.047*\"cell\" + 0.025*\"firing\" + 0.016*\"potential\" + 0.012*\"activity\" + 0.011*\"stimulus\" + 0.010*\"membrane\" + 0.009*\"fig\" + 0.008*\"inhibitory\" + 0.008*\"threshold\" + 0.008*\"response\"'),\n", + " '0.026*\"image\" + 0.013*\"object\" + 0.012*\"visual\" + 0.010*\"field\" + 0.007*\"direction\" + 0.006*\"map\" + 0.006*\"position\" + 0.005*\"motion\" + 0.005*\"spatial\" + 0.005*\"response\"'),\n", " (6,\n", - " '0.017*\"cell\" + 0.013*\"map\" + 0.011*\"circuit\" + 0.007*\"field\" + 0.007*\"cortical\" + 0.006*\"cortex\" + 0.006*\"region\" + 0.005*\"response\" + 0.005*\"constraint\" + 0.005*\"visual\"'),\n", + " '0.007*\"bound\" + 0.006*\"let\" + 0.005*\"solution\" + 0.005*\"class\" + 0.005*\"generalization\" + 0.005*\"theorem\" + 0.005*\"xi\" + 0.004*\"matrix\" + 0.004*\"optimal\" + 0.003*\"convergence\"'),\n", " (7,\n", - " '0.017*\"hidden\" + 0.011*\"hidden_unit\" + 0.009*\"propagation\" + 0.007*\"back_propagation\" + 0.006*\"gradient\" + 0.005*\"internal\" + 0.005*\"probability\" + 0.005*\"procedure\" + 0.004*\"target\" + 0.004*\"node\"'),\n", + " '0.014*\"classifier\" + 0.009*\"class\" + 0.008*\"classification\" + 0.007*\"rule\" + 0.006*\"control\" + 0.006*\"trajectory\" + 0.005*\"trained\" + 0.005*\"robot\" + 0.005*\"character\" + 0.004*\"decision\"'),\n", " (8,\n", - " '0.011*\"noise\" + 0.009*\"activation\" + 0.009*\"node\" + 0.009*\"chip\" + 0.008*\"threshold\" + 0.007*\"analog\" + 0.007*\"match\" + 0.007*\"cycle\" + 0.006*\"pulse\" + 0.006*\"distribution\"'),\n", + " '0.011*\"action\" + 0.009*\"policy\" + 0.007*\"optimal\" + 0.007*\"reinforcement\" + 0.007*\"control\" + 0.005*\"reinforcement_learning\" + 0.004*\"reward\" + 0.004*\"decision\" + 0.004*\"prediction\" + 0.004*\"search\"'),\n", " (9,\n", - " '0.022*\"image\" + 0.014*\"vector\" + 0.010*\"recognition\" + 0.008*\"hidden\" + 0.007*\"noise\" + 0.007*\"object\" + 0.007*\"speech\" + 0.006*\"visual\" + 0.006*\"pixel\" + 0.005*\"frame\"')]" + " '0.016*\"memory\" + 0.013*\"neuron\" + 0.009*\"node\" + 0.006*\"dynamic\" + 0.006*\"control\" + 0.005*\"connection\" + 0.005*\"bit\" + 0.005*\"capacity\" + 0.004*\"net\" + 0.004*\"activation\"')]" ] }, - "execution_count": 104, + "execution_count": 99, "metadata": {}, "output_type": "execute_result" } @@ -479,29 +490,132 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 100, "metadata": { "collapsed": false }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Yaser S.Abu-Mostafa\n", + "Docs: [643, 1161]\n", + "[(4, 0.19559840654935753),\n", + " (6, 0.22443733229655313),\n", + " (7, 0.53773779059283966),\n", + " (8, 0.03154799178372595)]\n", + "\n", + "Geoffrey E. Hinton\n", + "Docs: [143, 284, 230, 197]\n", + "[(0, 0.84493527369383181), (2, 0.07126593370576316), (5, 0.08343994928990435)]\n", + "\n", + "Michael I. Jordan\n", + "Docs: [237]\n", + "[(0, 0.026658101414032433),\n", + " (1, 0.021215048465908881),\n", + " (4, 0.019387287171330789),\n", + " (7, 0.19687832498382354),\n", + " (8, 0.29243919850727573),\n", + " (9, 0.43454557999851595)]\n", + "\n", + "James M. Bower\n", + "Docs: [131, 101, 126, 127, 281, 208, 225]\n", + "[(1, 0.90286561460378767), (5, 0.096887985740307506)]\n" + ] + } + ], + "source": [ + "name = 'Yaser S.Abu-Mostafa'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Geoffrey E. Hinton'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'Michael I. Jordan'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))\n", + "\n", + "name = 'James M. Bower'\n", + "print('\\n%s' % name)\n", + "print('Docs:', author2doc[author2id[name]])\n", + "pprint(model.get_author_topics(author2id[name]))" + ] + }, + { + "cell_type": "code", + "execution_count": 95, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 50.8 s, sys: 11.9 s, total: 1min 2s\n", + "Wall time: 49.5 s\n" + ] + } + ], + "source": [ + "%time model2 = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", + " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=1, random_state=1, var_lambda=None, chunksize=2000)" + ] + }, + { + "cell_type": "code", + "execution_count": 96, + "metadata": { + "collapsed": false, + "scrolled": false + }, "outputs": [ { "data": { "text/plain": [ - "197" + "[(0,\n", + " '0.006*\"word\" + 0.005*\"tree\" + 0.004*\"recognition\" + 0.004*\"speech\" + 0.003*\"class\" + 0.003*\"node\" + 0.003*\"layer\" + 0.003*\"context\" + 0.003*\"hmm\" + 0.003*\"target\"'),\n", + " (1,\n", + " '0.013*\"cell\" + 0.009*\"neuron\" + 0.005*\"connection\" + 0.004*\"layer\" + 0.004*\"cortex\" + 0.004*\"signal\" + 0.004*\"response\" + 0.004*\"simulation\" + 0.004*\"map\" + 0.003*\"recognition\"'),\n", + " (2,\n", + " '0.005*\"gaussian\" + 0.004*\"matrix\" + 0.004*\"hidden\" + 0.004*\"approximation\" + 0.003*\"bound\" + 0.003*\"generalization\" + 0.003*\"noise\" + 0.003*\"class\" + 0.003*\"prior\" + 0.003*\"xi\"'),\n", + " (3,\n", + " '0.006*\"signal\" + 0.005*\"speech\" + 0.004*\"visual\" + 0.004*\"position\" + 0.004*\"image\" + 0.004*\"response\" + 0.003*\"stimulus\" + 0.003*\"recognition\" + 0.003*\"motion\" + 0.003*\"component\"'),\n", + " (4,\n", + " '0.016*\"neuron\" + 0.007*\"circuit\" + 0.007*\"cell\" + 0.006*\"signal\" + 0.006*\"spike\" + 0.005*\"response\" + 0.005*\"chip\" + 0.005*\"analog\" + 0.005*\"voltage\" + 0.005*\"synaptic\"'),\n", + " (5,\n", + " '0.005*\"net\" + 0.004*\"threshold\" + 0.004*\"class\" + 0.004*\"node\" + 0.003*\"theorem\" + 0.003*\"layer\" + 0.003*\"image\" + 0.003*\"bound\" + 0.003*\"sample\" + 0.003*\"estimate\"'),\n", + " (6,\n", + " '0.008*\"image\" + 0.006*\"recognition\" + 0.005*\"rule\" + 0.004*\"classification\" + 0.004*\"class\" + 0.004*\"character\" + 0.003*\"classifier\" + 0.003*\"layer\" + 0.003*\"matrix\" + 0.003*\"distance\"'),\n", + " (7,\n", + " '0.008*\"action\" + 0.006*\"layer\" + 0.006*\"policy\" + 0.006*\"control\" + 0.004*\"neuron\" + 0.004*\"architecture\" + 0.004*\"net\" + 0.004*\"hidden\" + 0.003*\"cell\" + 0.003*\"reinforcement\"'),\n", + " (8,\n", + " '0.005*\"image\" + 0.004*\"object\" + 0.003*\"hidden\" + 0.003*\"ii\" + 0.003*\"visual\" + 0.003*\"component\" + 0.003*\"neuron\" + 0.003*\"activity\" + 0.003*\"cell\" + 0.003*\"sequence\"'),\n", + " (9,\n", + " '0.004*\"hidden\" + 0.004*\"rule\" + 0.004*\"classifier\" + 0.004*\"class\" + 0.004*\"optimal\" + 0.003*\"prediction\" + 0.003*\"control\" + 0.003*\"estimate\" + 0.003*\"noise\" + 0.003*\"generalization\"')]" ] }, - "execution_count": 33, + "execution_count": 96, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "author2id['James M. Bower']" + "model2.show_topics(num_topics=10)" ] }, { "cell_type": "code", - "execution_count": 105, + "execution_count": 97, "metadata": { "collapsed": false }, @@ -512,33 +626,44 @@ "text": [ "\n", "Yaser S.Abu-Mostafa\n", - "Docs: [62]\n", - "[(0, 0.019643887783549894),\n", - " (1, 0.03909003995731989),\n", - " (2, 0.16804942558366059),\n", - " (3, 0.10477718721148226),\n", - " (6, 0.021371962138910492),\n", - " (7, 0.22727551202952315),\n", - " (8, 0.31201895712462607),\n", - " (9, 0.1066712694188155)]\n", + "Docs: [643, 1161]\n", + "[(0, 0.015493433113318947),\n", + " (2, 0.02068848386807097),\n", + " (3, 0.014477887090769973),\n", + " (5, 0.76217743188283804),\n", + " (6, 0.056071929501884839),\n", + " (7, 0.034542230987795401),\n", + " (8, 0.034205908519518852),\n", + " (9, 0.051387058690322375)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [143, 284, 230, 197]\n", - "[(1, 0.027378250046950596),\n", - " (7, 0.28611123303779012),\n", - " (8, 0.01409793422740707),\n", - " (9, 0.67186598917760154)]\n", + "[(0, 0.010876412473090083),\n", + " (1, 0.025987546707861891),\n", + " (2, 0.16269958999503972),\n", + " (3, 0.040154749497748242),\n", + " (5, 0.038497018610811631),\n", + " (6, 0.32616150298201085),\n", + " (7, 0.17789752502782427),\n", + " (8, 0.10484007949328089),\n", + " (9, 0.10360838755269398)]\n", "\n", "Michael I. Jordan\n", "Docs: [237]\n", - "[(0, 0.032384767535828737),\n", - " (1, 0.41066501849642167),\n", - " (5, 0.028938355831066891),\n", - " (7, 0.52523422248412255)]\n", + "[(0, 0.065523264868966244),\n", + " (1, 0.057277736718350389),\n", + " (2, 0.049434602727837465),\n", + " (3, 0.057202259043067194),\n", + " (4, 0.048998371270245894),\n", + " (5, 0.054599275818053232),\n", + " (6, 0.055707885269878736),\n", + " (7, 0.33348117191683641),\n", + " (8, 0.15381463451838998),\n", + " (9, 0.12396079784837437)]\n", "\n", "James M. Bower\n", "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(2, 0.029310008934256015), (6, 0.97040282458105698)]\n" + "[(1, 0.9819387900380987), (4, 0.013399430252372287)]\n" ] } ], @@ -546,22 +671,22 @@ "name = 'Yaser S.Abu-Mostafa'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", + "pprint(model2.get_author_topics(author2id[name]))\n", "\n", "name = 'Geoffrey E. Hinton'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", + "pprint(model2.get_author_topics(author2id[name]))\n", "\n", "name = 'Michael I. Jordan'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", + "pprint(model2.get_author_topics(author2id[name]))\n", "\n", "name = 'James M. Bower'\n", "print('\\n%s' % name)\n", "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))" + "pprint(model2.get_author_topics(author2id[name]))" ] }, { @@ -768,19 +893,19 @@ }, { "cell_type": "code", - "execution_count": 182, + "execution_count": 89, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ - "reload(gensim.models.ldamodel)\n", - "LdaModel = gensim.models.ldamodel.LdaModel" + "reload(ldamodel)\n", + "LdaModel = ldamodel.LdaModel" ] }, { "cell_type": "code", - "execution_count": 200, + "execution_count": 90, "metadata": { "collapsed": false }, @@ -789,13 +914,74 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 2min 17s, sys: 57.5 s, total: 3min 14s\n", - "Wall time: 2min 9s\n" + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(670)update()\n", + "-> gammat = self.do_estep(chunk, other, chunk_no, rho=rho())\n", + "(Pdb) rho()\n", + "1.0\n", + "(Pdb) s\n", + "--Call--\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(642)rho()\n", + "-> def rho():\n", + "(Pdb) s\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(643)rho()\n", + "-> return pow(offset + pass_ + (self.num_updates / chunksize), -decay)\n", + "(Pdb) s\n", + "--Return--\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(643)rho()->1.0\n", + "-> return pow(offset + pass_ + (self.num_updates / chunksize), -decay)\n", + "(Pdb) s\n", + "--Call--\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(500)do_estep()\n", + "-> def do_estep(self, chunk, state=None, chunk_no=None, rho=None):\n", + "(Pdb) s\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(506)do_estep()\n", + "-> if state is None:\n", + "(Pdb) s\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(508)do_estep()\n", + "-> gamma, sstats = self.inference(chunk, collect_sstats=True, chunk_no=None, rho=rho)\n", + "(Pdb) s\n", + "--Call--\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(410)inference()\n", + "-> def inference(self, chunk, collect_sstats=False, chunk_no=None, rho=None):\n", + "(Pdb) s\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(428)inference()\n", + "-> try:\n", + "(Pdb) s\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(429)inference()\n", + "-> _ = len(chunk)\n", + "(Pdb) rho\n", + "1.0\n", + "(Pdb) exit\n", + "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(669)update()\n", + "-> from pdb import set_trace; set_trace()\n", + "(Pdb) exit\n" + ] + }, + { + "ename": "BdbQuit", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mBdbQuit\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, iterations=1, alpha='symmetric', eta='symmetric', eval_every=0, chunksize=2000, random_state=0)\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, distributed, chunksize, passes, update_every, alpha, eta, decay, offset, eval_every, iterations, gamma_threshold, minimum_probability, random_state, ns_conf, minimum_phi_value, per_word_topics)\u001b[0m\n\u001b[1;32m 355\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 356\u001b[0m \u001b[0muse_numpy\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdispatcher\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 357\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunks_as_numpy\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0muse_numpy\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 358\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 359\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0minit_dir_prior\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mprior\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py\u001b[0m in \u001b[0;36mupdate\u001b[0;34m(self, corpus, chunksize, decay, offset, passes, update_every, eval_every, iterations, gamma_threshold, chunks_as_numpy)\u001b[0m\n\u001b[1;32m 667\u001b[0m logger.info('PROGRESS: pass %i, at document #%i/%i',\n\u001b[1;32m 668\u001b[0m pass_, chunk_no * chunksize + len(chunk), lencorpus)\n\u001b[0;32m--> 669\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mpdb\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mset_trace\u001b[0m\u001b[0;34m;\u001b[0m \u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 670\u001b[0m \u001b[0mgammat\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdo_estep\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchunk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mother\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunk_no\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrho\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 671\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py\u001b[0m in \u001b[0;36mupdate\u001b[0;34m(self, corpus, chunksize, decay, offset, passes, update_every, eval_every, iterations, gamma_threshold, chunks_as_numpy)\u001b[0m\n\u001b[1;32m 667\u001b[0m logger.info('PROGRESS: pass %i, at document #%i/%i',\n\u001b[1;32m 668\u001b[0m pass_, chunk_no * chunksize + len(chunk), lencorpus)\n\u001b[0;32m--> 669\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mpdb\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mset_trace\u001b[0m\u001b[0;34m;\u001b[0m \u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 670\u001b[0m \u001b[0mgammat\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdo_estep\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchunk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mother\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunk_no\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrho\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 671\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.5/bdb.py\u001b[0m in \u001b[0;36mtrace_dispatch\u001b[0;34m(self, frame, event, arg)\u001b[0m\n\u001b[1;32m 46\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;31m# None\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mevent\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'line'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 48\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdispatch_line\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 49\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mevent\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'call'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 50\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdispatch_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.5/bdb.py\u001b[0m in \u001b[0;36mdispatch_line\u001b[0;34m(self, frame)\u001b[0m\n\u001b[1;32m 65\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstop_here\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbreak_here\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 66\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0muser_line\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 67\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mquitting\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mBdbQuit\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 68\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrace_dispatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 69\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mBdbQuit\u001b[0m: " ] } ], "source": [ - "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=100, \\\n", + "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", " iterations=1, alpha='symmetric', eta='symmetric', eval_every=0, chunksize=2000, random_state=0)" ] }, diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index 082c65ca3c..4a07165511 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -17,6 +17,7 @@ from .phrases import Phrases from .normmodel import NormModel from .atmodel import AuthorTopicModel +from .atmodel2 import AuthorTopicModel2 from .ldaseqmodel import LdaSeqModel from . import wrappers diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index aea8413534..7cedd6c3de 100644 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -129,9 +129,9 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, self.random_state = random_state self.chunksize = chunksize - # NOTE: this is not necessarily a good way to initialize the topics. self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) - self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + #self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + self.eta = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_terms)]) self.random_state = get_random_state(random_state) @@ -163,6 +163,8 @@ def inference(self, corpus=None, var_lambda=None): self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. + corpus_words = sum(cnt for document in corpus for _, cnt in document) + logger.info('Starting inference. Training on %d documents.', len(corpus)) # NOTE: as the numerically stable phi update (and bound evaluation) causes @@ -200,7 +202,8 @@ def inference(self, corpus=None, var_lambda=None): # Initialize dirichlet expectations. Elogtheta = dirichlet_expectation(var_gamma) - Elogbeta = dirichlet_expectation(var_lambda) + #Elogbeta = dirichlet_expectation(var_lambda) + Elogbeta = dirichlet_expectation(var_lambda + self.eta) if numstable_sm: maxElogtheta = Elogtheta.max() maxElogbeta = Elogbeta.max() @@ -215,7 +218,8 @@ def inference(self, corpus=None, var_lambda=None): theta_bound = self.theta_bound(Elogtheta) beta_bound = self.beta_bound(Elogbeta) bound = word_bound + theta_bound + beta_bound - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + perwordbound = bound / corpus_words + logger.info('Total bound: %.3e. Per-word total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, perwordbound, word_bound, theta_bound, beta_bound) for _pass in xrange(self.passes): converged = 0 # Number of documents converged for current pass over corpus. for chunk_no, chunk in enumerate(utils.grouper(corpus, self.chunksize, as_numpy=False)): @@ -316,7 +320,8 @@ def inference(self, corpus=None, var_lambda=None): theta_bound = self.theta_bound(Elogtheta) beta_bound = self.beta_bound(Elogbeta) bound = word_bound + theta_bound + beta_bound - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + perwordbound = bound / corpus_words + logger.info('Total bound: %.3e. Per-word total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, perwordbound, word_bound, theta_bound, beta_bound) # NOTE: bound can be computed as below. We compute each term for now because it can be useful for debugging. # bound = eval_bound(corpus, Elogtheta, Elogbeta, expElogtheta, expElogtheta, maxElogtheta=maxElogtheta, maxElogbeta=maxElogbeta): diff --git a/gensim/models/atmodel2.py b/gensim/models/atmodel2.py index e88d1634c8..9e486ed48e 100755 --- a/gensim/models/atmodel2.py +++ b/gensim/models/atmodel2.py @@ -17,12 +17,12 @@ from pprint import pprint import logging -import np # for arrays, array broadcasting etc. +import numpy as np # for arrays, array broadcasting etc. import numbers from gensim import interfaces, utils, matutils from gensim.models import LdaModel -from gensim.models.ldamodel import dirichlet_expectation, get_random_seed, LdaState +from gensim.models.ldamodel import dirichlet_expectation, get_random_state, LdaState from itertools import chain from scipy.special import gammaln, psi # gamma function utils from scipy.special import polygamma @@ -39,8 +39,22 @@ logger = logging.getLogger('gensim.models.atmodel') +class AuthorTopicState(LdaState): + """ + Encapsulate information for distributed computation of AuthorTopicModel objects. + + Objects of this class are sent over the network, so try to keep them lean to + reduce traffic. -class AuthorTopicModel(LdaModel): + """ + def __init__(self, eta, lambda_shape, gamma_shape): + self.eta = eta + self.sstats = np.zeros(lambda_shape) + self.gamma = np.zeros(gamma_shape) + self.numdocs = 0 + + +class AuthorTopicModel2(LdaModel): """ """ def __init__(self, corpus=None, num_topics=100, id2word=None, @@ -56,7 +70,6 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, if corpus is None and self.id2word is None: raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') - # NOTE: Why would id2word not be none, but have length 0? (From LDA code) if self.id2word is None: logger.warning("no word id mapping provided; initializing from corpus, assuming identity") self.id2word = utils.dict_from_corpus(corpus) @@ -161,10 +174,12 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, self.iterations = iterations self.gamma_threshold = gamma_threshold - # Initialize the variational distribution q(beta|lambda) - self.state = LdaState(self.eta, (self.num_topics, self.num_terms)) + # Initialize the variational distributions q(beta|lambda) and q(theta|gamma) + self.state = AuthorTopicState(self.eta, (self.num_topics, self.num_terms), (self.num_authors, self.num_topics)) self.state.sstats = self.random_state.gamma(100., 1. / 100., (self.num_topics, self.num_terms)) + self.state.gamma = self.random_state.gamma(100., 1. / 100., (self.num_authors, self.num_topics)) self.expElogbeta = np.exp(dirichlet_expectation(self.state.sstats)) + self.expElogtheta = np.exp(dirichlet_expectation(self.state.gamma)) # if a training corpus was provided, start estimating the model right away if corpus is not None: @@ -177,15 +192,15 @@ def __str__(self): def compute_phinorm(self, ids, authors_d, expElogthetad, expElogbetad): """Efficiently computes the normalizing factor in phi.""" - phinorm = numpy.zeros(len(ids)) - expElogtheta_sum = numpy.zeros(self.num_topics) + phinorm = np.zeros(len(ids)) + expElogtheta_sum = np.zeros(self.num_topics) for a in xrange(len(authors_d)): expElogtheta_sum += expElogthetad[a, :] phinorm = expElogtheta_sum.dot(expElogbetad) return phinorm - def inference(self, chunk, collect_sstats=False): + def inference(self, chunk, collect_sstats=False, chunk_no=None): """ Given a chunk of sparse document vectors, estimate gamma (parameters controlling the topic weights) for each document in the chunk. @@ -212,21 +227,17 @@ def inference(self, chunk, collect_sstats=False): logger.debug("performing inference on a chunk of %i documents", len(chunk)) # Initialize the variational distribution q(theta|gamma) for the chunk - # FIXME: - # num_authors_chunk = ??? - gamma = self.random_state.gamma(100., 1. / 100., (num_authors_chunk, self.num_topics)) - Elogtheta = dirichlet_expectation(gamma) - expElogtheta = np.exp(Elogtheta) if collect_sstats: sstats = np.zeros_like(self.expElogbeta) else: sstats = None converged = 0 + chunk_authors = set() + # Now, for each document d update that document's gamma and phi for d, doc in enumerate(chunk): - # FIXME: - # doc_no = ??? + doc_no = chunk_no + d # TODO: can it safely be assumed that this is the case? if doc and not isinstance(doc[0][0], six.integer_types): # make sure the term IDs are ints, otherwise np will get upset ids = [int(id) for id, _ in doc] @@ -235,66 +246,60 @@ def inference(self, chunk, collect_sstats=False): cts = np.array([cnt for _, cnt in doc]) authors_d = self.doc2author[doc_no] # List of author IDs for the current document. - gammad = state.get_gamma(authors_d) # FIXME: implement this method. - tilde_gammad = np.zeros(gammad.shape) + gammad = self.state.gamma[authors_d, :] + tilde_gamma = gammad.copy() - Elogthetad = dirichlet_expectation(tilde_gammad) - expElogthetad = numpy.exp(Elogthetad) - expElogbetad = expElogbeta[:, ids] + Elogthetad = dirichlet_expectation(tilde_gamma) + expElogthetad = np.exp(Elogthetad) + expElogbetad = self.expElogbeta[:, ids] - phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) + phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) # Iterate between gamma and phi until convergence - for iteration in xrange(self.iterations): - #logger.info('iteration %i', iteration) - - lastgamma = gammad - - # Update gamma. - for a in authors_d: - tilde_gammad[a, :] = self.alpha + len(self.author2doc[a]) * expElogthetad[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) - - # Update gamma and lambda. - # Interpolation between document d's "local" gamma (tilde_gamma), - # and "global" gamma (var_gamma). Same goes for lambda. - tilde_gamma = (1 - rhot) * gammad + rhot * tilde_gamma - - # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. - Elogthetad = dirichlet_expectation(tilde_gammad) - expElogthetad = numpy.exp(Elogtheta[authors_d, :]) - - phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) - - # Check for convergence. - # Criterion is mean change in "local" gamma and lambda. - if iteration > 0: - meanchange_gamma = numpy.mean(abs(tilde_gamma - lastgamma)) - gamma_condition = meanchange_gamma < self.threshold - # logger.info('Mean change in gamma: %.3e', meanchange_gamma) - if gamma_condition: - # logger.info('Converged after %d iterations.', iteration) - converged += 1 - break - # End of iterations loop. - - for _ in xrange(self.iterations): - lastgamma = gammad - # We represent phi implicitly to save memory and time. - # Substituting the value of the optimal phi back into - # the update for gamma gives this update. Cf. Lee&Seung 2001. - gammad = self.alpha + expElogthetad * np.dot(cts / phinorm, expElogbetad.T) - Elogthetad = dirichlet_expectation(gammad) + for iteration in xrange(self.iterations): + #logger.info('iteration %i', iteration) + + lastgamma = tilde_gamma.copy() + + # Update gamma. + for ai, a in enumerate(authors_d): + tilde_gamma[ai, :] = self.alpha + len(self.author2doc[a]) * expElogthetad[ai, :] * np.dot(cts / phinorm, expElogbetad.T) + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). + tilde_gamma = (1 - self.rho) * gammad + self.rho * tilde_gamma + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + Elogthetad = dirichlet_expectation(tilde_gamma) expElogthetad = np.exp(Elogthetad) - phinorm = np.dot(expElogthetad, expElogbetad) + 1e-100 - # If gamma hasn't changed much, we're done. - meanchange = np.mean(abs(gammad - lastgamma)) - if (meanchange < self.gamma_threshold): - converged += 1 - break + + phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) + + # Check for convergence. + # Criterion is mean change in "local" gamma and lambda. + # TODO: this if statement shouldn't be necessary. Isn't used in LDA. + if iteration > 0: + meanchange_gamma = np.mean(abs(tilde_gamma - lastgamma)) + gamma_condition = meanchange_gamma < self.gamma_threshold + # logger.info('Mean change in gamma: %.3e', meanchange_gamma) + if gamma_condition: + # logger.info('Converged after %d iterations.', iteration) + converged += 1 + break + # End of iterations loop. + + self.state.gamma[authors_d, :] = tilde_gamma + + # NOTE: this may be slow. Especially when there are many authors per document. + # It is imporant to find a faster way to handle this. + chunk_authors = chunk_authors.union(set(authors_d)) + if collect_sstats: # Contribution of document d to the expected sufficient # statistics for the M step. - sstats[:, ids] += np.outer(expElogthetad.T, cts / phinorm) + expElogtheta_sum_a = expElogthetad.sum(axis=0) + sstats[:, ids] += np.outer(expElogtheta_sum_a.T, cts/phinorm) if len(chunk) > 1: logger.debug("%i/%i documents converged within %i iterations", @@ -306,9 +311,10 @@ def inference(self, chunk, collect_sstats=False): # sstats[k, w] = \sum_d n_{dw} * phi_{dwk} # = \sum_d n_{dw} * exp{Elogtheta_{dk} + Elogbeta_{kw}} / phinorm_{dw}. sstats *= self.expElogbeta - return gamma, sstats + gamma_chunk = self.state.gamma[list(chunk_authors), :] + return gamma_chunk, sstats - def do_estep(self, chunk, state=None): + def do_estep(self, chunk, state=None, chunk_no=None): """ Perform inference on a chunk of documents, and accumulate the collected sufficient statistics in `state` (or `self.state` if None). @@ -316,48 +322,12 @@ def do_estep(self, chunk, state=None): """ if state is None: state = self.state - gamma, sstats = self.inference(chunk, collect_sstats=True) + gamma, sstats = self.inference(chunk, collect_sstats=True, chunk_no=chunk_no) state.sstats += sstats - state.numdocs += gamma.shape[0] # avoids calling len(chunk) on a generator - return gamma - - def inference(self, chunk, collect_sstats=False): - """ - """ - return gamma, sstats - - def do_estep(self, chunk, state=None): - """ - Perform inference on a chunk of documents, and accumulate the collected - sufficient statistics in `state` (or `self.state` if None). - - """ - if state is None: - state = self.state - gamma, sstats = self.inference(chunk, collect_sstats=True) - state.sstats += sstats - # NOTE: why not use chunksize here? state.numdocs += len(chunk) return gamma - # NOTE: this method can be used directly, but self.bound needs to be updated slightly. - # def log_perplexity(self, chunk, total_docs=None): - - def update(self, corpus, chunksize=None, decay=None, offset=None, - passes=None, update_every=None, eval_every=None, iterations=None, - gamma_threshold=None, chunks_as_numpy=False): - """ - """ - # TODO: this - pass - - def do_mstep(self, rho, other, extra_pass=False): - """ - """ - # TODO: this - pass - - def bound(self, corpus, gamma=None, subsample_ratio=1.0): + def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0): """ Estimate the variational bound of documents from `corpus`: E_q[log p(corpus)] - E_q[log q(corpus)] @@ -371,21 +341,27 @@ def bound(self, corpus, gamma=None, subsample_ratio=1.0): _lambda = self.state.get_lambda() Elogbeta = dirichlet_expectation(_lambda) + if gamma is None: + gamma = self.state.gamma + + Elogtheta = dirichlet_expectation(gamma) + word_score = 0.0 authors_set = set() # Used in computing theta bound. theta_score = 0.0 for d, doc in enumerate(corpus): # stream the input doc-by-doc, in case it's too large to fit in RAM - authors_d = self.doc2author[d] + doc_no = chunk_no + d + authors_d = self.doc2author[doc_no] ids = np.array([id for id, _ in doc]) # Word IDs in doc. cts = np.array([cnt for _, cnt in doc]) # Word counts. if d % self.chunksize == 0: logger.debug("bound: at document #%i", d) if gamma is None: - gammad, _ = self.inference([doc]) + gammad, _ = self.inference([doc], chunk_no=chunk_no) else: - gammad = gamma[d] - Elogthetad = dirichlet_expectation(gammad) # Shape (len(authors_d), self.num_topics). + gammad = gamma[authors_d, :] + Elogthetad = Elogtheta[authors_d, :] # Shape (len(authors_d), self.num_topics). # Computing the bound requires summing over expElogtheta[a, k] * expElogbeta[k, v], which # is the same computation as in normalizing phi. @@ -396,17 +372,17 @@ def bound(self, corpus, gamma=None, subsample_ratio=1.0): # The code blow ensure we compute the score of each author only once. for ai, a in enumerate(authors_d): if a not in authors_set: - theta_score += numpy.sum((self.alpha - gammad[ai, :]) * Elogthetad[ai]) - theta_score += numpy.sum(gammaln(gammad[ai, :]) - gammaln(self.alpha)) - theta_score += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(gammad[ai, :])) + theta_score += np.sum((self.alpha - gammad[ai, :]) * Elogthetad[ai]) + theta_score += np.sum(gammaln(gammad[ai, :]) - gammaln(self.alpha)) + theta_score += gammaln(np.sum(self.alpha)) - gammaln(np.sum(gammad[ai, :])) authors_set.add(a) - # compensate likelihood for when `corpus` above is only a sample of the whole corpus + # Compensate likelihood for when `corpus` above is only a sample of the whole corpus. This ensures + # that the likelihood is always rougly on the same scale. word_score *= subsample_ratio - # TODO: theta_score should probably be multiplied by subsample ratio as well. Maybe it - # has to be a different subsample ratio, for example something along the lines of: - # theta_score *= self.num_authors / len(authors_set) + # theta_score is rescaled in a similar fashion. + theta_score *= self.num_authors / len(authors_set) # E[log p(beta | eta) - log q (beta | lambda)] beta_score = 0.0 @@ -419,6 +395,23 @@ def bound(self, corpus, gamma=None, subsample_ratio=1.0): return total_score + def get_author_topics(self, author_id, minimum_probability=None): + """ + Return topic distribution the given author, as a list of + (topic_id, topic_probability) 2-tuples. + Ignore topics with very low probability (below `minimum_probability`). + """ + if minimum_probability is None: + minimum_probability = self.minimum_probability + minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output + + topic_dist = self.state.gamma[author_id, :] / sum(self.state.gamma[author_id, :]) + + author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) + if topicvalue >= minimum_probability] + + return author_topics + # NOTE: method `top_topics` is used directly. There is no topic coherence measure for # the author-topic model. c_v topic coherence is a valid measure of topic quality in # the author-topic model, although it does not take authorship information into account. @@ -456,7 +449,6 @@ def save(self, fname, ignore=['state', 'dispatcher'], *args, **kwargs): Please refer to the wiki recipes section (https://github.com/piskvorky/gensim/wiki/Recipes-&-FAQ#q9-how-do-i-load-a-model-in-python-3-that-was-trained-and-saved-using-python-2) for an example on how to work around these issues. """ - # TODO: this if self.state is not None: self.state.save(utils.smart_extension(fname, '.state'), *args, **kwargs) @@ -484,7 +476,6 @@ def load(cls, fname, *args, **kwargs): >>> AuthorTopicModel.load(fname, mmap='r') """ - # TODO: this kwargs['mmap'] = kwargs.get('mmap', None) result = super(AuthorTopicModel, cls).load(fname, *args, **kwargs) state_fname = utils.smart_extension(fname, '.state') diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index 048e5d4c51..b915ce31bc 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -407,7 +407,7 @@ def clear(self): self.state = None self.Elogbeta = None - def inference(self, chunk, collect_sstats=False): + def inference(self, chunk, collect_sstats=False, chunk_no=None): """ Given a chunk of sparse document vectors, estimate gamma (parameters controlling the topic weights) for each document in the chunk. @@ -497,7 +497,7 @@ def inference(self, chunk, collect_sstats=False): sstats *= self.expElogbeta return gamma, sstats - def do_estep(self, chunk, state=None): + def do_estep(self, chunk, state=None, chunk_no=None): """ Perform inference on a chunk of documents, and accumulate the collected sufficient statistics in `state` (or `self.state` if None). @@ -505,7 +505,7 @@ def do_estep(self, chunk, state=None): """ if state is None: state = self.state - gamma, sstats = self.inference(chunk, collect_sstats=True) + gamma, sstats = self.inference(chunk, collect_sstats=True, chunk_no=None) state.sstats += sstats state.numdocs += gamma.shape[0] # avoids calling len(chunk) on a generator return gamma @@ -535,7 +535,7 @@ def update_eta(self, lambdat, rho): return self.eta - def log_perplexity(self, chunk, total_docs=None): + def log_perplexity(self, chunk, chunk_no=None, total_docs=None): """ Calculate and return per-word likelihood bound, using the `chunk` of documents as evaluation corpus. Also output the calculated statistics. incl. @@ -546,7 +546,7 @@ def log_perplexity(self, chunk, total_docs=None): total_docs = len(chunk) corpus_words = sum(cnt for document in chunk for _, cnt in document) subsample_ratio = 1.0 * total_docs / len(chunk) - perwordbound = self.bound(chunk, subsample_ratio=subsample_ratio) / (subsample_ratio * corpus_words) + perwordbound = self.bound(chunk, chunk_no, subsample_ratio=subsample_ratio) / (subsample_ratio * corpus_words) logger.info("%.3f per-word bound, %.1f perplexity estimate based on a held-out corpus of %i documents with %i words" % (perwordbound, np.exp2(-perwordbound), len(chunk), corpus_words)) return perwordbound @@ -652,10 +652,11 @@ def rho(): reallen = 0 for chunk_no, chunk in enumerate(utils.grouper(corpus, chunksize, as_numpy=chunks_as_numpy)): + self.rho = rho() reallen += len(chunk) # keep track of how many documents we've processed so far if eval_every and ((reallen == lencorpus) or ((chunk_no + 1) % (eval_every * self.numworkers) == 0)): - self.log_perplexity(chunk, total_docs=lencorpus) + self.log_perplexity(chunk, chunk_no, total_docs=lencorpus) if self.dispatcher: # add the chunk to dispatcher's job queue, so workers can munch on it @@ -666,7 +667,7 @@ def rho(): else: logger.info('PROGRESS: pass %i, at document #%i/%i', pass_, chunk_no * chunksize + len(chunk), lencorpus) - gammat = self.do_estep(chunk, other) + gammat = self.do_estep(chunk, other, chunk_no) if self.optimize_alpha: self.update_alpha(gammat, rho()) @@ -729,7 +730,7 @@ def do_mstep(self, rho, other, extra_pass=False): # only update if this isn't an additional pass self.num_updates += other.numdocs - def bound(self, corpus, gamma=None, subsample_ratio=1.0): + def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0): """ Estimate the variational bound of documents from `corpus`: E_q[log p(corpus)] - E_q[log q(corpus)] @@ -760,7 +761,8 @@ def bound(self, corpus, gamma=None, subsample_ratio=1.0): score += np.sum(gammaln(gammad) - gammaln(self.alpha)) score += gammaln(np.sum(self.alpha)) - gammaln(np.sum(gammad)) - # compensate likelihood for when `corpus` above is only a sample of the whole corpus + # Compensate likelihood for when `corpus` above is only a sample of the whole corpus. This ensures + # that the likelihood is always rougly on the same scale. score *= subsample_ratio # E[log p(beta | eta) - log q (beta | lambda)]; assumes eta is a scalar From e5e7722cebbd404ef9f3bf6e654d252a25346bb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Sun, 4 Dec 2016 16:41:46 +0100 Subject: [PATCH 059/100] Refactoring. Various docstring and commenting. Made methods for constructing author2doc and doc2author so that the user may do this at will. Assuming that input to bound is seen data, as the converse may be problematic. --- docs/notebooks/at_with_nips.ipynb | 292 ++++++++++++------------------ gensim/models/atmodel.py | 32 ++-- gensim/models/atmodel2.py | 143 ++++++++------- gensim/models/ldamodel.py | 1 + 4 files changed, 216 insertions(+), 252 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 518ae34244..7b31d913a2 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -108,7 +108,7 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": 62, "metadata": { "collapsed": false }, @@ -122,8 +122,8 @@ "#data_dir = '../../../nipstxt/' # On Hetzner.\n", "\n", "# Folders containin individual NIPS papers.\n", - "yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "#yrs = ['00', '01', '02']\n", + "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", + "yrs = ['00', '01', '02']\n", "dirs = ['nips' + yr for yr in yrs]\n", "\n", "# Get all document texts and their corresponding IDs.\n", @@ -145,7 +145,7 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 63, "metadata": { "collapsed": false }, @@ -175,7 +175,7 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": 64, "metadata": { "collapsed": false }, @@ -187,7 +187,7 @@ }, { "cell_type": "code", - "execution_count": 67, + "execution_count": 65, "metadata": { "collapsed": false }, @@ -205,7 +205,7 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": 66, "metadata": { "collapsed": false }, @@ -231,7 +231,7 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": 67, "metadata": { "collapsed": false }, @@ -254,7 +254,7 @@ }, { "cell_type": "code", - "execution_count": 70, + "execution_count": 68, "metadata": { "collapsed": false }, @@ -269,7 +269,7 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 69, "metadata": { "collapsed": false }, @@ -297,7 +297,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 70, "metadata": { "collapsed": true }, @@ -309,7 +309,7 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 71, "metadata": { "collapsed": false }, @@ -327,16 +327,16 @@ }, { "cell_type": "code", - "execution_count": 74, + "execution_count": 72, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYXFWd//H3J4GgLGn44SSAgIAIggKSCARlnUiQTZ0B\nhcYFFNRBQKaVRRHGDLgAAmFXFBBBbYwoo6xhUwgQRQirhKDsiwlb6IQkEJJ8f3+cU3Bzqd6X6ur6\nvJ6nnkqde+6551Z1p791VkUEZmZmZvVsWK0rYGZmZtZbDmjMzMys7jmgMTMzs7rngMbMzMzqngMa\nMzMzq3sOaMzMzKzuOaAxMzOzuueAxszMzOqeAxozMzOrew5ozKzbJD0j6aeF1+MlLZX0kQG49vck\nvVF4PTxf+/T+vna+3sH5emsNxPV6StK3JD0mabGkO2tdn66S9N78/u5f67pYfXFAY3VD0gH5P7pq\njx/Uun4NptqeKd3eR0XSdyTt1YNrL+3utbqrg7oFPbjXgSRpd+AHwJ+AA4Hja1ohswGwXK0rYNZN\nQfrP+YlS+oMDXxWriIibJL0zIhZ189TjgEuBK7txzneBE7p5nZ5or24XAZf24F4H0s7AG8DB4Q37\nrEE4oLF6dF1ETO9qZkkCRkTE6/1Yp4bX33/gJa0YEQsiYikD0ELTnhwgDOZgBmA0MH8wBjP+fbT+\n4i4nG1KK4ykkfV7S34HXgPH5uCR9Q9LfJb0m6V+SzpM0slSOJP1PHivyqqQbJb1f0tOlsSPLjOco\npFcdZyFpD0lTc5ltkv4o6f2lPL+UNEfS2vn4PEnPSzqpynUkqUXS/ZIW5nzXSPpQPn67pLvaea8e\nldRhy0h770OVfG8bQyNpI0m/lzQr1+0pSb+StFLlcwJGAJX3amnlvc3v69Jcxm8kzSF1n7T7nudj\nn5c0M1/vzvKYnvze/qPKeW+W2YW6tffZHl74uXpW0llVfq5ukzRd0gck/UnSgvzefqOjz6Fw/nKS\nvps/u9eUxsicIGn5Ut0/CzTlei5RO+NR8s/OG5JWKqQdk887qZC2XP78TyikrSxpUv6deE3SDEn/\nXSq/s9/H1SRdIukVSS9LuhBY5j3L+daU9Iv8Xr0m6TlJV0hauyvvmzUGt9BYPWqStHoxISJeKuWZ\nAOwHnAu8DDyV0y8CmvPzGcAGwOHAFpK2z9/+IY0/OAb4IzAFGAtcD7yzdJ32xlO8LV3SgcCFwDXA\n0cBKwNeAqZK2jIhnCucul683Ffhmvp+jJP0jIi4sFHsJ6Y/XlcBPSX+EdwC2Ae7Nx8+TtFFEPFKo\ny7bA+sC3q9S9qKvvQ6XelfJXyPmGkd7n2cDawF7AyIiYL+lzwM+B2/L7AvDPUlm/Bx4GvlVIa+89\nHw/sD5xF6m45FJgi6cMRMbOTc99Mj4glXahb+bP9HnAscB3pZ24T0mc7tvRzFcC7gGuB3wKXAZ8B\nfiTpvoi4qUrdii7O93gZ6WdjHKlrbGNg30LdvwZsAXwFEHB7O+VNJX1GHyV9XgDbAUuA7Qv5xpI+\n81vz/Qq4Op/3M+B+YDfgdElrRsQxpeu87fcxl3El6Wf1PGAmsDfpfS9/Rv8HbEj6bJ8itUBNIP1M\nPYMZQET44UddPIADSF0N5ceSQp7hOW0RsGHp/J3ysb1L6bvl9H3y61H5/N+V8p2U8/20kHYisKhK\nXQ8i/VFYK79eBXgFOLuUb3ROP6eQdmk+9+hS3nuBOwqvd8n1OaWD92xVYCFwQin93Hzdd3Rwbnfe\nh/G5zh/Jr8fmPHt18pkuLJZTel+XAhe3c2xR4XXlM18MfLCQ/h5Sa8Blpff2kc7K7KRu5c92dH6f\n/ljK9/Wc77OFtKk57TOFtBGkgO/XnbxXY/J9nltKPz2X+dHSfb7chd+p4cA84MRC2sukgOm1ys8H\ncFS+x5Xz671zXY4slfc7UjC5bhd+HytlfL2QNowURC4B9s9p/6+czw8/qj3c5WT1JoBDgI8VHrtU\nyXdTRPyzlLYP6T/rP0tavfIA7iL98do559uV9B/x2aXzz+hFvT9OCmouK117CfC3wrWLflp6fRup\nRalib9If8RPbu2hEvAJcRfpWD6RuAODTpEDltQ7qPIGevw+v5OfdJL2jC/mrCeAn3cg/NSLeHBwe\nEU+SWgA+3sPrd9UupPep/L6cDywA9iilt0XE5MqLSGOP/sayn201u5Pek/L09NNIrTDl63QqIpYA\n00itekjaHGgCfggsT2o9gdRqc19EvJpf70YKUs4tFXk66b0ov+fVfh93A16n8HMeqSXrnHw/FQtI\nQdLOkpq6eYvWQBzQWD36W0TcXHxUyfNElbT3kb7tvVB6zAbeQWqRAFg3Py/zH3BEzCJ9m+2JDUn/\nSU8tXft54N8L1654NQcjRXOA1QqvNwCeiYjO6nQJsL6kcfn1x4HVSd/iO/Ke/Nzt9yEiHgXOBL4K\nvCTpWkmHSFqlk2uWPd6NvOU/mACPAKtIWq3Ksb5SeZ8eKSZGGvT6eOF4xdNVyih/tu1dZ3F+b4vX\neZb0eZSv01W3AVvlcTjbA09HxH2kmYOVbqePkn52i3V5JiIWlsqaUThe9ESV674HeLZKUD2z+CIf\nPxbYE3he0p8lHSmp/DtjDc5jaGyoKv9HCymAfw74PMt+A6x4Pj9XjnVlhkh7eYZXuXaQxu+8WCV/\neZDrknbKVTv/7si1+ZqfA/6Sn5+NiD93cl533oe3iYiWPMjzk6TWnnOAYySNy0FRV1T7HLuj/B51\n9fPqzTU605XPtrvHu1uHoqmkqfDbkFpiphbSt5f0AdIXgVt7cb1qn6Oo/nm8reyIOE3SFcCnSC2o\n3wO+LWnHYqucNTa30FgjeZQ0IPO2cgtPflT+Y3wiP29UPFnSGqRuo6I5wHBJK5bS16tybYDn27n2\nVLrvn8Da5Zk0ZRGxmDz4VNKqpIG5v+pC+U/k5668D+1d+8GI+H5E7AjsSGr9+koxS1fK6aL3VUnb\nCJgXEXPy6zmkcUVl61VJ62rdnsjPGxcTJY3I5T7ZxXK6cp3lJL23dJ21gJV7cZ2/kLoudyC1yFR+\nFm8FPkLqDg1SS06xLmtLKg8O3yQ/d6UulTLKXZIbV8lLRDwWEadHxK7AZqRByl2aHWaNwQGNNZLJ\npAGYx5UP5GmplcDgBtK36MNL2VqqlPko6RvlDoWyVia1AhVdC7wKfCePYSlf/11dvIei35FaWbuy\nCuylpGDufNIfgq4ENN15H5YhaaSk8v8vD5L+MK5QSJtP9QCjJ7bLY0AqdViP1E1xXSHPo8DqkjYp\n5Hs3Kcgr62rdKu/TEaX0r5Jmsl3VhTK64hrSz9p/l9K/SXpfr+5JobnbaDrpZ3ZNlm2hWQk4DJgZ\nEcWWxWtIv0tfKxXXQnovru3Cpa8h/Sx8tZKQfzcOY9kZc+/Ms+aKHiP9Pq1QyLeGpI2r/NxZg3CX\nk9WbHjetR8TNuQvkOEljgBtJ30w3Ig0YPoQ0U2W2pEnAkZL+SPrP+cOkAcgvl4q9FngWuFjSqTnt\nS8C/gDfXKYmINkmHkaaLT5d0Gakb6D2kwZx/opvfNiPiRkmtwDeU1oa5ntR1sj0wJSKKgy3vkjSD\nNBj4/q4003fzfYBlP5tdgEmSfgv8gzTA9ABS19rvC/nuBibk9Uv+BTwaEVXXzemCB4HrJZ1N+ly/\nlp//t5Dn16Sp6H/M+VYG/os0NXyLUnldqlt+n04GjpV0DSmA2SSXO43UOtZrETFd0q+Ar+UB5VOB\nbUldiJMjor2p2V0xFTgSeCkiZuTr/UvSo6Tfj5+V8l9BasE5WdKGvDVtew/gRxFRbZxQ2RWk1qFT\nc6tTZdp2ubVzU+A6SZOBh0gB0z6kcWCthXynkga/r03qWrZGU+tpVn740dUH6Q/iEmBMB3mG5zyn\ndZDny6RZJa+SuiDuAb4PjCrl+x9SsPIq6Vv4xqQBnT8t5RtD+sO1kPTN8VBKU3sLeXcitRjMyeXO\nBC4APlTIcynpD0u53icCr5fSRPpD9FC+/izSzJ7Nq5z/rVynb3Tzfa/2PjwFnF/IU562vUG+r3+Q\nWjqez+fuUCr7/cCfc9lLKu9tvtclpDVrOnwfip856Y/7I/m9uLNSn9L5E4AHSNOS/05aB6batO32\n6tbeZ3toLu+1/H6dCaxSyjMVuLtKnS4ltYJ09lkMz5/Ho/k6j5MCtuWqlPe2n6EOyt0r39MVpfSL\nKE09LxxbiTSr6Zlcl4eBI7rz+0gaCH0JaVbcS6Q1f7Zk2Wnb7yLNtHsImEsKpm8HPlXlnheXPxc/\nGueh/INgZl0g6Wng2oj4SqeZBxlJ3yStIbNuRPyr1vUxM+tL7ms0axxfIq0H4mDGzIYcj6ExG8KU\n9uj5BGncy/vxrBAzG6Ic0Jh1T3t7AQ1Wa5BmNL1M2v5gSo3rY2bWLzyGxszMzOqex9CYmZlZ3XNA\nY2ZmZnXPAY2Z9Yqk70kq70U10HUYLmmppPJO1L0pc3wu8xN9VWY3rv1LSf8Y6Oua1TMHNGb9SNIB\n+Y9i5bFQ0kxJZw+h3YLrbaB0d9TqvgJYWqNrm9Ulz3Iy639B2m/pCeAdpB2NDwF2k/TBiHithnWz\njvVmF+veOLCG1zarSw5ozAbGdRExPf/7Ikkvkzby+yTwm9pVq3OSVoyIBbWuRyOJiCW1uK4/a6tn\n7nIyq42bSd/A168kSFpf0m8lvSRpvqRpknYvniTphcImmCh5RdIbhd3CkXRMTluxkLaxpMtz+Qsl\n/U3SXqXyK11kO0g6T9Js0v5V3SLpIEk3SZqdr/WgpC+X8pwpaVYp7cf5+v9VSFsrp32pi9f+fO7W\nWyjpTkkfqZLn3ZIuljRL0muSHpB0QJXiAhgm6XhJz0haIOkGSeuXytsxf3ZP5fKelHRqcZdoSd+S\ntETSWuWL5LwLJa2SX79tDI2klSVNkvR0vsaMvHFmMc9783u1fym9Msbo2ELa93LaRpJ+I2kOaZNU\ns7rkgMasNjbMzy8B5PE000i7VJ8DHAusAFwp6ZOF824Hdii83hyoBDIfLaRvB0yvfNuW9AHSzsYb\nAz8krRj8KvB/pfIrziOtLPy/pP2fuusQ0kad3we+Sdqs8fxSUDMV+DdJG5XqvYS0Y3jFDqTAYmoX\nrjse+BHwC9ImjqOAKZI2rmSQtAZp48odgbOAI3Jdfy7pa6XyROou3AM4OT8+QtpQsegzpM/rHOAw\n0kacR5A2d6y4LJf36Sr13ge4JiLm5dfLjEuSJOBq4HDSbt4tpI0/T1fa6bsnKuX/nrSJ5LdIm0Oa\n1ada747phx9D+cFbO4TvDKwOvBvYF3iBFFCsmfNNyvm2LZy7EmlX5UcLad8EFgEr5deHkf4YTwN+\nUMj3MnBq4fWNpF3Fy7sy3wY8XKrvUtIu0+riPVbbqXqFKvluAGYUXo/O1zoov14tvweXAU8V8p0D\nzOqkDsNzWYuBDxbS30PaCfqyQtrFpN3Cm0plTAZeBJbPr8fnMu8DhhfyteR6btTJ/X4n12fNQtpf\ngTtK+bbN1/lMIe1S4JHC671zniNL5/4OeIO04SjAe3O+/dt5f44tfW5LgYtr/Xvihx998XALjVn/\nE3ATKYh5Gvg1MBf4VLy1UeRuwJ0RMa1yUkTMB34KrCdp05w8lTT2rdKNsn1Om5r/jaTNgVVzGpJW\nIwVUvwWaJK1eeQDXA++TtGahvgH8LCJ6PMMnIl5/8+alkflatwAbSXpnzjMb+CdvtThtD7wOnAas\nLek9pXvsiqkR8WChHk8CVwIfz3UR8B/AH4DlqrwXqwEfKpV5YSw7pmUq6TPdoJ37XTGXd0fOVyzv\nN8A2ktYtpO0LLCC1vLRnN1Ige24p/XRSsPLxDs7tSAA/6eG5ZoOKAxqz/hekLpiPATsBm0bEeyPi\nxkKe9wAzq5w7o3AcYDrpj1+lS2Y73gpoPixpRD4WpNYXSN1bIn0jf6H0mJjzlKeQP1F8IWl5SaOL\nj45uWNL2km6W9CrwSr7WCflwUyHrbaV7uRO4C2gDtpfUBHyQrgc0/6yS9giwSg7s1gBWAb7G29+L\nn+b85feiPIZoTn5erZIg6T2SLpH0Eqnl7QVSEAvL3u/k/PyZQtrewFXR8WDc9wDPRMTCUnr556Mn\nHu/FuWaDhmc5mQ2Mv8Vbs5x6LCIWS/orsIOk9wJrAreS/oAuD2xDCgxmRMRL+bTKF5dTgfY2pywH\nAuU/nDuQuoyCFByFpHUi4rlyQZLel/M+SOqeeZrUuvAJ0hiQ4hepqcABktYhBTY3RkRIuj2/rgQP\nt7ZT764oTn+uXPsXwC/byX9f6XV7M44EacAtqUtvFeAHpMB0AbAuaQzNm/cbEc9ImkYKaE6VtD2p\nG/KybtxDR9prVRvewTnlz9qsLjmgMRscniQN2C3bpHC8YipwNGkA8QsR8QiApL+TAo/tSd0sFY/l\n5zci4uYe1u9uUgtT0Qvt5P0EKbjaI3crkeu3a5W8lZaXXYExwHfz61uBL5ICmnm8Pchoz/uqpG0E\nzIuIOZLmAvOBYb14L8o+RBq70hwRb07Bl9ReN9BlwJmSNiB1N80Dru3kGk8A20l6Z6mVpvzzUQkA\nVy2d35sWHLO64C4ns8HhGmBrSdtUEiStBHwFeDwiHirknUpaoO8I3upWIv/786RWmze7aCLiBdIg\n36/mGT7LkPSuzioXEa9ExM2lR3vbHVRaNN78/yV393yhSrn/BGaTBjsPI407qdzjxqTxLnd0YzzP\ndnkMUeW66wF7Atfl6y0BrgA+I2mT8slV3ouuXLfa/Yr0+VQ7/7fkgbuk7qY/FsfgtOMaYASpq6yo\nMkD5WoCImEPq4tuhlO+wdupSlaQmpWn+K3f1HLNacwuNWf/rSnfBSUAzcJ2ks0izlA4kfbP+z1Le\naaTZMxsB5xfSbyWN1ak2xfnQnPaApJ+RWm1Gk2bYvBvYspv17cgU0vTma/K1RgJfBv7F28enQArE\n9iFNM381p/2N1BWyIWlWUlc9CFwv6WzSe/S1/Py/hTxHk/7g35nrNwP4f8CHSa1bxaCvK+/F30nj\nUM7IA5lfzfczslrmiJgtaSpwFLAyXVtY8QrS53uypA2B+0kDhfcAfhQRxXE+FwBHSmojjbnaidSC\n1J3PdT/gx/l5cid5zQYFt9CY9b9OvxlHxPOk4OJ60rfpH5CmG+8ZEX8s5V1AmoJdHPgLKWAJ0pTn\np0vnzCD9wb6KNDX7HOCrpG/3J7CsnsxuevOcfK19SP+/nAocDJxNWtummkq9i61Ki0lTnLu6/kyl\nDjcBR5LucSKp9WdCrlOl7FnAVqRxNP+Z6/Z1UgByTHv31V56bqnakxRkHAscRwpyvthBXX9DCmZe\nof1xTcVrBCl4OQvYizTNfyPgGxHxrdJ53yWN3fkMKbBcnOvX3T23hur+XDZEqRczM83MzMwGhUHR\nQpOX9D5D0hN5afHbJH24lOcESc8Vlh7fsHR8NUm/ktQmaY6kC/IYhGKezSXdmpcYf1LSUQNxf2Zm\nZta/BkVAQ1puezzwWdKaEzcAN1YW+5J0DKkZ/qvA1qRZClPymhsVvyaN+B9PaprdgcL4AqU9UqaQ\n+rrHkPqvJ0o6uF/vzMzMzPpdzbucJL2DNG1xr4i4rpB+F2lvk/+R9Bxp4NukfGwkqW/8gIiYnGcr\n/B0YGxH35Dy7kvY+WTsiZkk6hLSw2Bq5fx5JPwQ+GRGbYmZmZnVrMLTQLEda9Kk8bXEhaQrm+qRZ\nB5VVN4mIuaQBg9vmpHHAnEowk91IGtS2TSHPrZVgJpsCbJxXIzUzM7M6VfOAJk/TnAYcL2lNScMk\nfY4UrKxJCmaC1CJTNJu3pleuATxfKncJaeprMU+1MmDZaZpmZmZWZwbLOjSfI00zfJY0xXA6aUzM\nmA7OEZ1PK+wsT2Vdhqp58gZzu5JW6Xytk2uZmZnZW94BrAdMKWzF0m8GRUATEY8DO+ddeEfmhacu\nIw3gnUUKPEazbAvLKNJaHOQ8yyzYlfdXWS0fq+Qpb6hXOafcclOxK/Crbt+QmZmZVXyW1EjRrwZF\nQFOR9yhZmJdJ3xU4MiIelzSLNHvpfnhzUPA2wLn51GnAqpK2LIyjGU8KhO4s5PmepOG5OwpgAjAz\nItraqdITAL/85S/ZZJO3rZJed1paWpg0aVKtq9FnfD+D11C6F/D9DGZD6V5gaN3PjBkz+NznPgf5\nb2l/GxQBjaQJpOBjJmlzuVNIy5FfnLOcARwn6Z+kN+ZE4BngDwAR8bCkKcDP8mymEaTVP1vzqqCQ\nosP/AS6SdDKwGWl10CM6qNprAJtssgljxnTU+1UfmpqahsR9VPh+Bq+hdC/g+xnMhtK9wNC7n2xA\nhmwMioAGaAJ+SNpT5mXgcuC4SktKRJwiaUXSujKrkpZC3y0iFhXK2J+0nPuNpI3fLqcQrETE3DyV\n+xzgLuBFYGJEXNjP92ZmZmb9bFAENBHxW9IOtB3lmUjam6W946+QBhd3VMYDwI7dr6GZmZkNZjWf\ntm1mZmbWWw5oGkhzc3Otq9CnfD+D11C6F/D9DGZD6V5g6N3PQKr51geDmaQxwN133333UBykZWZm\n1m+mT5/O2LFjIW1LNL2/r+cWGjMzM6t7DmjMzMys7jmgMTMzs7rngMbMzMzqngMaMzMzq3sOaMzM\nzAyAu+6Ce++tdS16ZlCsFGxmZma1953vQFMTTJ5c65p0n1tozMzMrO45oDEzM7O654DGzMzMAKjn\nzQMc0JiZmVndc0BjZmZmb5JqXYOecUBjZmZmdc8BjZmZmdU9BzRmZmYGeFCwmZmZDREeQ2NmZmZW\nIw5ozMzMrO45oDEzMzPAY2jMzMzMaqrmAY2kYZJOlPSYpAWS/inpuCr5TpD0XM5zg6QNS8dXk/Qr\nSW2S5ki6QNJKpTybS7pV0kJJT0o6qr/vz8zMrJ54UHDPfQv4KvA14P3A0cDRkg6rZJB0DHBYzrc1\nMB+YImlEoZxfA5sA44E9gB2A8wtlrAJMAR4HxgBHARMlHdxvd2ZmZmYDYrlaVwDYFvhDRFyXXz8l\naX9S4FJxBHBiRFwJIOkLwGzgU8BkSZsAuwJjI+KenOdw4GpJR0bELOBzwPLAQRGxGJghaUvgG8AF\n/X6XZmZmg5zH0PTOHcB4Se8DkLQF8FHgmvx6fWAN4KbKCRExF/grKRgCGAfMqQQz2Y1AANsU8tya\ng5mKKcDGkpr6+qbMzMxs4AyGFpqTgJHAw5KWkIKs70TEZfn4GqTAZHbpvNn5WCXP88WDEbFE0sul\nPI9VKaNyrK2X92FmZlb36nUMzWAIaPYF9gf2Ax4CPgScKem5iLi0g/NECnQ60lmeysdWx41sZmZm\nNhgCmlOAH0TEb/Prv0taD/g2cCkwixR4jGbZVppRQKWLaVZ+/SZJw4HV8rFKntGla1fOKbf+LKOl\npYWmpmV7pZqbm2lubu7oNDMzs7rS0zE0ra2ttLa2LpPW1jawHR+DIaBZkbe3kCwlj++JiMclzSLN\nXrofQNJI0tiYc3P+acCqkrYsjKMZTwqE7izk+Z6k4RGxJKdNAGZGRIfv+qRJkxgzZkxP78/MzGxI\nq/Ylf/r06YwdO3bA6jAYBgVfCXxH0u6S3iPpP4AW4PeFPGcAx0naS9JmwCXAM8AfACLiYdIA359J\n2krSR4GzgdY8wwnStO5FwEWSNpW0L/B14LQBuEczMzPrR4OhheYw4ERSa8so4DngxzkNgIg4RdKK\npHVlVgWmArtFxKJCOfsD55BmNy0FLidN966UMVfSrjnPXcCLwMSIuLD/bs3MzKy+eFBwD0XEfNJa\nMN/oJN9EYGIHx18hrTXTURkPADt2u5JmZmYNwOvQmJmZmdWQAxozMzOrew5ozMzM7E31OobGAY2Z\nmZnVPQc0ZmZmBnhQsJmZmVlNOaAxMzOzuueAxszMzN7kQcFmZmZW1zyGxszMzKyGHNCYmZlZ3XNA\nY2ZmZm/yGBozMzOrax5DY2ZmZlZDDmjMzMys7jmgMTMzs7rngMbMzMyANIbGg4LNzMzMasQBjZmZ\nmdU9BzRmZmZW9xzQmJmZGeAxNGZmZmY15YDGzMzM6p4DGjMzM6t7NQ9oJD0uaWmVx9n5+AqSzpX0\noqR5ki6XNKpUxjqSrpY0X9IsSadIGlbKs5OkuyW9JukRSQcM5H2amZlZ/6l5QAN8GFij8NgFCGBy\nPn4GsAewN7ADsBbwu8rJOXC5BlgOGAccABwInFDIsx5wFXATsAVwJnCBpF3666bMzMzqTT0PCl6u\n1hWIiJeKryXtBTwaEVMljQS+BOwXEbfk418EZkjaOiLuBHYF3g/sHBEvAg9IOh44SdLEiFgMHAI8\nFhFH58vMlLQd0ALcMBD3aWZmZv1nMLTQvEnS8sBngQtz0odJQddNlTwRMRN4Ctg2J40DHsjBTMUU\noAn4QCHPjaXLTSmUYWZmZnVsUAU0wH+QApFf5NejgUURMbeUbzape4r8PLvKcbqQZ6SkFXpbaTMz\nM6utmnc5lXwJuDYiZnWST6RxNp3pKI+6kAeAlpYWmpqalklrbm6mubm5C1UwMzOrD9GVv6xVtLa2\n0traukxaW1tbH9So6wZNQCNpXeBjwKcKybOAEZJGllppRvFWi8ssYKtScaMLxyrPo0t5RgFzI2JR\nZ3WbNGkSY8aM6fwmzMzM6lxPBgVX+5I/ffp0xo4d20e16txg6nL6EilIuaaQdjewGBhfSZC0EbAu\ncEdOmgZsJuldhfMmAG3AjEKe8SxrQk43MzOzOjcoWmgkiTTV+uKIWFpJj4i5ki4ETpc0B5gHnAXc\nHhF/y9muBx4CLpV0DLAmcCJwTkS8kfP8BDhM0snARaTgZh9g936/OTMzM+t3gyKgIXU1rQP8vMqx\nFmAJcDmwAnAdcGjlYEQslbQn8GNSq8184GLgu4U8T0jaAzgd+DrwDHBQRJRnPpmZmTWsno6hGQwG\nRUATETcAw9s59jpweH60d/7TwJ6dXOMWYOA688zMzOpMPS+sN5jG0JiZmVkNOaAxMzOzuueAxszM\nzOqeAxozMzMbEhzQmJmZWV1zC42ZmZnVPQc0ZmZmVvcc0JiZmVndc0BjZmZmdc8BjZmZmdU9BzRm\nZmZW9xwx6zFGAAAgAElEQVTQmJmZWd1zQGNmZmZ1zwGNmZmZ1T0HNGZmZlb3HNCYmZlZ3XNAY2Zm\nZnXPAY2ZmZnVPQc0ZmZmVvcc0JiZmVndc0BjZmZmdc8BjZmZmdU9BzRmZmZW9yJgWJ1GBoOi2pLW\nknSppBclLZB0n6QxpTwnSHouH79B0oal46tJ+pWkNklzJF0gaaVSns0l3SppoaQnJR01EPdnZmZW\nD5YudQtNj0laFbgdeB3YFdgE+CYwp5DnGOAw4KvA1sB8YIqkEYWifp3PHQ/sAewAnF8oYxVgCvA4\nMAY4Cpgo6eD+ujczM7N6Us9dTsvVugLAt4CnIqIYWDxZynMEcGJEXAkg6QvAbOBTwGRJm5CCobER\ncU/OczhwtaQjI2IW8DlgeeCgiFgMzJC0JfAN4IL+uz0zM7P6UM8BTc1baIC9gLskTZY0W9L0YquJ\npPWBNYCbKmkRMRf4K7BtThoHzKkEM9mNQADbFPLcmoOZiinAxpKa+vqmzMzM6o0Dmt7ZADgEmAlM\nAH4CnCXpc/n4GqTAZHbpvNn5WCXP88WDEbEEeLmUp1oZFPKYmZk1rHoOaAZDl9Mw4M6IOD6/vk/S\nB0hBzi87OE+kQKcjneWpfGydlWNmZjbkOaDpnX8BM0ppM4D/zP+eRQo8RrNsC8so4J5CnlHFAiQN\nB1bLxyp5RpeuUzmn3HKzjJaWFpqalu2Vam5uprm5uaPTzMzM6kpPA5rW1lZaW1uXSWtra+ujWnXN\nYAhobgc2LqVtTB4YHBGPS5pFmr10P4CkkaSxMefm/NOAVSVtWRhHM54UCN1ZyPM9ScNzdxSkLq6Z\nEdHhuz5p0iTGjBnTURYzM7O619OAptqX/OnTpzN27Ng+qlnnBsMYmknAOEnflvReSfsDBwPnFPKc\nARwnaS9JmwGXAM8AfwCIiIdJA3x/JmkrSR8FzgZa8wwnSNO6FwEXSdpU0r7A14HTBuAezczMBj13\nOfVCRNwl6T+Ak4DjSevEHBERlxXynCJpRdK6MqsCU4HdImJRoaj9SUHQjcBS4HLSdO9KGXMl7Zrz\n3AW8CEyMiAv78/7MzMzqRT2vFFzzgAYgIq4Brukkz0RgYgfHXyGtNdNRGQ8AO3a/hmZmZkOfVwo2\nMzOzulfPXU4OaMzMzAxwQGNmZmZDQMMHNJKGS/qQpNX6ojwzMzMbeA0X0Eg6Q9JB+d/DgVuA6cDT\nknbqu+qZmZnZQGm4gAbYB7gv/3svYH3g/aQ1Zb7fB/UyMzOzAdaIAc27eGtLgd2B30bEI8BFwGZ9\nUTEzMzMbWI0Y0MwGNs3dTR8nLWYHsCKwpN2zzMzMbNCq54Cmpwvr/RyYTNpYMoAbcvo2wMN9UC8z\nMzMbYA23UnBETJT0ILAOqbvp9XxoCWkLAzMzM6sz9bxScI+3PoiIywEkvaOQ9ou+qJSZmZkNvHru\ncurptO3hko6X9CzwqqQNcvqJlencZmZmVl8aLqABvgMcCBwNFHe8fhA4uJd1MjMzsxpoxIDmC8BX\nIuJXLDur6T7SejRmZmZWZxoxoHk38M92ylu+59UxMzOzWmnEgOYhYPsq6fsA9/S8OmZmZlYr9RzQ\n9HSW0wnALyS9mxQU/aekjUldUXv2VeXMzMxs4NRzQNOjFpqI+AMpcPkYMJ8U4GwC7BURN3R0rpmZ\nmQ1O9RzQ9GYdmtuAXfqwLmZmZlZD9bxScE/XodlK0jZV0reR9OHeV8vMzMwGUkR6rtcWmp7GYeeS\ntj0oe3c+ZmZmZnWkUQOaTYHpVdLvycfMzMysjjRqQPM6MLpK+prA4p5Xx8zMzGqhUQOa64EfSmqq\nJEhaFfgB4FlOZmZmdaZRA5ojSWNonpT0J0l/Ah4H1gC+2Z2CJH1X0tLS46HC8RUknSvpRUnzJF0u\naVSpjHUkXS1pvqRZkk6RNKyUZydJd0t6TdIjkg7o4b2bmZkNOQ0Z0ETEs8DmpM0pHwLuBo4ANouI\np3tQ5IOkLqw18mO7wrEzgD2AvYEdgLWA31UO5sDlGtIU9HHAAaSNM08o5FkPuAq4CdgCOBO4QJKn\nnZuZmVH/AU1v1qGZD/y0j+qxOCJeKCdKGgl8CdgvIm7JaV8EZkjaOiLuBHYlbYi5c0S8CDwg6Xjg\nJEkTI2IxcAjwWEQcnYueKWk7oAV3kZmZmTVuQCNpI2AnYBSllp6IOKHaOR14n6RngdeAacC3c0vP\n2FzHmwplz5T0FLAtcCepVeaBHMxUTAF+DHyAtAP4OODG0jWnAJO6WU8zM7MhqSEDGklfJgUMLwKz\ngCgcDgrdPV3wF1IX0UzSLKmJwK2SPkjqfloUEXNL58zOx8jPs6scrxy7r4M8IyWtEBGvd6O+ZmZm\nQ04loKnXlYJ72kJzHPCdiDi5txWIiCmFlw9KuhN4EvgMqcWmGrFsENVu8R0cUxfyANDS0kJTU9My\nac3NzTQ3N3ehCmZmZoPf0qXpuSctNK2trbS2ti6T1tbW1ge16rqeBjSrAb/ty4pURESbpEeADUnd\nRCMkjSy10ozirRaXWcBWpWJGF45Vnsvr5owC5kbEos7qNGnSJMaMGdONuzAzM6svvelyqvYlf/r0\n6YwdO7YPatY1PW1Y+i0woS8rUiFpZeC9wHOk2VOLgfGF4xsB6wJ35KRpwGaS3lUoZgLQBswo5BnP\nsibkdDMzs4bXkGNogH8CJ0oaBzwAvFE8GBFndbUgST8CriR1M70b+F9SEHNZRMyVdCFwuqQ5wDzg\nLOD2iPhbLuJ60tTxSyUdQxqHcyJwTkRU6vUT4DBJJwMXkYKbfYDdu33nZmZmQ1CjBjRfAV4FdsyP\noiAFHV21NvBrYHXgBeA2YFxEvJSPtwBLgMuBFYDrgEPfvFjEUkl7kgYp3wHMBy4GvlvI84SkPYDT\nga8DzwAHRUR55pOZmVlDasiAJiLW76sKRESHI2vzDKTD86O9PE8De3ZSzi2kaeBmZmZWUu8BTa8m\nZ0kaIWljST1ez8bMzMxqryEDGkkr5rEtC4C/kwbpIulsSd/qw/qZmZnZAGjIgAb4IWlPpJ1Ydq2Y\nG4F9e1knMzMzG2D1HtD0tKvoU8C+EfEXScWF6f5OmnJtZmZmdaTeA5qettD8G/B8lfSV6NoKvmZm\nZjaILFmSnocPr209eqqnAc1dwB6F15Ug5mC8WJ2ZmVndqQQ0y9XpNJ+eVvtY4FpJm+YyjpD0AdIO\n2OV1aczMzGyQW7w4PTdUC01E3EYaFLwcaaXgCaS9lbaNiLv7rnpmZmY2EBquhSavObM/MCUivtz3\nVTIzM7OB1nAtNBGxmLQ30jv6vjpmZmZWC/XeQtPTQcF3Alv2ZUXMzMysduq9haancdh5wGmS1gbu\nJm0I+aaIuL+3FTMzM7OBUwlo6rWFpqfVviw/F3fVDkD5uU7jOzMzs8ZU711OPa12n+22bWZmZrXX\nkF1OEfFkX1fEzMzMaqchW2gkfaGj4xFxSc+qY2ZmZrXQkC00wJml18sDKwKLgAWAAxozM7M60pAt\nNBGxWjlN0vuAHwM/6m2lzMzMbGDVewtNT9eheZuI+AfwLd7eemNmZmaDXKPutt2excBafVymmZmZ\n9bPXXkvP76jTfQB6Oij4E+UkYE3gMOD23lbKzMzMBtbChen5ne+sbT16qqdDf/6v9DqAF4CbgW/2\nqkZmZmY24CoBTUO10EREX3dVmZmZWQ0tXAgjRsCwOv0LP+iqLenbkpZKOr2QtoKkcyW9KGmepMsl\njSqdt46kqyXNlzRL0imShpXy7CTpbkmvSXpE0gEDdV9mZmaD2euvwwor1LoWPdejgCYHFN+qkn6U\npN/2tDKStgK+DNxXOnQGsAewN7ADaeDx7wrnDQOuIbU4jQMOAA4ETijkWQ+4CrgJ2II0G+sCSbv0\ntL5mZmZDxZIl9TvDCXreQrMjcHWV9OtIAUe3SVoZ+CVwMPBKIX0k8CWgJSJuiYh7gC8CH5W0dc62\nK/B+4LMR8UBETAGOBw6VVOlWOwR4LCKOjoiZEXEucDnQ0pP6mpmZDSVLlzZmQLMyaVXgsjeAkT0s\n81zgyoi4uZT+YVLLy02VhIiYCTwFbJuTxgEPRMSLhfOmAE3ABwp5biyVPaVQhpmZWcNaurR+x89A\nzwOaB4B9q6TvBzzU3cIk7Qd8CPh2lcOjgUURMbeUPhtYI/97jfy6fJwu5BkpqY57Dc3MzHpvyZL6\nDmh6Om37ROD3kt5LmqoNMB5oBj7dnYIkrU0aI7NLRLzRnVNJ08U701EedSGPmZnZkFfvXU49nbZ9\npaRPAccC+wALgfuBj0XELd0sbizwb8DdkioBxnBgB0mHAR8HVpA0stRKM4q3WlxmAVuVyh1dOFZ5\nHl3KMwqYGxHVus/e1NLSQlNT0zJpzc3NNDc3d3hjZmZm9aI3LTStra20trYuk9bW1tYHteq6Hu+p\nGRFXU31gcHfdCGxWSrsYmAGcBDxLGpszHrgCQNJGwLrAHTn/NOBYSe8qjKOZALTlcip5ditdZ0JO\n79CkSZMYM2ZM1+/IzMyszvRmDE21L/nTp09n7NixfVCzrunp1gdbAcMi4q+l9G2AJRFxV1fLioj5\nlMbdSJoPvBQRM/LrC4HTJc0B5gFnAbdHxN/yKdfnMi6VdAxpG4YTgXMK3Vg/AQ6TdDJwESlA2gfY\nvet3bmZmNjTVe5dTT4f/nAusUyX93flYb5XHtLSQ1pC5HPgz8BxpTZqUOWIpsCewhNRqcwmplee7\nhTxPkNay+Rhwby7zoIgoz3wyMzNrOI06KHhTYHqV9HvysV6JiH8vvX4dODw/2jvnaVJQ01G5t5DG\n7JiZmVlBo7bQvM7bB9hC6upZ3PPqmJmZWS006jo01wM/lPTm1B9JqwI/AG7oi4qZmZnZwGnULqcj\ngVuBJyXdk9M+RJpG/fm+qJiZmZkNnHrvcurpOjTPStoc+Cxpo8eFwM+B1m4ujmdmZmaDQL13OfVm\nHZr5wE/7sC5mZmZWIw3Z5STp06RtDjYiTbH+B/DriLi8D+tmZmZmA6Teu5y6FYtJGibpN8BvSNOz\n/wk8RtrRerKkywrbF5iZmVmdaLQWmiNIC9N9IiKuKh6Q9AnSOJojSJtNmpmZWZ2o9zE03a36F4Gj\nysEMQET8ETga+FJfVMzMzMwGTkN1OQHvI20m2Z4bcx4zMzOrI/Xe5dTdqi8EVu3g+EjgtZ5Xx8zM\nzGqh0VpopgGHdHD80JzHzMzM6ki9t9B0d1Dw94E/S1odOBV4GBCwCfBN4JPAzn1aQzMzM+t3r74K\nK69c61r0XLcCmoi4Q9K+pAX19i4dngM0R8TtfVU5MzMzGxhtbbD++rWuRc91e2G9iLhC0hRgAmlh\nPYBHgOsjYkFfVs7MzMwGRlsbjBxZ61r0XE/3clog6WPA/0TEy31cJzMzMxtgbW3Q1FTrWvRcd1cK\nXrvwcn9g5Zz+gKR1+rJiZmZmNnDmzq3vgKa7LTQPS3oJuB14B7AO8BSwHrB831bNzMzMBkJECmjq\nucupuxO0moBPA3fnc6+R9AiwArCrpDX6uH5mZmbWz159Na1DU88tNN0NaJaPiDsj4jTSIntbkrZD\nWELa8uBRSTP7uI5mZmbWj+bOTc/13ELT3S6nuZLuIXU5jQBWjIjbJS0G9gWeAbbu4zqamZlZP3r1\n1fS8yiq1rUdvdLeFZi3ge8DrpGDoLklTScHNGCAi4ra+raKZmZn1pwV50ZUVV6xtPXqjWwFNRLwY\nEVdGxLeBBcBWwNlAkFYOnivplr6vppmZmfWXhgtoqmiLiMnAG8C/A+sD53WnAEn/Jek+SW35cYek\njxeOryDpXEkvSpon6XJJo0plrCPpaknzJc2SdIqkYaU8O0m6W9Jrkh6RdEDPb9vMzGzoqAQ0K61U\n23r0Rm8Cms1JY2YAngTeiIhZEfGbbpbzNHAMMDY/bgb+IGmTfPwMYA/SVgs7kLq9flc5OQcu15C6\nwMYBBwAHAicU8qwHXAXcBGwBnAlcIGmXbtbVzMxsyJk/Pz3XcwtNj1YKBoiIpwv//mAvyrm6lHSc\npEOAcZKeJc2e2i8ibgGQ9EVghqStI+JOYFfg/cDOEfEi8ICk44GTJE2MiMWkHcIfi4ij8zVmStoO\naAFu6GndzczMhgJ3OfUxScMk7QesCEwjtdgsR2pZASAiZpIW89s2J40DHsjBTMUU0po5HyjkubF0\nuSmFMszMzBpWJaB55ztrW4/eGBQBjaQPSppHmj11HvAfEfEwsAawKCLmlk6ZnY+Rn2dXOU4X8oyU\ntEIf3IKZmVndWrAgBTPDBkVU0DM97nLqYw+TxrasShorc4mkHTrIL9LMqs50lEddyGNmZjbkzZ9f\n391NMEgCmjzO5bH8crqkrYEjgMnACEkjS600o3irxWUWafp40ejCscrz6FKeUcDciFjUWf1aWlpo\nKq0H3dzcTHNzc2enmpmZDXoLFvQuoGltbaW1tXWZtLa2tl7WqnsGRUBTxTDS/lB3A4uB8cAVAJI2\nAtYF7sh5pwHHSnpXYRzNBKANmFHIs1vpGhNyeqcmTZrEmDFjenYnZmZmg9yCBb2bsl3tS/706dMZ\nO3ZsL2vWdTUPaCR9H7iWNH17FeCzwI7AhIiYK+lC4HRJc4B5wFnA7RHxt1zE9cBDwKWSjgHWBE4E\nzomIN3KenwCHSToZuIgUIO0D7D4Q92hmZjaYucupb4wGLiEFIm3A/aRg5uZ8vIW0+eXlpFab64BD\nKydHxFJJewI/JrXazAcuBr5byPOEpD2A04Gvk9bPOSgiyjOfzMzMGk5vu5wGg5oHNBFxcCfHXwcO\nz4/28jwN7NlJObeQpoGbmZlZQW+7nAaDOp6gZWZmZn1hKLTQOKAxMzNrcHPmwMiRta5F7zigMTMz\na3D/+hestVata9E7DmjMzMwaWEQKaNZcs9Y16R0HNGZmZg1szhxYtMgBjZmZmdWxf/0rPTugMTMz\ns7rlgMbMzMzq3gsvpOdRo2pbj95yQGNmZtbAXnoJRozwwnpmZmZWx156CVZfHaRa16R3HNCYmZk1\nsEpAU+8c0JiZmTWwtjZoaqp1LXrPAY2ZmVkDGwobU4IDGjMzs4Y2FDamBAc0ZmZmDc0BjZmZmdW9\nhQvhne+sdS16zwGNmZlZA3v1VbfQmJmZWZ2bNQtGj651LXrPAY2ZmVmDWrgwrUOzzjq1rknvOaAx\nMzNrUJWNKd/97trWoy84oDEzM2tQzz6bntdaq7b16AsOaMzMzBrUc8+lZ7fQmJmZWd167rm0SvAq\nq9S6Jr3ngMbMzKxBPfxwGhBc7zttwyAIaCR9W9KdkuZKmi3pCkkblfKsIOlcSS9KmifpckmjSnnW\nkXS1pPmSZkk6RdKwUp6dJN0t6TVJj0g6YCDu0czMbDC6914YN67WtegbNQ9ogO2Bs4FtgI8BywPX\nSyquW3gGsAewN7ADsBbwu8rBHLhcAywHjAMOAA4ETijkWQ+4CrgJ2AI4E7hA0i79cldmZmaD3GOP\nwXvfW+ta9I3lal2BiNi9+FrSgcDzwFjgNkkjgS8B+0XELTnPF4EZkraOiDuBXYH3AztHxIvAA5KO\nB06SNDEiFgOHAI9FxNH5UjMlbQe0ADf0+42amZkNIvPmwYsvwgYb1LomfWMwtNCUrQoE8HJ+PZYU\neN1UyRARM4GngG1z0jjggRzMVEwBmoAPFPLcWLrWlEIZZmZmDePJJ9PzeuvVtBp9ZlAFNJJE6l66\nLSIeyslrAIsiYm4p++x8rJJndpXjdCHPSEkr9LbuZmZm9aSyqN5QWIMGBkGXU8l5wKbAdl3IK1JL\nTmc6yqMu5DEzMxty7r037bLtgKaPSToH2B3YPiKeKxyaBYyQNLLUSjOKt1pcZgFblYocXThWeS5v\nvzUKmBsRizqqW0tLC01NTcukNTc309zc3NFpZmZmg9b558MOO8CIEb0vq7W1ldbW1mXS2trael9w\nNwyKgCYHM58EdoyIp0qH7wYWA+OBK3L+jYB1gTtynmnAsZLeVRhHMwFoA2YU8uxWKntCTu/QpEmT\nGDNmTLfuyczMbLB65hl49FE48si+Ka/al/zp06czduzYvrlAF9Q8oJF0HtAMfAKYL6nSitIWEa9F\nxFxJFwKnS5oDzAPOAm6PiL/lvNcDDwGXSjoGWBM4ETgnIt7IeX4CHCbpZOAiUoC0D6lVyMzMrGHc\nfnt6njChtvXoS4NhUPB/ASOBPwPPFR6fKeRpIa0hc3kh396VgxGxFNgTWEJqtbkEuBj4biHPE6S1\nbD4G3JvLPCgiyjOfzMzMhrRnnknP665b23r0pZq30EREp0FVRLwOHJ4f7eV5mhTUdFTOLaRp4GZm\nZg3rqadg441huZpHAX1nMLTQmJmZ2QCaPh222KLWtehbDmjMzMwayMKFMG0abLNNrWvStxzQmJmZ\nNZCHH4YlS+BDH6p1TfqWAxozM7MG8pe/pGd3OZmZmVnduuOO1N20+uq1rknfckBjZmbWQO67b+i1\nzoADGjMzs4YxZw48+CCMG1frmvQ9BzRmZmYNorUVhg2DXXetdU36ngMaMzOzBvHLX8LOOw+dHbaL\nHNCYmZk1gNmz0/oz++1X65r0Dwc0ZmZmDeCqq0CCT3yi1jXpHw5ozMzMGsAFF8Auu8C//Vuta9I/\nhtC2VGZmZlbNLbekBfUmT651TfqPW2jMzMyGsDfegMMPT1sdfPKTta5N/3ELjZmZ2RB2wQXwwAPw\npz/BiBG1rk3/cQuNmZnZELVkCZx0EjQ3w0471bo2/csBjZmZ2RB13nnw9NNwxBG1rkn/c0BjZmY2\nBM2bBz/6Eey7b9qMcqhzQGNmZjbERMBRR8HLL8P3v1/r2gwMDwo2MzMbQiKgpQXOPx9+8APYYINa\n12hgOKAxMzMbQo4+Gs48E047LQU2jcIBjZmZ2RBx881w6qkpmPnGN2pdm4HlMTRmZmZDwKJFqXVm\ns80aq2WmYlAENJK2l/RHSc9KWirpbVtnSTpB0nOSFki6QdKGpeOrSfqVpDZJcyRdIGmlUp7NJd0q\naaGkJyUd1d/3ZmZmNhC+/W24/374+c/TJpSNZlAENMBKwL3AoUCUD0o6BjgM+CqwNTAfmCKpuObh\nr4FNgPHAHsAOwPmFMlYBpgCPA2OAo4CJkg7uh/sxMzMbMFdfDaefDiefDGPH1ro2tTEoxtBExHXA\ndQBS1bjyCODEiLgy5/kCMBv4FDBZ0ibArsDYiLgn5zkcuFrSkRExC/gcsDxwUEQsBmZI2hL4BnBB\nv96gmZlZP5k1Cw48EHbfHf77v2tdm9oZLC007ZK0PrAGcFMlLSLmAn8Fts1J44A5lWAmu5HU2rNN\nIc+tOZipmAJsLKmpn6pvZmbWb+bOhU9/GpYuhQsvbMyupopBH9CQgpkgtcgUzc7HKnmeLx6MiCXA\ny6U81cqgkMfMzKwuzJsH++0H994L//d/sEaD/yUbFF1OPSSqjLfpZp5KLNtZOWZmZoPG66/D+PHw\n8MMweTJsv32ta1R79RDQzCIFHqNZtoVlFHBPIc+o4kmShgOr5WOVPKNLZVfOKbfcLKOlpYWmpmV7\npZqbm2lubu7aHZiZmfWh006De+6BW26Bj3yk1rWB1tZWWltbl0lra2sb0DoM+oAmIh6XNIs0e+l+\nAEkjSWNjzs3ZpgGrStqyMI5mPCkQurOQ53uShufuKIAJwMyI6PBdnzRpEmPGjOmzezIzM+uJCPjx\nj+H44+HQQwdHMAPVv+RPnz6dsQM45WpQjKGRtJKkLSR9KCdtkF+vk1+fARwnaS9JmwGXAM8AfwCI\niIdJA3x/JmkrSR8FzgZa8wwnSNO6FwEXSdpU0r7A14HTBuQmzczMeuG222DzzVMg89Wvpina9pbB\n0kLzYeBPpLEswVtBxi+AL0XEKZJWJK0rsyowFdgtIhYVytgfOIc0u2kpcDlpujeQZkZJ2jXnuQt4\nEZgYERf2542ZmZn11l//CrvtllYBvvVWj5mpZlAENBFxC520FkXERGBiB8dfIa0101EZDwD/v717\nj6+jrPM4/vklbZImIb1Q0lBsKVLK/V4ELEgQChVEF9gtFwFX5CWgrmxVdFm5KAooIgsqrAi6iEAV\neCmCllel0lKhuJWWrShtubQUeqWFNm3SNM3l2T9+M5zJybVtknMm+b5fr3mdM888M/M8mZOZ3zzz\nzMxJO15CERGR3Pjtb+Hiiz2YefppKCvrep6BKC8uOYmIiEhrTU1wxx1w0UVQXQ2//72Cmc4ooBER\nEckzzz0HEyfCV74CU6fCww/D7rvnulT5TQGNiIhInnjlFTjnHO8jM3gwPP+8v2yyoiLXJct/CmhE\nRERy7O9/h898xu9iWrQI7r3XOwIfd1yuS5YeCmhERERy5J134Mtf9kBm1iy49VZvpbnsMijQEXqH\n5MVdTiIiIgPJxo1w551w223Q3AzXXAM33ABFRbkuWXopoBEREekjK1b4awvuv99fLvmlL8H116vD\nb09QQCMiItLLVq6EW26B++6DoUPhc5/zJ/7us0+uS9Z/KKARERHpBU1NMGMG/PSn/lle7u9guuoq\n2G23XJeu/1FAIyIi0kNCgIUL4cEH4bHHvGXmqKP8hZJTp8Lw4bkuYf+lgEZERGQXbdsGjzwC99wD\n8+bBiBH+PJkrr/SARnqfAhoREZGdsGwZzJwJf/yjv2Oprg5OOgmeeAKmTPEH40nfUUAjIiLShRD8\n+TCPPw5//jO89JI/Q6aw0F9RcO21cNZZcPDBuS7pwKWARkREpB2LF8Ps2fDXv/orCF57zV9BMGkS\nXHEFHHQQfPzjemFkvlBAIyIiA15Li7fAzJ8PzzzjrTBvveWXjQ47zN+tdNttcOqpUFqa69JKexTQ\niIjIgNLSAq+/DkuWeOvLvHn+3qS6OjCDww/3O5ImTYLTT4chQ3JdYukOBTQiItIvNTV5x90lS2DB\nAnj5ZW+FWbYMGhs9z8iRHrhcf73fjXTssXpGTFopoBERkVQLATZtguXLPXB57jl48UVYutTfkwRQ\nWWU5J6EAABFhSURBVAmHHOJ3H40fDxMmeB+YvfbyVhlJPwU0IiKS10LwlznGrSsrV8KqVd5J9+23\nfby21vMWFMCRR0J1NXzxix64TJgAH/iAApf+TgGNiIjkREsLbNgA774L69bB6tU+xN9XrvSOuevX\ne/+W2IgRMHq0t7RMmeKtLGPH+nDggf6uJBl4FNCIiEiPa2yENWu8JeXNNz1Ieecd//7GGx6wrF8P\nDQ2t5ysvh6oqD1hGj4bjj4dRozxoOeAA2G8/ddKV9imgERGRLjU2wubN3pqyZYsHI+vW+fi772Za\nVlat8u8bNrSev6TEA5W99vKHz02eDHvsAXvv7S0uVVWw557qkCs7TwGNiEg/1tLi7xmqrfWAZOtW\nHzZu9Ms4mzd7gFJX5x1ra2p82qZN8N57/rlxo39vT3m5v3Bx9GgPSI4/HsaM8e9VVf597Fh/IJ36\nsEhvUkAjIpIHWloywUZtrQcZW7f6Z22tByXbtnnAUV+fCVJqa9vOs2lTZr6amq7XXVrqw9ChMGyY\nDyNGwLhxHqxUVHjAUlEBu++e+Rw1ylteRPLBgAtozOwLwFeBKmAR8G8hhL/mtlR9Y/r06VxwwQW5\nLkaPUX3yV1rrEoI/u2TLFu/bsW2bBwi/+c10qqsvYPt22L7dp2/b5nkaGryVI57W0JAJTJJp8Tzx\nMuvq/DLO9u3+2dDgQU1Xyso8+Cgp8c+KCu9TUl7uAUZpqV+2qajwvMOHZ+YZNszzzpkznQsvvIDy\ncp8vzS9RTOtvrSP9rT59aUAFNGZ2HvAD4HPAfGAaMNPMJoQQNnQ6cz/Q3/5RVJ/8FAI8/PB0zj33\nApqa/GC9dSvvf29qygzxQT+Z1tW0xkZveYiX1diYCQjq6lqnNTV5AFFf3/464paOOH/8sLW2pgPt\nb5viYg8KSkr8e1GRBw1lZf69qMgDhjFjPL2kJBOADB6cmV5SkglOyso8+Cgt9e+77ZZZfmHhrm+j\nm26azte/nv7fGvSf/5tYf6tPXxpQAQ0ewNwTQngAwMyuAM4ELgVuzWXBJP+F4GfQLS3+sK74e3KI\n07du7V6+eKiv9ztAnnoqM625ufX3lhY/wG/b1jqtvc/m5kwQEY83N7cdr6tru57k0NKSaanInpa9\nnDhIiB9kVlzcM3/3ggIYNCgzFBZ6MFBU5OODB2eG8vJMWnGxBwOVlf6ZXEY8xMFFchmDB7cOIIYM\ngW98A37yk0zAUl6emU/9QkTyw4AJaMxsMHA0cHOcFkIIZjYLOH5Xlx8fLJIHvfj7jnw2NPhZYjy+\nM8uID3yNja3T16yBRx9tu9z2xrO/NzVlmsSz19XdtOZmPzDu7PzJg39LCyxcCKec0npadt7s+ZKB\nRGd5k9Pjs/u+cMYZXeeJD+oFBZ1/xgfcwsKOh9LSTACQPS1eTtzi0NVyBg/OlO3uu2HatEzgUFaW\nmZ4dUJSUtE6L88UBRUFB7//du1JR4bcMi0j+GjABDTASKATWZaWvA/bvbMbTTvOdavbZdfLgGAcz\n+W7q1B2fx8wPUiUl/ncw88/k9+58xgfZ7uTvaJqZHzzjA21lZSZ/e0NyeSUlmTPq7s5TUJA5K+9s\nnuxhyBA/KMfjcYCQHJJpxcVw5ZVw//2ZacmgIv4eH/Dz3ZNPwiWX5LoUIjKQDKSApiMGdBSKlABU\nVy9m1KjOD2CFhX5Qig+6yQNwPA5t07OnDRrkB8/uzNvZOgoL/cAXp5nBddfVcPPNCzstR9x8npye\nr6ZNq+Hqqxfmuhg7Lb5cA976tX17DevWpbc+STU1NSxc2D/qAqpPPutPdYH+VZ/FixfHX/vkXjgL\naWhW6AHRJaetwLkhhCcS6fcDQ0MIZ7czz4XAQ31WSBERkf7nUyGEh3t7JQOmhSaE0GhmC4BTgCcA\nzMyi8R92MNtM4FPAm8C2PiimiIhIf1ECjMOPpb1uwLTQAJjZVOAXwOVkbtv+Z+CAEML6XJZNRERE\ndt6AaaEBCCE8YmYjgRuBUcD/AacrmBEREUm3AdVCIyIiIv1THjzhQURERGTXKKDpgJl9wcyWm1m9\nmf3FzI7JgzKdaGZPmNkqM2sxs0+0k+dGM1ttZlvN7GkzG581fbiZPWRmNWa20czuM7OyrDyHmdnc\nqO4rzOzqXqjLNWY238w2m9k6M/utmU3IylNsZneZ2QYz22Jmj5lZZVaeMWb2BzOrM7O1ZnarmRVk\n5ak2swVmts3MXjWzT/dCfa4ws0XR37XGzOaZ2ZQ01qWdul0T/d5uT2t9zOyGqA7J4ZUU12e0mf0y\nKu/W6Ld3VFaetOwLlrezbVrM7EfR9NRsGzMrMLNvm9my6O/+upld206+VGybaD3lZnaHmb0Zlfc5\nM5uYl/UJIWjIGoDz8LuaLgEOAO4B3gNG5rhcU/D+P/8ENAOfyJr+9aicZwGHAI8DbwBFiTxPAQuB\nicCHgVeBBxPTdwPW4J2nDwSmAnXAZT1clxnAxdE6DgV+j99NNiSR57+jtJOAI4F5wJ8T0wuAl/Ee\n9IcCpwPvAN9J5BkH1OKvttgf+ALQCEzu4fqcGW2f8dHwHaABODBtdcmq1zHAMuAl4PY0bptoXTcA\nfwP2ACqjYUQa6wMMA5YD9+FPP98bOBXYJ6X7gt0T26QSv/O0GTgxhdvmP6N1TwHGAucAm4EvpnHb\nROv6dfT3nQR8EP9f2gTsmW/16dGK95cB+AtwZ2LcgJXA13JdtkSZWmgb0KwGpiXGK4B6YGo0fmA0\n35GJPKcDTUBVNH4lsAEYlMhzC/BKL9dnZFS2ExJlbwDOTuTZP8rzoWj8Y9FOaWQiz+XAxrj8wPeA\nv2Wtazowow+20bvAZ9JaF6AcWAp8FJhNFNCksT74TnhhB9NSVR/gu8CzXeRJ877gDuDVlG6bJ4F7\ns9IeAx5I47bBb7tuBKZkpb8I3Jhv9dElpyyWeefTn+K04H/dHnnnU28xs32AKlqXezPwv2TKfRyw\nMYTwUmLWWfiTko9N5JkbQki+vWgmsL+ZDe2l4oOfdQY80gffBoNoXZ+lwFu0rs/LofWb0mcCQ4GD\nE3lmZa1rJr24LaNm5/OBUuAF0luXu4AnQwjPZKVPJJ312c/8cu0bZvagmY2J0tO2fc4CXjSzR8wv\n1y40s8viiWneF0T7308BP4uS0vZbmwecYmb7AZjZ4XjLxoxoPG3bZhD+yqCGrPR64IR8q48CmrY6\ne+dTVd8Xp9uq8B9IZ+WuwptD3xdCaMaDiGSe9pYBvVR/MzP8rOy5EELcr6EK2B79c2SXZUfK2lGe\nCjProfdBOzM7xMy24P/8d+NnlUtIZ13OB44Armln8ihSVh+81fVf8TPDK4B9gLnRdfy0bZ8P4me0\nS4HTgJ8APzSzixLlSOW+ADgbD0R+EY2n7bf2XfwSzRIz2w4sAO4IIfwqUY7UbJsQQi1+Unadme0Z\nnaxdhAcre5Jn9RlQz6HZRZ298ymfdafcXeWJ3+jUW/W/GzgIOKEbebu7HXJRnyXA4Xhr07nAA2b2\nkS7KkXd1MbMP4AHm5BBC447M2s1y9Pm2CSEkn1T6dzObD6zAr9V39BTwfK1PATA/hHBdNL7IzA7G\ng5wHuyhLvu8LLgWeCiGs7SJfvm6b84ALgfOBV/CTgjvNbHUI4ZddlCVft81FwM+BVfhlooXAw8BR\nncyTk/qohaatDXiHtFFZ6ZW0jSDzyVr8B9BZuddG4+8zs0JgeDQtztPeMqAX6m9mPwbOAKpDCKsT\nk9YCRWZW0U5ZkvXJLuuoxLSO8lQCm0MI23el7NlCCE0hhGUhhIUhhG8Ai4CrSF9djsY7zy4ws0Yz\na8Q7ZF4VnXWuA4pTVJ82Qgg1eMfE8aRv+6wBFmelLcY7ocblSOO+YCzeufneRHLats2twC0hhEdD\nCP8IITwE/BeZls7UbZsQwvIQwslAGTAmhHAcUIR3TM+r+iigyRKdkcbvfAJavfNpXq7K1ZUQQvzj\nSpa7Ar9GGZf7BWCYmR2ZmPUU/Ac5P5HnI9EPLnYasDQ6CPSYKJj5JHByCOGtrMkL8LOBZH0m4Dvt\nZH0ONX/6c7KsNWR2+C8kl5HI80JP1KELBUAx6avLLPxukSPwFqfD8U6ADya+N5Ke+rRhZuXAvniH\nxrRtn+fxjrFJ++MtTqncF0QuxQ9eMxJpads2pbRtUWghOtameNsQQqgPIawzs+H4pdvH864+Pdkj\nur8MeDN0Pa1v234X2CPH5SrDDyhH4P8k/x6Nj4mmfy0q51n4Aelx4DVa3z43Az8gHYN3VlsK/DIx\nvQLfyf8Cvwx0Hn6742d7uC5343chnIhH5vFQkpVnOVCNtxo8T9vbNRfhtwQehv+TrQO+ncgzLir/\n9/Cd/ueB7cCpPVyfm/BLZnvjty7egu+IP5q2unRQv/fvckpjfYDvAx+Jts+Hgaej8uyetvrgHWUb\n8LP+ffFLHFuA8xN5UrMviNZl+K3ZN7UzLU3b5n/wDstnRL+1s/H+IzeneNucFv1NxwGT8Uc4zAMK\n860+PVrx/jREP/g38cDmBWBiHpTpJDyQac4afp7I883oh7EV7yU+PmsZw/Az7Ro8oLgXKM3Kcyjw\nbLSMt4Cv9kJd2qtHM3BJIk8x8CP8MuAW4FGgMms5Y/Bn2NRGO7HvAQXt/N0WRNvyNeDiXqjPffjz\nWurxM5Y/EgUzaatLB/V7htYBTarqg9+iuzJaz1t4H4B9UlyfM/Dn6mwF/gFc2k6eb5KCfUG0nsnR\n///4dqalZtvgJ5234wFYXbSeb5G4HTmF2+ZfgNejv9sq4E5gt3ysj97lJCIiIqmnPjQiIiKSegpo\nREREJPUU0IiIiEjqKaARERGR1FNAIyIiIqmngEZERERSTwGNiIiIpJ4CGhEREUk9BTQiIiKSegpo\nRGTAMLPZZnZ7rsshIj1PAY2I9Akzu9zMNptZQSKtzMwazexPWXlPNrMWMxvX1+UUkXRSQCMifWU2\n/vK+iYm0E4E1wHFmVpRIPwlYEUJ4c0dXYmaDdqWQIpJOCmhEpE+EEF7Fg5fqRHI18Dj+duLjstJn\nA5jZGDP7nZltMbMaM/u1mVXGGc3sBjN7ycw+a2bLgG1ReqmZPRDNt8rMvpxdJjP7vJm9amb1ZrbW\nzB7p2VqLSF9RQCMifWkOcHJi/OQo7dk43cyKgWOBZ6I8vwOG4a05pwL7Ar/KWu544BzgbOCIKO22\naJ6zgNPwIOnoeAYzmwjcCVwLTABOB+buYv1EJEfUNCsifWkOcHvUj6YMDz7mAkXA5cC3gEnR+Bwz\nmwwcAowLIawGMLOLgX+Y2dEhhAXRcgcDF4cQ3ovylAGXAheGEOZEaZ8GVibKMgaoBf4QQqgD3gYW\n9VK9RaSXqYVGRPpS3I/mGOAE4NUQwga8hebYqB9NNfBGCGElcADwdhzMAIQQFgObgAMTy10RBzOR\nffEgZ35ivo3A0kSep4EVwPLo0tSFZjakx2oqIn1KAY2I9JkQwhvAKvzy0sl4IEMIYQ3eQjKJRP8Z\nwIDQzqKy0+vamU4H88ZlqQWOAs4HVuOtQ4vMrKLbFRKRvKGARkT62mw8mKnGL0HF5gIfAz5EJqB5\nBRhrZnvFmczsIGBoNK0jrwNNJDoam9lwvK/M+0IILSGEZ0II/wEcDowDProTdRKRHFMfGhHpa7OB\nu/D9z7OJ9LnAj/FLRXMAQgizzOxl4CEzmxZNuwuYHUJ4qaMVhBDqzOxnwPfN7D1gPfAdoDnOY2Zn\nAh+M1rsROBNv2Vnadokiku8U0IhIX5sNlACLQwjrE+nPAuXAkhDC2kT6J4EfRdNbgKeAL3VjPVfj\n/XWeALYAPwCSl5M24XdG3RCV5zXg/KiPjoikjIXQ4SVmERERkVRQHxoRERFJPQU0IiIiknoKaERE\nRCT1FNCIiIhI6imgERERkdRTQCMiIiKpp4BGREREUk8BjYiIiKSeAhoRERFJPQU0IiIiknoKaERE\nRCT1FNCIiIhI6v0/Nc3dzWma43MAAAAASUVORK5CYII=\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -362,7 +362,7 @@ }, { "cell_type": "code", - "execution_count": 75, + "execution_count": 73, "metadata": { "collapsed": true }, @@ -376,7 +376,7 @@ }, { "cell_type": "code", - "execution_count": 76, + "execution_count": 74, "metadata": { "collapsed": false }, @@ -385,9 +385,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "Number of authors: 2720\n", - "Number of unique tokens: 8640\n", - "Number of documents: 1740\n" + "Number of authors: 536\n", + "Number of unique tokens: 2245\n", + "Number of documents: 286\n" ] } ], @@ -406,7 +406,7 @@ }, { "cell_type": "code", - "execution_count": 94, + "execution_count": 130, "metadata": { "collapsed": false }, @@ -422,7 +422,7 @@ }, { "cell_type": "code", - "execution_count": 98, + "execution_count": 131, "metadata": { "collapsed": false }, @@ -431,16 +431,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 7min 31s, sys: 2min 6s, total: 9min 38s\n", - "Wall time: 7min 16s\n" + "CPU times: user 6.08 s, sys: 0 ns, total: 6.08 s\n", + "Wall time: 6.07 s\n" ] } ], "source": [ "%time model = AuthorTopicModel2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", " author2doc=author2doc, doc2author=doc2author, id2author=id2author, var_lambda=None, \\\n", - " distributed=False, chunksize=2000, passes=100, update_every=1, \\\n", - " alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, \\\n", + " chunksize=2000, passes=10, update_every=1, \\\n", + " alpha='auto', eta='symmetric', decay=0.5, offset=1.0, \\\n", " eval_every=1, iterations=10, gamma_threshold=1e-10, \\\n", " minimum_probability=0.01, random_state=1, ns_conf={}, \\\n", " minimum_phi_value=0.01, per_word_topics=False)\n" @@ -448,38 +448,37 @@ }, { "cell_type": "code", - "execution_count": 99, + "execution_count": 121, "metadata": { - "collapsed": false, - "scrolled": false + "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "[(0,\n", - " '0.018*\"hidden\" + 0.011*\"layer\" + 0.010*\"recognition\" + 0.009*\"net\" + 0.009*\"speech\" + 0.009*\"word\" + 0.009*\"hidden_unit\" + 0.006*\"sequence\" + 0.006*\"architecture\" + 0.006*\"trained\"'),\n", + " '0.013*\"hidden\" + 0.009*\"vector\" + 0.008*\"classifier\" + 0.006*\"hidden_unit\" + 0.005*\"procedure\" + 0.005*\"propagation\" + 0.005*\"back_propagation\" + 0.005*\"decision\" + 0.004*\"generalization\" + 0.004*\"test\"'),\n", " (1,\n", - " '0.022*\"neuron\" + 0.015*\"cell\" + 0.010*\"response\" + 0.010*\"spike\" + 0.008*\"stimulus\" + 0.008*\"signal\" + 0.007*\"frequency\" + 0.007*\"firing\" + 0.007*\"synaptic\" + 0.007*\"activity\"'),\n", + " '0.015*\"cell\" + 0.013*\"field\" + 0.010*\"visual\" + 0.007*\"map\" + 0.006*\"cortical\" + 0.006*\"activity\" + 0.006*\"synaptic\" + 0.005*\"synapsis\" + 0.005*\"cortex\" + 0.005*\"fig\"'),\n", " (2,\n", - " '0.014*\"circuit\" + 0.014*\"chip\" + 0.012*\"signal\" + 0.011*\"analog\" + 0.006*\"vlsi\" + 0.006*\"voltage\" + 0.006*\"motion\" + 0.005*\"code\" + 0.005*\"filter\" + 0.005*\"implementation\"'),\n", + " '0.009*\"circuit\" + 0.006*\"current\" + 0.006*\"dynamic\" + 0.006*\"node\" + 0.005*\"processor\" + 0.005*\"hidden\" + 0.005*\"classifier\" + 0.005*\"vector\" + 0.004*\"pulse\" + 0.004*\"machine\"'),\n", " (3,\n", - " '0.014*\"cell\" + 0.012*\"layer\" + 0.012*\"neuron\" + 0.009*\"map\" + 0.008*\"connection\" + 0.007*\"orientation\" + 0.006*\"cortical\" + 0.005*\"region\" + 0.005*\"net\" + 0.005*\"self\"'),\n", + " '0.013*\"node\" + 0.006*\"activation\" + 0.006*\"memory\" + 0.005*\"classifier\" + 0.005*\"vector\" + 0.004*\"bit\" + 0.004*\"adaptive\" + 0.004*\"tree\" + 0.004*\"element\" + 0.004*\"neural_net\"'),\n", " (4,\n", - " '0.008*\"gaussian\" + 0.007*\"mixture\" + 0.007*\"density\" + 0.006*\"matrix\" + 0.006*\"component\" + 0.006*\"estimate\" + 0.006*\"likelihood\" + 0.005*\"prior\" + 0.005*\"noise\" + 0.005*\"variance\"'),\n", + " '0.022*\"cell\" + 0.012*\"firing\" + 0.010*\"stimulus\" + 0.010*\"response\" + 0.008*\"potential\" + 0.007*\"current\" + 0.007*\"spike\" + 0.007*\"cortex\" + 0.007*\"activity\" + 0.006*\"synaptic\"'),\n", " (5,\n", - " '0.026*\"image\" + 0.013*\"object\" + 0.012*\"visual\" + 0.010*\"field\" + 0.007*\"direction\" + 0.006*\"map\" + 0.006*\"position\" + 0.005*\"motion\" + 0.005*\"spatial\" + 0.005*\"response\"'),\n", + " '0.007*\"memory\" + 0.006*\"fig\" + 0.006*\"circuit\" + 0.005*\"analog\" + 0.005*\"chip\" + 0.005*\"matrix\" + 0.005*\"threshold\" + 0.005*\"cell\" + 0.004*\"response\" + 0.004*\"hopfield\"'),\n", " (6,\n", - " '0.007*\"bound\" + 0.006*\"let\" + 0.005*\"solution\" + 0.005*\"class\" + 0.005*\"generalization\" + 0.005*\"theorem\" + 0.005*\"xi\" + 0.004*\"matrix\" + 0.004*\"optimal\" + 0.003*\"convergence\"'),\n", + " '0.008*\"node\" + 0.008*\"vector\" + 0.006*\"direction\" + 0.006*\"memory\" + 0.006*\"noise\" + 0.005*\"activation\" + 0.005*\"cell\" + 0.004*\"fig\" + 0.004*\"associative\" + 0.004*\"matrix\"'),\n", " (7,\n", - " '0.014*\"classifier\" + 0.009*\"class\" + 0.008*\"classification\" + 0.007*\"rule\" + 0.006*\"control\" + 0.006*\"trajectory\" + 0.005*\"trained\" + 0.005*\"robot\" + 0.005*\"character\" + 0.004*\"decision\"'),\n", + " '0.015*\"memory\" + 0.009*\"vector\" + 0.007*\"probability\" + 0.006*\"distribution\" + 0.005*\"hidden\" + 0.005*\"class\" + 0.005*\"energy\" + 0.004*\"node\" + 0.004*\"capacity\" + 0.004*\"hidden_unit\"'),\n", " (8,\n", - " '0.011*\"action\" + 0.009*\"policy\" + 0.007*\"optimal\" + 0.007*\"reinforcement\" + 0.007*\"control\" + 0.005*\"reinforcement_learning\" + 0.004*\"reward\" + 0.004*\"decision\" + 0.004*\"prediction\" + 0.004*\"search\"'),\n", + " '0.016*\"image\" + 0.013*\"recognition\" + 0.011*\"hidden\" + 0.011*\"speech\" + 0.009*\"object\" + 0.005*\"trained\" + 0.005*\"propagation\" + 0.005*\"hidden_layer\" + 0.004*\"frame\" + 0.004*\"hidden_unit\"'),\n", " (9,\n", - " '0.016*\"memory\" + 0.013*\"neuron\" + 0.009*\"node\" + 0.006*\"dynamic\" + 0.006*\"control\" + 0.005*\"connection\" + 0.005*\"bit\" + 0.005*\"capacity\" + 0.004*\"net\" + 0.004*\"activation\"')]" + " '0.008*\"cell\" + 0.008*\"map\" + 0.005*\"vector\" + 0.005*\"fig\" + 0.005*\"activity\" + 0.004*\"matrix\" + 0.004*\"region\" + 0.004*\"field\" + 0.004*\"memory\" + 0.004*\"eye\"')]" ] }, - "execution_count": 99, + "execution_count": 121, "metadata": {}, "output_type": "execute_result" } @@ -490,7 +489,7 @@ }, { "cell_type": "code", - "execution_count": 100, + "execution_count": 96, "metadata": { "collapsed": false }, @@ -501,28 +500,29 @@ "text": [ "\n", "Yaser S.Abu-Mostafa\n", - "Docs: [643, 1161]\n", - "[(4, 0.19559840654935753),\n", - " (6, 0.22443733229655313),\n", - " (7, 0.53773779059283966),\n", - " (8, 0.03154799178372595)]\n", + "Docs: [62]\n", + "[(1, 0.017412267312877025),\n", + " (2, 0.016179010242631686),\n", + " (3, 0.036699986691635919),\n", + " (5, 0.070031308676836254),\n", + " (6, 0.033023933328145814),\n", + " (7, 0.77953147445231008),\n", + " (9, 0.038343348900076533)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [143, 284, 230, 197]\n", - "[(0, 0.84493527369383181), (2, 0.07126593370576316), (5, 0.08343994928990435)]\n", + "[(8, 0.99937742034674781)]\n", "\n", "Michael I. Jordan\n", "Docs: [237]\n", - "[(0, 0.026658101414032433),\n", - " (1, 0.021215048465908881),\n", - " (4, 0.019387287171330789),\n", - " (7, 0.19687832498382354),\n", - " (8, 0.29243919850727573),\n", - " (9, 0.43454557999851595)]\n", + "[(0, 0.23028565955735972),\n", + " (6, 0.28588276328170809),\n", + " (8, 0.015124405175832004),\n", + " (9, 0.45898140870100651)]\n", "\n", "James M. Bower\n", "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(1, 0.90286561460378767), (5, 0.096887985740307506)]\n" + "[(1, 0.30674043606317025), (4, 0.69291925103790253)]\n" ] } ], @@ -550,7 +550,7 @@ }, { "cell_type": "code", - "execution_count": 95, + "execution_count": 29, "metadata": { "collapsed": false }, @@ -559,8 +559,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 50.8 s, sys: 11.9 s, total: 1min 2s\n", - "Wall time: 49.5 s\n" + "CPU times: user 7.32 s, sys: 4 ms, total: 7.32 s\n", + "Wall time: 7.33 s\n" ] } ], @@ -573,7 +573,7 @@ }, { "cell_type": "code", - "execution_count": 96, + "execution_count": 24, "metadata": { "collapsed": false, "scrolled": false @@ -583,28 +583,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.006*\"word\" + 0.005*\"tree\" + 0.004*\"recognition\" + 0.004*\"speech\" + 0.003*\"class\" + 0.003*\"node\" + 0.003*\"layer\" + 0.003*\"context\" + 0.003*\"hmm\" + 0.003*\"target\"'),\n", + " '0.016*\"vector\" + 0.012*\"memory\" + 0.007*\"associative\" + 0.005*\"control\" + 0.005*\"constraint\" + 0.005*\"recognition\" + 0.005*\"chip\" + 0.005*\"image\" + 0.004*\"hidden\" + 0.004*\"machine\"'),\n", " (1,\n", - " '0.013*\"cell\" + 0.009*\"neuron\" + 0.005*\"connection\" + 0.004*\"layer\" + 0.004*\"cortex\" + 0.004*\"signal\" + 0.004*\"response\" + 0.004*\"simulation\" + 0.004*\"map\" + 0.003*\"recognition\"'),\n", + " '0.014*\"memory\" + 0.007*\"probability\" + 0.007*\"vector\" + 0.005*\"chip\" + 0.005*\"pulse\" + 0.005*\"fig\" + 0.004*\"node\" + 0.004*\"cell\" + 0.004*\"capacity\" + 0.004*\"matrix\"'),\n", " (2,\n", - " '0.005*\"gaussian\" + 0.004*\"matrix\" + 0.004*\"hidden\" + 0.004*\"approximation\" + 0.003*\"bound\" + 0.003*\"generalization\" + 0.003*\"noise\" + 0.003*\"class\" + 0.003*\"prior\" + 0.003*\"xi\"'),\n", + " '0.017*\"classifier\" + 0.015*\"circuit\" + 0.006*\"noise\" + 0.006*\"current\" + 0.006*\"fig\" + 0.006*\"node\" + 0.005*\"gaussian\" + 0.005*\"speech\" + 0.005*\"propagation\" + 0.005*\"decision\"'),\n", " (3,\n", - " '0.006*\"signal\" + 0.005*\"speech\" + 0.004*\"visual\" + 0.004*\"position\" + 0.004*\"image\" + 0.004*\"response\" + 0.003*\"stimulus\" + 0.003*\"recognition\" + 0.003*\"motion\" + 0.003*\"component\"'),\n", + " '0.008*\"cell\" + 0.008*\"fig\" + 0.006*\"vector\" + 0.006*\"hidden\" + 0.006*\"velocity\" + 0.005*\"operator\" + 0.005*\"image\" + 0.004*\"activation\" + 0.004*\"receptor\" + 0.004*\"delay\"'),\n", " (4,\n", - " '0.016*\"neuron\" + 0.007*\"circuit\" + 0.007*\"cell\" + 0.006*\"signal\" + 0.006*\"spike\" + 0.005*\"response\" + 0.005*\"chip\" + 0.005*\"analog\" + 0.005*\"voltage\" + 0.005*\"synaptic\"'),\n", + " '0.009*\"image\" + 0.009*\"match\" + 0.008*\"processor\" + 0.007*\"classifier\" + 0.007*\"node\" + 0.007*\"element\" + 0.006*\"activation\" + 0.006*\"link\" + 0.005*\"nat\" + 0.005*\"fig\"'),\n", " (5,\n", - " '0.005*\"net\" + 0.004*\"threshold\" + 0.004*\"class\" + 0.004*\"node\" + 0.003*\"theorem\" + 0.003*\"layer\" + 0.003*\"image\" + 0.003*\"bound\" + 0.003*\"sample\" + 0.003*\"estimate\"'),\n", + " '0.011*\"field\" + 0.010*\"cell\" + 0.007*\"synaptic\" + 0.005*\"cortical\" + 0.005*\"visual\" + 0.005*\"activity\" + 0.005*\"eye\" + 0.005*\"synapsis\" + 0.004*\"map\" + 0.004*\"phase\"'),\n", " (6,\n", - " '0.008*\"image\" + 0.006*\"recognition\" + 0.005*\"rule\" + 0.004*\"classification\" + 0.004*\"class\" + 0.004*\"character\" + 0.003*\"classifier\" + 0.003*\"layer\" + 0.003*\"matrix\" + 0.003*\"distance\"'),\n", + " '0.006*\"hidden\" + 0.006*\"recognition\" + 0.005*\"map\" + 0.005*\"vector\" + 0.005*\"node\" + 0.004*\"object\" + 0.004*\"speech\" + 0.004*\"matrix\" + 0.003*\"class\" + 0.003*\"sequence\"'),\n", " (7,\n", - " '0.008*\"action\" + 0.006*\"layer\" + 0.006*\"policy\" + 0.006*\"control\" + 0.004*\"neuron\" + 0.004*\"architecture\" + 0.004*\"net\" + 0.004*\"hidden\" + 0.003*\"cell\" + 0.003*\"reinforcement\"'),\n", + " '0.013*\"role\" + 0.009*\"motion\" + 0.008*\"source\" + 0.007*\"regular\" + 0.007*\"visual\" + 0.006*\"markov\" + 0.006*\"threshold\" + 0.006*\"node\" + 0.005*\"code\" + 0.005*\"depth\"'),\n", " (8,\n", - " '0.005*\"image\" + 0.004*\"object\" + 0.003*\"hidden\" + 0.003*\"ii\" + 0.003*\"visual\" + 0.003*\"component\" + 0.003*\"neuron\" + 0.003*\"activity\" + 0.003*\"cell\" + 0.003*\"sequence\"'),\n", + " '0.028*\"cell\" + 0.013*\"response\" + 0.013*\"stimulus\" + 0.010*\"spike\" + 0.009*\"firing\" + 0.009*\"current\" + 0.009*\"image\" + 0.009*\"potential\" + 0.006*\"activity\" + 0.006*\"membrane\"'),\n", " (9,\n", - " '0.004*\"hidden\" + 0.004*\"rule\" + 0.004*\"classifier\" + 0.004*\"class\" + 0.004*\"optimal\" + 0.003*\"prediction\" + 0.003*\"control\" + 0.003*\"estimate\" + 0.003*\"noise\" + 0.003*\"generalization\"')]" + " '0.014*\"hidden\" + 0.009*\"hidden_unit\" + 0.008*\"distribution\" + 0.008*\"node\" + 0.007*\"image\" + 0.006*\"activation\" + 0.006*\"propagation\" + 0.006*\"back_propagation\" + 0.005*\"speech\" + 0.005*\"sample\"')]" ] }, - "execution_count": 96, + "execution_count": 24, "metadata": {}, "output_type": "execute_result" } @@ -615,7 +615,7 @@ }, { "cell_type": "code", - "execution_count": 97, + "execution_count": 25, "metadata": { "collapsed": false }, @@ -626,44 +626,41 @@ "text": [ "\n", "Yaser S.Abu-Mostafa\n", - "Docs: [643, 1161]\n", - "[(0, 0.015493433113318947),\n", - " (2, 0.02068848386807097),\n", - " (3, 0.014477887090769973),\n", - " (5, 0.76217743188283804),\n", - " (6, 0.056071929501884839),\n", - " (7, 0.034542230987795401),\n", - " (8, 0.034205908519518852),\n", - " (9, 0.051387058690322375)]\n", + "Docs: [62]\n", + "[(0, 0.12257888012385142),\n", + " (1, 0.18839815551960026),\n", + " (2, 0.036637297625550132),\n", + " (3, 0.015498644507138377),\n", + " (4, 0.072386020997623229),\n", + " (5, 0.075906889662321148),\n", + " (6, 0.34904030995007596),\n", + " (7, 0.022928611918427422),\n", + " (8, 0.068558327925279966),\n", + " (9, 0.048066861770131898)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [143, 284, 230, 197]\n", - "[(0, 0.010876412473090083),\n", - " (1, 0.025987546707861891),\n", - " (2, 0.16269958999503972),\n", - " (3, 0.040154749497748242),\n", - " (5, 0.038497018610811631),\n", - " (6, 0.32616150298201085),\n", - " (7, 0.17789752502782427),\n", - " (8, 0.10484007949328089),\n", - " (9, 0.10360838755269398)]\n", + "[(0, 0.017941678995404806),\n", + " (1, 0.037633561393485344),\n", + " (2, 0.040229587442296211),\n", + " (6, 0.86860470412607893)]\n", "\n", "Michael I. Jordan\n", "Docs: [237]\n", - "[(0, 0.065523264868966244),\n", - " (1, 0.057277736718350389),\n", - " (2, 0.049434602727837465),\n", - " (3, 0.057202259043067194),\n", - " (4, 0.048998371270245894),\n", - " (5, 0.054599275818053232),\n", - " (6, 0.055707885269878736),\n", - " (7, 0.33348117191683641),\n", - " (8, 0.15381463451838998),\n", - " (9, 0.12396079784837437)]\n", + "[(0, 0.23458706049871778),\n", + " (1, 0.028074129427662773),\n", + " (2, 0.092872627256054469),\n", + " (3, 0.060285180922721039),\n", + " (4, 0.05771159235103844),\n", + " (5, 0.27973835933458052),\n", + " (6, 0.13822500759562903),\n", + " (7, 0.015239077050084931),\n", + " (8, 0.052855346935884104),\n", + " (9, 0.040411618627626808)]\n", "\n", "James M. Bower\n", "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(1, 0.9819387900380987), (4, 0.013399430252372287)]\n" + "[(5, 0.11028614207196175), (6, 0.41931814978983267), (8, 0.4701441896634681)]\n" ] } ], @@ -893,7 +890,7 @@ }, { "cell_type": "code", - "execution_count": 89, + "execution_count": 79, "metadata": { "collapsed": false }, @@ -905,7 +902,7 @@ }, { "cell_type": "code", - "execution_count": 90, + "execution_count": 88, "metadata": { "collapsed": false }, @@ -914,75 +911,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(670)update()\n", - "-> gammat = self.do_estep(chunk, other, chunk_no, rho=rho())\n", - "(Pdb) rho()\n", - "1.0\n", - "(Pdb) s\n", - "--Call--\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(642)rho()\n", - "-> def rho():\n", - "(Pdb) s\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(643)rho()\n", - "-> return pow(offset + pass_ + (self.num_updates / chunksize), -decay)\n", - "(Pdb) s\n", - "--Return--\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(643)rho()->1.0\n", - "-> return pow(offset + pass_ + (self.num_updates / chunksize), -decay)\n", - "(Pdb) s\n", - "--Call--\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(500)do_estep()\n", - "-> def do_estep(self, chunk, state=None, chunk_no=None, rho=None):\n", - "(Pdb) s\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(506)do_estep()\n", - "-> if state is None:\n", - "(Pdb) s\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(508)do_estep()\n", - "-> gamma, sstats = self.inference(chunk, collect_sstats=True, chunk_no=None, rho=rho)\n", - "(Pdb) s\n", - "--Call--\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(410)inference()\n", - "-> def inference(self, chunk, collect_sstats=False, chunk_no=None, rho=None):\n", - "(Pdb) s\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(428)inference()\n", - "-> try:\n", - "(Pdb) s\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(429)inference()\n", - "-> _ = len(chunk)\n", - "(Pdb) rho\n", - "1.0\n", - "(Pdb) exit\n", - "> /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py(669)update()\n", - "-> from pdb import set_trace; set_trace()\n", - "(Pdb) exit\n" - ] - }, - { - "ename": "BdbQuit", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mBdbQuit\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mget_ipython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmagic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, iterations=1, alpha='symmetric', eta='symmetric', eval_every=0, chunksize=2000, random_state=0)\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mmagic\u001b[0;34m(self, arg_s)\u001b[0m\n\u001b[1;32m 2156\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0marg_s\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpartition\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m' '\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2157\u001b[0m \u001b[0mmagic_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmagic_name\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprefilter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mESC_MAGIC\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2158\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_line_magic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmagic_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmagic_arg_s\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2160\u001b[0m \u001b[0;31m#-------------------------------------------------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/interactiveshell.py\u001b[0m in \u001b[0;36mrun_line_magic\u001b[0;34m(self, magic_name, line)\u001b[0m\n\u001b[1;32m 2077\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'local_ns'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_getframe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstack_depth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf_locals\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2078\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuiltin_trap\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2079\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2080\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2081\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magic.py\u001b[0m in \u001b[0;36m\u001b[0;34m(f, *a, **k)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[0;31m# but it's overkill for just that one bit of state.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mmagic_deco\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 188\u001b[0;31m \u001b[0mcall\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 189\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcallable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/.virtualenvs/thesis/lib/python3.5/site-packages/IPython/core/magics/execution.py\u001b[0m in \u001b[0;36mtime\u001b[0;34m(self, line, cell, local_ns)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m \u001b[0mst\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1180\u001b[0;31m \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocal_ns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclock2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, corpus, num_topics, id2word, distributed, chunksize, passes, update_every, alpha, eta, decay, offset, eval_every, iterations, gamma_threshold, minimum_probability, random_state, ns_conf, minimum_phi_value, per_word_topics)\u001b[0m\n\u001b[1;32m 355\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcorpus\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 356\u001b[0m \u001b[0muse_numpy\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdispatcher\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 357\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcorpus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunks_as_numpy\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0muse_numpy\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 358\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 359\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0minit_dir_prior\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mprior\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py\u001b[0m in \u001b[0;36mupdate\u001b[0;34m(self, corpus, chunksize, decay, offset, passes, update_every, eval_every, iterations, gamma_threshold, chunks_as_numpy)\u001b[0m\n\u001b[1;32m 667\u001b[0m logger.info('PROGRESS: pass %i, at document #%i/%i',\n\u001b[1;32m 668\u001b[0m pass_, chunk_no * chunksize + len(chunk), lencorpus)\n\u001b[0;32m--> 669\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mpdb\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mset_trace\u001b[0m\u001b[0;34m;\u001b[0m \u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 670\u001b[0m \u001b[0mgammat\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdo_estep\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchunk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mother\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunk_no\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrho\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 671\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/ldamodel.py\u001b[0m in \u001b[0;36mupdate\u001b[0;34m(self, corpus, chunksize, decay, offset, passes, update_every, eval_every, iterations, gamma_threshold, chunks_as_numpy)\u001b[0m\n\u001b[1;32m 667\u001b[0m logger.info('PROGRESS: pass %i, at document #%i/%i',\n\u001b[1;32m 668\u001b[0m pass_, chunk_no * chunksize + len(chunk), lencorpus)\n\u001b[0;32m--> 669\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mpdb\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mset_trace\u001b[0m\u001b[0;34m;\u001b[0m \u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 670\u001b[0m \u001b[0mgammat\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdo_estep\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mchunk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mother\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mchunk_no\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrho\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrho\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 671\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/lib/python3.5/bdb.py\u001b[0m in \u001b[0;36mtrace_dispatch\u001b[0;34m(self, frame, event, arg)\u001b[0m\n\u001b[1;32m 46\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;31m# None\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mevent\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'line'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 48\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdispatch_line\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 49\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mevent\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'call'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 50\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdispatch_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0marg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/lib/python3.5/bdb.py\u001b[0m in \u001b[0;36mdispatch_line\u001b[0;34m(self, frame)\u001b[0m\n\u001b[1;32m 65\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstop_here\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbreak_here\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 66\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0muser_line\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mframe\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 67\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mquitting\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mBdbQuit\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 68\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrace_dispatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 69\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mBdbQuit\u001b[0m: " + "CPU times: user 21.7 s, sys: 7.6 s, total: 29.3 s\n", + "Wall time: 20.8 s\n" ] } ], "source": [ - "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", - " iterations=1, alpha='symmetric', eta='symmetric', eval_every=0, chunksize=2000, random_state=0)" + "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, gamma_threshold=1e-10, \\\n", + " iterations=10, alpha='symmetric', eta='symmetric', eval_every=0, chunksize=2000, random_state=1)" ] }, { @@ -1031,7 +967,7 @@ }, { "cell_type": "code", - "execution_count": 155, + "execution_count": 89, "metadata": { "collapsed": false }, @@ -1040,28 +976,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.004*neuron + 0.003*image + 0.003*layer + 0.003*field + 0.003*class + 0.003*cell + 0.003*signal + 0.003*noise + 0.003*hidden + 0.002*node'),\n", + " '0.013*\"cell\" + 0.011*\"neuron\" + 0.008*\"visual\" + 0.008*\"response\" + 0.007*\"stimulus\" + 0.006*\"activity\" + 0.006*\"field\" + 0.004*\"motion\" + 0.004*\"cortex\" + 0.004*\"layer\"'),\n", " (1,\n", - " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*class + 0.003*signal + 0.003*matrix + 0.003*layer + 0.003*noise + 0.002*hidden + 0.002*recognition'),\n", + " '0.006*\"bound\" + 0.005*\"class\" + 0.005*\"node\" + 0.004*\"generalization\" + 0.004*\"sample\" + 0.004*\"let\" + 0.004*\"estimate\" + 0.004*\"tree\" + 0.004*\"approximation\" + 0.004*\"theorem\"'),\n", " (2,\n", - " '0.004*cell + 0.003*neuron + 0.003*matrix + 0.003*signal + 0.003*image + 0.003*hidden + 0.002*rule + 0.002*response + 0.002*field + 0.002*dynamic'),\n", + " '0.009*\"class\" + 0.007*\"recognition\" + 0.007*\"classifier\" + 0.005*\"classification\" + 0.005*\"word\" + 0.005*\"distance\" + 0.005*\"image\" + 0.005*\"hidden\" + 0.004*\"character\" + 0.004*\"trained\"'),\n", " (3,\n", - " '0.005*neuron + 0.003*layer + 0.003*image + 0.003*cell + 0.002*class + 0.002*net + 0.002*hidden + 0.002*control + 0.002*sequence + 0.002*response'),\n", + " '0.021*\"image\" + 0.006*\"gaussian\" + 0.005*\"face\" + 0.005*\"component\" + 0.004*\"matrix\" + 0.003*\"prior\" + 0.003*\"density\" + 0.003*\"noise\" + 0.003*\"hidden\" + 0.003*\"object\"'),\n", " (4,\n", - " '0.004*layer + 0.003*image + 0.003*neuron + 0.003*cell + 0.003*hidden + 0.003*signal + 0.003*component + 0.002*recognition + 0.002*net + 0.002*node'),\n", + " '0.009*\"control\" + 0.006*\"action\" + 0.006*\"policy\" + 0.005*\"optimal\" + 0.005*\"dynamic\" + 0.005*\"reinforcement\" + 0.005*\"signal\" + 0.004*\"controller\" + 0.004*\"noise\" + 0.003*\"trajectory\"'),\n", " (5,\n", - " '0.005*image + 0.004*neuron + 0.004*layer + 0.003*hidden + 0.003*cell + 0.002*control + 0.002*class + 0.002*net + 0.002*noise + 0.002*signal'),\n", + " '0.009*\"memory\" + 0.004*\"rule\" + 0.004*\"net\" + 0.004*\"bit\" + 0.004*\"layer\" + 0.004*\"architecture\" + 0.004*\"recognition\" + 0.003*\"matrix\" + 0.003*\"processor\" + 0.003*\"machine\"'),\n", " (6,\n", - " '0.005*neuron + 0.005*layer + 0.004*hidden + 0.003*image + 0.003*cell + 0.003*class + 0.003*rule + 0.002*noise + 0.002*net + 0.002*matrix'),\n", + " '0.007*\"hidden\" + 0.007*\"layer\" + 0.007*\"speech\" + 0.006*\"node\" + 0.006*\"net\" + 0.005*\"word\" + 0.004*\"sequence\" + 0.004*\"activation\" + 0.004*\"context\" + 0.004*\"language\"'),\n", " (7,\n", - " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*hidden + 0.003*recognition + 0.003*field + 0.003*layer + 0.002*noise + 0.002*node + 0.002*component'),\n", + " '0.010*\"neuron\" + 0.008*\"circuit\" + 0.008*\"signal\" + 0.006*\"voltage\" + 0.006*\"channel\" + 0.006*\"chip\" + 0.005*\"analog\" + 0.004*\"frequency\" + 0.004*\"cell\" + 0.004*\"spike\"'),\n", " (8,\n", - " '0.004*neuron + 0.003*image + 0.003*signal + 0.003*recognition + 0.003*cell + 0.003*layer + 0.003*noise + 0.003*rule + 0.002*class + 0.002*hidden'),\n", + " '0.008*\"object\" + 0.005*\"mixture\" + 0.004*\"hidden\" + 0.004*\"likelihood\" + 0.004*\"recognition\" + 0.004*\"em\" + 0.003*\"gaussian\" + 0.003*\"matrix\" + 0.003*\"view\" + 0.003*\"component\"'),\n", " (9,\n", - " '0.005*neuron + 0.004*class + 0.003*layer + 0.003*image + 0.003*cell + 0.002*hidden + 0.002*signal + 0.002*control + 0.002*field + 0.002*net')]" + " '0.011*\"neuron\" + 0.006*\"dynamic\" + 0.005*\"matrix\" + 0.004*\"noise\" + 0.004*\"solution\" + 0.003*\"field\" + 0.003*\"condition\" + 0.003*\"gradient\" + 0.003*\"convergence\" + 0.003*\"limit\"')]" ] }, - "execution_count": 155, + "execution_count": 89, "metadata": {}, "output_type": "execute_result" } diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index 7cedd6c3de..1a222fd14b 100644 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -197,13 +197,16 @@ def inference(self, corpus=None, var_lambda=None): self.norm_lambda = var_lambda.copy() for k in xrange(self.num_topics): self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + + #var_lambda += self.eta + sstats_global = var_lambda.copy() + self.var_lambda = var_lambda # Initialize dirichlet expectations. Elogtheta = dirichlet_expectation(var_gamma) - #Elogbeta = dirichlet_expectation(var_lambda) - Elogbeta = dirichlet_expectation(var_lambda + self.eta) + Elogbeta = dirichlet_expectation(var_lambda) if numstable_sm: maxElogtheta = Elogtheta.max() maxElogbeta = Elogbeta.max() @@ -220,6 +223,9 @@ def inference(self, corpus=None, var_lambda=None): bound = word_bound + theta_bound + beta_bound perwordbound = bound / corpus_words logger.info('Total bound: %.3e. Per-word total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, perwordbound, word_bound, theta_bound, beta_bound) + #var_lambda -= self.eta + #Elogbeta = dirichlet_expectation(var_lambda) + #expElogbeta = numpy.exp(Elogbeta) for _pass in xrange(self.passes): converged = 0 # Number of documents converged for current pass over corpus. for chunk_no, chunk in enumerate(utils.grouper(corpus, self.chunksize, as_numpy=False)): @@ -266,14 +272,13 @@ def inference(self, corpus=None, var_lambda=None): # Check for convergence. # Criterion is mean change in "local" gamma and lambda. - if iteration > 0: - meanchange_gamma = numpy.mean(abs(tilde_gamma[authors_d, :] - lastgamma)) - gamma_condition = meanchange_gamma < self.threshold - # logger.info('Mean change in gamma: %.3e', meanchange_gamma) - if gamma_condition: - # logger.info('Converged after %d iterations.', iteration) - converged += 1 - break + meanchange_gamma = numpy.mean(abs(tilde_gamma[authors_d, :] - lastgamma)) + gamma_condition = meanchange_gamma < self.threshold + # logger.info('Mean change in gamma: %.3e', meanchange_gamma) + if gamma_condition: + # logger.info('Converged after %d iterations.', iteration) + converged += 1 + break # End of iterations loop. var_gamma = tilde_gamma.copy() @@ -284,6 +289,11 @@ def inference(self, corpus=None, var_lambda=None): if self.optimize_lambda: # Update lambda. + #sstats *= expElogbeta + #sstats_global = (1 - rhot) * sstats_global + rhot * sstats + #var_lambda = sstats + self.eta + #Elogbeta = dirichlet_expectation(var_lambda) + #expElogbeta = numpy.exp(Elogbeta) sstats *= expElogbeta # Find the ids of the words that are to be updated per this chunk, and update @@ -305,7 +315,7 @@ def inference(self, corpus=None, var_lambda=None): expElogbeta = numpy.exp(Elogbeta - maxElogbeta) else: expElogbeta = numpy.exp(Elogbeta) - var_lambda = var_lambda.copy() + #var_lambda = var_lambda.copy() # Print topics: # pprint(self.show_topics()) diff --git a/gensim/models/atmodel2.py b/gensim/models/atmodel2.py index 9e486ed48e..20d275c370 100755 --- a/gensim/models/atmodel2.py +++ b/gensim/models/atmodel2.py @@ -20,24 +20,15 @@ import numpy as np # for arrays, array broadcasting etc. import numbers -from gensim import interfaces, utils, matutils +from gensim import utils from gensim.models import LdaModel from gensim.models.ldamodel import dirichlet_expectation, get_random_state, LdaState from itertools import chain -from scipy.special import gammaln, psi # gamma function utils -from scipy.special import polygamma +from scipy.special import gammaln # gamma function utils from six.moves import xrange import six -# log(sum(exp(x))) that tries to avoid overflow -try: - # try importing from here if older scipy is installed - from scipy.maxentropy import logsumexp -except ImportError: - # maxentropy has been removed in recent releases, logsumexp now in misc - from scipy.misc import logsumexp - -logger = logging.getLogger('gensim.models.atmodel') +logger = logging.getLogger('gensim.models.atmodel2') class AuthorTopicState(LdaState): """ @@ -53,19 +44,50 @@ def __init__(self, eta, lambda_shape, gamma_shape): self.gamma = np.zeros(gamma_shape) self.numdocs = 0 +def construct_doc2author(corpus, author2doc): + """Make a mapping from document IDs to author IDs.""" + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + return doc2author + +def construct_author2doc(corpus, doc2author): + """Make a mapping from author IDs to document IDs.""" + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + return author2doc class AuthorTopicModel2(LdaModel): """ """ def __init__(self, corpus=None, num_topics=100, id2word=None, author2doc=None, doc2author=None, id2author=None, var_lambda=None, - distributed=False, chunksize=2000, passes=1, update_every=1, + chunksize=2000, passes=1, update_every=1, alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, eval_every=10, iterations=50, gamma_threshold=0.001, minimum_probability=0.01, random_state=None, ns_conf={}, minimum_phi_value=0.01, per_word_topics=False): """ """ + + distributed = False # TODO: implement distributed version. + self.id2word = id2word if corpus is None and self.id2word is None: raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') @@ -87,33 +109,13 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, if doc2author is None and author2doc is None: raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') - # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). # If either doc2author or author2doc is missing, construct them from the other. + # FIXME: make the code below into methods, so the user can construct either doc2author or author2doc *once* and then not worry about it. + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). if doc2author is None: - # Make a mapping from document IDs to author IDs. - doc2author = {} - for d, _ in enumerate(corpus): - author_ids = [] - for a, a_doc_ids in author2doc.items(): - if d in a_doc_ids: - author_ids.append(a) - doc2author[d] = author_ids + doc2author = construct_doc2author(corpus, author2doc) elif author2doc is None: - # Make a mapping from author IDs to document IDs. - - # First get a set of all authors. - authors_ids = set() - for d, a_doc_ids in doc2author.items(): - for a in a_doc_ids: - authors_ids.add(a) - - # Now construct the dictionary. - author2doc = {} - for a in range(len(authors_ids)): - author2doc[a] = [] - for d, a_ids in doc2author.items(): - if a in a_ids: - author2doc[a].append(d) + author2doc = construct_author2doc(corpus, doc2author) self.author2doc = author2doc self.doc2author = doc2author @@ -130,7 +132,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, # Make the reverse mapping, from author names to author IDs. self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) - self.distributed = False # NOTE: distributed not yet implemented. + self.distributed = distributed self.num_topics = num_topics self.chunksize = chunksize self.decay = decay @@ -212,7 +214,8 @@ def inference(self, chunk, collect_sstats=False, chunk_no=None): If `collect_sstats` is True, also collect sufficient statistics needed to update the model's topic-word distributions, and return a 2-tuple `(gamma, sstats)`. Otherwise, return `(gamma, None)`. `gamma` is of shape - `len(chunk) x self.num_topics`. + `len(chunk_authors) x self.num_topics`, where `chunk_authors` is the number + of authors in the documents in the current chunk. Avoids computing the `phi` variational parameter directly using the optimization presented in **Lee, Seung: Algorithms for non-negative matrix factorization, NIPS 2001**. @@ -278,21 +281,19 @@ def inference(self, chunk, collect_sstats=False, chunk_no=None): # Check for convergence. # Criterion is mean change in "local" gamma and lambda. - # TODO: this if statement shouldn't be necessary. Isn't used in LDA. - if iteration > 0: - meanchange_gamma = np.mean(abs(tilde_gamma - lastgamma)) - gamma_condition = meanchange_gamma < self.gamma_threshold - # logger.info('Mean change in gamma: %.3e', meanchange_gamma) - if gamma_condition: - # logger.info('Converged after %d iterations.', iteration) - converged += 1 - break + meanchange_gamma = np.mean(abs(tilde_gamma - lastgamma)) + gamma_condition = meanchange_gamma < self.gamma_threshold + # logger.info('Mean change in gamma: %.3e', meanchange_gamma) + if gamma_condition: + # logger.info('Converged after %d iterations.', iteration) + converged += 1 + break # End of iterations loop. self.state.gamma[authors_d, :] = tilde_gamma - # NOTE: this may be slow. Especially when there are many authors per document. - # It is imporant to find a faster way to handle this. + # NOTE: this may be slow. Especially when there are many authors per document. It is + # imporant to find a faster way to handle this. chunk_authors = chunk_authors.union(set(authors_d)) if collect_sstats: @@ -327,7 +328,7 @@ def do_estep(self, chunk, state=None, chunk_no=None): state.numdocs += len(chunk) return gamma - def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0): + def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2author=None, ): """ Estimate the variational bound of documents from `corpus`: E_q[log p(corpus)] - E_q[log q(corpus)] @@ -336,15 +337,31 @@ def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0): document (=2d matrix=what comes out of `inference()`). If not supplied, will be inferred from the model. + Computing the bound of unseen data is not recommended, unless one knows what one is doing. + In this case, gamma must be inferred in advance, and doc2author for this new data must be + provided. + """ _lambda = self.state.get_lambda() Elogbeta = dirichlet_expectation(_lambda) + expElogbeta = np.exp(dirichlet_expectation(_lambda)) - if gamma is None: + if gamma is not None: + logger.warning('bound() assumes gamma to be None and uses the gamma provided is self.state.') + # NOTE: alternatively: + #assert gamma is None, 'bound() assumes gamma to be None and uses the gamma provided is self.state.' + else: gamma = self.state.gamma + if chunk_no is None: + logger.warning('No chunk_no provided to bound().') + # NOTE: alternatively: + #assert chunk_no is not None, 'chunk_no must be provided to bound().' + chunk_no = 0 + Elogtheta = dirichlet_expectation(gamma) + expElogtheta = np.exp(dirichlet_expectation(gamma)) word_score = 0.0 authors_set = set() # Used in computing theta bound. @@ -357,24 +374,19 @@ def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0): if d % self.chunksize == 0: logger.debug("bound: at document #%i", d) - if gamma is None: - gammad, _ = self.inference([doc], chunk_no=chunk_no) - else: - gammad = gamma[authors_d, :] - Elogthetad = Elogtheta[authors_d, :] # Shape (len(authors_d), self.num_topics). # Computing the bound requires summing over expElogtheta[a, k] * expElogbeta[k, v], which # is the same computation as in normalizing phi. - phinorm = self.compute_phinorm(ids, authors_d, np.exp(Elogthetad), np.exp(Elogbeta[:, ids])) + phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) word_score += np.log(1.0 / len(authors_d)) + cts.dot(np.log(phinorm)) # E[log p(theta | alpha) - log q(theta | gamma)] # The code blow ensure we compute the score of each author only once. - for ai, a in enumerate(authors_d): + for a in authors_d: if a not in authors_set: - theta_score += np.sum((self.alpha - gammad[ai, :]) * Elogthetad[ai]) - theta_score += np.sum(gammaln(gammad[ai, :]) - gammaln(self.alpha)) - theta_score += gammaln(np.sum(self.alpha)) - gammaln(np.sum(gammad[ai, :])) + theta_score += np.sum((self.alpha - gamma[a, :]) * Elogtheta[a, :]) + theta_score += np.sum(gammaln(gamma[a, :]) - gammaln(self.alpha)) + theta_score += gammaln(np.sum(self.alpha)) - gammaln(np.sum(gamma[a, :])) authors_set.add(a) # Compensate likelihood for when `corpus` above is only a sample of the whole corpus. This ensures @@ -393,6 +405,8 @@ def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0): total_score = word_score + theta_score + beta_score + #print("%.3e\t%.3e\t%.3e\t%.3e" %(total_score, word_score, theta_score, beta_score)) + return total_score def get_author_topics(self, author_id, minimum_probability=None): @@ -419,7 +433,10 @@ def get_author_topics(self, author_id, minimum_probability=None): def __getitem__(self, bow, eps=None): """ """ - # TODO: this + # TODO: this. + # E.g. assume bow is a list of documents for this particular author, and that the author + # is not in the corpus beforehand. Then add an author to doc2author and author2doc, + # and call self.update to infer the new author's topic distribution. pass def save(self, fname, ignore=['state', 'dispatcher'], *args, **kwargs): diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index b915ce31bc..8605220876 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -652,6 +652,7 @@ def rho(): reallen = 0 for chunk_no, chunk in enumerate(utils.grouper(corpus, chunksize, as_numpy=chunks_as_numpy)): + # FIXME: replace rho() in e.g. self.do_estep by self.rho? Needed for AuthorTopicModel. self.rho = rho() reallen += len(chunk) # keep track of how many documents we've processed so far From 861e81a10740806aeb6d21b692da577332e4df16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Mon, 5 Dec 2016 12:09:50 +0100 Subject: [PATCH 060/100] New refactored code now in atmodel.py. Old code is in atmodelold.py, until I'm confident I don't need it anymore. Working on a new version in atmodel2.py, where I will be looping over authors rather than documents in the update. --- docs/notebooks/at_with_nips.ipynb | 204 +++++--- gensim/models/__init__.py | 2 +- gensim/models/atmodel.py | 740 +++++++++++++++--------------- gensim/models/atmodel2.py | 160 ++++++- gensim/models/atmodelold.py | 495 ++++++++++++++++++++ gensim/models/ldamodel.py | 2 +- 6 files changed, 1182 insertions(+), 421 deletions(-) mode change 100644 => 100755 gensim/models/atmodel.py create mode 100644 gensim/models/atmodelold.py diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 7b31d913a2..f60445f4e8 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 17, "metadata": { "collapsed": false }, @@ -68,8 +68,8 @@ "\n", "from gensim.models import AuthorTopicModel\n", "from gensim.models import atmodel\n", - "from gensim.models import AuthorTopicModel2\n", - "from gensim.models import atmodel2\n", + "from gensim.models import AuthorTopicModelOld\n", + "from gensim.models import atmodelold\n", "from gensim.models import LdaModel\n", "from gensim.models import ldamodel\n", "\n", @@ -80,7 +80,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": { "collapsed": false }, @@ -108,7 +108,7 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": 3, "metadata": { "collapsed": false }, @@ -145,7 +145,7 @@ }, { "cell_type": "code", - "execution_count": 63, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -175,7 +175,7 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": 5, "metadata": { "collapsed": false }, @@ -187,7 +187,7 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 6, "metadata": { "collapsed": false }, @@ -205,7 +205,7 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": 7, "metadata": { "collapsed": false }, @@ -231,7 +231,7 @@ }, { "cell_type": "code", - "execution_count": 67, + "execution_count": 8, "metadata": { "collapsed": false }, @@ -254,7 +254,7 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -269,7 +269,7 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": 10, "metadata": { "collapsed": false }, @@ -297,7 +297,7 @@ }, { "cell_type": "code", - "execution_count": 70, + "execution_count": 11, "metadata": { "collapsed": true }, @@ -309,7 +309,7 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 12, "metadata": { "collapsed": false }, @@ -327,7 +327,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 13, "metadata": { "collapsed": false }, @@ -336,7 +336,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -362,7 +362,7 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 14, "metadata": { "collapsed": true }, @@ -376,7 +376,7 @@ }, { "cell_type": "code", - "execution_count": 74, + "execution_count": 15, "metadata": { "collapsed": false }, @@ -406,23 +406,23 @@ }, { "cell_type": "code", - "execution_count": 130, + "execution_count": 30, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "reload(atmodel2)\n", - "AuthorTopicModel2 = atmodel2.AuthorTopicModel2\n", "reload(atmodel)\n", "AuthorTopicModel = atmodel.AuthorTopicModel\n", + "reload(atmodelold)\n", + "AuthorTopicModelOld = atmodelold.AuthorTopicModelOld\n", "reload(ldamodel)\n", "LdaModel = ldamodel.LdaModel" ] }, { "cell_type": "code", - "execution_count": 131, + "execution_count": 31, "metadata": { "collapsed": false }, @@ -431,16 +431,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 6.08 s, sys: 0 ns, total: 6.08 s\n", - "Wall time: 6.07 s\n" + "CPU times: user 5.77 s, sys: 0 ns, total: 5.77 s\n", + "Wall time: 5.77 s\n" ] } ], "source": [ - "%time model = AuthorTopicModel2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", + "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", " author2doc=author2doc, doc2author=doc2author, id2author=id2author, var_lambda=None, \\\n", " chunksize=2000, passes=10, update_every=1, \\\n", - " alpha='auto', eta='symmetric', decay=0.5, offset=1.0, \\\n", + " alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, \\\n", " eval_every=1, iterations=10, gamma_threshold=1e-10, \\\n", " minimum_probability=0.01, random_state=1, ns_conf={}, \\\n", " minimum_phi_value=0.01, per_word_topics=False)\n" @@ -448,7 +448,86 @@ }, { "cell_type": "code", - "execution_count": 121, + "execution_count": 59, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "ename": "ImportError", + "evalue": "cannot import name 'from_iterable'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mitertools\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mfrom_iterable\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mImportError\u001b[0m: cannot import name 'from_iterable'" + ] + } + ], + "source": [ + "from itertools import chain" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "14" + ] + }, + "execution_count": 60, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "generator_chain = chain.from_iterable([range(10), range(10, 20)])\n", + "next(islice(generator_chain, 14, 15))" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from itertools import islice, count" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "next(islice(count(), 20))" + ] + }, + { + "cell_type": "code", + "execution_count": 26, "metadata": { "collapsed": false }, @@ -457,28 +536,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.013*\"hidden\" + 0.009*\"vector\" + 0.008*\"classifier\" + 0.006*\"hidden_unit\" + 0.005*\"procedure\" + 0.005*\"propagation\" + 0.005*\"back_propagation\" + 0.005*\"decision\" + 0.004*\"generalization\" + 0.004*\"test\"'),\n", + " '0.017*\"classifier\" + 0.012*\"node\" + 0.008*\"vector\" + 0.007*\"recognition\" + 0.006*\"decision\" + 0.006*\"classification\" + 0.005*\"sequence\" + 0.005*\"class\" + 0.005*\"sample\" + 0.005*\"probability\"'),\n", " (1,\n", - " '0.015*\"cell\" + 0.013*\"field\" + 0.010*\"visual\" + 0.007*\"map\" + 0.006*\"cortical\" + 0.006*\"activity\" + 0.006*\"synaptic\" + 0.005*\"synapsis\" + 0.005*\"cortex\" + 0.005*\"fig\"'),\n", + " '0.010*\"cell\" + 0.009*\"activation\" + 0.008*\"hidden\" + 0.007*\"node\" + 0.005*\"propagation\" + 0.005*\"response\" + 0.005*\"hidden_unit\" + 0.005*\"energy\" + 0.004*\"back_propagation\" + 0.004*\"matrix\"'),\n", " (2,\n", - " '0.009*\"circuit\" + 0.006*\"current\" + 0.006*\"dynamic\" + 0.006*\"node\" + 0.005*\"processor\" + 0.005*\"hidden\" + 0.005*\"classifier\" + 0.005*\"vector\" + 0.004*\"pulse\" + 0.004*\"machine\"'),\n", + " '0.009*\"vector\" + 0.008*\"image\" + 0.007*\"hidden\" + 0.005*\"fig\" + 0.005*\"dynamic\" + 0.005*\"noise\" + 0.005*\"object\" + 0.004*\"energy\" + 0.004*\"memory\" + 0.004*\"matrix\"'),\n", " (3,\n", - " '0.013*\"node\" + 0.006*\"activation\" + 0.006*\"memory\" + 0.005*\"classifier\" + 0.005*\"vector\" + 0.004*\"bit\" + 0.004*\"adaptive\" + 0.004*\"tree\" + 0.004*\"element\" + 0.004*\"neural_net\"'),\n", + " '0.013*\"vector\" + 0.011*\"hidden\" + 0.010*\"memory\" + 0.009*\"field\" + 0.005*\"hidden_unit\" + 0.004*\"threshold\" + 0.004*\"internal\" + 0.004*\"associative\" + 0.004*\"bit\" + 0.003*\"fig\"'),\n", " (4,\n", - " '0.022*\"cell\" + 0.012*\"firing\" + 0.010*\"stimulus\" + 0.010*\"response\" + 0.008*\"potential\" + 0.007*\"current\" + 0.007*\"spike\" + 0.007*\"cortex\" + 0.007*\"activity\" + 0.006*\"synaptic\"'),\n", + " '0.020*\"cell\" + 0.009*\"firing\" + 0.008*\"stimulus\" + 0.007*\"synaptic\" + 0.007*\"activity\" + 0.006*\"image\" + 0.006*\"response\" + 0.006*\"spike\" + 0.005*\"potential\" + 0.004*\"current\"'),\n", " (5,\n", - " '0.007*\"memory\" + 0.006*\"fig\" + 0.006*\"circuit\" + 0.005*\"analog\" + 0.005*\"chip\" + 0.005*\"matrix\" + 0.005*\"threshold\" + 0.005*\"cell\" + 0.004*\"response\" + 0.004*\"hopfield\"'),\n", + " '0.007*\"hidden\" + 0.006*\"node\" + 0.005*\"image\" + 0.005*\"matrix\" + 0.004*\"class\" + 0.004*\"fig\" + 0.004*\"noise\" + 0.004*\"propagation\" + 0.003*\"recognition\" + 0.003*\"vector\"'),\n", " (6,\n", - " '0.008*\"node\" + 0.008*\"vector\" + 0.006*\"direction\" + 0.006*\"memory\" + 0.006*\"noise\" + 0.005*\"activation\" + 0.005*\"cell\" + 0.004*\"fig\" + 0.004*\"associative\" + 0.004*\"matrix\"'),\n", + " '0.009*\"speech\" + 0.009*\"region\" + 0.008*\"recognition\" + 0.006*\"chain\" + 0.006*\"probability\" + 0.005*\"class\" + 0.005*\"cell\" + 0.005*\"hidden\" + 0.004*\"domain\" + 0.004*\"distribution\"'),\n", " (7,\n", - " '0.015*\"memory\" + 0.009*\"vector\" + 0.007*\"probability\" + 0.006*\"distribution\" + 0.005*\"hidden\" + 0.005*\"class\" + 0.005*\"energy\" + 0.004*\"node\" + 0.004*\"capacity\" + 0.004*\"hidden_unit\"'),\n", + " '0.011*\"cell\" + 0.007*\"memory\" + 0.006*\"response\" + 0.006*\"fig\" + 0.006*\"circuit\" + 0.006*\"current\" + 0.005*\"hopfield\" + 0.005*\"analog\" + 0.005*\"synapse\" + 0.005*\"activity\"'),\n", " (8,\n", - " '0.016*\"image\" + 0.013*\"recognition\" + 0.011*\"hidden\" + 0.011*\"speech\" + 0.009*\"object\" + 0.005*\"trained\" + 0.005*\"propagation\" + 0.005*\"hidden_layer\" + 0.004*\"frame\" + 0.004*\"hidden_unit\"'),\n", + " '0.013*\"circuit\" + 0.006*\"memory\" + 0.005*\"control\" + 0.005*\"cell\" + 0.005*\"threshold\" + 0.005*\"fig\" + 0.004*\"voltage\" + 0.004*\"transistor\" + 0.004*\"current\" + 0.004*\"response\"'),\n", " (9,\n", - " '0.008*\"cell\" + 0.008*\"map\" + 0.005*\"vector\" + 0.005*\"fig\" + 0.005*\"activity\" + 0.004*\"matrix\" + 0.004*\"region\" + 0.004*\"field\" + 0.004*\"memory\" + 0.004*\"eye\"')]" + " '0.008*\"memory\" + 0.008*\"field\" + 0.008*\"cell\" + 0.007*\"map\" + 0.007*\"delay\" + 0.006*\"cortex\" + 0.006*\"image\" + 0.006*\"chip\" + 0.005*\"current\" + 0.005*\"synaptic\"')]" ] }, - "execution_count": 121, + "execution_count": 26, "metadata": {}, "output_type": "execute_result" } @@ -489,7 +568,7 @@ }, { "cell_type": "code", - "execution_count": 96, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -501,28 +580,49 @@ "\n", "Yaser S.Abu-Mostafa\n", "Docs: [62]\n", - "[(1, 0.017412267312877025),\n", - " (2, 0.016179010242631686),\n", - " (3, 0.036699986691635919),\n", - " (5, 0.070031308676836254),\n", - " (6, 0.033023933328145814),\n", - " (7, 0.77953147445231008),\n", - " (9, 0.038343348900076533)]\n", + "[(0, 0.17428370582074723),\n", + " (1, 0.10229569024379424),\n", + " (2, 0.062556106292013122),\n", + " (3, 0.078817840485611065),\n", + " (4, 0.068487942942868585),\n", + " (5, 0.14869390057914703),\n", + " (6, 0.17212355568609788),\n", + " (7, 0.074170089610964149),\n", + " (8, 0.047702469618850774),\n", + " (9, 0.070868698719905782)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [143, 284, 230, 197]\n", - "[(8, 0.99937742034674781)]\n", + "[(0, 0.1963942573033424),\n", + " (1, 0.12792966363823302),\n", + " (2, 0.23505329159063704),\n", + " (3, 0.060305386421733033),\n", + " (4, 0.04267590384413758),\n", + " (5, 0.060980284135135593),\n", + " (6, 0.2451247159367281),\n", + " (8, 0.01574985863695311),\n", + " (9, 0.01023414671028579)]\n", "\n", "Michael I. Jordan\n", "Docs: [237]\n", - "[(0, 0.23028565955735972),\n", - " (6, 0.28588276328170809),\n", - " (8, 0.015124405175832004),\n", - " (9, 0.45898140870100651)]\n", + "[(0, 0.085651762012953936),\n", + " (1, 0.065665448104732405),\n", + " (2, 0.07777125401127058),\n", + " (3, 0.050480420361483674),\n", + " (4, 0.065721037891177864),\n", + " (5, 0.086499723758504746),\n", + " (6, 0.38914428858057321),\n", + " (7, 0.039550645237331414),\n", + " (8, 0.10733538353868659),\n", + " (9, 0.032180036503285388)]\n", "\n", "James M. Bower\n", "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(1, 0.30674043606317025), (4, 0.69291925103790253)]\n" + "[(1, 0.013903403611000136),\n", + " (4, 0.098066607370058775),\n", + " (7, 0.11242612291693072),\n", + " (8, 0.016186021191484681),\n", + " (9, 0.74612850820488463)]\n" ] } ], diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index 4a07165511..5149253372 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -17,7 +17,7 @@ from .phrases import Phrases from .normmodel import NormModel from .atmodel import AuthorTopicModel -from .atmodel2 import AuthorTopicModel2 +from .atmodelold import AuthorTopicModelOld from .ldaseqmodel import LdaSeqModel from . import wrappers diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py old mode 100644 new mode 100755 index 1a222fd14b..c7dff4dba4 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -5,57 +5,94 @@ # Copyright (C) 2016 Olavur Mortensen # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + """ -Author-topic model. +Author-topic model in Python. + """ +# TODO: write proper docstrings. + import pdb from pdb import set_trace as st +from pprint import pprint import logging -import numpy +import numpy as np # for arrays, array broadcasting etc. import numbers -from gensim import utils, matutils -from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim import utils from gensim.models import LdaModel +from gensim.models.ldamodel import dirichlet_expectation, get_random_state, LdaState +from itertools import chain +from scipy.special import gammaln # gamma function utils from six.moves import xrange -from scipy.special import gammaln - -from pprint import pprint +import six -# log(sum(exp(x))) that tries to avoid overflow. NOTE: not used at the moment. -try: - # try importing from here if older scipy is installed - from scipy.maxentropy import logsumexp -except ImportError: - # maxentropy has been removed in recent releases, logsumexp now in misc - from scipy.misc import logsumexp +logger = logging.getLogger('gensim.models.atmodel') -logger = logging.getLogger(__name__) +class AuthorTopicState(LdaState): + """ + Encapsulate information for distributed computation of AuthorTopicModel objects. -class AuthorTopicState: - def __init__(self, atmodel): - self.atmodel = atmodel + Objects of this class are sent over the network, so try to keep them lean to + reduce traffic. - def get_lambda(self): - return self.atmodel.var_lambda + """ + def __init__(self, eta, lambda_shape, gamma_shape): + self.eta = eta + self.sstats = np.zeros(lambda_shape) + self.gamma = np.zeros(gamma_shape) + self.numdocs = 0 + +def construct_doc2author(corpus, author2doc): + """Make a mapping from document IDs to author IDs.""" + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + return doc2author + +def construct_author2doc(corpus, doc2author): + """Make a mapping from author IDs to document IDs.""" + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + return author2doc class AuthorTopicModel(LdaModel): """ - Train the author-topic model using online variational Bayes. """ + def __init__(self, corpus=None, num_topics=100, id2word=None, + author2doc=None, doc2author=None, id2author=None, var_lambda=None, + chunksize=2000, passes=1, update_every=1, + alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, + eval_every=10, iterations=50, gamma_threshold=0.001, + minimum_probability=0.01, random_state=None, ns_conf={}, + minimum_phi_value=0.01, per_word_topics=False): + """ + """ - def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, - author2doc=None, doc2author=None, threshold=0.001, minimum_probability=0.01, - iterations=10, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, - eval_every=1, random_state=None, var_lambda=None, chunksize=1): + distributed = False # TODO: implement distributed version. self.id2word = id2word if corpus is None and self.id2word is None: raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') - # NOTE: Why would id2word not be none, but have length 0? (From LDA code) if self.id2word is None: logger.warning("no word id mapping provided; initializing from corpus, assuming identity") self.id2word = utils.dict_from_corpus(corpus) @@ -73,33 +110,13 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, if doc2author is None and author2doc is None: raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') - # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). # If either doc2author or author2doc is missing, construct them from the other. + # FIXME: make the code below into methods, so the user can construct either doc2author or author2doc *once* and then not worry about it. + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). if doc2author is None: - # Make a mapping from document IDs to author IDs. - doc2author = {} - for d, _ in enumerate(corpus): - author_ids = [] - for a, a_doc_ids in author2doc.items(): - if d in a_doc_ids: - author_ids.append(a) - doc2author[d] = author_ids + doc2author = construct_doc2author(corpus, author2doc) elif author2doc is None: - # Make a mapping from author IDs to document IDs. - - # First get a set of all authors. - authors_ids = set() - for d, a_doc_ids in doc2author.items(): - for a in a_doc_ids: - authors_ids.add(a) - - # Now construct the dictionary. - author2doc = {} - for a in range(len(authors_ids)): - author2doc[a] = [] - for d, a_ids in doc2author.items(): - if a in a_ids: - author2doc[a].append(d) + author2doc = construct_author2doc(corpus, doc2author) self.author2doc = author2doc self.doc2author = doc2author @@ -116,363 +133,282 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, # Make the reverse mapping, from author names to author IDs. self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) - self.corpus = corpus - self.iterations = iterations - self.passes = passes + self.distributed = distributed self.num_topics = num_topics - self.threshold = threshold - self.minimum_probability = minimum_probability + self.chunksize = chunksize self.decay = decay self.offset = offset - self.num_authors = len(author2doc) + self.minimum_probability = minimum_probability + self.num_updates = 0 + + self.passes = passes + self.update_every = update_every self.eval_every = eval_every - self.random_state = random_state - self.chunksize = chunksize + self.minimum_phi_value = minimum_phi_value + self.per_word_topics = per_word_topics + + self.corpus = corpus + self.num_authors = len(author2doc) + + self.alpha, self.optimize_alpha = self.init_dir_prior(alpha, 'alpha') + + assert self.alpha.shape == (self.num_topics,), "Invalid alpha shape. Got shape %s, but expected (%d, )" % (str(self.alpha.shape), self.num_topics) - self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) - #self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) - self.eta = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_terms)]) + if isinstance(eta, six.string_types): + if eta == 'asymmetric': + raise ValueError("The 'asymmetric' option cannot be used for eta") + + self.eta, self.optimize_eta = self.init_dir_prior(eta, 'eta') self.random_state = get_random_state(random_state) - self.state = AuthorTopicState(self) + assert (self.eta.shape == (self.num_terms,) or self.eta.shape == (self.num_topics, self.num_terms)), ( + "Invalid eta shape. Got shape %s, but expected (%d, 1) or (%d, %d)" % + (str(self.eta.shape), self.num_terms, self.num_topics, self.num_terms)) + if not distributed: + self.dispatcher = None + self.numworkers = 1 + else: + # TODO: implement distributed version. + pass + + # VB constants + self.iterations = iterations + self.gamma_threshold = gamma_threshold + + # Initialize the variational distributions q(beta|lambda) and q(theta|gamma) + self.state = AuthorTopicState(self.eta, (self.num_topics, self.num_terms), (self.num_authors, self.num_topics)) + self.state.sstats = self.random_state.gamma(100., 1. / 100., (self.num_topics, self.num_terms)) + self.state.gamma = self.random_state.gamma(100., 1. / 100., (self.num_authors, self.num_topics)) + self.expElogbeta = np.exp(dirichlet_expectation(self.state.sstats)) + self.expElogtheta = np.exp(dirichlet_expectation(self.state.gamma)) + + # if a training corpus was provided, start estimating the model right away if corpus is not None: - self.inference(corpus, var_lambda=var_lambda) + use_numpy = self.dispatcher is not None + self.update(corpus, chunks_as_numpy=use_numpy) def __str__(self): - return "AuthorTopicModel(num_terms=%s, num_topics=%s, num_authors=%s, decay=%s)" % \ - (self.num_terms, self.num_topics, self.num_authors, self.decay) - - def rho(self, t): - return pow(self.offset + t, -self.decay) + return "AuthorTopicModel(num_terms=%s, num_topics=%s, num_authors=%s, decay=%s, chunksize=%s)" % \ + (self.num_terms, self.num_topics, self.num_authors, self.decay, self.chunksize) def compute_phinorm(self, ids, authors_d, expElogthetad, expElogbetad): - phinorm = numpy.zeros(len(ids)) - expElogtheta_sum = numpy.zeros(self.num_topics) + """Efficiently computes the normalizing factor in phi.""" + phinorm = np.zeros(len(ids)) + expElogtheta_sum = np.zeros(self.num_topics) for a in xrange(len(authors_d)): expElogtheta_sum += expElogthetad[a, :] phinorm = expElogtheta_sum.dot(expElogbetad) return phinorm - def inference(self, corpus=None, var_lambda=None): - if corpus is None: - # TODO: is copy necessary here? - corpus = self.corpus.copy() - - self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. - - corpus_words = sum(cnt for document in corpus for _, cnt in document) + def inference(self, chunk, collect_sstats=False, chunk_no=None): + """ + Given a chunk of sparse document vectors, estimate gamma (parameters + controlling the topic weights) for each document in the chunk. - logger.info('Starting inference. Training on %d documents.', len(corpus)) + This function does not modify the model (=is read-only aka const). The + whole input chunk of document is assumed to fit in RAM; chunking of a + large corpus must be done earlier in the pipeline. - # NOTE: as the numerically stable phi update (and bound evaluation) causes - # the bound to converge a bit differently (faster, actually), it is not used - # for now until it is fully understood. - numstable_sm = False + If `collect_sstats` is True, also collect sufficient statistics needed + to update the model's topic-word distributions, and return a 2-tuple + `(gamma, sstats)`. Otherwise, return `(gamma, None)`. `gamma` is of shape + `len(chunk_authors) x self.num_topics`, where `chunk_authors` is the number + of authors in the documents in the current chunk. - if not numstable_sm: - maxElogbeta = None - maxElogtheta = None + Avoids computing the `phi` variational parameter directly using the + optimization presented in **Lee, Seung: Algorithms for non-negative matrix factorization, NIPS 2001**. - if var_lambda is None: - self.optimize_lambda = True - else: - # We have topics from LDA, thus we do not train the topics. - self.optimize_lambda = False - - # Initial values of gamma and lambda. - # Parameters of gamma distribution same as in `ldamodel`. - var_gamma = self.random_state.gamma(100., 1. / 100., - (self.num_authors, self.num_topics)) - tilde_gamma = var_gamma.copy() - self.var_gamma = var_gamma - - if var_lambda is None: - var_lambda = self.random_state.gamma(100., 1. / 100., - (self.num_topics, self.num_terms)) - tilde_lambda = var_lambda.copy() - else: - self.norm_lambda = var_lambda.copy() - for k in xrange(self.num_topics): - self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] - - #var_lambda += self.eta - - sstats_global = var_lambda.copy() - - self.var_lambda = var_lambda - - # Initialize dirichlet expectations. - Elogtheta = dirichlet_expectation(var_gamma) - Elogbeta = dirichlet_expectation(var_lambda) - if numstable_sm: - maxElogtheta = Elogtheta.max() - maxElogbeta = Elogbeta.max() - expElogtheta = numpy.exp(Elogtheta - maxElogtheta) - expElogbeta = numpy.exp(Elogbeta - maxElogbeta) + """ + try: + _ = len(chunk) + except: + # convert iterators/generators to plain list, so we have len() etc. + chunk = list(chunk) + if len(chunk) > 1: + logger.debug("performing inference on a chunk of %i documents", len(chunk)) + + # Initialize the variational distribution q(theta|gamma) for the chunk + if collect_sstats: + sstats = np.zeros_like(self.expElogbeta) else: - expElogtheta = numpy.exp(Elogtheta) - expElogbeta = numpy.exp(Elogbeta) - - if self.eval_every > 0: - word_bound = self.word_bound(corpus, expElogtheta, expElogbeta, maxElogtheta, maxElogbeta) - theta_bound = self.theta_bound(Elogtheta) - beta_bound = self.beta_bound(Elogbeta) - bound = word_bound + theta_bound + beta_bound - perwordbound = bound / corpus_words - logger.info('Total bound: %.3e. Per-word total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, perwordbound, word_bound, theta_bound, beta_bound) - #var_lambda -= self.eta - #Elogbeta = dirichlet_expectation(var_lambda) - #expElogbeta = numpy.exp(Elogbeta) - for _pass in xrange(self.passes): - converged = 0 # Number of documents converged for current pass over corpus. - for chunk_no, chunk in enumerate(utils.grouper(corpus, self.chunksize, as_numpy=False)): - # TODO: a smarter of computing rho may be necessary. In ldamodel, - # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). - rhot = self.rho(chunk_no + _pass) - sstats = numpy.zeros(var_lambda.shape) - for d, doc in enumerate(chunk): - doc_no = chunk_no + d - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = self.doc2author[doc_no] # List of author IDs for the current document. - - phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) - - # TODO: if not used, get rid of these. - expElogthetad = expElogtheta[authors_d, :] - expElogbetad = expElogbeta[:, ids] - - for iteration in xrange(self.iterations): - #logger.info('iteration %i', iteration) - - lastgamma = tilde_gamma[authors_d, :] - - # Update gamma. - for a in authors_d: - tilde_gamma[a, :] = self.alpha + len(self.author2doc[a]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) - - # Update gamma and lambda. - # Interpolation between document d's "local" gamma (tilde_gamma), - # and "global" gamma (var_gamma). Same goes for lambda. - tilde_gamma[authors_d, :] = (1 - rhot) * var_gamma[authors_d, :] + rhot * tilde_gamma[authors_d, :] - - # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. - Elogtheta[authors_d, :] = dirichlet_expectation(tilde_gamma[authors_d, :]) - if numstable_sm: - temp_max = Elogtheta[authors_d, :].max() - maxElogtheta = temp_max if temp_max > maxElogtheta else maxElogtheta - expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :] - maxElogtheta) - else: - expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :]) - - phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) - - # Check for convergence. - # Criterion is mean change in "local" gamma and lambda. - meanchange_gamma = numpy.mean(abs(tilde_gamma[authors_d, :] - lastgamma)) - gamma_condition = meanchange_gamma < self.threshold - # logger.info('Mean change in gamma: %.3e', meanchange_gamma) - if gamma_condition: - # logger.info('Converged after %d iterations.', iteration) - converged += 1 - break - # End of iterations loop. - - var_gamma = tilde_gamma.copy() - - expElogtheta_sum_a = expElogtheta[authors_d, :].sum(axis=0) - sstats[:, ids] += numpy.outer(expElogtheta_sum_a.T, cts/phinorm) - # End of chunk loop. - - if self.optimize_lambda: - # Update lambda. - #sstats *= expElogbeta - #sstats_global = (1 - rhot) * sstats_global + rhot * sstats - #var_lambda = sstats + self.eta - #Elogbeta = dirichlet_expectation(var_lambda) - #expElogbeta = numpy.exp(Elogbeta) - - sstats *= expElogbeta - # Find the ids of the words that are to be updated per this chunk, and update - # only those terms. - # NOTE: this is not necessarily more efficient than just updating all terms, but - # doing that may cause problems. - # NOTE: this assumes that if a single value in a row of sstats is zero, then the - # entire column is zero. This *should* be the case (if not, something else has gone - # wrong). - chunk_ids = sstats[0, :].nonzero() - tilde_lambda[:, chunk_ids] = self.eta[chunk_ids] + self.num_docs * sstats[:, chunk_ids] / self.chunksize - - var_lambda[:, chunk_ids] = (1 - rhot) * var_lambda[:, chunk_ids] + rhot * tilde_lambda[:, chunk_ids] - Elogbeta = dirichlet_expectation(var_lambda) - if numstable_sm: - # NOTE: can it be assumed that only Elogbeta[:, ids] have changed? - temp_max = Elogbeta.max() - maxElogbeta = temp_max if temp_max > maxElogbeta else maxElogbeta - expElogbeta = numpy.exp(Elogbeta - maxElogbeta) - else: - expElogbeta = numpy.exp(Elogbeta) - #var_lambda = var_lambda.copy() - - # Print topics: - # pprint(self.show_topics()) - # End of corpus loop. - - - if self.eval_every > 0 and (_pass + 1) % self.eval_every == 0: - self.var_gamma = var_gamma - self.var_lambda = var_lambda - prev_bound = bound - word_bound = self.word_bound(corpus, expElogtheta, expElogbeta, maxElogtheta, maxElogbeta) - theta_bound = self.theta_bound(Elogtheta) - beta_bound = self.beta_bound(Elogbeta) - bound = word_bound + theta_bound + beta_bound - perwordbound = bound / corpus_words - logger.info('Total bound: %.3e. Per-word total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, perwordbound, word_bound, theta_bound, beta_bound) - # NOTE: bound can be computed as below. We compute each term for now because it can be useful for debugging. - # bound = eval_bound(corpus, Elogtheta, Elogbeta, expElogtheta, expElogtheta, maxElogtheta=maxElogtheta, maxElogbeta=maxElogbeta): - - #logger.info('Converged documents: %d/%d', converged, self.num_docs) - - # TODO: consider whether to include bound convergence criterion, something like this: - #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold: - # break - # End of pass over corpus loop. - - # Ensure that the bound (or log probabilities) is computed at the very last pass. - if self.eval_every > 0 and not (_pass + 1) % self.eval_every == 0: - # If the bound should be computed, and it wasn't computed at the last pass, - # then compute the bound. - self.var_gamma = var_gamma - self.var_lambda = var_lambda - prev_bound = bound - word_bound = self.word_bound(corpus, expElogtheta, expElogbeta, maxElogtheta, maxElogbeta) - theta_bound = self.theta_bound(Elogtheta) - beta_bound = self.beta_bound(Elogbeta) - bound = word_bound + theta_bound + beta_bound - logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) - - - self.var_lambda = var_lambda - self.var_gamma = var_gamma - - return var_gamma, var_lambda - - def eval_bound(self, corpus, Elogtheta, Elogbeta, expElogtheta, expElogbeta, maxElogtheta=None, maxElogbeta=None): - word_bound = self.word_bound(corpus, expElogtheta, expElogbeta, maxElogtheta=maxElogtheta, maxElogbeta=maxElogbeta) - theta_bound = self.theta_bound(Elogtheta) - beta_bound = self.beta_bound(Elogbeta) - bound = word_bound + theta_bound + beta_bound - return bound - - def word_bound(self, docs, expElogtheta, expElogbeta, maxElogtheta=None, maxElogbeta=None): + sstats = None + converged = 0 + + chunk_authors = set() + + # Now, for each document d update that document's gamma and phi + for d, doc in enumerate(chunk): + doc_no = chunk_no + d # TODO: can it safely be assumed that this is the case? + if doc and not isinstance(doc[0][0], six.integer_types): + # make sure the term IDs are ints, otherwise np will get upset + ids = [int(id) for id, _ in doc] + else: + ids = [id for id, _ in doc] + cts = np.array([cnt for _, cnt in doc]) + authors_d = self.doc2author[doc_no] # List of author IDs for the current document. + + gammad = self.state.gamma[authors_d, :] + tilde_gamma = gammad.copy() + + Elogthetad = dirichlet_expectation(tilde_gamma) + expElogthetad = np.exp(Elogthetad) + expElogbetad = self.expElogbeta[:, ids] + + phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) + + # Iterate between gamma and phi until convergence + for iteration in xrange(self.iterations): + #logger.info('iteration %i', iteration) + + lastgamma = tilde_gamma.copy() + + # Update gamma. + for ai, a in enumerate(authors_d): + tilde_gamma[ai, :] = self.alpha + len(self.author2doc[a]) * expElogthetad[ai, :] * np.dot(cts / phinorm, expElogbetad.T) + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). + tilde_gamma = (1 - self.rho) * gammad + self.rho * tilde_gamma + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + Elogthetad = dirichlet_expectation(tilde_gamma) + expElogthetad = np.exp(Elogthetad) + + phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) + + # Check for convergence. + # Criterion is mean change in "local" gamma and lambda. + meanchange_gamma = np.mean(abs(tilde_gamma - lastgamma)) + gamma_condition = meanchange_gamma < self.gamma_threshold + # logger.info('Mean change in gamma: %.3e', meanchange_gamma) + if gamma_condition: + # logger.info('Converged after %d iterations.', iteration) + converged += 1 + break + # End of iterations loop. + + self.state.gamma[authors_d, :] = tilde_gamma + + # NOTE: this may be slow. Especially when there are many authors per document. It is + # imporant to find a faster way to handle this. + chunk_authors = chunk_authors.union(set(authors_d)) + + if collect_sstats: + # Contribution of document d to the expected sufficient + # statistics for the M step. + expElogtheta_sum_a = expElogthetad.sum(axis=0) + sstats[:, ids] += np.outer(expElogtheta_sum_a.T, cts/phinorm) + + if len(chunk) > 1: + logger.debug("%i/%i documents converged within %i iterations", + converged, len(chunk), self.iterations) + + if collect_sstats: + # This step finishes computing the sufficient statistics for the + # M step, so that + # sstats[k, w] = \sum_d n_{dw} * phi_{dwk} + # = \sum_d n_{dw} * exp{Elogtheta_{dk} + Elogbeta_{kw}} / phinorm_{dw}. + sstats *= self.expElogbeta + gamma_chunk = self.state.gamma[list(chunk_authors), :] + return gamma_chunk, sstats + + def do_estep(self, chunk, state=None, chunk_no=None): + """ + Perform inference on a chunk of documents, and accumulate the collected + sufficient statistics in `state` (or `self.state` if None). + + """ + if state is None: + state = self.state + gamma, sstats = self.inference(chunk, collect_sstats=True, chunk_no=chunk_no) + state.sstats += sstats + state.numdocs += len(chunk) + return gamma + + def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2author=None, ): """ - Compute the expectation of the log conditional likelihood of the data, + Estimate the variational bound of documents from `corpus`: + E_q[log p(corpus)] - E_q[log q(corpus)] - E_q[log p(w_d | theta, beta, A_d)], + `gamma` are the variational parameters on topic weights for each `corpus` + document (=2d matrix=what comes out of `inference()`). + If not supplied, will be inferred from the model. + + Computing the bound of unseen data is not recommended, unless one knows what one is doing. + In this case, gamma must be inferred in advance, and doc2author for this new data must be + provided. - where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. """ - # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. - # NOTE: computing bound is very very computationally intensive. We could, for example, - # only use a portion of the data to do that (even a held-out set). - - # TODO: same optimized computation as in phinorm can be used. - bound= 0.0 - for d, doc in enumerate(docs): - authors_d = self.doc2author[d] - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - bound_d = 0.0 - # Computing the bound requires summing over expElogtheta[a, k] * expElogbeta[k, v], which - # is the same computation as in normalizing phi. - phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) - bound += numpy.log(1.0 / len(authors_d)) + cts.dot(numpy.log(phinorm)) + _lambda = self.state.get_lambda() + Elogbeta = dirichlet_expectation(_lambda) + expElogbeta = np.exp(dirichlet_expectation(_lambda)) - # TODO: consider using per-word bound, i.e. - # bound *= 1 /sum(len(doc) for doc in docs) + if gamma is not None: + logger.warning('bound() assumes gamma to be None and uses the gamma provided is self.state.') + # NOTE: alternatively: + #assert gamma is None, 'bound() assumes gamma to be None and uses the gamma provided is self.state.' + else: + gamma = self.state.gamma - return bound + if chunk_no is None: + logger.warning('No chunk_no provided to bound().') + # NOTE: alternatively: + #assert chunk_no is not None, 'chunk_no must be provided to bound().' + chunk_no = 0 - def theta_bound(self, Elogtheta): - bound = 0.0 - for a in xrange(self.num_authors): - var_gamma_a = self.var_gamma[a, :] - Elogtheta_a = Elogtheta[a, :] - bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) - bound += numpy.sum(gammaln(var_gamma_a) - gammaln(self.alpha)) - bound += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(var_gamma_a)) + Elogtheta = dirichlet_expectation(gamma) + expElogtheta = np.exp(dirichlet_expectation(gamma)) - return bound + word_score = 0.0 + authors_set = set() # Used in computing theta bound. + theta_score = 0.0 + for d, doc in enumerate(corpus): # stream the input doc-by-doc, in case it's too large to fit in RAM + doc_no = chunk_no + d + authors_d = self.doc2author[doc_no] + ids = np.array([id for id, _ in doc]) # Word IDs in doc. + cts = np.array([cnt for _, cnt in doc]) # Word counts. - def beta_bound(self, Elogbeta): - bound = 0.0 - bound += numpy.sum((self.eta - self.var_lambda) * Elogbeta) - bound += numpy.sum(gammaln(self.var_lambda) - gammaln(self.eta)) - bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(self.var_lambda, 1))) + if d % self.chunksize == 0: + logger.debug("bound: at document #%i", d) - return bound + # Computing the bound requires summing over expElogtheta[a, k] * expElogbeta[k, v], which + # is the same computation as in normalizing phi. + phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) + word_score += np.log(1.0 / len(authors_d)) + cts.dot(np.log(phinorm)) - def eval_logprob(self, doc_ids=None): - """ - Compute the liklihood of the corpus under the model, by first - computing the conditional probabilities of the words in a - document d, + # E[log p(theta | alpha) - log q(theta | gamma)] + # The code blow ensure we compute the score of each author only once. + for a in authors_d: + if a not in authors_set: + theta_score += np.sum((self.alpha - gamma[a, :]) * Elogtheta[a, :]) + theta_score += np.sum(gammaln(gamma[a, :]) - gammaln(self.alpha)) + theta_score += gammaln(np.sum(self.alpha)) - gammaln(np.sum(gamma[a, :])) + authors_set.add(a) - p(w_d | theta, beta, A_d), + # Compensate likelihood for when `corpus` above is only a sample of the whole corpus. This ensures + # that the likelihood is always rougly on the same scale. + word_score *= subsample_ratio - summing over all documents, and dividing by the number of documents. - """ + # theta_score is rescaled in a similar fashion. + theta_score *= self.num_authors / len(authors_set) - norm_gamma = self.var_gamma.copy() - for a in xrange(self.num_authors): - norm_gamma[a, :] = self.var_gamma[a, :] / self.var_gamma.sum(axis=1)[a] + # E[log p(beta | eta) - log q (beta | lambda)] + beta_score = 0.0 + beta_score += np.sum((self.eta - _lambda) * Elogbeta) + beta_score += np.sum(gammaln(_lambda) - gammaln(self.eta)) + sum_eta = np.sum(self.eta) + beta_score += np.sum(gammaln(sum_eta) - gammaln(np.sum(_lambda, 1))) - if self.optimize_lambda: - norm_lambda = self.var_lambda.copy() - for k in xrange(self.num_topics): - norm_lambda[k, :] = self.var_lambda[k, :] / self.var_lambda.sum(axis=1)[k] - else: - norm_lambda = self.norm_lambda + total_score = word_score + theta_score + beta_score - if doc_ids is None: - docs = self.corpus - else: - docs = [self.corpus[d] for d in doc_ids] - - logprob = 0.0 - for d, doc in enumerate(docs): - ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. - cts = numpy.array([cnt for _, cnt in doc]) # Word counts. - authors_d = self.doc2author[d] - logprob_d = 0.0 - #phinorm = self.compute_phinorm(ids, authors_d, expElogtheta, expElogbeta) - for vi, v in enumerate(ids): - logprob_v = 0.0 - for k in xrange(self.num_topics): - for a in authors_d: - logprob_v += norm_gamma[a, k] * norm_lambda[k, v] - logprob_d += cts[vi] * numpy.log(logprob_v) - logprob += numpy.log(1.0 / len(authors_d)) + logprob_d - - return logprob - - # Overriding LdaModel.get_topic_terms. - def get_topic_terms(self, topicid, topn=10): - """ - Return a list of `(word_id, probability)` 2-tuples for the most - probable words in topic `topicid`. - Only return 2-tuples for the topn most probable words (ignore the rest). - """ - topic = self.var_lambda[topicid, :] - topic = topic / topic.sum() # normalize to probability distribution - bestn = matutils.argsort(topic, topn, reverse=True) - return [(id, topic[id]) for id in bestn] + #print("%.3e\t%.3e\t%.3e\t%.3e" %(total_score, word_score, theta_score, beta_score)) + return total_score def get_author_topics(self, author_id, minimum_probability=None): """ @@ -484,12 +420,86 @@ def get_author_topics(self, author_id, minimum_probability=None): minimum_probability = self.minimum_probability minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output - topic_dist = self.var_gamma[author_id, :] / sum(self.var_gamma[author_id, :]) + topic_dist = self.state.gamma[author_id, :] / sum(self.state.gamma[author_id, :]) author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) if topicvalue >= minimum_probability] return author_topics + # NOTE: method `top_topics` is used directly. There is no topic coherence measure for + # the author-topic model. c_v topic coherence is a valid measure of topic quality in + # the author-topic model, although it does not take authorship information into account. + def __getitem__(self, bow, eps=None): + """ + """ + # TODO: this. + # E.g. assume bow is a list of documents for this particular author, and that the author + # is not in the corpus beforehand. Then add an author to doc2author and author2doc, + # and call self.update to infer the new author's topic distribution. + pass + + def save(self, fname, ignore=['state', 'dispatcher'], *args, **kwargs): + """ + Save the model to file. + + Large internal arrays may be stored into separate files, with `fname` as prefix. + + `separately` can be used to define which arrays should be stored in separate files. + + `ignore` parameter can be used to define which variables should be ignored, i.e. left + out from the pickled author-topic model. By default the internal `state` is ignored as it uses + its own serialisation not the one provided by `AuthorTopicModel`. The `state` and `dispatcher` + will be added to any ignore parameter defined. + + + Note: do not save as a compressed file if you intend to load the file back with `mmap`. + Note: If you intend to use models across Python 2/3 versions there are a few things to + keep in mind: + + 1. The pickled Python dictionaries will not work across Python versions + 2. The `save` method does not automatically save all NumPy arrays using NumPy, only + those ones that exceed `sep_limit` set in `gensim.utils.SaveLoad.save`. The main + concern here is the `alpha` array if for instance using `alpha='auto'`. + + Please refer to the wiki recipes section (https://github.com/piskvorky/gensim/wiki/Recipes-&-FAQ#q9-how-do-i-load-a-model-in-python-3-that-was-trained-and-saved-using-python-2) + for an example on how to work around these issues. + """ + if self.state is not None: + self.state.save(utils.smart_extension(fname, '.state'), *args, **kwargs) + + # make sure 'state' and 'dispatcher' are ignored from the pickled object, even if + # someone sets the ignore list themselves + if ignore is not None and ignore: + if isinstance(ignore, six.string_types): + ignore = [ignore] + ignore = [e for e in ignore if e] # make sure None and '' are not in the list + ignore = list(set(['state', 'dispatcher']) | set(ignore)) + else: + ignore = ['state', 'dispatcher'] + # TODO: the only difference between this save method and LdaModel's is the use of + # "AuthorTopicModel" below. This should be an easy refactor. + # Same goes for load method below. + super(AuthorTopicModel, self).save(fname, *args, ignore=ignore, **kwargs) + + @classmethod + def load(cls, fname, *args, **kwargs): + """ + Load a previously saved object from file (also see `save`). + + Large arrays can be memmap'ed back as read-only (shared memory) by setting `mmap='r'`: + + >>> AuthorTopicModel.load(fname, mmap='r') + + """ + kwargs['mmap'] = kwargs.get('mmap', None) + result = super(AuthorTopicModel, cls).load(fname, *args, **kwargs) + state_fname = utils.smart_extension(fname, '.state') + try: + result.state = super(LdaModel, cls).load(state_fname, *args, **kwargs) + except Exception as e: + logging.warning("failed to load state from %s: %s", state_fname, e) + return result +# endclass LdaModel diff --git a/gensim/models/atmodel2.py b/gensim/models/atmodel2.py index 20d275c370..b50be701f0 100755 --- a/gensim/models/atmodel2.py +++ b/gensim/models/atmodel2.py @@ -7,7 +7,8 @@ """ -Author-topic model. +Author-topic model in Python. + """ # TODO: write proper docstrings. @@ -28,7 +29,7 @@ from six.moves import xrange import six -logger = logging.getLogger('gensim.models.atmodel2') +logger = logging.getLogger('gensim.models.atmodel') class AuthorTopicState(LdaState): """ @@ -328,6 +329,161 @@ def do_estep(self, chunk, state=None, chunk_no=None): state.numdocs += len(chunk) return gamma + def update(self, corpus, chunksize=None, decay=None, offset=None, + passes=None, update_every=None, eval_every=None, iterations=None, + gamma_threshold=None, chunks_as_numpy=False): + """ + Train the model with new documents, by EM-iterating over `corpus` until + the topics converge (or until the maximum number of allowed iterations + is reached). `corpus` must be an iterable (repeatable stream of documents), + + In distributed mode, the E step is distributed over a cluster of machines. + + This update also supports updating an already trained model (`self`) + with new documents from `corpus`; the two models are then merged in + proportion to the number of old vs. new documents. This feature is still + experimental for non-stationary input streams. + + For stationary input (no topic drift in new documents), on the other hand, + this equals the online update of Hoffman et al. and is guaranteed to + converge for any `decay` in (0.5, 1.0>. Additionally, for smaller + `corpus` sizes, an increasing `offset` may be beneficial (see + Table 1 in Hoffman et al.) + + Args: + corpus (gensim corpus): The corpus with which the LDA model should be updated. + + chunks_as_numpy (bool): Whether each chunk passed to `.inference` should be a np + array of not. np can in some settings turn the term IDs + into floats, these will be converted back into integers in + inference, which incurs a performance hit. For distributed + computing it may be desirable to keep the chunks as np + arrays. + + For other parameter settings, see :class:`LdaModel` constructor. + + """ + # use parameters given in constructor, unless user explicitly overrode them + if decay is None: + decay = self.decay + if offset is None: + offset = self.offset + if passes is None: + passes = self.passes + if update_every is None: + update_every = self.update_every + if eval_every is None: + eval_every = self.eval_every + if iterations is None: + iterations = self.iterations + if gamma_threshold is None: + gamma_threshold = self.gamma_threshold + + try: + lencorpus = len(corpus) + except: + logger.warning("input corpus stream has no len(); counting documents") + lencorpus = sum(1 for _ in corpus) + if lencorpus == 0: + logger.warning("LdaModel.update() called with an empty corpus") + return + + if chunksize is None: + chunksize = min(lencorpus, self.chunksize) + + self.state.numdocs += lencorpus + + if update_every: + updatetype = "online" + updateafter = min(lencorpus, update_every * self.numworkers * chunksize) + else: + updatetype = "batch" + updateafter = lencorpus + evalafter = min(lencorpus, (eval_every or 0) * self.numworkers * chunksize) + + updates_per_pass = max(1, lencorpus / updateafter) + logger.info("running %s LDA training, %s topics, %i passes over " + "the supplied corpus of %i documents, updating model once " + "every %i documents, evaluating perplexity every %i documents, " + "iterating %ix with a convergence threshold of %f", + updatetype, self.num_topics, passes, lencorpus, + updateafter, evalafter, iterations, + gamma_threshold) + + if updates_per_pass * passes < 10: + logger.warning("too few updates, training might not converge; consider " + "increasing the number of passes or iterations to improve accuracy") + + # rho is the "speed" of updating; TODO try other fncs + # pass_ + num_updates handles increasing the starting t for each pass, + # while allowing it to "reset" on the first pass of each update + def rho(): + return pow(offset + pass_ + (self.num_updates / chunksize), -decay) + + for pass_ in xrange(passes): + if self.dispatcher: + logger.info('initializing %s workers' % self.numworkers) + self.dispatcher.reset(self.state) + else: + other = LdaState(self.eta, self.state.sstats.shape) + dirty = False + + reallen = 0 + for chunk_no, chunk in enumerate(utils.grouper(corpus, chunksize, as_numpy=chunks_as_numpy)): + # FIXME: replace rho() in e.g. self.do_estep by self.rho? self.rho is needed for AuthorTopicModel. + self.rho = rho() + reallen += len(chunk) # keep track of how many documents we've processed so far + + if eval_every and ((reallen == lencorpus) or ((chunk_no + 1) % (eval_every * self.numworkers) == 0)): + self.log_perplexity(chunk, chunk_no, total_docs=lencorpus) + + if self.dispatcher: + # add the chunk to dispatcher's job queue, so workers can munch on it + logger.info('PROGRESS: pass %i, dispatching documents up to #%i/%i', + pass_, chunk_no * chunksize + len(chunk), lencorpus) + # this will eventually block until some jobs finish, because the queue has a small finite length + self.dispatcher.putjob(chunk) + else: + logger.info('PROGRESS: pass %i, at document #%i/%i', + pass_, chunk_no * chunksize + len(chunk), lencorpus) + gammat = self.do_estep(chunk, other, chunk_no) + + if self.optimize_alpha: + self.update_alpha(gammat, rho()) + + dirty = True + del chunk + + # perform an M step. determine when based on update_every, don't do this after every chunk + if update_every and (chunk_no + 1) % (update_every * self.numworkers) == 0: + if self.dispatcher: + # distributed mode: wait for all workers to finish + logger.info("reached the end of input; now waiting for all remaining jobs to finish") + other = self.dispatcher.getstate() + self.do_mstep(rho(), other, pass_ > 0) + del other # frees up memory + + if self.dispatcher: + logger.info('initializing workers') + self.dispatcher.reset(self.state) + else: + other = LdaState(self.eta, self.state.sstats.shape) + dirty = False + # endfor single corpus iteration + if reallen != lencorpus: + raise RuntimeError("input corpus size changed during training (don't use generators as input)") + + if dirty: + # finish any remaining updates + if self.dispatcher: + # distributed mode: wait for all workers to finish + logger.info("reached the end of input; now waiting for all remaining jobs to finish") + other = self.dispatcher.getstate() + self.do_mstep(rho(), other, pass_ > 0) + del other + dirty = False + # endfor entire corpus update + def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2author=None, ): """ Estimate the variational bound of documents from `corpus`: diff --git a/gensim/models/atmodelold.py b/gensim/models/atmodelold.py new file mode 100644 index 0000000000..0925ffa46f --- /dev/null +++ b/gensim/models/atmodelold.py @@ -0,0 +1,495 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2016 Radim Rehurek +# Copyright (C) 2016 Olavur Mortensen +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Author-topic model. +""" + +import pdb +from pdb import set_trace as st + +import logging +import numpy +import numbers + +from gensim import utils, matutils +from gensim.models.ldamodel import dirichlet_expectation, get_random_state +from gensim.models import LdaModel +from six.moves import xrange +from scipy.special import gammaln + +from pprint import pprint + +# log(sum(exp(x))) that tries to avoid overflow. NOTE: not used at the moment. +try: + # try importing from here if older scipy is installed + from scipy.maxentropy import logsumexp +except ImportError: + # maxentropy has been removed in recent releases, logsumexp now in misc + from scipy.misc import logsumexp + +logger = logging.getLogger(__name__) + +class AuthorTopicState: + def __init__(self, atmodel): + self.atmodel = atmodel + + def get_lambda(self): + return self.atmodel.var_lambda + +class AuthorTopicModelOld(LdaModel): + """ + Train the author-topic model using online variational Bayes. + """ + + def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, + author2doc=None, doc2author=None, threshold=0.001, minimum_probability=0.01, + iterations=10, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, + eval_every=1, random_state=None, var_lambda=None, chunksize=1): + + self.id2word = id2word + if corpus is None and self.id2word is None: + raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') + + # NOTE: Why would id2word not be none, but have length 0? (From LDA code) + if self.id2word is None: + logger.warning("no word id mapping provided; initializing from corpus, assuming identity") + self.id2word = utils.dict_from_corpus(corpus) + self.num_terms = len(self.id2word) + elif len(self.id2word) > 0: + self.num_terms = 1 + max(self.id2word.keys()) + else: + self.num_terms = 0 + + if self.num_terms == 0: + raise ValueError("cannot compute LDA over an empty collection (no terms)") + + logger.info('Vocabulary consists of %d words.', self.num_terms) + + if doc2author is None and author2doc is None: + raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') + + # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). + # If either doc2author or author2doc is missing, construct them from the other. + if doc2author is None: + # Make a mapping from document IDs to author IDs. + doc2author = {} + for d, _ in enumerate(corpus): + author_ids = [] + for a, a_doc_ids in author2doc.items(): + if d in a_doc_ids: + author_ids.append(a) + doc2author[d] = author_ids + elif author2doc is None: + # Make a mapping from author IDs to document IDs. + + # First get a set of all authors. + authors_ids = set() + for d, a_doc_ids in doc2author.items(): + for a in a_doc_ids: + authors_ids.add(a) + + # Now construct the dictionary. + author2doc = {} + for a in range(len(authors_ids)): + author2doc[a] = [] + for d, a_ids in doc2author.items(): + if a in a_ids: + author2doc[a].append(d) + + self.author2doc = author2doc + self.doc2author = doc2author + + self.num_authors = len(self.author2doc) + logger.info('Number of authors: %d.', self.num_authors) + + self.id2author = id2author + if self.id2author is None: + logger.warning("no author id mapping provided; initializing from corpus, assuming identity") + author_integer_ids = [str(i) for i in range(len(author2doc))] + self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) + + # Make the reverse mapping, from author names to author IDs. + self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) + + self.corpus = corpus + self.iterations = iterations + self.passes = passes + self.num_topics = num_topics + self.threshold = threshold + self.minimum_probability = minimum_probability + self.decay = decay + self.offset = offset + self.num_authors = len(author2doc) + self.eval_every = eval_every + self.random_state = random_state + self.chunksize = chunksize + + self.alpha = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_topics)]) + #self.eta = numpy.asarray([1.0 / self.num_terms for i in xrange(self.num_terms)]) + self.eta = numpy.asarray([1.0 / self.num_topics for i in xrange(self.num_terms)]) + + self.random_state = get_random_state(random_state) + + self.state = AuthorTopicState(self) + + if corpus is not None: + self.inference(corpus, var_lambda=var_lambda) + + def __str__(self): + return "AuthorTopicModel(num_terms=%s, num_topics=%s, num_authors=%s, decay=%s)" % \ + (self.num_terms, self.num_topics, self.num_authors, self.decay) + + def rho(self, t): + return pow(self.offset + t, -self.decay) + + def compute_phinorm(self, ids, authors_d, expElogthetad, expElogbetad): + phinorm = numpy.zeros(len(ids)) + expElogtheta_sum = numpy.zeros(self.num_topics) + for a in xrange(len(authors_d)): + expElogtheta_sum += expElogthetad[a, :] + phinorm = expElogtheta_sum.dot(expElogbetad) + + return phinorm + + def inference(self, corpus=None, var_lambda=None): + if corpus is None: + # TODO: is copy necessary here? + corpus = self.corpus.copy() + + self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online. + + corpus_words = sum(cnt for document in corpus for _, cnt in document) + + logger.info('Starting inference. Training on %d documents.', len(corpus)) + + # NOTE: as the numerically stable phi update (and bound evaluation) causes + # the bound to converge a bit differently (faster, actually), it is not used + # for now until it is fully understood. + numstable_sm = False + + if not numstable_sm: + maxElogbeta = None + maxElogtheta = None + + if var_lambda is None: + self.optimize_lambda = True + else: + # We have topics from LDA, thus we do not train the topics. + self.optimize_lambda = False + + # Initial values of gamma and lambda. + # Parameters of gamma distribution same as in `ldamodel`. + var_gamma = self.random_state.gamma(100., 1. / 100., + (self.num_authors, self.num_topics)) + tilde_gamma = var_gamma.copy() + self.var_gamma = var_gamma + + if var_lambda is None: + var_lambda = self.random_state.gamma(100., 1. / 100., + (self.num_topics, self.num_terms)) + tilde_lambda = var_lambda.copy() + else: + self.norm_lambda = var_lambda.copy() + for k in xrange(self.num_topics): + self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k] + + #var_lambda += self.eta + + sstats_global = var_lambda.copy() + + self.var_lambda = var_lambda + + # Initialize dirichlet expectations. + Elogtheta = dirichlet_expectation(var_gamma) + Elogbeta = dirichlet_expectation(var_lambda) + if numstable_sm: + maxElogtheta = Elogtheta.max() + maxElogbeta = Elogbeta.max() + expElogtheta = numpy.exp(Elogtheta - maxElogtheta) + expElogbeta = numpy.exp(Elogbeta - maxElogbeta) + else: + expElogtheta = numpy.exp(Elogtheta) + expElogbeta = numpy.exp(Elogbeta) + + if self.eval_every > 0: + word_bound = self.word_bound(corpus, expElogtheta, expElogbeta, maxElogtheta, maxElogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + perwordbound = bound / corpus_words + logger.info('Total bound: %.3e. Per-word total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, perwordbound, word_bound, theta_bound, beta_bound) + #var_lambda -= self.eta + #Elogbeta = dirichlet_expectation(var_lambda) + #expElogbeta = numpy.exp(Elogbeta) + for _pass in xrange(self.passes): + converged = 0 # Number of documents converged for current pass over corpus. + for chunk_no, chunk in enumerate(utils.grouper(corpus, self.chunksize, as_numpy=False)): + # TODO: a smarter of computing rho may be necessary. In ldamodel, + # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay). + rhot = self.rho(chunk_no + _pass) + sstats = numpy.zeros(var_lambda.shape) + for d, doc in enumerate(chunk): + doc_no = chunk_no + d + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[doc_no] # List of author IDs for the current document. + + phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) + + # TODO: if not used, get rid of these. + expElogthetad = expElogtheta[authors_d, :] + expElogbetad = expElogbeta[:, ids] + + for iteration in xrange(self.iterations): + #logger.info('iteration %i', iteration) + + lastgamma = tilde_gamma[authors_d, :] + + # Update gamma. + for a in authors_d: + tilde_gamma[a, :] = self.alpha + len(self.author2doc[a]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) + + # Update gamma and lambda. + # Interpolation between document d's "local" gamma (tilde_gamma), + # and "global" gamma (var_gamma). Same goes for lambda. + tilde_gamma[authors_d, :] = (1 - rhot) * var_gamma[authors_d, :] + rhot * tilde_gamma[authors_d, :] + + # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. + Elogtheta[authors_d, :] = dirichlet_expectation(tilde_gamma[authors_d, :]) + if numstable_sm: + temp_max = Elogtheta[authors_d, :].max() + maxElogtheta = temp_max if temp_max > maxElogtheta else maxElogtheta + expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :] - maxElogtheta) + else: + expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :]) + + phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) + + # Check for convergence. + # Criterion is mean change in "local" gamma and lambda. + meanchange_gamma = numpy.mean(abs(tilde_gamma[authors_d, :] - lastgamma)) + gamma_condition = meanchange_gamma < self.threshold + # logger.info('Mean change in gamma: %.3e', meanchange_gamma) + if gamma_condition: + # logger.info('Converged after %d iterations.', iteration) + converged += 1 + break + # End of iterations loop. + + var_gamma = tilde_gamma.copy() + + expElogtheta_sum_a = expElogtheta[authors_d, :].sum(axis=0) + sstats[:, ids] += numpy.outer(expElogtheta_sum_a.T, cts/phinorm) + # End of chunk loop. + + if self.optimize_lambda: + # Update lambda. + #sstats *= expElogbeta + #sstats_global = (1 - rhot) * sstats_global + rhot * sstats + #var_lambda = sstats + self.eta + #Elogbeta = dirichlet_expectation(var_lambda) + #expElogbeta = numpy.exp(Elogbeta) + + sstats *= expElogbeta + # Find the ids of the words that are to be updated per this chunk, and update + # only those terms. + # NOTE: this is not necessarily more efficient than just updating all terms, but + # doing that may cause problems. + # NOTE: this assumes that if a single value in a row of sstats is zero, then the + # entire column is zero. This *should* be the case (if not, something else has gone + # wrong). + chunk_ids = sstats[0, :].nonzero() + tilde_lambda[:, chunk_ids] = self.eta[chunk_ids] + self.num_docs * sstats[:, chunk_ids] / self.chunksize + + var_lambda[:, chunk_ids] = (1 - rhot) * var_lambda[:, chunk_ids] + rhot * tilde_lambda[:, chunk_ids] + Elogbeta = dirichlet_expectation(var_lambda) + if numstable_sm: + # NOTE: can it be assumed that only Elogbeta[:, ids] have changed? + temp_max = Elogbeta.max() + maxElogbeta = temp_max if temp_max > maxElogbeta else maxElogbeta + expElogbeta = numpy.exp(Elogbeta - maxElogbeta) + else: + expElogbeta = numpy.exp(Elogbeta) + #var_lambda = var_lambda.copy() + + # Print topics: + # pprint(self.show_topics()) + # End of corpus loop. + + + if self.eval_every > 0 and (_pass + 1) % self.eval_every == 0: + self.var_gamma = var_gamma + self.var_lambda = var_lambda + prev_bound = bound + word_bound = self.word_bound(corpus, expElogtheta, expElogbeta, maxElogtheta, maxElogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + perwordbound = bound / corpus_words + logger.info('Total bound: %.3e. Per-word total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, perwordbound, word_bound, theta_bound, beta_bound) + # NOTE: bound can be computed as below. We compute each term for now because it can be useful for debugging. + # bound = eval_bound(corpus, Elogtheta, Elogbeta, expElogtheta, expElogtheta, maxElogtheta=maxElogtheta, maxElogbeta=maxElogbeta): + + #logger.info('Converged documents: %d/%d', converged, self.num_docs) + + # TODO: consider whether to include bound convergence criterion, something like this: + #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold: + # break + # End of pass over corpus loop. + + # Ensure that the bound (or log probabilities) is computed at the very last pass. + if self.eval_every > 0 and not (_pass + 1) % self.eval_every == 0: + # If the bound should be computed, and it wasn't computed at the last pass, + # then compute the bound. + self.var_gamma = var_gamma + self.var_lambda = var_lambda + prev_bound = bound + word_bound = self.word_bound(corpus, expElogtheta, expElogbeta, maxElogtheta, maxElogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) + + + self.var_lambda = var_lambda + self.var_gamma = var_gamma + + return var_gamma, var_lambda + + def eval_bound(self, corpus, Elogtheta, Elogbeta, expElogtheta, expElogbeta, maxElogtheta=None, maxElogbeta=None): + word_bound = self.word_bound(corpus, expElogtheta, expElogbeta, maxElogtheta=maxElogtheta, maxElogbeta=maxElogbeta) + theta_bound = self.theta_bound(Elogtheta) + beta_bound = self.beta_bound(Elogbeta) + bound = word_bound + theta_bound + beta_bound + return bound + + def word_bound(self, docs, expElogtheta, expElogbeta, maxElogtheta=None, maxElogbeta=None): + """ + Compute the expectation of the log conditional likelihood of the data, + + E_q[log p(w_d | theta, beta, A_d)], + + where p(w_d | theta, beta, A_d) is the log conditional likelihood of the data. + """ + + # TODO: allow for evaluating test corpus. This will require inferring on unseen documents. + # NOTE: computing bound is very very computationally intensive. We could, for example, + # only use a portion of the data to do that (even a held-out set). + + # TODO: same optimized computation as in phinorm can be used. + bound= 0.0 + for d, doc in enumerate(docs): + authors_d = self.doc2author[d] + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + bound_d = 0.0 + # Computing the bound requires summing over expElogtheta[a, k] * expElogbeta[k, v], which + # is the same computation as in normalizing phi. + phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) + bound += numpy.log(1.0 / len(authors_d)) + cts.dot(numpy.log(phinorm)) + + # TODO: consider using per-word bound, i.e. + # bound *= 1 /sum(len(doc) for doc in docs) + + return bound + + def theta_bound(self, Elogtheta): + bound = 0.0 + for a in xrange(self.num_authors): + var_gamma_a = self.var_gamma[a, :] + Elogtheta_a = Elogtheta[a, :] + bound += numpy.sum((self.alpha - var_gamma_a) * Elogtheta_a) + bound += numpy.sum(gammaln(var_gamma_a) - gammaln(self.alpha)) + bound += gammaln(numpy.sum(self.alpha)) - gammaln(numpy.sum(var_gamma_a)) + + return bound + + def beta_bound(self, Elogbeta): + bound = 0.0 + bound += numpy.sum((self.eta - self.var_lambda) * Elogbeta) + bound += numpy.sum(gammaln(self.var_lambda) - gammaln(self.eta)) + bound += numpy.sum(gammaln(numpy.sum(self.eta)) - gammaln(numpy.sum(self.var_lambda, 1))) + + return bound + + def eval_logprob(self, doc_ids=None): + """ + Compute the liklihood of the corpus under the model, by first + computing the conditional probabilities of the words in a + document d, + + p(w_d | theta, beta, A_d), + + summing over all documents, and dividing by the number of documents. + """ + + norm_gamma = self.var_gamma.copy() + for a in xrange(self.num_authors): + norm_gamma[a, :] = self.var_gamma[a, :] / self.var_gamma.sum(axis=1)[a] + + if self.optimize_lambda: + norm_lambda = self.var_lambda.copy() + for k in xrange(self.num_topics): + norm_lambda[k, :] = self.var_lambda[k, :] / self.var_lambda.sum(axis=1)[k] + else: + norm_lambda = self.norm_lambda + + if doc_ids is None: + docs = self.corpus + else: + docs = [self.corpus[d] for d in doc_ids] + + logprob = 0.0 + for d, doc in enumerate(docs): + ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. + cts = numpy.array([cnt for _, cnt in doc]) # Word counts. + authors_d = self.doc2author[d] + logprob_d = 0.0 + #phinorm = self.compute_phinorm(ids, authors_d, expElogtheta, expElogbeta) + for vi, v in enumerate(ids): + logprob_v = 0.0 + for k in xrange(self.num_topics): + for a in authors_d: + logprob_v += norm_gamma[a, k] * norm_lambda[k, v] + logprob_d += cts[vi] * numpy.log(logprob_v) + logprob += numpy.log(1.0 / len(authors_d)) + logprob_d + + return logprob + + # Overriding LdaModel.get_topic_terms. + def get_topic_terms(self, topicid, topn=10): + """ + Return a list of `(word_id, probability)` 2-tuples for the most + probable words in topic `topicid`. + Only return 2-tuples for the topn most probable words (ignore the rest). + """ + topic = self.var_lambda[topicid, :] + topic = topic / topic.sum() # normalize to probability distribution + bestn = matutils.argsort(topic, topn, reverse=True) + return [(id, topic[id]) for id in bestn] + + + def get_author_topics(self, author_id, minimum_probability=None): + """ + Return topic distribution the given author, as a list of + (topic_id, topic_probability) 2-tuples. + Ignore topics with very low probability (below `minimum_probability`). + """ + if minimum_probability is None: + minimum_probability = self.minimum_probability + minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output + + topic_dist = self.var_gamma[author_id, :] / sum(self.var_gamma[author_id, :]) + + author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) + if topicvalue >= minimum_probability] + + return author_topics + + + diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index 8605220876..eb702521b2 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -652,7 +652,7 @@ def rho(): reallen = 0 for chunk_no, chunk in enumerate(utils.grouper(corpus, chunksize, as_numpy=chunks_as_numpy)): - # FIXME: replace rho() in e.g. self.do_estep by self.rho? Needed for AuthorTopicModel. + # FIXME: replace rho() in e.g. self.do_estep by self.rho? self.rho is needed for AuthorTopicModel. self.rho = rho() reallen += len(chunk) # keep track of how many documents we've processed so far From e911aed007a8ad13683ad7957e04dfd5d4bd7fef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 7 Dec 2016 15:25:59 +0100 Subject: [PATCH 061/100] Implemented 'continued training' (call update multiple times) and __getitem__ in refactored code (atmodel.py). --- docs/notebooks/at_with_nips.ipynb | 451 ++++++++++---------- gensim/models/atmodel.py | 351 ++++++++++++++-- gensim/models/atmodel2.py | 660 ------------------------------ gensim/models/atmodelold.py | 4 +- 4 files changed, 544 insertions(+), 922 deletions(-) delete mode 100755 gensim/models/atmodel2.py diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index f60445f4e8..6e00eec295 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -56,7 +56,7 @@ "from nltk.stem.wordnet import WordNetLemmatizer\n", "import gensim\n", "from gensim.models import Phrases\n", - "from gensim.corpora import Dictionary\n", + "from gensim.corpora import Dictionary, MmCorpus\n", "from gensim.models import LdaModel\n", "from imp import reload\n", "from pprint import pprint\n", @@ -68,6 +68,8 @@ "\n", "from gensim.models import AuthorTopicModel\n", "from gensim.models import atmodel\n", + "from gensim.models import AuthorTopicModel2\n", + "from gensim.models import atmodel2\n", "from gensim.models import AuthorTopicModelOld\n", "from gensim.models import atmodelold\n", "from gensim.models import LdaModel\n", @@ -80,7 +82,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": { "collapsed": false }, @@ -108,7 +110,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 308, "metadata": { "collapsed": false }, @@ -145,7 +147,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 309, "metadata": { "collapsed": false }, @@ -163,31 +165,29 @@ " contents = re.split(',', line)\n", " author_name = (contents[1] + contents[0]).strip()\n", " ids = [c.strip() for c in contents[2:]]\n", - " if not author2id.get(author_name):\n", + " if not author2doc.get(author_name):\n", + " author2doc[author_name] = []\n", " author2id[author_name] = i\n", - " author2doc[i] = []\n", " i += 1\n", " \n", - " author_id = author2id[author_name]\n", - " author2doc[author_id].extend([yr + '_' + id for id in ids])\n", + " author2doc[author_name].extend([yr + '_' + id for id in ids])\n", " " ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 310, "metadata": { - "collapsed": false + "collapsed": true }, "outputs": [], "source": [ - "# Make a mapping from author ID to author name.\n", "id2author = dict(zip(author2id.values(), author2id.keys()))" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 311, "metadata": { "collapsed": false }, @@ -203,25 +203,6 @@ " author2doc[a][i] = doc_id_dict[doc_id]" ] }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Make a mapping from document IDs to author IDs.\n", - "# Same as in the atvb code.\n", - "doc2author = {}\n", - "for d, _ in enumerate(docs):\n", - " author_ids = []\n", - " for a, a_doc_ids in author2doc.items():\n", - " if d in a_doc_ids:\n", - " author_ids.append(a)\n", - " doc2author[d] = author_ids" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -231,7 +212,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 312, "metadata": { "collapsed": false }, @@ -254,7 +235,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 313, "metadata": { "collapsed": false }, @@ -269,7 +250,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 314, "metadata": { "collapsed": false }, @@ -297,7 +278,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 315, "metadata": { "collapsed": true }, @@ -309,7 +290,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 316, "metadata": { "collapsed": false }, @@ -327,7 +308,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 317, "metadata": { "collapsed": false }, @@ -336,7 +317,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -362,21 +343,36 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 318, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ "# Vectorize data.\n", "\n", "# Bag-of-words representation of the documents.\n", - "corpus = [dictionary.doc2bow(doc) for doc in docs]" + "corpus = [dictionary.doc2bow(doc) for doc in docs]\n", + "\n", + "# Serialize the corpus.\n", + "MmCorpus.serialize('/tmp/corpus.mm', corpus)\n", + "corpus = MmCorpus('/tmp/corpus.mm')" ] }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 319, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "doc2author = atmodel.construct_doc2author(author2doc=author2doc, corpus=corpus)" + ] + }, + { + "cell_type": "code", + "execution_count": 320, "metadata": { "collapsed": false }, @@ -406,7 +402,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 321, "metadata": { "collapsed": false }, @@ -422,7 +418,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 322, "metadata": { "collapsed": false }, @@ -431,8 +427,18 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 5.77 s, sys: 0 ns, total: 5.77 s\n", - "Wall time: 5.77 s\n" + "-8.22604261303\n", + "-7.00250685225\n", + "-6.997669314\n", + "-6.99095801828\n", + "-6.98194710233\n", + "-6.97059497993\n", + "-6.9571757795\n", + "-6.94218337286\n", + "-6.92617606121\n", + "-6.90967324258\n", + "CPU times: user 6.35 s, sys: 0 ns, total: 6.35 s\n", + "Wall time: 6.34 s\n" ] } ], @@ -448,86 +454,87 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": 267, "metadata": { "collapsed": false }, "outputs": [ { - "ename": "ImportError", - "evalue": "cannot import name 'from_iterable'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mitertools\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mfrom_iterable\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mImportError\u001b[0m: cannot import name 'from_iterable'" + "name": "stdout", + "output_type": "stream", + "text": [ + "-6.89310631328\n", + "-6.83421961412\n", + "-6.80373632624\n", + "-6.77968901432\n", + "-6.76018760288\n", + "-6.74405190792\n", + "-6.7304722216\n", + "-6.71887559126\n", + "-6.70884623307\n", + "-6.70007530703\n" ] } ], "source": [ - "from itertools import chain" + "model.update()" ] }, { "cell_type": "code", - "execution_count": 60, + "execution_count": 305, "metadata": { "collapsed": false }, "outputs": [ { - "data": { - "text/plain": [ - "14" - ] - }, - "execution_count": 60, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "generator_chain = chain.from_iterable([range(10), range(10, 20)])\n", - "next(islice(generator_chain, 14, 15))" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "from itertools import islice, count" - ] - }, - { - "cell_type": "code", - "execution_count": 45, - "metadata": { - "collapsed": false - }, - "outputs": [ + "name": "stdout", + "output_type": "stream", + "text": [ + "Is this a new author? True\n", + "Number of documents by author: 1\n", + "-6.0190157986\n", + "-5.93303699802\n", + "-5.89706978832\n", + "-5.87070002406\n", + "-5.85024357659\n", + "-5.83373120263\n", + "-5.82001191111\n", + "-5.80836592923\n", + "-5.79831886651\n", + "-5.78954605844\n" + ] + }, { "data": { "text/plain": [ - "0" + "[(0, 0.1701733360072834),\n", + " (1, 0.012369638562793309),\n", + " (2, 0.022435297354046722),\n", + " (3, 0.064683494549145251),\n", + " (4, 0.14229529414449704),\n", + " (5, 0.096406364483889423),\n", + " (7, 0.037669655666072922),\n", + " (8, 0.32989184827034412),\n", + " (9, 0.12324347211255265)]" ] }, - "execution_count": 45, + "execution_count": 305, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "next(islice(count(), 20))" + "author_name = 'Behnaam Aazhang'\n", + "print('Is this a new author?', author_name not in model.author2doc)\n", + "docs = corpus[author2doc[author_name]]\n", + "print('Number of documents by author:', len(docs))\n", + "model[[docs, author_name]]" ] }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 323, "metadata": { "collapsed": false }, @@ -536,28 +543,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.017*\"classifier\" + 0.012*\"node\" + 0.008*\"vector\" + 0.007*\"recognition\" + 0.006*\"decision\" + 0.006*\"classification\" + 0.005*\"sequence\" + 0.005*\"class\" + 0.005*\"sample\" + 0.005*\"probability\"'),\n", + " '0.012*\"hidden\" + 0.007*\"memory\" + 0.006*\"vector\" + 0.005*\"image\" + 0.005*\"speech\" + 0.005*\"hidden_unit\" + 0.004*\"circuit\" + 0.004*\"node\" + 0.004*\"propagation\" + 0.004*\"back_propagation\"'),\n", " (1,\n", - " '0.010*\"cell\" + 0.009*\"activation\" + 0.008*\"hidden\" + 0.007*\"node\" + 0.005*\"propagation\" + 0.005*\"response\" + 0.005*\"hidden_unit\" + 0.005*\"energy\" + 0.004*\"back_propagation\" + 0.004*\"matrix\"'),\n", + " '0.010*\"cell\" + 0.006*\"field\" + 0.005*\"activity\" + 0.005*\"vector\" + 0.005*\"cortex\" + 0.005*\"matrix\" + 0.005*\"receptive\" + 0.004*\"noise\" + 0.004*\"cortical\" + 0.004*\"response\"'),\n", " (2,\n", - " '0.009*\"vector\" + 0.008*\"image\" + 0.007*\"hidden\" + 0.005*\"fig\" + 0.005*\"dynamic\" + 0.005*\"noise\" + 0.005*\"object\" + 0.004*\"energy\" + 0.004*\"memory\" + 0.004*\"matrix\"'),\n", + " '0.008*\"cell\" + 0.007*\"node\" + 0.007*\"region\" + 0.006*\"field\" + 0.005*\"class\" + 0.005*\"probability\" + 0.004*\"fig\" + 0.004*\"distribution\" + 0.004*\"element\" + 0.004*\"threshold\"'),\n", " (3,\n", - " '0.013*\"vector\" + 0.011*\"hidden\" + 0.010*\"memory\" + 0.009*\"field\" + 0.005*\"hidden_unit\" + 0.004*\"threshold\" + 0.004*\"internal\" + 0.004*\"associative\" + 0.004*\"bit\" + 0.003*\"fig\"'),\n", + " '0.014*\"cell\" + 0.007*\"response\" + 0.006*\"speech\" + 0.006*\"fig\" + 0.006*\"stimulus\" + 0.005*\"synaptic\" + 0.005*\"chain\" + 0.005*\"field\" + 0.004*\"synapse\" + 0.004*\"synapsis\"'),\n", " (4,\n", - " '0.020*\"cell\" + 0.009*\"firing\" + 0.008*\"stimulus\" + 0.007*\"synaptic\" + 0.007*\"activity\" + 0.006*\"image\" + 0.006*\"response\" + 0.006*\"spike\" + 0.005*\"potential\" + 0.004*\"current\"'),\n", + " '0.011*\"node\" + 0.007*\"role\" + 0.006*\"activation\" + 0.005*\"processor\" + 0.005*\"current\" + 0.005*\"cell\" + 0.004*\"fig\" + 0.004*\"noise\" + 0.004*\"line\" + 0.004*\"element\"'),\n", " (5,\n", - " '0.007*\"hidden\" + 0.006*\"node\" + 0.005*\"image\" + 0.005*\"matrix\" + 0.004*\"class\" + 0.004*\"fig\" + 0.004*\"noise\" + 0.004*\"propagation\" + 0.003*\"recognition\" + 0.003*\"vector\"'),\n", + " '0.013*\"vector\" + 0.011*\"hidden\" + 0.009*\"image\" + 0.006*\"hidden_unit\" + 0.006*\"recognition\" + 0.005*\"object\" + 0.005*\"matrix\" + 0.005*\"connectionist\" + 0.004*\"procedure\" + 0.004*\"sequence\"'),\n", " (6,\n", - " '0.009*\"speech\" + 0.009*\"region\" + 0.008*\"recognition\" + 0.006*\"chain\" + 0.006*\"probability\" + 0.005*\"class\" + 0.005*\"cell\" + 0.005*\"hidden\" + 0.004*\"domain\" + 0.004*\"distribution\"'),\n", + " '0.018*\"cell\" + 0.012*\"firing\" + 0.010*\"current\" + 0.009*\"circuit\" + 0.008*\"response\" + 0.008*\"activity\" + 0.007*\"synaptic\" + 0.006*\"spike\" + 0.005*\"potential\" + 0.005*\"membrane\"'),\n", " (7,\n", - " '0.011*\"cell\" + 0.007*\"memory\" + 0.006*\"response\" + 0.006*\"fig\" + 0.006*\"circuit\" + 0.006*\"current\" + 0.005*\"hopfield\" + 0.005*\"analog\" + 0.005*\"synapse\" + 0.005*\"activity\"'),\n", + " '0.007*\"map\" + 0.006*\"fig\" + 0.005*\"image\" + 0.005*\"memory\" + 0.005*\"activity\" + 0.004*\"object\" + 0.004*\"probability\" + 0.004*\"node\" + 0.004*\"field\" + 0.004*\"vector\"'),\n", " (8,\n", - " '0.013*\"circuit\" + 0.006*\"memory\" + 0.005*\"control\" + 0.005*\"cell\" + 0.005*\"threshold\" + 0.005*\"fig\" + 0.004*\"voltage\" + 0.004*\"transistor\" + 0.004*\"current\" + 0.004*\"response\"'),\n", + " '0.014*\"classifier\" + 0.013*\"memory\" + 0.010*\"hidden\" + 0.007*\"vector\" + 0.006*\"sample\" + 0.006*\"node\" + 0.006*\"recognition\" + 0.005*\"propagation\" + 0.005*\"bit\" + 0.005*\"hidden_unit\"'),\n", " (9,\n", - " '0.008*\"memory\" + 0.008*\"field\" + 0.008*\"cell\" + 0.007*\"map\" + 0.007*\"delay\" + 0.006*\"cortex\" + 0.006*\"image\" + 0.006*\"chip\" + 0.005*\"current\" + 0.005*\"synaptic\"')]" + " '0.008*\"cell\" + 0.005*\"speech\" + 0.005*\"region\" + 0.005*\"image\" + 0.004*\"map\" + 0.004*\"visual\" + 0.004*\"threshold\" + 0.004*\"field\" + 0.004*\"class\" + 0.003*\"activation\"')]" ] }, - "execution_count": 26, + "execution_count": 323, "metadata": {}, "output_type": "execute_result" } @@ -568,7 +575,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 307, "metadata": { "collapsed": false }, @@ -578,79 +585,88 @@ "output_type": "stream", "text": [ "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [62]\n", - "[(0, 0.17428370582074723),\n", - " (1, 0.10229569024379424),\n", - " (2, 0.062556106292013122),\n", - " (3, 0.078817840485611065),\n", - " (4, 0.068487942942868585),\n", - " (5, 0.14869390057914703),\n", - " (6, 0.17212355568609788),\n", - " (7, 0.074170089610964149),\n", - " (8, 0.047702469618850774),\n", - " (9, 0.070868698719905782)]\n", - "\n", - "Geoffrey E. Hinton\n", - "Docs: [143, 284, 230, 197]\n", - "[(0, 0.1963942573033424),\n", - " (1, 0.12792966363823302),\n", - " (2, 0.23505329159063704),\n", - " (3, 0.060305386421733033),\n", - " (4, 0.04267590384413758),\n", - " (5, 0.060980284135135593),\n", - " (6, 0.2451247159367281),\n", - " (8, 0.01574985863695311),\n", - " (9, 0.01023414671028579)]\n", - "\n", - "Michael I. Jordan\n", - "Docs: [237]\n", - "[(0, 0.085651762012953936),\n", - " (1, 0.065665448104732405),\n", - " (2, 0.07777125401127058),\n", - " (3, 0.050480420361483674),\n", - " (4, 0.065721037891177864),\n", - " (5, 0.086499723758504746),\n", - " (6, 0.38914428858057321),\n", - " (7, 0.039550645237331414),\n", - " (8, 0.10733538353868659),\n", - " (9, 0.032180036503285388)]\n", - "\n", - "James M. Bower\n", - "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(1, 0.013903403611000136),\n", - " (4, 0.098066607370058775),\n", - " (7, 0.11242612291693072),\n", - " (8, 0.016186021191484681),\n", - " (9, 0.74612850820488463)]\n" + "Yaser S.Abu-Mostafa\n" + ] + }, + { + "ename": "KeyError", + "evalue": "'Yaser S.Abu-Mostafa'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'Yaser S.Abu-Mostafa'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\n%s'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Docs:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_author_topics\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyError\u001b[0m: 'Yaser S.Abu-Mostafa'" ] } ], "source": [ "name = 'Yaser S.Abu-Mostafa'\n", "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", + "print('Docs:', author2doc[name])\n", "pprint(model.get_author_topics(author2id[name]))\n", "\n", "name = 'Geoffrey E. Hinton'\n", "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", + "print('Docs:', author2doc[name])\n", "pprint(model.get_author_topics(author2id[name]))\n", "\n", "name = 'Michael I. Jordan'\n", "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", + "print('Docs:', author2doc[name])\n", "pprint(model.get_author_topics(author2id[name]))\n", "\n", "name = 'James M. Bower'\n", "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", + "print('Docs:', author2doc[name])\n", "pprint(model.get_author_topics(author2id[name]))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Serialized corpus stuff" + ] + }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 40, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "corpus = [[(0,1)]]\n", + "MmCorpus.serialize('/tmp/corpus.mm', corpus)\n", + "corpus = MmCorpus('/tmp/corpus.mm')" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 46, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "type(corpus).__name__ == 'MmCorpus'" + ] + }, + { + "cell_type": "code", + "execution_count": 328, "metadata": { "collapsed": false }, @@ -659,21 +675,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 7.32 s, sys: 4 ms, total: 7.32 s\n", - "Wall time: 7.33 s\n" + "CPU times: user 8.97 s, sys: 24 ms, total: 9 s\n", + "Wall time: 9 s\n" ] } ], "source": [ - "%time model2 = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + "%time model2 = AuthorTopicModelOld(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None, chunksize=2000)" + " eval_every=0, random_state=1, var_lambda=None, chunksize=2000)" ] }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 329, "metadata": { "collapsed": false, "scrolled": false @@ -683,28 +699,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.016*\"vector\" + 0.012*\"memory\" + 0.007*\"associative\" + 0.005*\"control\" + 0.005*\"constraint\" + 0.005*\"recognition\" + 0.005*\"chip\" + 0.005*\"image\" + 0.004*\"hidden\" + 0.004*\"machine\"'),\n", + " '0.028*\"cell\" + 0.015*\"firing\" + 0.013*\"stimulus\" + 0.012*\"activity\" + 0.011*\"synaptic\" + 0.011*\"cortical\" + 0.010*\"response\" + 0.009*\"orientation\" + 0.008*\"spike\" + 0.008*\"field\"'),\n", " (1,\n", - " '0.014*\"memory\" + 0.007*\"probability\" + 0.007*\"vector\" + 0.005*\"chip\" + 0.005*\"pulse\" + 0.005*\"fig\" + 0.004*\"node\" + 0.004*\"cell\" + 0.004*\"capacity\" + 0.004*\"matrix\"'),\n", + " '0.014*\"memory\" + 0.006*\"matrix\" + 0.005*\"cell\" + 0.005*\"fig\" + 0.005*\"cortex\" + 0.004*\"vector\" + 0.004*\"associative\" + 0.004*\"hopfield\" + 0.004*\"associative_memory\" + 0.004*\"location\"'),\n", " (2,\n", - " '0.017*\"classifier\" + 0.015*\"circuit\" + 0.006*\"noise\" + 0.006*\"current\" + 0.006*\"fig\" + 0.006*\"node\" + 0.005*\"gaussian\" + 0.005*\"speech\" + 0.005*\"propagation\" + 0.005*\"decision\"'),\n", + " '0.016*\"classifier\" + 0.013*\"hidden\" + 0.007*\"hidden_unit\" + 0.006*\"internal\" + 0.006*\"back_propagation\" + 0.006*\"current\" + 0.005*\"propagation\" + 0.005*\"chip\" + 0.005*\"node\" + 0.005*\"table\"'),\n", " (3,\n", - " '0.008*\"cell\" + 0.008*\"fig\" + 0.006*\"vector\" + 0.006*\"hidden\" + 0.006*\"velocity\" + 0.005*\"operator\" + 0.005*\"image\" + 0.004*\"activation\" + 0.004*\"receptor\" + 0.004*\"delay\"'),\n", + " '0.011*\"hidden\" + 0.010*\"recognition\" + 0.010*\"image\" + 0.008*\"speech\" + 0.008*\"vector\" + 0.006*\"node\" + 0.005*\"propagation\" + 0.005*\"hidden_unit\" + 0.004*\"energy\" + 0.004*\"connectionist\"'),\n", " (4,\n", - " '0.009*\"image\" + 0.009*\"match\" + 0.008*\"processor\" + 0.007*\"classifier\" + 0.007*\"node\" + 0.007*\"element\" + 0.006*\"activation\" + 0.006*\"link\" + 0.005*\"nat\" + 0.005*\"fig\"'),\n", + " '0.011*\"map\" + 0.009*\"fig\" + 0.008*\"element\" + 0.007*\"contour\" + 0.007*\"xl\" + 0.007*\"brain\" + 0.005*\"threshold\" + 0.005*\"vector\" + 0.005*\"position\" + 0.005*\"noise\"'),\n", " (5,\n", - " '0.011*\"field\" + 0.010*\"cell\" + 0.007*\"synaptic\" + 0.005*\"cortical\" + 0.005*\"visual\" + 0.005*\"activity\" + 0.005*\"eye\" + 0.005*\"synapsis\" + 0.004*\"map\" + 0.004*\"phase\"'),\n", + " '0.013*\"vector\" + 0.008*\"probability\" + 0.007*\"class\" + 0.007*\"matrix\" + 0.005*\"distribution\" + 0.005*\"theorem\" + 0.005*\"threshold\" + 0.005*\"let\" + 0.005*\"bound\" + 0.004*\"theory\"'),\n", " (6,\n", - " '0.006*\"hidden\" + 0.006*\"recognition\" + 0.005*\"map\" + 0.005*\"vector\" + 0.005*\"node\" + 0.004*\"object\" + 0.004*\"speech\" + 0.004*\"matrix\" + 0.003*\"class\" + 0.003*\"sequence\"'),\n", + " '0.007*\"activation\" + 0.006*\"analog\" + 0.006*\"node\" + 0.006*\"pulse\" + 0.005*\"processor\" + 0.005*\"temperature\" + 0.005*\"circuit\" + 0.005*\"field\" + 0.005*\"chip\" + 0.005*\"threshold\"'),\n", " (7,\n", - " '0.013*\"role\" + 0.009*\"motion\" + 0.008*\"source\" + 0.007*\"regular\" + 0.007*\"visual\" + 0.006*\"markov\" + 0.006*\"threshold\" + 0.006*\"node\" + 0.005*\"code\" + 0.005*\"depth\"'),\n", + " '0.011*\"role\" + 0.005*\"eye\" + 0.005*\"vector\" + 0.005*\"controller\" + 0.005*\"motor\" + 0.005*\"fig\" + 0.005*\"motion\" + 0.005*\"product\" + 0.005*\"control\" + 0.005*\"variable\"'),\n", " (8,\n", - " '0.028*\"cell\" + 0.013*\"response\" + 0.013*\"stimulus\" + 0.010*\"spike\" + 0.009*\"firing\" + 0.009*\"current\" + 0.009*\"image\" + 0.009*\"potential\" + 0.006*\"activity\" + 0.006*\"membrane\"'),\n", + " '0.020*\"cell\" + 0.010*\"map\" + 0.010*\"region\" + 0.009*\"response\" + 0.007*\"circuit\" + 0.006*\"chain\" + 0.006*\"brain\" + 0.005*\"human\" + 0.005*\"current\" + 0.005*\"fig\"'),\n", " (9,\n", - " '0.014*\"hidden\" + 0.009*\"hidden_unit\" + 0.008*\"distribution\" + 0.008*\"node\" + 0.007*\"image\" + 0.006*\"activation\" + 0.006*\"propagation\" + 0.006*\"back_propagation\" + 0.005*\"speech\" + 0.005*\"sample\"')]" + " '0.008*\"cell\" + 0.005*\"threshold\" + 0.005*\"object\" + 0.005*\"associative\" + 0.005*\"node\" + 0.005*\"control\" + 0.005*\"activity\" + 0.005*\"stimulus\" + 0.005*\"direction\" + 0.005*\"phase\"')]" ] }, - "execution_count": 24, + "execution_count": 329, "metadata": {}, "output_type": "execute_result" } @@ -715,7 +731,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 331, "metadata": { "collapsed": false }, @@ -727,62 +743,59 @@ "\n", "Yaser S.Abu-Mostafa\n", "Docs: [62]\n", - "[(0, 0.12257888012385142),\n", - " (1, 0.18839815551960026),\n", - " (2, 0.036637297625550132),\n", - " (3, 0.015498644507138377),\n", - " (4, 0.072386020997623229),\n", - " (5, 0.075906889662321148),\n", - " (6, 0.34904030995007596),\n", - " (7, 0.022928611918427422),\n", - " (8, 0.068558327925279966),\n", - " (9, 0.048066861770131898)]\n", + "[(0, 0.058713846836094249),\n", + " (1, 0.15509352295391379),\n", + " (2, 0.041233589610359118),\n", + " (3, 0.13792786464219733),\n", + " (4, 0.048002886804540928),\n", + " (5, 0.25763034118791089),\n", + " (6, 0.17724144451099547),\n", + " (7, 0.01646106706106195),\n", + " (8, 0.053148727421393017),\n", + " (9, 0.054546708971533304)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [143, 284, 230, 197]\n", - "[(0, 0.017941678995404806),\n", - " (1, 0.037633561393485344),\n", - " (2, 0.040229587442296211),\n", - " (6, 0.86860470412607893)]\n", + "[(3, 0.97651618616058111), (7, 0.012150945089860542)]\n", "\n", "Michael I. Jordan\n", "Docs: [237]\n", - "[(0, 0.23458706049871778),\n", - " (1, 0.028074129427662773),\n", - " (2, 0.092872627256054469),\n", - " (3, 0.060285180922721039),\n", - " (4, 0.05771159235103844),\n", - " (5, 0.27973835933458052),\n", - " (6, 0.13822500759562903),\n", - " (7, 0.015239077050084931),\n", - " (8, 0.052855346935884104),\n", - " (9, 0.040411618627626808)]\n", + "[(0, 0.016941361324012749),\n", + " (1, 0.047718481863329137),\n", + " (2, 0.059733414724854321),\n", + " (3, 0.29707902020298604),\n", + " (4, 0.013791609529790545),\n", + " (5, 0.095397289926623455),\n", + " (6, 0.048931589429489268),\n", + " (7, 0.19814572323721011),\n", + " (8, 0.16098988099736566),\n", + " (9, 0.061271628764338795)]\n", "\n", "James M. Bower\n", "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(5, 0.11028614207196175), (6, 0.41931814978983267), (8, 0.4701441896634681)]\n" + "[(1, 0.44841235581967448), (8, 0.55011298037901579)]\n" ] } ], "source": [ "name = 'Yaser S.Abu-Mostafa'\n", "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", + "print('Docs:', author2doc[name])\n", "pprint(model2.get_author_topics(author2id[name]))\n", "\n", "name = 'Geoffrey E. Hinton'\n", "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", + "print('Docs:', author2doc[name])\n", "pprint(model2.get_author_topics(author2id[name]))\n", "\n", "name = 'Michael I. Jordan'\n", "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", + "print('Docs:', author2doc[name])\n", "pprint(model2.get_author_topics(author2id[name]))\n", "\n", "name = 'James M. Bower'\n", "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", + "print('Docs:', author2doc[name])\n", "pprint(model2.get_author_topics(author2id[name]))" ] }, @@ -990,7 +1003,7 @@ }, { "cell_type": "code", - "execution_count": 79, + "execution_count": 324, "metadata": { "collapsed": false }, @@ -1002,7 +1015,7 @@ }, { "cell_type": "code", - "execution_count": 88, + "execution_count": 325, "metadata": { "collapsed": false }, @@ -1011,8 +1024,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 21.7 s, sys: 7.6 s, total: 29.3 s\n", - "Wall time: 20.8 s\n" + "CPU times: user 5.94 s, sys: 44 ms, total: 5.99 s\n", + "Wall time: 5.99 s\n" ] } ], @@ -1067,7 +1080,7 @@ }, { "cell_type": "code", - "execution_count": 89, + "execution_count": 326, "metadata": { "collapsed": false }, @@ -1076,28 +1089,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.013*\"cell\" + 0.011*\"neuron\" + 0.008*\"visual\" + 0.008*\"response\" + 0.007*\"stimulus\" + 0.006*\"activity\" + 0.006*\"field\" + 0.004*\"motion\" + 0.004*\"cortex\" + 0.004*\"layer\"'),\n", + " '0.014*\"hidden\" + 0.014*\"speech\" + 0.011*\"recognition\" + 0.007*\"hidden_unit\" + 0.006*\"propagation\" + 0.006*\"trained\" + 0.006*\"back_propagation\" + 0.006*\"test\" + 0.005*\"image\" + 0.005*\"classification\"'),\n", " (1,\n", - " '0.006*\"bound\" + 0.005*\"class\" + 0.005*\"node\" + 0.004*\"generalization\" + 0.004*\"sample\" + 0.004*\"let\" + 0.004*\"estimate\" + 0.004*\"tree\" + 0.004*\"approximation\" + 0.004*\"theorem\"'),\n", + " '0.011*\"field\" + 0.008*\"receptive\" + 0.008*\"cell\" + 0.008*\"receptive_field\" + 0.007*\"direction\" + 0.007*\"energy\" + 0.007*\"motion\" + 0.006*\"visual\" + 0.006*\"noise\" + 0.005*\"activity\"'),\n", " (2,\n", - " '0.009*\"class\" + 0.007*\"recognition\" + 0.007*\"classifier\" + 0.005*\"classification\" + 0.005*\"word\" + 0.005*\"distance\" + 0.005*\"image\" + 0.005*\"hidden\" + 0.004*\"character\" + 0.004*\"trained\"'),\n", + " '0.011*\"region\" + 0.009*\"memory\" + 0.009*\"delay\" + 0.008*\"chain\" + 0.007*\"fig\" + 0.006*\"matrix\" + 0.006*\"cell\" + 0.006*\"hopfield\" + 0.005*\"field\" + 0.005*\"stability\"'),\n", " (3,\n", - " '0.021*\"image\" + 0.006*\"gaussian\" + 0.005*\"face\" + 0.005*\"component\" + 0.004*\"matrix\" + 0.003*\"prior\" + 0.003*\"density\" + 0.003*\"noise\" + 0.003*\"hidden\" + 0.003*\"object\"'),\n", + " '0.026*\"cell\" + 0.010*\"circuit\" + 0.010*\"response\" + 0.010*\"chip\" + 0.010*\"synaptic\" + 0.010*\"stimulus\" + 0.009*\"synapse\" + 0.009*\"cortex\" + 0.008*\"synapsis\" + 0.008*\"pulse\"'),\n", " (4,\n", - " '0.009*\"control\" + 0.006*\"action\" + 0.006*\"policy\" + 0.005*\"optimal\" + 0.005*\"dynamic\" + 0.005*\"reinforcement\" + 0.005*\"signal\" + 0.004*\"controller\" + 0.004*\"noise\" + 0.003*\"trajectory\"'),\n", + " '0.018*\"node\" + 0.012*\"activation\" + 0.010*\"processor\" + 0.009*\"object\" + 0.008*\"role\" + 0.006*\"connectionist\" + 0.005*\"current\" + 0.005*\"element\" + 0.005*\"machine\" + 0.005*\"update\"'),\n", " (5,\n", - " '0.009*\"memory\" + 0.004*\"rule\" + 0.004*\"net\" + 0.004*\"bit\" + 0.004*\"layer\" + 0.004*\"architecture\" + 0.004*\"recognition\" + 0.003*\"matrix\" + 0.003*\"processor\" + 0.003*\"machine\"'),\n", + " '0.017*\"vector\" + 0.012*\"hidden\" + 0.008*\"matrix\" + 0.006*\"hidden_unit\" + 0.005*\"probability\" + 0.005*\"gradient\" + 0.005*\"let\" + 0.004*\"convergence\" + 0.004*\"sequence\" + 0.004*\"propagation\"'),\n", " (6,\n", - " '0.007*\"hidden\" + 0.007*\"layer\" + 0.007*\"speech\" + 0.006*\"node\" + 0.006*\"net\" + 0.005*\"word\" + 0.004*\"sequence\" + 0.004*\"activation\" + 0.004*\"context\" + 0.004*\"language\"'),\n", + " '0.018*\"cell\" + 0.013*\"firing\" + 0.010*\"spike\" + 0.009*\"current\" + 0.008*\"activity\" + 0.008*\"response\" + 0.008*\"frequency\" + 0.007*\"circuit\" + 0.006*\"synaptic\" + 0.006*\"potential\"'),\n", " (7,\n", - " '0.010*\"neuron\" + 0.008*\"circuit\" + 0.008*\"signal\" + 0.006*\"voltage\" + 0.006*\"channel\" + 0.006*\"chip\" + 0.005*\"analog\" + 0.004*\"frequency\" + 0.004*\"cell\" + 0.004*\"spike\"'),\n", + " '0.019*\"image\" + 0.009*\"map\" + 0.006*\"field\" + 0.006*\"object\" + 0.005*\"probability\" + 0.005*\"fig\" + 0.004*\"human\" + 0.004*\"visual\" + 0.004*\"strategy\" + 0.004*\"pixel\"'),\n", " (8,\n", - " '0.008*\"object\" + 0.005*\"mixture\" + 0.004*\"hidden\" + 0.004*\"likelihood\" + 0.004*\"recognition\" + 0.004*\"em\" + 0.003*\"gaussian\" + 0.003*\"matrix\" + 0.003*\"view\" + 0.003*\"component\"'),\n", + " '0.019*\"classifier\" + 0.019*\"memory\" + 0.009*\"bit\" + 0.008*\"node\" + 0.008*\"vector\" + 0.008*\"hidden\" + 0.006*\"neural_net\" + 0.006*\"sample\" + 0.006*\"decision\" + 0.006*\"recognition\"'),\n", " (9,\n", - " '0.011*\"neuron\" + 0.006*\"dynamic\" + 0.005*\"matrix\" + 0.004*\"noise\" + 0.004*\"solution\" + 0.003*\"field\" + 0.003*\"condition\" + 0.003*\"gradient\" + 0.003*\"convergence\" + 0.003*\"limit\"')]" + " '0.015*\"constraint\" + 0.008*\"orientation\" + 0.008*\"optimization\" + 0.006*\"constrained\" + 0.006*\"visual\" + 0.005*\"differential\" + 0.005*\"map\" + 0.005*\"joint\" + 0.005*\"speaker\" + 0.004*\"noise\"')]" ] }, - "execution_count": 89, + "execution_count": 326, "metadata": {}, "output_type": "execute_result" } diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index c7dff4dba4..b0e91de28b 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -107,39 +107,20 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, logger.info('Vocabulary consists of %d words.', self.num_terms) - if doc2author is None and author2doc is None: - raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') - - # If either doc2author or author2doc is missing, construct them from the other. - # FIXME: make the code below into methods, so the user can construct either doc2author or author2doc *once* and then not worry about it. - # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). - if doc2author is None: - doc2author = construct_doc2author(corpus, author2doc) - elif author2doc is None: - author2doc = construct_author2doc(corpus, doc2author) - - self.author2doc = author2doc - self.doc2author = doc2author - - self.num_authors = len(self.author2doc) - logger.info('Number of authors: %d.', self.num_authors) - self.id2author = id2author - if self.id2author is None: - logger.warning("no author id mapping provided; initializing from corpus, assuming identity") - author_integer_ids = [str(i) for i in range(len(author2doc))] - self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) - - # Make the reverse mapping, from author names to author IDs. - self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) + self.author2doc = {} + self.doc2author = {} + self.corpus = [] # FIXME: should be either a list or an MmCorpus instance. self.distributed = distributed self.num_topics = num_topics + self.num_authors = 0 self.chunksize = chunksize self.decay = decay self.offset = offset self.minimum_probability = minimum_probability self.num_updates = 0 + self.total_docs = 0 self.passes = passes self.update_every = update_every @@ -147,9 +128,6 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, self.minimum_phi_value = minimum_phi_value self.per_word_topics = per_word_topics - self.corpus = corpus - self.num_authors = len(author2doc) - self.alpha, self.optimize_alpha = self.init_dir_prior(alpha, 'alpha') assert self.alpha.shape == (self.num_topics,), "Invalid alpha shape. Got shape %s, but expected (%d, )" % (str(self.alpha.shape), self.num_topics) @@ -180,14 +158,12 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, # Initialize the variational distributions q(beta|lambda) and q(theta|gamma) self.state = AuthorTopicState(self.eta, (self.num_topics, self.num_terms), (self.num_authors, self.num_topics)) self.state.sstats = self.random_state.gamma(100., 1. / 100., (self.num_topics, self.num_terms)) - self.state.gamma = self.random_state.gamma(100., 1. / 100., (self.num_authors, self.num_topics)) self.expElogbeta = np.exp(dirichlet_expectation(self.state.sstats)) - self.expElogtheta = np.exp(dirichlet_expectation(self.state.gamma)) # if a training corpus was provided, start estimating the model right away - if corpus is not None: + if corpus is not None and (author2doc is not None or doc2author is not None): use_numpy = self.dispatcher is not None - self.update(corpus, chunks_as_numpy=use_numpy) + self.update(corpus, author2doc, doc2author, chunks_as_numpy=use_numpy) def __str__(self): return "AuthorTopicModel(num_terms=%s, num_topics=%s, num_authors=%s, decay=%s, chunksize=%s)" % \ @@ -203,7 +179,7 @@ def compute_phinorm(self, ids, authors_d, expElogthetad, expElogbetad): return phinorm - def inference(self, chunk, collect_sstats=False, chunk_no=None): + def inference(self, chunk, rhot, collect_sstats=False, chunk_no=None): """ Given a chunk of sparse document vectors, estimate gamma (parameters controlling the topic weights) for each document in the chunk. @@ -249,6 +225,7 @@ def inference(self, chunk, collect_sstats=False, chunk_no=None): ids = [id for id, _ in doc] cts = np.array([cnt for _, cnt in doc]) authors_d = self.doc2author[doc_no] # List of author IDs for the current document. + authors_d = [self.author2id[a] for a in authors_d] gammad = self.state.gamma[authors_d, :] tilde_gamma = gammad.copy() @@ -267,12 +244,12 @@ def inference(self, chunk, collect_sstats=False, chunk_no=None): # Update gamma. for ai, a in enumerate(authors_d): - tilde_gamma[ai, :] = self.alpha + len(self.author2doc[a]) * expElogthetad[ai, :] * np.dot(cts / phinorm, expElogbetad.T) + tilde_gamma[ai, :] = self.alpha + len(self.author2doc[self.id2author[a]]) * expElogthetad[ai, :] * np.dot(cts / phinorm, expElogbetad.T) # Update gamma and lambda. # Interpolation between document d's "local" gamma (tilde_gamma), # and "global" gamma (var_gamma). - tilde_gamma = (1 - self.rho) * gammad + self.rho * tilde_gamma + tilde_gamma = (1 - rhot) * gammad + rhot * tilde_gamma # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. Elogthetad = dirichlet_expectation(tilde_gamma) @@ -316,7 +293,7 @@ def inference(self, chunk, collect_sstats=False, chunk_no=None): gamma_chunk = self.state.gamma[list(chunk_authors), :] return gamma_chunk, sstats - def do_estep(self, chunk, state=None, chunk_no=None): + def do_estep(self, chunk, rhot, state=None, chunk_no=None): """ Perform inference on a chunk of documents, and accumulate the collected sufficient statistics in `state` (or `self.state` if None). @@ -324,11 +301,283 @@ def do_estep(self, chunk, state=None, chunk_no=None): """ if state is None: state = self.state - gamma, sstats = self.inference(chunk, collect_sstats=True, chunk_no=chunk_no) + gamma, sstats = self.inference(chunk, rhot, collect_sstats=True, chunk_no=chunk_no) state.sstats += sstats state.numdocs += len(chunk) return gamma + def log_perplexity(self, chunk, chunk_no=None, total_docs=None): + """ + Calculate and return per-word likelihood bound, using the `chunk` of + documents as evaluation corpus. Also output the calculated statistics. incl. + perplexity=2^(-bound), to log at INFO level. + + """ + if total_docs is None: + total_docs = len(chunk) + corpus_words = sum(cnt for document in chunk for _, cnt in document) + subsample_ratio = 1.0 * total_docs / len(chunk) + perwordbound = self.bound(chunk, chunk_no, subsample_ratio=subsample_ratio) / (subsample_ratio * corpus_words) + print(perwordbound) + logger.info("%.3f per-word bound, %.1f perplexity estimate based on a held-out corpus of %i documents with %i words" % + (perwordbound, np.exp2(-perwordbound), len(chunk), corpus_words)) + return perwordbound + + def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, decay=None, offset=None, + passes=None, update_every=None, eval_every=None, iterations=None, + gamma_threshold=None, chunks_as_numpy=False): + """ + Train the model with new documents, by EM-iterating over `corpus` until + the topics converge (or until the maximum number of allowed iterations + is reached). `corpus` must be an iterable (repeatable stream of documents), + + In distributed mode, the E step is distributed over a cluster of machines. + + This update also supports updating an already trained model (`self`) + with new documents from `corpus`; the two models are then merged in + proportion to the number of old vs. new documents. This feature is still + experimental for non-stationary input streams. + + For stationary input (no topic drift in new documents), on the other hand, + this equals the online update of Hoffman et al. and is guaranteed to + converge for any `decay` in (0.5, 1.0>. Additionally, for smaller + `corpus` sizes, an increasing `offset` may be beneficial (see + Table 1 in Hoffman et al.) + + Args: + corpus (gensim corpus): The corpus with which the LDA model should be updated. + + chunks_as_numpy (bool): Whether each chunk passed to `.inference` should be a np + array of not. np can in some settings turn the term IDs + into floats, these will be converted back into integers in + inference, which incurs a performance hit. For distributed + computing it may be desirable to keep the chunks as np + arrays. + + For other parameter settings, see :class:`LdaModel` constructor. + + """ + # FIXME update docstring. + + # use parameters given in constructor, unless user explicitly overrode them + if decay is None: + decay = self.decay + if offset is None: + offset = self.offset + if passes is None: + passes = self.passes + if update_every is None: + update_every = self.update_every + if eval_every is None: + eval_every = self.eval_every + if iterations is None: + iterations = self.iterations + if gamma_threshold is None: + gamma_threshold = self.gamma_threshold + + # NOTE: it is not possible to add new authors to an existing document (all input documents are treated + # as completely new documents). Perhaps this functionality could be implemented. + # If it's absolutely necessary, the user can delete the documents that have new authors, and call update + # on them with the new authors. + + if corpus is None: + # Just keep training on the already available data. + # Assumes self.update() has been called before with input documents and corresponding authors. + train_corpus_idx = [d for d in xrange(self.total_docs)] + else: + if doc2author is None and author2doc is None: + raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') + + # Avoid overwriting the user's dictionaries. + author2doc = deepcopy(author2doc) + doc2author = deepcopy(doc2author) + + # If either doc2author or author2doc is missing, construct them from the other. + if doc2author is None: + doc2author = construct_doc2author(corpus, author2doc) + elif author2doc is None: + author2doc = construct_author2doc(corpus, doc2author) + + try: + len_input_corpus = len(corpus) + except: + logger.warning("input corpus stream has no len(); counting documents") + len_input_corpus = sum(1 for _ in corpus) + if len_input_corpus == 0: + logger.warning("AuthorTopicModel.update() called with an empty corpus") + return + + self.total_docs += len_input_corpus + + # FIXME: don't treat the corpus as a list. It's either a list or an MmCorpus instance. + # Perhaps if it is some sort of other iterable, it can be stored as an MmCorpus anyway. + self.corpus.extend(corpus) + + # Obtain a list of new authors. + new_authors = [] + for a in author2doc.keys(): + if not self.author2doc.get(a): + new_authors.append(a) + + num_new_authors = len(new_authors) + self.num_authors += num_new_authors + + # Initialize the variational distributions q(theta|gamma) + gamma_new = self.random_state.gamma(100., 1. / 100., (num_new_authors, self.num_topics)) + self.state.gamma = np.vstack([self.state.gamma, gamma_new]) + + # Combine author2doc with self.author2doc. + # First, increment the document IDs by the number of previously seen documents. + for a, doc_ids in author2doc.items(): + doc_ids = [d + self.total_docs - len_input_corpus for d in doc_ids] + + # For all authors in the input corpus, add the new documents. + for a, doc_ids in author2doc.items(): + if self.author2doc.get(a): + # This is not a new author, append new documents. + self.author2doc[a].extend(doc_ids) + else: + # This is a new author, create index. + self.author2doc[a] = doc_ids + + self.doc2author = construct_doc2author(self.corpus, self.author2doc) + + # Train on all documents of authors in input_corpus. + #train_corpus_idx = [i for i in xrange(len(self.corpus))] + train_corpus_idx = [] + for a in author2doc.keys(): # For all authors in input corpus. + for doc_ids in self.author2doc.values(): # For all documents in total corpus. + train_corpus_idx.extend(doc_ids) + + # Make the list of training documents unique. + train_corpus_idx = list(set(train_corpus_idx)) + + self.author2id = dict(zip(self.author2doc.keys(), xrange(self.num_authors))) + self.id2author = dict(zip(xrange(self.num_authors), self.author2doc.keys())) + + # train_corpus_idx is only a list of indexes, so "len" is valid. + lencorpus = len(train_corpus_idx) + + if chunksize is None: + chunksize = min(lencorpus, self.chunksize) + + self.state.numdocs += lencorpus + + if update_every: + updatetype = "online" + updateafter = min(lencorpus, update_every * self.numworkers * chunksize) + else: + updatetype = "batch" + updateafter = lencorpus + evalafter = min(lencorpus, (eval_every or 0) * self.numworkers * chunksize) + + updates_per_pass = max(1, lencorpus / updateafter) + logger.info("running %s LDA training, %s topics, %i passes over " + "the supplied corpus of %i documents, updating model once " + "every %i documents, evaluating perplexity every %i documents, " + "iterating %ix with a convergence threshold of %f", + updatetype, self.num_topics, passes, lencorpus, + updateafter, evalafter, iterations, + gamma_threshold) + + if updates_per_pass * passes < 10: + logger.warning("too few updates, training might not converge; consider " + "increasing the number of passes or iterations to improve accuracy") + + # rho is the "speed" of updating; TODO try other fncs + # pass_ + num_updates handles increasing the starting t for each pass, + # while allowing it to "reset" on the first pass of each update + def rho(): + return pow(offset + pass_ + (self.num_updates / chunksize), -decay) + + for pass_ in xrange(passes): + if self.dispatcher: + logger.info('initializing %s workers' % self.numworkers) + self.dispatcher.reset(self.state) + else: + other = LdaState(self.eta, self.state.sstats.shape) + dirty = False + + reallen = 0 + for chunk_no, chunk_doc_idx in enumerate(utils.grouper(train_corpus_idx, chunksize, as_numpy=chunks_as_numpy)): + chunk = [self.corpus[d] for d in chunk_doc_idx] + reallen += len(chunk) # keep track of how many documents we've processed so far + + if eval_every and ((reallen == lencorpus) or ((chunk_no + 1) % (eval_every * self.numworkers) == 0)): + self.log_perplexity(chunk, chunk_no, total_docs=lencorpus) + + if self.dispatcher: + # add the chunk to dispatcher's job queue, so workers can munch on it + logger.info('PROGRESS: pass %i, dispatching documents up to #%i/%i', + pass_, chunk_no * chunksize + len(chunk), lencorpus) + # this will eventually block until some jobs finish, because the queue has a small finite length + self.dispatcher.putjob(chunk) + else: + logger.info('PROGRESS: pass %i, at document #%i/%i', + pass_, chunk_no * chunksize + len(chunk), lencorpus) + gammat = self.do_estep(chunk, rho(), other, chunk_no) + + if self.optimize_alpha: + self.update_alpha(gammat, rho()) + + dirty = True + del chunk + + # perform an M step. determine when based on update_every, don't do this after every chunk + if update_every and (chunk_no + 1) % (update_every * self.numworkers) == 0: + if self.dispatcher: + # distributed mode: wait for all workers to finish + logger.info("reached the end of input; now waiting for all remaining jobs to finish") + other = self.dispatcher.getstate() + self.do_mstep(rho(), other, pass_ > 0) + del other # frees up memory + + if self.dispatcher: + logger.info('initializing workers') + self.dispatcher.reset(self.state) + else: + other = LdaState(self.eta, self.state.sstats.shape) + dirty = False + # endfor single corpus iteration + if reallen != lencorpus: + raise RuntimeError("input corpus size changed during training (don't use generators as input)") + + if dirty: + # finish any remaining updates + if self.dispatcher: + # distributed mode: wait for all workers to finish + logger.info("reached the end of input; now waiting for all remaining jobs to finish") + other = self.dispatcher.getstate() + self.do_mstep(rho(), other, pass_ > 0) + del other + dirty = False + # endfor entire corpus update + + def do_mstep(self, rho, other, extra_pass=False): + """ + M step: use linear interpolation between the existing topics and + collected sufficient statistics in `other` to update the topics. + + """ + logger.debug("updating topics") + # update self with the new blend; also keep track of how much did + # the topics change through this update, to assess convergence + diff = np.log(self.expElogbeta) + self.state.blend(rho, other) + diff -= self.state.get_Elogbeta() + self.sync_state() + + # print out some debug info at the end of each EM iteration + self.print_topics(5) + logger.info("topic diff=%f, rho=%f", np.mean(np.abs(diff)), rho) + + if self.optimize_eta: + self.update_eta(self.state.get_lambda(), rho) + + if not extra_pass: + # only update if this isn't an additional pass + self.num_updates += other.numdocs + def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2author=None, ): """ Estimate the variational bound of documents from `corpus`: @@ -370,6 +619,7 @@ def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2auth for d, doc in enumerate(corpus): # stream the input doc-by-doc, in case it's too large to fit in RAM doc_no = chunk_no + d authors_d = self.doc2author[doc_no] + authors_d = [self.author2id[a] for a in authors_d] ids = np.array([id for id, _ in doc]) # Word IDs in doc. cts = np.array([cnt for _, cnt in doc]) # Word counts. @@ -431,14 +681,31 @@ def get_author_topics(self, author_id, minimum_probability=None): # the author-topic model. c_v topic coherence is a valid measure of topic quality in # the author-topic model, although it does not take authorship information into account. - def __getitem__(self, bow, eps=None): + def __getitem__(self, data): """ + `data` must be a list consisting of two elements: `bow` and `author_name`, described below. + + `bow` is a list of documents in BOW representation. + + `author_name` is the name of the author of the documents in `bow`. + + If `author_name` + already exists in model (e.g. self.author2doc), the model will be updated w.r.t. all + the documents that the author is responsible. + """ - # TODO: this. - # E.g. assume bow is a list of documents for this particular author, and that the author - # is not in the corpus beforehand. Then add an author to doc2author and author2doc, - # and call self.update to infer the new author's topic distribution. - pass + + bow = data[0] + author_name = data[1] + + # TODO: perhaps this method should assume author_name if it is not provided. This is problematic + # if the author names are strings, though. + + author2doc = {author_name: list(xrange(len(bow)))} + + self.update(bow, author2doc) + + return self.get_author_topics(self.author2id[author_name]) def save(self, fname, ignore=['state', 'dispatcher'], *args, **kwargs): """ diff --git a/gensim/models/atmodel2.py b/gensim/models/atmodel2.py deleted file mode 100755 index b50be701f0..0000000000 --- a/gensim/models/atmodel2.py +++ /dev/null @@ -1,660 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (C) 2016 Radim Rehurek -# Copyright (C) 2016 Olavur Mortensen -# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html - - -""" -Author-topic model in Python. - -""" - -# TODO: write proper docstrings. - -import pdb -from pdb import set_trace as st -from pprint import pprint - -import logging -import numpy as np # for arrays, array broadcasting etc. -import numbers - -from gensim import utils -from gensim.models import LdaModel -from gensim.models.ldamodel import dirichlet_expectation, get_random_state, LdaState -from itertools import chain -from scipy.special import gammaln # gamma function utils -from six.moves import xrange -import six - -logger = logging.getLogger('gensim.models.atmodel') - -class AuthorTopicState(LdaState): - """ - Encapsulate information for distributed computation of AuthorTopicModel objects. - - Objects of this class are sent over the network, so try to keep them lean to - reduce traffic. - - """ - def __init__(self, eta, lambda_shape, gamma_shape): - self.eta = eta - self.sstats = np.zeros(lambda_shape) - self.gamma = np.zeros(gamma_shape) - self.numdocs = 0 - -def construct_doc2author(corpus, author2doc): - """Make a mapping from document IDs to author IDs.""" - doc2author = {} - for d, _ in enumerate(corpus): - author_ids = [] - for a, a_doc_ids in author2doc.items(): - if d in a_doc_ids: - author_ids.append(a) - doc2author[d] = author_ids - return doc2author - -def construct_author2doc(corpus, doc2author): - """Make a mapping from author IDs to document IDs.""" - - # First get a set of all authors. - authors_ids = set() - for d, a_doc_ids in doc2author.items(): - for a in a_doc_ids: - authors_ids.add(a) - - # Now construct the dictionary. - author2doc = {} - for a in range(len(authors_ids)): - author2doc[a] = [] - for d, a_ids in doc2author.items(): - if a in a_ids: - author2doc[a].append(d) - return author2doc - -class AuthorTopicModel2(LdaModel): - """ - """ - def __init__(self, corpus=None, num_topics=100, id2word=None, - author2doc=None, doc2author=None, id2author=None, var_lambda=None, - chunksize=2000, passes=1, update_every=1, - alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, - eval_every=10, iterations=50, gamma_threshold=0.001, - minimum_probability=0.01, random_state=None, ns_conf={}, - minimum_phi_value=0.01, per_word_topics=False): - """ - """ - - distributed = False # TODO: implement distributed version. - - self.id2word = id2word - if corpus is None and self.id2word is None: - raise ValueError('at least one of corpus/id2word must be specified, to establish input space dimensionality') - - if self.id2word is None: - logger.warning("no word id mapping provided; initializing from corpus, assuming identity") - self.id2word = utils.dict_from_corpus(corpus) - self.num_terms = len(self.id2word) - elif len(self.id2word) > 0: - self.num_terms = 1 + max(self.id2word.keys()) - else: - self.num_terms = 0 - - if self.num_terms == 0: - raise ValueError("cannot compute LDA over an empty collection (no terms)") - - logger.info('Vocabulary consists of %d words.', self.num_terms) - - if doc2author is None and author2doc is None: - raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') - - # If either doc2author or author2doc is missing, construct them from the other. - # FIXME: make the code below into methods, so the user can construct either doc2author or author2doc *once* and then not worry about it. - # TODO: consider whether there is a more elegant way of doing this (more importantly, a more efficient way). - if doc2author is None: - doc2author = construct_doc2author(corpus, author2doc) - elif author2doc is None: - author2doc = construct_author2doc(corpus, doc2author) - - self.author2doc = author2doc - self.doc2author = doc2author - - self.num_authors = len(self.author2doc) - logger.info('Number of authors: %d.', self.num_authors) - - self.id2author = id2author - if self.id2author is None: - logger.warning("no author id mapping provided; initializing from corpus, assuming identity") - author_integer_ids = [str(i) for i in range(len(author2doc))] - self.id2author = dict(zip(range(len(author2doc)), author_integer_ids)) - - # Make the reverse mapping, from author names to author IDs. - self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) - - self.distributed = distributed - self.num_topics = num_topics - self.chunksize = chunksize - self.decay = decay - self.offset = offset - self.minimum_probability = minimum_probability - self.num_updates = 0 - - self.passes = passes - self.update_every = update_every - self.eval_every = eval_every - self.minimum_phi_value = minimum_phi_value - self.per_word_topics = per_word_topics - - self.corpus = corpus - self.num_authors = len(author2doc) - - self.alpha, self.optimize_alpha = self.init_dir_prior(alpha, 'alpha') - - assert self.alpha.shape == (self.num_topics,), "Invalid alpha shape. Got shape %s, but expected (%d, )" % (str(self.alpha.shape), self.num_topics) - - if isinstance(eta, six.string_types): - if eta == 'asymmetric': - raise ValueError("The 'asymmetric' option cannot be used for eta") - - self.eta, self.optimize_eta = self.init_dir_prior(eta, 'eta') - - self.random_state = get_random_state(random_state) - - assert (self.eta.shape == (self.num_terms,) or self.eta.shape == (self.num_topics, self.num_terms)), ( - "Invalid eta shape. Got shape %s, but expected (%d, 1) or (%d, %d)" % - (str(self.eta.shape), self.num_terms, self.num_topics, self.num_terms)) - - if not distributed: - self.dispatcher = None - self.numworkers = 1 - else: - # TODO: implement distributed version. - pass - - # VB constants - self.iterations = iterations - self.gamma_threshold = gamma_threshold - - # Initialize the variational distributions q(beta|lambda) and q(theta|gamma) - self.state = AuthorTopicState(self.eta, (self.num_topics, self.num_terms), (self.num_authors, self.num_topics)) - self.state.sstats = self.random_state.gamma(100., 1. / 100., (self.num_topics, self.num_terms)) - self.state.gamma = self.random_state.gamma(100., 1. / 100., (self.num_authors, self.num_topics)) - self.expElogbeta = np.exp(dirichlet_expectation(self.state.sstats)) - self.expElogtheta = np.exp(dirichlet_expectation(self.state.gamma)) - - # if a training corpus was provided, start estimating the model right away - if corpus is not None: - use_numpy = self.dispatcher is not None - self.update(corpus, chunks_as_numpy=use_numpy) - - def __str__(self): - return "AuthorTopicModel(num_terms=%s, num_topics=%s, num_authors=%s, decay=%s, chunksize=%s)" % \ - (self.num_terms, self.num_topics, self.num_authors, self.decay, self.chunksize) - - def compute_phinorm(self, ids, authors_d, expElogthetad, expElogbetad): - """Efficiently computes the normalizing factor in phi.""" - phinorm = np.zeros(len(ids)) - expElogtheta_sum = np.zeros(self.num_topics) - for a in xrange(len(authors_d)): - expElogtheta_sum += expElogthetad[a, :] - phinorm = expElogtheta_sum.dot(expElogbetad) - - return phinorm - - def inference(self, chunk, collect_sstats=False, chunk_no=None): - """ - Given a chunk of sparse document vectors, estimate gamma (parameters - controlling the topic weights) for each document in the chunk. - - This function does not modify the model (=is read-only aka const). The - whole input chunk of document is assumed to fit in RAM; chunking of a - large corpus must be done earlier in the pipeline. - - If `collect_sstats` is True, also collect sufficient statistics needed - to update the model's topic-word distributions, and return a 2-tuple - `(gamma, sstats)`. Otherwise, return `(gamma, None)`. `gamma` is of shape - `len(chunk_authors) x self.num_topics`, where `chunk_authors` is the number - of authors in the documents in the current chunk. - - Avoids computing the `phi` variational parameter directly using the - optimization presented in **Lee, Seung: Algorithms for non-negative matrix factorization, NIPS 2001**. - - """ - try: - _ = len(chunk) - except: - # convert iterators/generators to plain list, so we have len() etc. - chunk = list(chunk) - if len(chunk) > 1: - logger.debug("performing inference on a chunk of %i documents", len(chunk)) - - # Initialize the variational distribution q(theta|gamma) for the chunk - if collect_sstats: - sstats = np.zeros_like(self.expElogbeta) - else: - sstats = None - converged = 0 - - chunk_authors = set() - - # Now, for each document d update that document's gamma and phi - for d, doc in enumerate(chunk): - doc_no = chunk_no + d # TODO: can it safely be assumed that this is the case? - if doc and not isinstance(doc[0][0], six.integer_types): - # make sure the term IDs are ints, otherwise np will get upset - ids = [int(id) for id, _ in doc] - else: - ids = [id for id, _ in doc] - cts = np.array([cnt for _, cnt in doc]) - authors_d = self.doc2author[doc_no] # List of author IDs for the current document. - - gammad = self.state.gamma[authors_d, :] - tilde_gamma = gammad.copy() - - Elogthetad = dirichlet_expectation(tilde_gamma) - expElogthetad = np.exp(Elogthetad) - expElogbetad = self.expElogbeta[:, ids] - - phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) - - # Iterate between gamma and phi until convergence - for iteration in xrange(self.iterations): - #logger.info('iteration %i', iteration) - - lastgamma = tilde_gamma.copy() - - # Update gamma. - for ai, a in enumerate(authors_d): - tilde_gamma[ai, :] = self.alpha + len(self.author2doc[a]) * expElogthetad[ai, :] * np.dot(cts / phinorm, expElogbetad.T) - - # Update gamma and lambda. - # Interpolation between document d's "local" gamma (tilde_gamma), - # and "global" gamma (var_gamma). - tilde_gamma = (1 - self.rho) * gammad + self.rho * tilde_gamma - - # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. - Elogthetad = dirichlet_expectation(tilde_gamma) - expElogthetad = np.exp(Elogthetad) - - phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) - - # Check for convergence. - # Criterion is mean change in "local" gamma and lambda. - meanchange_gamma = np.mean(abs(tilde_gamma - lastgamma)) - gamma_condition = meanchange_gamma < self.gamma_threshold - # logger.info('Mean change in gamma: %.3e', meanchange_gamma) - if gamma_condition: - # logger.info('Converged after %d iterations.', iteration) - converged += 1 - break - # End of iterations loop. - - self.state.gamma[authors_d, :] = tilde_gamma - - # NOTE: this may be slow. Especially when there are many authors per document. It is - # imporant to find a faster way to handle this. - chunk_authors = chunk_authors.union(set(authors_d)) - - if collect_sstats: - # Contribution of document d to the expected sufficient - # statistics for the M step. - expElogtheta_sum_a = expElogthetad.sum(axis=0) - sstats[:, ids] += np.outer(expElogtheta_sum_a.T, cts/phinorm) - - if len(chunk) > 1: - logger.debug("%i/%i documents converged within %i iterations", - converged, len(chunk), self.iterations) - - if collect_sstats: - # This step finishes computing the sufficient statistics for the - # M step, so that - # sstats[k, w] = \sum_d n_{dw} * phi_{dwk} - # = \sum_d n_{dw} * exp{Elogtheta_{dk} + Elogbeta_{kw}} / phinorm_{dw}. - sstats *= self.expElogbeta - gamma_chunk = self.state.gamma[list(chunk_authors), :] - return gamma_chunk, sstats - - def do_estep(self, chunk, state=None, chunk_no=None): - """ - Perform inference on a chunk of documents, and accumulate the collected - sufficient statistics in `state` (or `self.state` if None). - - """ - if state is None: - state = self.state - gamma, sstats = self.inference(chunk, collect_sstats=True, chunk_no=chunk_no) - state.sstats += sstats - state.numdocs += len(chunk) - return gamma - - def update(self, corpus, chunksize=None, decay=None, offset=None, - passes=None, update_every=None, eval_every=None, iterations=None, - gamma_threshold=None, chunks_as_numpy=False): - """ - Train the model with new documents, by EM-iterating over `corpus` until - the topics converge (or until the maximum number of allowed iterations - is reached). `corpus` must be an iterable (repeatable stream of documents), - - In distributed mode, the E step is distributed over a cluster of machines. - - This update also supports updating an already trained model (`self`) - with new documents from `corpus`; the two models are then merged in - proportion to the number of old vs. new documents. This feature is still - experimental for non-stationary input streams. - - For stationary input (no topic drift in new documents), on the other hand, - this equals the online update of Hoffman et al. and is guaranteed to - converge for any `decay` in (0.5, 1.0>. Additionally, for smaller - `corpus` sizes, an increasing `offset` may be beneficial (see - Table 1 in Hoffman et al.) - - Args: - corpus (gensim corpus): The corpus with which the LDA model should be updated. - - chunks_as_numpy (bool): Whether each chunk passed to `.inference` should be a np - array of not. np can in some settings turn the term IDs - into floats, these will be converted back into integers in - inference, which incurs a performance hit. For distributed - computing it may be desirable to keep the chunks as np - arrays. - - For other parameter settings, see :class:`LdaModel` constructor. - - """ - # use parameters given in constructor, unless user explicitly overrode them - if decay is None: - decay = self.decay - if offset is None: - offset = self.offset - if passes is None: - passes = self.passes - if update_every is None: - update_every = self.update_every - if eval_every is None: - eval_every = self.eval_every - if iterations is None: - iterations = self.iterations - if gamma_threshold is None: - gamma_threshold = self.gamma_threshold - - try: - lencorpus = len(corpus) - except: - logger.warning("input corpus stream has no len(); counting documents") - lencorpus = sum(1 for _ in corpus) - if lencorpus == 0: - logger.warning("LdaModel.update() called with an empty corpus") - return - - if chunksize is None: - chunksize = min(lencorpus, self.chunksize) - - self.state.numdocs += lencorpus - - if update_every: - updatetype = "online" - updateafter = min(lencorpus, update_every * self.numworkers * chunksize) - else: - updatetype = "batch" - updateafter = lencorpus - evalafter = min(lencorpus, (eval_every or 0) * self.numworkers * chunksize) - - updates_per_pass = max(1, lencorpus / updateafter) - logger.info("running %s LDA training, %s topics, %i passes over " - "the supplied corpus of %i documents, updating model once " - "every %i documents, evaluating perplexity every %i documents, " - "iterating %ix with a convergence threshold of %f", - updatetype, self.num_topics, passes, lencorpus, - updateafter, evalafter, iterations, - gamma_threshold) - - if updates_per_pass * passes < 10: - logger.warning("too few updates, training might not converge; consider " - "increasing the number of passes or iterations to improve accuracy") - - # rho is the "speed" of updating; TODO try other fncs - # pass_ + num_updates handles increasing the starting t for each pass, - # while allowing it to "reset" on the first pass of each update - def rho(): - return pow(offset + pass_ + (self.num_updates / chunksize), -decay) - - for pass_ in xrange(passes): - if self.dispatcher: - logger.info('initializing %s workers' % self.numworkers) - self.dispatcher.reset(self.state) - else: - other = LdaState(self.eta, self.state.sstats.shape) - dirty = False - - reallen = 0 - for chunk_no, chunk in enumerate(utils.grouper(corpus, chunksize, as_numpy=chunks_as_numpy)): - # FIXME: replace rho() in e.g. self.do_estep by self.rho? self.rho is needed for AuthorTopicModel. - self.rho = rho() - reallen += len(chunk) # keep track of how many documents we've processed so far - - if eval_every and ((reallen == lencorpus) or ((chunk_no + 1) % (eval_every * self.numworkers) == 0)): - self.log_perplexity(chunk, chunk_no, total_docs=lencorpus) - - if self.dispatcher: - # add the chunk to dispatcher's job queue, so workers can munch on it - logger.info('PROGRESS: pass %i, dispatching documents up to #%i/%i', - pass_, chunk_no * chunksize + len(chunk), lencorpus) - # this will eventually block until some jobs finish, because the queue has a small finite length - self.dispatcher.putjob(chunk) - else: - logger.info('PROGRESS: pass %i, at document #%i/%i', - pass_, chunk_no * chunksize + len(chunk), lencorpus) - gammat = self.do_estep(chunk, other, chunk_no) - - if self.optimize_alpha: - self.update_alpha(gammat, rho()) - - dirty = True - del chunk - - # perform an M step. determine when based on update_every, don't do this after every chunk - if update_every and (chunk_no + 1) % (update_every * self.numworkers) == 0: - if self.dispatcher: - # distributed mode: wait for all workers to finish - logger.info("reached the end of input; now waiting for all remaining jobs to finish") - other = self.dispatcher.getstate() - self.do_mstep(rho(), other, pass_ > 0) - del other # frees up memory - - if self.dispatcher: - logger.info('initializing workers') - self.dispatcher.reset(self.state) - else: - other = LdaState(self.eta, self.state.sstats.shape) - dirty = False - # endfor single corpus iteration - if reallen != lencorpus: - raise RuntimeError("input corpus size changed during training (don't use generators as input)") - - if dirty: - # finish any remaining updates - if self.dispatcher: - # distributed mode: wait for all workers to finish - logger.info("reached the end of input; now waiting for all remaining jobs to finish") - other = self.dispatcher.getstate() - self.do_mstep(rho(), other, pass_ > 0) - del other - dirty = False - # endfor entire corpus update - - def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2author=None, ): - """ - Estimate the variational bound of documents from `corpus`: - E_q[log p(corpus)] - E_q[log q(corpus)] - - `gamma` are the variational parameters on topic weights for each `corpus` - document (=2d matrix=what comes out of `inference()`). - If not supplied, will be inferred from the model. - - Computing the bound of unseen data is not recommended, unless one knows what one is doing. - In this case, gamma must be inferred in advance, and doc2author for this new data must be - provided. - - """ - - _lambda = self.state.get_lambda() - Elogbeta = dirichlet_expectation(_lambda) - expElogbeta = np.exp(dirichlet_expectation(_lambda)) - - if gamma is not None: - logger.warning('bound() assumes gamma to be None and uses the gamma provided is self.state.') - # NOTE: alternatively: - #assert gamma is None, 'bound() assumes gamma to be None and uses the gamma provided is self.state.' - else: - gamma = self.state.gamma - - if chunk_no is None: - logger.warning('No chunk_no provided to bound().') - # NOTE: alternatively: - #assert chunk_no is not None, 'chunk_no must be provided to bound().' - chunk_no = 0 - - Elogtheta = dirichlet_expectation(gamma) - expElogtheta = np.exp(dirichlet_expectation(gamma)) - - word_score = 0.0 - authors_set = set() # Used in computing theta bound. - theta_score = 0.0 - for d, doc in enumerate(corpus): # stream the input doc-by-doc, in case it's too large to fit in RAM - doc_no = chunk_no + d - authors_d = self.doc2author[doc_no] - ids = np.array([id for id, _ in doc]) # Word IDs in doc. - cts = np.array([cnt for _, cnt in doc]) # Word counts. - - if d % self.chunksize == 0: - logger.debug("bound: at document #%i", d) - - # Computing the bound requires summing over expElogtheta[a, k] * expElogbeta[k, v], which - # is the same computation as in normalizing phi. - phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) - word_score += np.log(1.0 / len(authors_d)) + cts.dot(np.log(phinorm)) - - # E[log p(theta | alpha) - log q(theta | gamma)] - # The code blow ensure we compute the score of each author only once. - for a in authors_d: - if a not in authors_set: - theta_score += np.sum((self.alpha - gamma[a, :]) * Elogtheta[a, :]) - theta_score += np.sum(gammaln(gamma[a, :]) - gammaln(self.alpha)) - theta_score += gammaln(np.sum(self.alpha)) - gammaln(np.sum(gamma[a, :])) - authors_set.add(a) - - # Compensate likelihood for when `corpus` above is only a sample of the whole corpus. This ensures - # that the likelihood is always rougly on the same scale. - word_score *= subsample_ratio - - # theta_score is rescaled in a similar fashion. - theta_score *= self.num_authors / len(authors_set) - - # E[log p(beta | eta) - log q (beta | lambda)] - beta_score = 0.0 - beta_score += np.sum((self.eta - _lambda) * Elogbeta) - beta_score += np.sum(gammaln(_lambda) - gammaln(self.eta)) - sum_eta = np.sum(self.eta) - beta_score += np.sum(gammaln(sum_eta) - gammaln(np.sum(_lambda, 1))) - - total_score = word_score + theta_score + beta_score - - #print("%.3e\t%.3e\t%.3e\t%.3e" %(total_score, word_score, theta_score, beta_score)) - - return total_score - - def get_author_topics(self, author_id, minimum_probability=None): - """ - Return topic distribution the given author, as a list of - (topic_id, topic_probability) 2-tuples. - Ignore topics with very low probability (below `minimum_probability`). - """ - if minimum_probability is None: - minimum_probability = self.minimum_probability - minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output - - topic_dist = self.state.gamma[author_id, :] / sum(self.state.gamma[author_id, :]) - - author_topics = [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist) - if topicvalue >= minimum_probability] - - return author_topics - - # NOTE: method `top_topics` is used directly. There is no topic coherence measure for - # the author-topic model. c_v topic coherence is a valid measure of topic quality in - # the author-topic model, although it does not take authorship information into account. - - def __getitem__(self, bow, eps=None): - """ - """ - # TODO: this. - # E.g. assume bow is a list of documents for this particular author, and that the author - # is not in the corpus beforehand. Then add an author to doc2author and author2doc, - # and call self.update to infer the new author's topic distribution. - pass - - def save(self, fname, ignore=['state', 'dispatcher'], *args, **kwargs): - """ - Save the model to file. - - Large internal arrays may be stored into separate files, with `fname` as prefix. - - `separately` can be used to define which arrays should be stored in separate files. - - `ignore` parameter can be used to define which variables should be ignored, i.e. left - out from the pickled author-topic model. By default the internal `state` is ignored as it uses - its own serialisation not the one provided by `AuthorTopicModel`. The `state` and `dispatcher` - will be added to any ignore parameter defined. - - - Note: do not save as a compressed file if you intend to load the file back with `mmap`. - - Note: If you intend to use models across Python 2/3 versions there are a few things to - keep in mind: - - 1. The pickled Python dictionaries will not work across Python versions - 2. The `save` method does not automatically save all NumPy arrays using NumPy, only - those ones that exceed `sep_limit` set in `gensim.utils.SaveLoad.save`. The main - concern here is the `alpha` array if for instance using `alpha='auto'`. - - Please refer to the wiki recipes section (https://github.com/piskvorky/gensim/wiki/Recipes-&-FAQ#q9-how-do-i-load-a-model-in-python-3-that-was-trained-and-saved-using-python-2) - for an example on how to work around these issues. - """ - if self.state is not None: - self.state.save(utils.smart_extension(fname, '.state'), *args, **kwargs) - - # make sure 'state' and 'dispatcher' are ignored from the pickled object, even if - # someone sets the ignore list themselves - if ignore is not None and ignore: - if isinstance(ignore, six.string_types): - ignore = [ignore] - ignore = [e for e in ignore if e] # make sure None and '' are not in the list - ignore = list(set(['state', 'dispatcher']) | set(ignore)) - else: - ignore = ['state', 'dispatcher'] - # TODO: the only difference between this save method and LdaModel's is the use of - # "AuthorTopicModel" below. This should be an easy refactor. - # Same goes for load method below. - super(AuthorTopicModel, self).save(fname, *args, ignore=ignore, **kwargs) - - @classmethod - def load(cls, fname, *args, **kwargs): - """ - Load a previously saved object from file (also see `save`). - - Large arrays can be memmap'ed back as read-only (shared memory) by setting `mmap='r'`: - - >>> AuthorTopicModel.load(fname, mmap='r') - - """ - kwargs['mmap'] = kwargs.get('mmap', None) - result = super(AuthorTopicModel, cls).load(fname, *args, **kwargs) - state_fname = utils.smart_extension(fname, '.state') - try: - result.state = super(LdaModel, cls).load(state_fname, *args, **kwargs) - except Exception as e: - logging.warning("failed to load state from %s: %s", state_fname, e) - return result -# endclass LdaModel diff --git a/gensim/models/atmodelold.py b/gensim/models/atmodelold.py index 0925ffa46f..90cd875144 100644 --- a/gensim/models/atmodelold.py +++ b/gensim/models/atmodelold.py @@ -238,6 +238,7 @@ def inference(self, corpus=None, var_lambda=None): ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. authors_d = self.doc2author[doc_no] # List of author IDs for the current document. + authors_d = [self.author2id[a] for a in authors_d] phinorm = self.compute_phinorm(ids, authors_d, expElogtheta[authors_d, :], expElogbeta[:, ids]) @@ -252,7 +253,7 @@ def inference(self, corpus=None, var_lambda=None): # Update gamma. for a in authors_d: - tilde_gamma[a, :] = self.alpha + len(self.author2doc[a]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) + tilde_gamma[a, :] = self.alpha + len(self.author2doc[self.id2author[a]]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T) # Update gamma and lambda. # Interpolation between document d's "local" gamma (tilde_gamma), @@ -385,6 +386,7 @@ def word_bound(self, docs, expElogtheta, expElogbeta, maxElogtheta=None, maxElog bound= 0.0 for d, doc in enumerate(docs): authors_d = self.doc2author[d] + authors_d = [self.author2id[a] for a in authors_d] ids = numpy.array([id for id, _ in doc]) # Word IDs in doc. cts = numpy.array([cnt for _, cnt in doc]) # Word counts. bound_d = 0.0 From ff7f8e62c8f78785da22c2aaca0363e041539c8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Thu, 8 Dec 2016 17:23:46 +0100 Subject: [PATCH 062/100] A lot of changes. Most notably, added docstrings, and made it possible to evaluate test set (held-out data). --- docs/notebooks/at_with_nips.ipynb | 444 +++++++++--------- gensim/models/__init__.py | 1 - gensim/models/atmodel.py | 329 +++++++++---- .../atmodel_pre-refactor.py} | 7 + 4 files changed, 490 insertions(+), 291 deletions(-) rename gensim/models/{atmodelold.py => temp/atmodel_pre-refactor.py} (98%) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 6e00eec295..64c65f75b6 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -110,7 +110,7 @@ }, { "cell_type": "code", - "execution_count": 308, + "execution_count": 361, "metadata": { "collapsed": false }, @@ -147,7 +147,7 @@ }, { "cell_type": "code", - "execution_count": 309, + "execution_count": 362, "metadata": { "collapsed": false }, @@ -176,7 +176,7 @@ }, { "cell_type": "code", - "execution_count": 310, + "execution_count": 363, "metadata": { "collapsed": true }, @@ -187,7 +187,7 @@ }, { "cell_type": "code", - "execution_count": 311, + "execution_count": 364, "metadata": { "collapsed": false }, @@ -212,7 +212,7 @@ }, { "cell_type": "code", - "execution_count": 312, + "execution_count": 365, "metadata": { "collapsed": false }, @@ -235,7 +235,7 @@ }, { "cell_type": "code", - "execution_count": 313, + "execution_count": 366, "metadata": { "collapsed": false }, @@ -250,7 +250,7 @@ }, { "cell_type": "code", - "execution_count": 314, + "execution_count": 367, "metadata": { "collapsed": false }, @@ -278,7 +278,7 @@ }, { "cell_type": "code", - "execution_count": 315, + "execution_count": 368, "metadata": { "collapsed": true }, @@ -290,7 +290,7 @@ }, { "cell_type": "code", - "execution_count": 316, + "execution_count": 369, "metadata": { "collapsed": false }, @@ -308,7 +308,7 @@ }, { "cell_type": "code", - "execution_count": 317, + "execution_count": 370, "metadata": { "collapsed": false }, @@ -317,7 +317,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -343,7 +343,7 @@ }, { "cell_type": "code", - "execution_count": 318, + "execution_count": 371, "metadata": { "collapsed": false }, @@ -361,7 +361,7 @@ }, { "cell_type": "code", - "execution_count": 319, + "execution_count": 372, "metadata": { "collapsed": false }, @@ -372,7 +372,7 @@ }, { "cell_type": "code", - "execution_count": 320, + "execution_count": 373, "metadata": { "collapsed": false }, @@ -402,7 +402,7 @@ }, { "cell_type": "code", - "execution_count": 321, + "execution_count": 358, "metadata": { "collapsed": false }, @@ -410,15 +410,13 @@ "source": [ "reload(atmodel)\n", "AuthorTopicModel = atmodel.AuthorTopicModel\n", - "reload(atmodelold)\n", - "AuthorTopicModelOld = atmodelold.AuthorTopicModelOld\n", "reload(ldamodel)\n", "LdaModel = ldamodel.LdaModel" ] }, { "cell_type": "code", - "execution_count": 322, + "execution_count": 374, "metadata": { "collapsed": false }, @@ -427,18 +425,18 @@ "name": "stdout", "output_type": "stream", "text": [ - "-8.22604261303\n", - "-7.00250685225\n", - "-6.997669314\n", - "-6.99095801828\n", - "-6.98194710233\n", - "-6.97059497993\n", - "-6.9571757795\n", - "-6.94218337286\n", - "-6.92617606121\n", - "-6.90967324258\n", - "CPU times: user 6.35 s, sys: 0 ns, total: 6.35 s\n", - "Wall time: 6.34 s\n" + "-8.22548993324\n", + "-7.00248242458\n", + "-6.99740973863\n", + "-6.98998990308\n", + "-6.97948100519\n", + "-6.9657412708\n", + "-6.94923575245\n", + "-6.93078544187\n", + "-6.91130176592\n", + "-6.89159127379\n", + "CPU times: user 6.37 s, sys: 12 ms, total: 6.38 s\n", + "Wall time: 6.38 s\n" ] } ], @@ -454,7 +452,7 @@ }, { "cell_type": "code", - "execution_count": 267, + "execution_count": 376, "metadata": { "collapsed": false }, @@ -463,16 +461,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "-6.89310631328\n", - "-6.83421961412\n", - "-6.80373632624\n", - "-6.77968901432\n", - "-6.76018760288\n", - "-6.74405190792\n", - "-6.7304722216\n", - "-6.71887559126\n", - "-6.70884623307\n", - "-6.70007530703\n" + "-6.86383519922\n", + "-6.80098424134\n", + "-6.77073905151\n", + "-6.74776922681\n", + "-6.72954797628\n", + "-6.71464583585\n", + "-6.70217200697\n", + "-6.6915373574\n", + "-6.68233766042\n", + "-6.67428444055\n" ] } ], @@ -482,59 +480,66 @@ }, { "cell_type": "code", - "execution_count": 305, + "execution_count": 375, "metadata": { "collapsed": false }, "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Is this a new author? True\n", - "Number of documents by author: 1\n", - "-6.0190157986\n", - "-5.93303699802\n", - "-5.89706978832\n", - "-5.87070002406\n", - "-5.85024357659\n", - "-5.83373120263\n", - "-5.82001191111\n", - "-5.80836592923\n", - "-5.79831886651\n", - "-5.78954605844\n" - ] - }, { "data": { "text/plain": [ - "[(0, 0.1701733360072834),\n", - " (1, 0.012369638562793309),\n", - " (2, 0.022435297354046722),\n", - " (3, 0.064683494549145251),\n", - " (4, 0.14229529414449704),\n", - " (5, 0.096406364483889423),\n", - " (7, 0.037669655666072922),\n", - " (8, 0.32989184827034412),\n", - " (9, 0.12324347211255265)]" + "-7.0166219933262406" ] }, - "execution_count": 305, + "execution_count": 375, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "author_name = 'Behnaam Aazhang'\n", - "print('Is this a new author?', author_name not in model.author2doc)\n", - "docs = corpus[author2doc[author_name]]\n", - "print('Number of documents by author:', len(docs))\n", - "model[[docs, author_name]]" + "corpus_words = sum(cnt for document in corpus for _, cnt in document)\n", + "model.bound(corpus, author2doc=author2doc, doc2author=doc2author) / corpus_words" ] }, { "cell_type": "code", - "execution_count": 323, + "execution_count": 316, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-8.22548993324\n", + "-7.00248242458\n", + "-6.99740973863\n", + "-6.98998990308\n", + "-6.97948100519\n", + "-6.9657412708\n", + "-6.94923575245\n", + "-6.93078544187\n", + "-6.91130176592\n", + "-6.89159127379\n", + "CPU times: user 6.19 s, sys: 8 ms, total: 6.2 s\n", + "Wall time: 6.2 s\n" + ] + } + ], + "source": [ + "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", + " author2doc=author2doc, doc2author=doc2author, id2author=id2author, var_lambda=None, \\\n", + " chunksize=2000, passes=10, update_every=1, \\\n", + " alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, \\\n", + " eval_every=1, iterations=10, gamma_threshold=1e-10, \\\n", + " minimum_probability=0.01, random_state=1, ns_conf={}, \\\n", + " minimum_phi_value=0.01, per_word_topics=False)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 377, "metadata": { "collapsed": false }, @@ -543,28 +548,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.012*\"hidden\" + 0.007*\"memory\" + 0.006*\"vector\" + 0.005*\"image\" + 0.005*\"speech\" + 0.005*\"hidden_unit\" + 0.004*\"circuit\" + 0.004*\"node\" + 0.004*\"propagation\" + 0.004*\"back_propagation\"'),\n", + " '0.033*\"memory\" + 0.011*\"capacity\" + 0.010*\"bit\" + 0.008*\"associative\" + 0.008*\"associative_memory\" + 0.008*\"stored\" + 0.008*\"circuit\" + 0.007*\"threshold\" + 0.006*\"vector\" + 0.006*\"address\"'),\n", " (1,\n", - " '0.010*\"cell\" + 0.006*\"field\" + 0.005*\"activity\" + 0.005*\"vector\" + 0.005*\"cortex\" + 0.005*\"matrix\" + 0.005*\"receptive\" + 0.004*\"noise\" + 0.004*\"cortical\" + 0.004*\"response\"'),\n", + " '0.025*\"vector\" + 0.012*\"probability\" + 0.008*\"group\" + 0.006*\"let\" + 0.006*\"distribution\" + 0.006*\"class\" + 0.005*\"response\" + 0.005*\"matrix\" + 0.004*\"position\" + 0.004*\"principle\"'),\n", " (2,\n", - " '0.008*\"cell\" + 0.007*\"node\" + 0.007*\"region\" + 0.006*\"field\" + 0.005*\"class\" + 0.005*\"probability\" + 0.004*\"fig\" + 0.004*\"distribution\" + 0.004*\"element\" + 0.004*\"threshold\"'),\n", + " '0.016*\"node\" + 0.012*\"matrix\" + 0.009*\"hopfield\" + 0.008*\"code\" + 0.007*\"optimization\" + 0.006*\"element\" + 0.006*\"stable\" + 0.006*\"sequence\" + 0.005*\"constraint\" + 0.005*\"graph\"'),\n", " (3,\n", - " '0.014*\"cell\" + 0.007*\"response\" + 0.006*\"speech\" + 0.006*\"fig\" + 0.006*\"stimulus\" + 0.005*\"synaptic\" + 0.005*\"chain\" + 0.005*\"field\" + 0.004*\"synapse\" + 0.004*\"synapsis\"'),\n", + " '0.029*\"cell\" + 0.011*\"activity\" + 0.010*\"visual\" + 0.010*\"cortex\" + 0.008*\"stimulus\" + 0.008*\"frequency\" + 0.008*\"synaptic\" + 0.008*\"map\" + 0.008*\"response\" + 0.007*\"cortical\"'),\n", " (4,\n", - " '0.011*\"node\" + 0.007*\"role\" + 0.006*\"activation\" + 0.005*\"processor\" + 0.005*\"current\" + 0.005*\"cell\" + 0.004*\"fig\" + 0.004*\"noise\" + 0.004*\"line\" + 0.004*\"element\"'),\n", + " '0.032*\"image\" + 0.023*\"classifier\" + 0.015*\"node\" + 0.012*\"fig\" + 0.012*\"field\" + 0.011*\"processor\" + 0.008*\"map\" + 0.007*\"region\" + 0.007*\"edge\" + 0.007*\"pixel\"'),\n", " (5,\n", - " '0.013*\"vector\" + 0.011*\"hidden\" + 0.009*\"image\" + 0.006*\"hidden_unit\" + 0.006*\"recognition\" + 0.005*\"object\" + 0.005*\"matrix\" + 0.005*\"connectionist\" + 0.004*\"procedure\" + 0.004*\"sequence\"'),\n", + " '0.012*\"delay\" + 0.008*\"theory\" + 0.007*\"vector\" + 0.007*\"attractor\" + 0.007*\"matrix\" + 0.007*\"stability\" + 0.006*\"role\" + 0.006*\"symmetric\" + 0.005*\"oscillation\" + 0.005*\"decision\"'),\n", " (6,\n", - " '0.018*\"cell\" + 0.012*\"firing\" + 0.010*\"current\" + 0.009*\"circuit\" + 0.008*\"response\" + 0.008*\"activity\" + 0.007*\"synaptic\" + 0.006*\"spike\" + 0.005*\"potential\" + 0.005*\"membrane\"'),\n", + " '0.019*\"cell\" + 0.017*\"firing\" + 0.014*\"circuit\" + 0.014*\"response\" + 0.013*\"spike\" + 0.011*\"potential\" + 0.010*\"current\" + 0.008*\"stimulus\" + 0.008*\"fig\" + 0.008*\"synaptic\"'),\n", " (7,\n", - " '0.007*\"map\" + 0.006*\"fig\" + 0.005*\"image\" + 0.005*\"memory\" + 0.005*\"activity\" + 0.004*\"object\" + 0.004*\"probability\" + 0.004*\"node\" + 0.004*\"field\" + 0.004*\"vector\"'),\n", + " '0.013*\"chip\" + 0.012*\"synapse\" + 0.011*\"human\" + 0.011*\"region\" + 0.010*\"chain\" + 0.010*\"analog\" + 0.009*\"current\" + 0.008*\"voltage\" + 0.007*\"pulse\" + 0.007*\"gain\"'),\n", " (8,\n", - " '0.014*\"classifier\" + 0.013*\"memory\" + 0.010*\"hidden\" + 0.007*\"vector\" + 0.006*\"sample\" + 0.006*\"node\" + 0.006*\"recognition\" + 0.005*\"propagation\" + 0.005*\"bit\" + 0.005*\"hidden_unit\"'),\n", + " '0.021*\"recognition\" + 0.017*\"speech\" + 0.012*\"hidden\" + 0.010*\"trained\" + 0.007*\"word\" + 0.007*\"frame\" + 0.007*\"experiment\" + 0.007*\"test\" + 0.005*\"hidden_layer\" + 0.005*\"class\"'),\n", " (9,\n", - " '0.008*\"cell\" + 0.005*\"speech\" + 0.005*\"region\" + 0.005*\"image\" + 0.004*\"map\" + 0.004*\"visual\" + 0.004*\"threshold\" + 0.004*\"field\" + 0.004*\"class\" + 0.003*\"activation\"')]" + " '0.025*\"hidden\" + 0.015*\"propagation\" + 0.014*\"hidden_unit\" + 0.012*\"back_propagation\" + 0.010*\"noise\" + 0.010*\"vector\" + 0.007*\"activation\" + 0.007*\"gradient\" + 0.006*\"generalization\" + 0.005*\"hidden_layer\"')]" ] }, - "execution_count": 323, + "execution_count": 377, "metadata": {}, "output_type": "execute_result" } @@ -575,7 +580,7 @@ }, { "cell_type": "code", - "execution_count": 307, + "execution_count": 378, "metadata": { "collapsed": false }, @@ -585,18 +590,39 @@ "output_type": "stream", "text": [ "\n", - "Yaser S.Abu-Mostafa\n" - ] - }, - { - "ename": "KeyError", - "evalue": "'Yaser S.Abu-Mostafa'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'Yaser S.Abu-Mostafa'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\n%s'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Docs:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_author_topics\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mKeyError\u001b[0m: 'Yaser S.Abu-Mostafa'" + "Yaser S.Abu-Mostafa\n", + "Docs: [62]\n", + "[(1, 0.022912313824938635),\n", + " (3, 0.13023641564906427),\n", + " (5, 0.053619476428563552),\n", + " (6, 0.74281673337282872),\n", + " (7, 0.039357296238215982)]\n", + "\n", + "Geoffrey E. Hinton\n", + "Docs: [143, 284, 230, 197]\n", + "[(0, 0.18261149058601064),\n", + " (1, 0.13054478554688737),\n", + " (2, 0.047327566056840936),\n", + " (4, 0.23227760420733609),\n", + " (5, 0.034549256573029284),\n", + " (6, 0.01533364796903508),\n", + " (7, 0.018779595429739036),\n", + " (8, 0.018536703478209651),\n", + " (9, 0.31657298712825083)]\n", + "\n", + "Michael I. Jordan\n", + "Docs: [237]\n", + "[(0, 0.1538433724459583),\n", + " (2, 0.0152049788742559),\n", + " (3, 0.14170418712027841),\n", + " (4, 0.012409363037171063),\n", + " (6, 0.015302663997163653),\n", + " (7, 0.6227732950816125),\n", + " (8, 0.03443767276723967)]\n", + "\n", + "James M. Bower\n", + "Docs: [131, 101, 126, 127, 281, 208, 225]\n", + "[(3, 0.046716420148960519), (6, 0.92386961466365525), (7, 0.023818236102952976)]\n" ] } ], @@ -622,51 +648,90 @@ "pprint(model.get_author_topics(author2id[name]))" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Serialized corpus stuff" - ] - }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 305, "metadata": { - "collapsed": false + "collapsed": false, + "scrolled": true }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-6.87227188261\n", + "-6.8076409676\n", + "-6.77662522591\n", + "-6.75300412768\n", + "-6.73424728384\n", + "-6.71889665571\n", + "-6.70604600927\n", + "-6.69509178841\n", + "-6.68561753933\n", + "-6.67732515051\n" + ] + } + ], "source": [ - "corpus = [[(0,1)]]\n", - "MmCorpus.serialize('/tmp/corpus.mm', corpus)\n", - "corpus = MmCorpus('/tmp/corpus.mm')" + "model.update()" ] }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 305, "metadata": { "collapsed": false }, "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Is this a new author? True\n", + "Number of documents by author: 1\n", + "-6.0190157986\n", + "-5.93303699802\n", + "-5.89706978832\n", + "-5.87070002406\n", + "-5.85024357659\n", + "-5.83373120263\n", + "-5.82001191111\n", + "-5.80836592923\n", + "-5.79831886651\n", + "-5.78954605844\n" + ] + }, { "data": { "text/plain": [ - "True" + "[(0, 0.1701733360072834),\n", + " (1, 0.012369638562793309),\n", + " (2, 0.022435297354046722),\n", + " (3, 0.064683494549145251),\n", + " (4, 0.14229529414449704),\n", + " (5, 0.096406364483889423),\n", + " (7, 0.037669655666072922),\n", + " (8, 0.32989184827034412),\n", + " (9, 0.12324347211255265)]" ] }, - "execution_count": 46, + "execution_count": 305, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "type(corpus).__name__ == 'MmCorpus'" + "author_name = 'Behnaam Aazhang'\n", + "print('Is this a new author?', author_name not in model.author2doc)\n", + "docs = corpus[author2doc[author_name]]\n", + "print('Number of documents by author:', len(docs))\n", + "model[[docs, author_name]]" ] }, { "cell_type": "code", - "execution_count": 328, + "execution_count": 66, "metadata": { "collapsed": false }, @@ -675,128 +740,85 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 8.97 s, sys: 24 ms, total: 9 s\n", - "Wall time: 9 s\n" + "-8.28158789368\n", + "-6.93399371312\n", + "CPU times: user 9.8 s, sys: 52 ms, total: 9.85 s\n", + "Wall time: 9.85 s\n" ] } ], "source": [ - "%time model2 = AuthorTopicModelOld(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + "%time model = AuthorTopicModel2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=0, random_state=1, var_lambda=None, chunksize=2000)" + " eval_every=100, random_state=0, var_lambda=None, chunksize=2000)" ] }, { "cell_type": "code", - "execution_count": 329, + "execution_count": 104, "metadata": { - "collapsed": false, - "scrolled": false + "collapsed": false }, "outputs": [ { - "data": { - "text/plain": [ - "[(0,\n", - " '0.028*\"cell\" + 0.015*\"firing\" + 0.013*\"stimulus\" + 0.012*\"activity\" + 0.011*\"synaptic\" + 0.011*\"cortical\" + 0.010*\"response\" + 0.009*\"orientation\" + 0.008*\"spike\" + 0.008*\"field\"'),\n", - " (1,\n", - " '0.014*\"memory\" + 0.006*\"matrix\" + 0.005*\"cell\" + 0.005*\"fig\" + 0.005*\"cortex\" + 0.004*\"vector\" + 0.004*\"associative\" + 0.004*\"hopfield\" + 0.004*\"associative_memory\" + 0.004*\"location\"'),\n", - " (2,\n", - " '0.016*\"classifier\" + 0.013*\"hidden\" + 0.007*\"hidden_unit\" + 0.006*\"internal\" + 0.006*\"back_propagation\" + 0.006*\"current\" + 0.005*\"propagation\" + 0.005*\"chip\" + 0.005*\"node\" + 0.005*\"table\"'),\n", - " (3,\n", - " '0.011*\"hidden\" + 0.010*\"recognition\" + 0.010*\"image\" + 0.008*\"speech\" + 0.008*\"vector\" + 0.006*\"node\" + 0.005*\"propagation\" + 0.005*\"hidden_unit\" + 0.004*\"energy\" + 0.004*\"connectionist\"'),\n", - " (4,\n", - " '0.011*\"map\" + 0.009*\"fig\" + 0.008*\"element\" + 0.007*\"contour\" + 0.007*\"xl\" + 0.007*\"brain\" + 0.005*\"threshold\" + 0.005*\"vector\" + 0.005*\"position\" + 0.005*\"noise\"'),\n", - " (5,\n", - " '0.013*\"vector\" + 0.008*\"probability\" + 0.007*\"class\" + 0.007*\"matrix\" + 0.005*\"distribution\" + 0.005*\"theorem\" + 0.005*\"threshold\" + 0.005*\"let\" + 0.005*\"bound\" + 0.004*\"theory\"'),\n", - " (6,\n", - " '0.007*\"activation\" + 0.006*\"analog\" + 0.006*\"node\" + 0.006*\"pulse\" + 0.005*\"processor\" + 0.005*\"temperature\" + 0.005*\"circuit\" + 0.005*\"field\" + 0.005*\"chip\" + 0.005*\"threshold\"'),\n", - " (7,\n", - " '0.011*\"role\" + 0.005*\"eye\" + 0.005*\"vector\" + 0.005*\"controller\" + 0.005*\"motor\" + 0.005*\"fig\" + 0.005*\"motion\" + 0.005*\"product\" + 0.005*\"control\" + 0.005*\"variable\"'),\n", - " (8,\n", - " '0.020*\"cell\" + 0.010*\"map\" + 0.010*\"region\" + 0.009*\"response\" + 0.007*\"circuit\" + 0.006*\"chain\" + 0.006*\"brain\" + 0.005*\"human\" + 0.005*\"current\" + 0.005*\"fig\"'),\n", - " (9,\n", - " '0.008*\"cell\" + 0.005*\"threshold\" + 0.005*\"object\" + 0.005*\"associative\" + 0.005*\"node\" + 0.005*\"control\" + 0.005*\"activity\" + 0.005*\"stimulus\" + 0.005*\"direction\" + 0.005*\"phase\"')]" - ] - }, - "execution_count": 329, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "-8.28196869716\n", + "-6.91135061575\n", + "CPU times: user 9.83 s, sys: 4 ms, total: 9.83 s\n", + "Wall time: 9.83 s\n" + ] } ], "source": [ - "model2.show_topics(num_topics=10)" + "%time model = AuthorTopicModelOld(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", + " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", + " iterations=10, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", + " eval_every=100, random_state=4, var_lambda=None, chunksize=2000)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Serialized corpus stuff" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "corpus = [[(0,1)]]\n", + "MmCorpus.serialize('/tmp/corpus.mm', corpus)\n", + "corpus = MmCorpus('/tmp/corpus.mm')" ] }, { "cell_type": "code", - "execution_count": 331, + "execution_count": 46, "metadata": { "collapsed": false }, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [62]\n", - "[(0, 0.058713846836094249),\n", - " (1, 0.15509352295391379),\n", - " (2, 0.041233589610359118),\n", - " (3, 0.13792786464219733),\n", - " (4, 0.048002886804540928),\n", - " (5, 0.25763034118791089),\n", - " (6, 0.17724144451099547),\n", - " (7, 0.01646106706106195),\n", - " (8, 0.053148727421393017),\n", - " (9, 0.054546708971533304)]\n", - "\n", - "Geoffrey E. Hinton\n", - "Docs: [143, 284, 230, 197]\n", - "[(3, 0.97651618616058111), (7, 0.012150945089860542)]\n", - "\n", - "Michael I. Jordan\n", - "Docs: [237]\n", - "[(0, 0.016941361324012749),\n", - " (1, 0.047718481863329137),\n", - " (2, 0.059733414724854321),\n", - " (3, 0.29707902020298604),\n", - " (4, 0.013791609529790545),\n", - " (5, 0.095397289926623455),\n", - " (6, 0.048931589429489268),\n", - " (7, 0.19814572323721011),\n", - " (8, 0.16098988099736566),\n", - " (9, 0.061271628764338795)]\n", - "\n", - "James M. Bower\n", - "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(1, 0.44841235581967448), (8, 0.55011298037901579)]\n" - ] + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 46, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ - "name = 'Yaser S.Abu-Mostafa'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[name])\n", - "pprint(model2.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Geoffrey E. Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[name])\n", - "pprint(model2.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Michael I. Jordan'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[name])\n", - "pprint(model2.get_author_topics(author2id[name]))\n", - "\n", - "name = 'James M. Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[name])\n", - "pprint(model2.get_author_topics(author2id[name]))" + "type(corpus).__name__ == 'MmCorpus'" ] }, { diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index 5149253372..082c65ca3c 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -17,7 +17,6 @@ from .phrases import Phrases from .normmodel import NormModel from .atmodel import AuthorTopicModel -from .atmodelold import AuthorTopicModelOld from .ldaseqmodel import LdaSeqModel from . import wrappers diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index b0e91de28b..29f6c52de9 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -9,9 +9,25 @@ """ Author-topic model in Python. +This module trains the author-topic model on documents and corresponding author-document +dictionaries. The training is online and is constant in memory w.r.t. the number of +documents. The model is *not* constant in memory w.r.t. the number of authors. + +The model can be updated with additional documents after taining has been completed. It is +also possible to continue training on the existing data. + +The model is closely related to Latent Dirichlet Allocation. Usage of the AuthorTopicModel +class is likewise similar to the usage of the LdaModel class. + """ -# TODO: write proper docstrings. +# FIXME: at the moment the input corpus is treated as a list. It must be possible to treat +# it as an MmCorpus. The reason for this is that the corpus must be indexable, so that it +# is possible to find out what authors correspond to a particular document (variables +# author2doc and doc2author). If the input corpus is a list, just keep treating it as a list. +# If the input document is an MmCorpus, just keep treating it as an MmCorpus. If the input +# document is something else, for example some sort of iterable, it should be saved as an +# MmCorpus (and it should be checked that it is actually indexable, i.e. corpus[d] is possible). import pdb from pdb import set_trace as st @@ -20,6 +36,7 @@ import logging import numpy as np # for arrays, array broadcasting etc. import numbers +from copy import deepcopy from gensim import utils from gensim.models import LdaModel @@ -33,6 +50,9 @@ class AuthorTopicState(LdaState): """ + NOTE: distributed mode not available yet in the author-topic model. This AuthorTopicState + object is kept so that when the time comes to imlement it, it will be easier. + Encapsulate information for distributed computation of AuthorTopicModel objects. Objects of this class are sent over the network, so try to keep them lean to @@ -67,7 +87,7 @@ def construct_author2doc(corpus, doc2author): # Now construct the dictionary. author2doc = {} - for a in range(len(authors_ids)): + for a in authors_ids: author2doc[a] = [] for d, a_ids in doc2author.items(): if a in a_ids: @@ -76,7 +96,18 @@ def construct_author2doc(corpus, doc2author): class AuthorTopicModel(LdaModel): """ + The constructor estimates the author-topic model parameters based + on a training corpus: + + >>> model = AuthorTopicModel(corpus, num_topics=10, author2doc=author2doc) + + The model can be updated (trained) with new documents via + + >>> model.update(other_corpus, other_author2doc) + + Model persistency is achieved through its `load`/`save` methods. """ + def __init__(self, corpus=None, num_topics=100, id2word=None, author2doc=None, doc2author=None, id2author=None, var_lambda=None, chunksize=2000, passes=1, update_every=1, @@ -85,6 +116,60 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, minimum_probability=0.01, random_state=None, ns_conf={}, minimum_phi_value=0.01, per_word_topics=False): """ + If the iterable corpus and one of author2doc/doc2author dictionaries are given, + start training straight away. If not given, the model is left untrained + (presumably because you want to call `update()` manually). + + `num_topics` is the number of requested latent topics to be extracted from + the training corpus. + + `id2word` is a mapping from word ids (integers) to words (strings). It is + used to determine the vocabulary size, as well as for debugging and topic + printing. + + `author2doc` is a dictionary where the keys are the names of authors, and the + values are lists of documents that the author contributes to. + + `doc2author` is a dictionary where the keys are document IDs (indexes to corpus) + and the values are lists of author names. I.e. this is the reverse mapping of + `author2doc`. Only one of the two, `author2doc` and `doc2author` have to be + supplied. + + `alpha` and `eta` are hyperparameters that affect sparsity of the author-topic + (theta) and topic-word (lambda) distributions. Both default to a symmetric + 1.0/num_topics prior. + + `alpha` can be set to an explicit array = prior of your choice. It also + support special values of 'asymmetric' and 'auto': the former uses a fixed + normalized asymmetric 1.0/topicno prior, the latter learns an asymmetric + prior directly from your data. + + `eta` can be a scalar for a symmetric prior over topic/word + distributions, or a vector of shape num_words, which can be used to + impose (user defined) asymmetric priors over the word distribution. + It also supports the special value 'auto', which learns an asymmetric + prior over words directly from your data. `eta` can also be a matrix + of shape num_topics x num_words, which can be used to impose + asymmetric priors over the word distribution on a per-topic basis + (can not be learned from data). + + Calculate and log perplexity estimate from the latest mini-batch every + `eval_every` model updates. Set to None to disable perplexity estimation. + + `decay` and `offset` parameters are the same as Kappa and Tau_0 in + Hoffman et al, respectively. + + `minimum_probability` controls filtering the topics returned for a document (bow). + + `random_state` can be a np.random.RandomState object or the seed for one + + Example: + + >>> model = AuthorTopicModel(corpus, num_topics=100, author2doc=author2doc) # train model + >>> model.update(corpus2) # update the author-topic model with additional documents + + >>> model = AuthorTopicModel(corpus, num_topics=50, author2doc=author2doc, alpha='auto', eval_every=5) # train asymmetric alpha from data + """ distributed = False # TODO: implement distributed version. @@ -103,7 +188,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, self.num_terms = 0 if self.num_terms == 0: - raise ValueError("cannot compute LDA over an empty collection (no terms)") + raise ValueError("cannot compute the author-topic model over an empty collection (no terms)") logger.info('Vocabulary consists of %d words.', self.num_terms) @@ -128,6 +213,9 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, self.minimum_phi_value = minimum_phi_value self.per_word_topics = per_word_topics + self.author2id = {} + self.id2author = {} + self.alpha, self.optimize_alpha = self.init_dir_prior(alpha, 'alpha') assert self.alpha.shape == (self.num_topics,), "Invalid alpha shape. Got shape %s, but expected (%d, )" % (str(self.alpha.shape), self.num_topics) @@ -148,7 +236,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, self.dispatcher = None self.numworkers = 1 else: - # TODO: implement distributed version. + # NOTE: distributed processing is not implemented for the author-topic model. pass # VB constants @@ -179,20 +267,21 @@ def compute_phinorm(self, ids, authors_d, expElogthetad, expElogbetad): return phinorm - def inference(self, chunk, rhot, collect_sstats=False, chunk_no=None): + def inference(self, chunk, author2doc, doc2author, rhot, collect_sstats=False, chunk_doc_idx=None): """ - Given a chunk of sparse document vectors, estimate gamma (parameters - controlling the topic weights) for each document in the chunk. + Given a chunk of sparse document vectors, update gamma (parameters + controlling the topic weights) for each author corresponding to the + documents in the chunk. - This function does not modify the model (=is read-only aka const). The - whole input chunk of document is assumed to fit in RAM; chunking of a - large corpus must be done earlier in the pipeline. + The whole input chunk of document is assumed to fit in RAM; chunking of + a large corpus must be done earlier in the pipeline. If `collect_sstats` is True, also collect sufficient statistics needed to update the model's topic-word distributions, and return a 2-tuple - `(gamma, sstats)`. Otherwise, return `(gamma, None)`. `gamma` is of shape - `len(chunk_authors) x self.num_topics`, where `chunk_authors` is the number - of authors in the documents in the current chunk. + `(gamma_chunk, sstats)`. Otherwise, return `(gamma_chunk, None)`. + `gamma_cunk` is of shape `len(chunk_authors) x self.num_topics`, where + `chunk_authors` is the number of authors in the documents in the + current chunk. Avoids computing the `phi` variational parameter directly using the optimization presented in **Lee, Seung: Algorithms for non-negative matrix factorization, NIPS 2001**. @@ -213,66 +302,74 @@ def inference(self, chunk, rhot, collect_sstats=False, chunk_no=None): sstats = None converged = 0 - chunk_authors = set() + # Stack all the computed gammas into this output array. + gamma_chunk = np.zeros((0, self.num_topics)) - # Now, for each document d update that document's gamma and phi + # Now, for each document d update gamma and phi w.r.t. all authors in those documents. for d, doc in enumerate(chunk): - doc_no = chunk_no + d # TODO: can it safely be assumed that this is the case? + if chunk_doc_idx is not None: + doc_no = chunk_doc_idx[d] + else: + doc_no = d + # Get the IDs and counts of all the words in the current document. if doc and not isinstance(doc[0][0], six.integer_types): # make sure the term IDs are ints, otherwise np will get upset ids = [int(id) for id, _ in doc] else: ids = [id for id, _ in doc] cts = np.array([cnt for _, cnt in doc]) - authors_d = self.doc2author[doc_no] # List of author IDs for the current document. - authors_d = [self.author2id[a] for a in authors_d] - gammad = self.state.gamma[authors_d, :] - tilde_gamma = gammad.copy() + # Get all the authors in the current document. + authors_d = self.doc2author[doc_no] # List of author names. + authors_d = [self.author2id[a] for a in authors_d] # Convert names to integer IDs. + gammad = self.state.gamma[authors_d, :] # gamma of document d before update. + tilde_gamma = gammad.copy() # gamma that will be updated. + + # Compute the expectation of the log of the Dirichlet parameters theta and beta. Elogthetad = dirichlet_expectation(tilde_gamma) expElogthetad = np.exp(Elogthetad) expElogbetad = self.expElogbeta[:, ids] + # Compute the normalizing constant of phi for the current document. phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) # Iterate between gamma and phi until convergence for iteration in xrange(self.iterations): - #logger.info('iteration %i', iteration) lastgamma = tilde_gamma.copy() # Update gamma. + # phi is computed implicitly below, for ai, a in enumerate(authors_d): tilde_gamma[ai, :] = self.alpha + len(self.author2doc[self.id2author[a]]) * expElogthetad[ai, :] * np.dot(cts / phinorm, expElogbetad.T) - # Update gamma and lambda. + # Update gamma. # Interpolation between document d's "local" gamma (tilde_gamma), - # and "global" gamma (var_gamma). + # and "global" gamma (gammad). tilde_gamma = (1 - rhot) * gammad + rhot * tilde_gamma # Update Elogtheta and Elogbeta, since gamma and lambda have been updated. Elogthetad = dirichlet_expectation(tilde_gamma) expElogthetad = np.exp(Elogthetad) + # Update the normalizing constant in phi. phinorm = self.compute_phinorm(ids, authors_d, expElogthetad, expElogbetad) # Check for convergence. - # Criterion is mean change in "local" gamma and lambda. + # Criterion is mean change in "local" gamma. meanchange_gamma = np.mean(abs(tilde_gamma - lastgamma)) gamma_condition = meanchange_gamma < self.gamma_threshold - # logger.info('Mean change in gamma: %.3e', meanchange_gamma) if gamma_condition: - # logger.info('Converged after %d iterations.', iteration) converged += 1 break # End of iterations loop. + # Store the updated gammas in the model state. self.state.gamma[authors_d, :] = tilde_gamma - # NOTE: this may be slow. Especially when there are many authors per document. It is - # imporant to find a faster way to handle this. - chunk_authors = chunk_authors.union(set(authors_d)) + # Stack the new gammas into the output array. + gamma_chunk = np.vstack([gamma_chunk, tilde_gamma]) if collect_sstats: # Contribution of document d to the expected sufficient @@ -287,13 +384,12 @@ def inference(self, chunk, rhot, collect_sstats=False, chunk_no=None): if collect_sstats: # This step finishes computing the sufficient statistics for the # M step, so that - # sstats[k, w] = \sum_d n_{dw} * phi_{dwk} - # = \sum_d n_{dw} * exp{Elogtheta_{dk} + Elogbeta_{kw}} / phinorm_{dw}. + # sstats[k, w] = \sum_d n_{dw} * \sum_a phi_{dwak} + # = \sum_d n_{dw} * exp{Elogtheta_{ak} + Elogbeta_{kw}} / phinorm_{dw}. sstats *= self.expElogbeta - gamma_chunk = self.state.gamma[list(chunk_authors), :] return gamma_chunk, sstats - def do_estep(self, chunk, rhot, state=None, chunk_no=None): + def do_estep(self, chunk, author2doc, doc2author, rhot, state=None, chunk_doc_idx=None): """ Perform inference on a chunk of documents, and accumulate the collected sufficient statistics in `state` (or `self.state` if None). @@ -301,12 +397,12 @@ def do_estep(self, chunk, rhot, state=None, chunk_no=None): """ if state is None: state = self.state - gamma, sstats = self.inference(chunk, rhot, collect_sstats=True, chunk_no=chunk_no) + gamma, sstats = self.inference(chunk, author2doc, doc2author, rhot, collect_sstats=True, chunk_doc_idx=chunk_doc_idx) state.sstats += sstats state.numdocs += len(chunk) return gamma - def log_perplexity(self, chunk, chunk_no=None, total_docs=None): + def log_perplexity(self, chunk, chunk_doc_idx=None, total_docs=None): """ Calculate and return per-word likelihood bound, using the `chunk` of documents as evaluation corpus. Also output the calculated statistics. incl. @@ -317,7 +413,7 @@ def log_perplexity(self, chunk, chunk_no=None, total_docs=None): total_docs = len(chunk) corpus_words = sum(cnt for document in chunk for _, cnt in document) subsample_ratio = 1.0 * total_docs / len(chunk) - perwordbound = self.bound(chunk, chunk_no, subsample_ratio=subsample_ratio) / (subsample_ratio * corpus_words) + perwordbound = self.bound(chunk, chunk_doc_idx, subsample_ratio=subsample_ratio) / (subsample_ratio * corpus_words) print(perwordbound) logger.info("%.3f per-word bound, %.1f perplexity estimate based on a held-out corpus of %i documents with %i words" % (perwordbound, np.exp2(-perwordbound), len(chunk), corpus_words)) @@ -331,8 +427,6 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, the topics converge (or until the maximum number of allowed iterations is reached). `corpus` must be an iterable (repeatable stream of documents), - In distributed mode, the E step is distributed over a cluster of machines. - This update also supports updating an already trained model (`self`) with new documents from `corpus`; the two models are then merged in proportion to the number of old vs. new documents. This feature is still @@ -344,8 +438,29 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, `corpus` sizes, an increasing `offset` may be beneficial (see Table 1 in Hoffman et al.) + If update is called with authors that already exist in the model, it will + resume training on not only new documents for that author, but also the + previously seen documents. This is necessary for those authors' topic + distributions to converge. + + Every time `update(corpus, author2doc)` is called, the new documents are + to appended to all the previously seen documents, and author2doc is + combined with the previously seen authors. + + To resume training on all the data seen by the model, simply call + `update()`. + + It is not possible to add new authors to existing documents, as all + documents in `corpus` are assumed to be new documents. + Args: - corpus (gensim corpus): The corpus with which the LDA model should be updated. + corpus (gensim corpus): The corpus with which the author-topic model should be updated. + + author2doc (dictionary): author to document mapping corresponding to indexes in input + corpus. + + doc2author (dictionary): document to author mapping corresponding to indexes in input + corpus. chunks_as_numpy (bool): Whether each chunk passed to `.inference` should be a np array of not. np can in some settings turn the term IDs @@ -354,10 +469,9 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, computing it may be desirable to keep the chunks as np arrays. - For other parameter settings, see :class:`LdaModel` constructor. + For other parameter settings, see :class:`AuthorTopicModel` constructor. """ - # FIXME update docstring. # use parameters given in constructor, unless user explicitly overrode them if decay is None: @@ -378,12 +492,14 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, # NOTE: it is not possible to add new authors to an existing document (all input documents are treated # as completely new documents). Perhaps this functionality could be implemented. # If it's absolutely necessary, the user can delete the documents that have new authors, and call update - # on them with the new authors. + # on them with the new and old authors. if corpus is None: # Just keep training on the already available data. # Assumes self.update() has been called before with input documents and corresponding authors. + assert self.total_docs > 0, 'update() was called with no documents to train on.' train_corpus_idx = [d for d in xrange(self.total_docs)] + num_input_authors = len(self.author2doc) else: if doc2author is None and author2doc is None: raise ValueError('at least one of author2doc/doc2author must be specified, to establish input space dimensionality') @@ -398,6 +514,9 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, elif author2doc is None: author2doc = construct_author2doc(corpus, doc2author) + # Number of authors that need to be updated. + num_input_authors = len(author2doc) + try: len_input_corpus = len(corpus) except: @@ -420,6 +539,13 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, new_authors.append(a) num_new_authors = len(new_authors) + + # Add new authors do author2id/id2author dictionaries. + for a_id, a_name in enumerate(new_authors): + self.author2id[a_name] = a_id + self.num_authors + self.id2author[a_id] = a_name + + # Increment the number of total authors seen. self.num_authors += num_new_authors # Initialize the variational distributions q(theta|gamma) @@ -440,10 +566,11 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, # This is a new author, create index. self.author2doc[a] = doc_ids - self.doc2author = construct_doc2author(self.corpus, self.author2doc) + # Add all new documents to self.doc2author. + for d, a_list in doc2author.items(): + self.doc2author[d] = a_list # Train on all documents of authors in input_corpus. - #train_corpus_idx = [i for i in xrange(len(self.corpus))] train_corpus_idx = [] for a in author2doc.keys(): # For all authors in input corpus. for doc_ids in self.author2doc.values(): # For all documents in total corpus. @@ -452,8 +579,6 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, # Make the list of training documents unique. train_corpus_idx = list(set(train_corpus_idx)) - self.author2id = dict(zip(self.author2doc.keys(), xrange(self.num_authors))) - self.id2author = dict(zip(xrange(self.num_authors), self.author2doc.keys())) # train_corpus_idx is only a list of indexes, so "len" is valid. lencorpus = len(train_corpus_idx) @@ -472,11 +597,11 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, evalafter = min(lencorpus, (eval_every or 0) * self.numworkers * chunksize) updates_per_pass = max(1, lencorpus / updateafter) - logger.info("running %s LDA training, %s topics, %i passes over " + logger.info("running %s author-topic training, %s topics, %s authors, %i passes over " "the supplied corpus of %i documents, updating model once " "every %i documents, evaluating perplexity every %i documents, " "iterating %ix with a convergence threshold of %f", - updatetype, self.num_topics, passes, lencorpus, + updatetype, self.num_topics, num_input_authors, passes, lencorpus, updateafter, evalafter, iterations, gamma_threshold) @@ -495,7 +620,8 @@ def rho(): logger.info('initializing %s workers' % self.numworkers) self.dispatcher.reset(self.state) else: - other = LdaState(self.eta, self.state.sstats.shape) + # gamma is not needed in "other", thus its shape is (0, 0). + other = AuthorTopicState(self.eta, self.state.sstats.shape, (0, 0)) dirty = False reallen = 0 @@ -504,7 +630,9 @@ def rho(): reallen += len(chunk) # keep track of how many documents we've processed so far if eval_every and ((reallen == lencorpus) or ((chunk_no + 1) % (eval_every * self.numworkers) == 0)): - self.log_perplexity(chunk, chunk_no, total_docs=lencorpus) + # log_perplexity requires the indexes of the documents being evaluated, to know what authors + # correspond to the documents. + self.log_perplexity(chunk, chunk_doc_idx, total_docs=lencorpus) if self.dispatcher: # add the chunk to dispatcher's job queue, so workers can munch on it @@ -515,7 +643,9 @@ def rho(): else: logger.info('PROGRESS: pass %i, at document #%i/%i', pass_, chunk_no * chunksize + len(chunk), lencorpus) - gammat = self.do_estep(chunk, rho(), other, chunk_no) + # do_estep requires the indexes of the documents being trained on, to know what authors + # correspond to the documents. + gammat = self.do_estep(chunk, self.author2doc, self.doc2author, rho(), other, chunk_doc_idx) if self.optimize_alpha: self.update_alpha(gammat, rho()) @@ -536,7 +666,7 @@ def rho(): logger.info('initializing workers') self.dispatcher.reset(self.state) else: - other = LdaState(self.eta, self.state.sstats.shape) + other = AuthorTopicState(self.eta, self.state.sstats.shape, (0, 0)) dirty = False # endfor single corpus iteration if reallen != lencorpus: @@ -578,37 +708,70 @@ def do_mstep(self, rho, other, extra_pass=False): # only update if this isn't an additional pass self.num_updates += other.numdocs - def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2author=None, ): + def bound(self, chunk, chunk_doc_idx=None, subsample_ratio=1.0, author2doc=None, doc2author=None): """ Estimate the variational bound of documents from `corpus`: E_q[log p(corpus)] - E_q[log q(corpus)] - `gamma` are the variational parameters on topic weights for each `corpus` - document (=2d matrix=what comes out of `inference()`). - If not supplied, will be inferred from the model. + `gamma` are the variational parameters on topic weights for each author + document (=2d matrix=what comes out of `inference()`). + + There are basically two use cases of this method: + 1. `chunk` is a subset of the training corpus, and `chunk_doc_idx` is provided, + indicating the indexes of the documents in the training corpus. + 2. `chunk` is a test set (held-out data), `chunk_doc_idx` is not needed, but + author2doc and doc2author corresponding to this test set are provided. It is + not recommended to call this method with data that has authors the model has + not seen; if this is the case, those documents will simply be discarded. - Computing the bound of unseen data is not recommended, unless one knows what one is doing. - In this case, gamma must be inferred in advance, and doc2author for this new data must be - provided. + To obtain the per-word bound, compute: + >>> corpus_words = sum(cnt for document in corpus for _, cnt in document) + >>> model.bound(corpus, author2doc=author2doc, doc2author=doc2author) / corpus_words """ + # NOTE: it may be possible to enable evaluation of documents with new authors. To + # do this, self.inference() has to be altered so that it uses gamma = self.state.gamma[a, :] + # if author a is already trained on, but initializes gamma randomly if author a is + # not already in the model. + _lambda = self.state.get_lambda() Elogbeta = dirichlet_expectation(_lambda) expElogbeta = np.exp(dirichlet_expectation(_lambda)) - if gamma is not None: - logger.warning('bound() assumes gamma to be None and uses the gamma provided is self.state.') - # NOTE: alternatively: - #assert gamma is None, 'bound() assumes gamma to be None and uses the gamma provided is self.state.' - else: + if author2doc is None and doc2author is None: gamma = self.state.gamma + chunk_idx = [d for d in xrange(len(chunk))] + author2doc = self.author2doc + doc2author = self.doc2author + else: + # Infer gamma based on input corpus. + + # Will be needed in self.inference(). + def rho(): + return pow(self.offset + self.passes + (self.num_updates / self.chunksize), -self.decay) + + # sstats are not collected, thus lambda is not updated + gamma, _ = self.inference(chunk, author2doc, doc2author, rho()) + + # Bound of held-out (test) data can only be computed with authors + # that are already existing in the data. + # Documents that contain new authors are discarded. + num_docs_new_authors = 0 + chunk_idx = [] + for d in xrange(len(chunk)): + authors_d = doc2author[d] + doc_new_authors = False + for a in authors_d: + if not self.author2doc.get(a): + doc_new_authors = True + if not doc_new_authors: + chunk_idx.append(d) + else: + num_docs_new_authors += 1 + if num_docs_new_authors > 0: + logger.warning('bound() called with held-out data with new authors; discarding %d documents.' % (num_docs_new_authors)) - if chunk_no is None: - logger.warning('No chunk_no provided to bound().') - # NOTE: alternatively: - #assert chunk_no is not None, 'chunk_no must be provided to bound().' - chunk_no = 0 Elogtheta = dirichlet_expectation(gamma) expElogtheta = np.exp(dirichlet_expectation(gamma)) @@ -616,9 +779,13 @@ def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2auth word_score = 0.0 authors_set = set() # Used in computing theta bound. theta_score = 0.0 - for d, doc in enumerate(corpus): # stream the input doc-by-doc, in case it's too large to fit in RAM - doc_no = chunk_no + d - authors_d = self.doc2author[doc_no] + for d in chunk_idx: + if author2doc is None: + doc_no = chunk_doc_idx[d] + else: + doc_no = d + doc = chunk[d] + authors_d = doc2author[doc_no] authors_d = [self.author2id[a] for a in authors_d] ids = np.array([id for id, _ in doc]) # Word IDs in doc. cts = np.array([cnt for _, cnt in doc]) # Word counts. @@ -640,11 +807,12 @@ def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2auth theta_score += gammaln(np.sum(self.alpha)) - gammaln(np.sum(gamma[a, :])) authors_set.add(a) - # Compensate likelihood for when `corpus` above is only a sample of the whole corpus. This ensures + # Compensate likelihood for when `chunk` above is only a sample of the whole corpus. This ensures # that the likelihood is always rougly on the same scale. word_score *= subsample_ratio # theta_score is rescaled in a similar fashion. + # TODO: treat this in a more general way, similar to how it is done with word_score. theta_score *= self.num_authors / len(authors_set) # E[log p(beta | eta) - log q (beta | lambda)] @@ -656,8 +824,6 @@ def bound(self, corpus, chunk_no=None, gamma=None, subsample_ratio=1.0, doc2auth total_score = word_score + theta_score + beta_score - #print("%.3e\t%.3e\t%.3e\t%.3e" %(total_score, word_score, theta_score, beta_score)) - return total_score def get_author_topics(self, author_id, minimum_probability=None): @@ -677,10 +843,6 @@ def get_author_topics(self, author_id, minimum_probability=None): return author_topics - # NOTE: method `top_topics` is used directly. There is no topic coherence measure for - # the author-topic model. c_v topic coherence is a valid measure of topic quality in - # the author-topic model, although it does not take authorship information into account. - def __getitem__(self, data): """ `data` must be a list consisting of two elements: `bow` and `author_name`, described below. @@ -695,12 +857,21 @@ def __getitem__(self, data): """ + assert False, '__getitem__ (model[data]) is not ready for use.' + + # FIXME: it is not clear at all what a __getitem__ method should accomplish in the author-topic + # model. In the attempt below, it assumed that multiple documents corresponding to a single + # author is passed to this method, and then update is called on that data. Then, get_author_topics + # is called on the author. + bow = data[0] author_name = data[1] # TODO: perhaps this method should assume author_name if it is not provided. This is problematic # if the author names are strings, though. + assert author_name not in self.author2doc, '__getitem__ (model[data]) called on an existing author.' + author2doc = {author_name: list(xrange(len(bow)))} self.update(bow, author2doc) diff --git a/gensim/models/atmodelold.py b/gensim/models/temp/atmodel_pre-refactor.py similarity index 98% rename from gensim/models/atmodelold.py rename to gensim/models/temp/atmodel_pre-refactor.py index 90cd875144..3161d7867f 100644 --- a/gensim/models/atmodelold.py +++ b/gensim/models/temp/atmodel_pre-refactor.py @@ -115,6 +115,9 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, id2author=None, # Make the reverse mapping, from author names to author IDs. self.author2id = dict(zip(self.id2author.values(), self.id2author.keys())) + #self.author2id = dict(zip(self.author2doc.keys(), xrange(self.num_authors))) + #self.id2author = dict(zip(xrange(self.num_authors), self.author2doc.keys())) + self.corpus = corpus self.iterations = iterations @@ -222,6 +225,7 @@ def inference(self, corpus=None, var_lambda=None): beta_bound = self.beta_bound(Elogbeta) bound = word_bound + theta_bound + beta_bound perwordbound = bound / corpus_words + print(perwordbound) logger.info('Total bound: %.3e. Per-word total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, perwordbound, word_bound, theta_bound, beta_bound) #var_lambda -= self.eta #Elogbeta = dirichlet_expectation(var_lambda) @@ -332,6 +336,7 @@ def inference(self, corpus=None, var_lambda=None): beta_bound = self.beta_bound(Elogbeta) bound = word_bound + theta_bound + beta_bound perwordbound = bound / corpus_words + print(perwordbound) logger.info('Total bound: %.3e. Per-word total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, perwordbound, word_bound, theta_bound, beta_bound) # NOTE: bound can be computed as below. We compute each term for now because it can be useful for debugging. # bound = eval_bound(corpus, Elogtheta, Elogbeta, expElogtheta, expElogtheta, maxElogtheta=maxElogtheta, maxElogbeta=maxElogbeta): @@ -354,6 +359,8 @@ def inference(self, corpus=None, var_lambda=None): theta_bound = self.theta_bound(Elogtheta) beta_bound = self.beta_bound(Elogbeta) bound = word_bound + theta_bound + beta_bound + perwordbound = bound / corpus_words + print(perwordbound) logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound) From bdac93a23eefda11a7e382e0cfe1ea0e9d3c8441 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 9 Dec 2016 15:37:47 +0100 Subject: [PATCH 063/100] Added unit tests. Basically a retrofit of LDA test; some new tests, some altered tests, some tests removed, others unchanged. --- gensim/test/test_atmodel.py | 426 ++++++++++++++++++++++++++++++++++++ 1 file changed, 426 insertions(+) create mode 100644 gensim/test/test_atmodel.py diff --git a/gensim/test/test_atmodel.py b/gensim/test/test_atmodel.py new file mode 100644 index 0000000000..7b7e8d1cc1 --- /dev/null +++ b/gensim/test/test_atmodel.py @@ -0,0 +1,426 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2016 Radim Rehurek +# Copyright (C) 2016 Olavur Mortensen +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Automated tests for checking transformation algorithms (the models package). +""" + + +import logging +import unittest +import os +import os.path +import tempfile +import numbers + +import six +import numpy as np +import scipy.linalg + +from gensim.corpora import mmcorpus, Dictionary +from gensim.models import atmodel +from gensim import matutils +from gensim.test import basetests + +# TODO: +# Test that computing the bound on new unseen documents works as expected (this is somewhat different +# in the author-topic model than in LDA). +# Test that calling model.update() after the model already has been trained works. +# Test that calling model.update(corpus, author2doc) (i.e. new documents) works. +# Perhaps test that the bound increases, in general (i.e. in several of the tests below where it makes +# sense. + +module_path = os.path.dirname(__file__) # needed because sample data files are located in the same folder +datapath = lambda fname: os.path.join(module_path, 'test_data', fname) + +# set up vars used in testing ("Deerwester" from the web tutorial) +texts = [['human', 'interface', 'computer'], + ['survey', 'user', 'computer', 'system', 'response', 'time'], + ['eps', 'user', 'interface', 'system'], + ['system', 'human', 'system', 'eps'], + ['user', 'response', 'time'], + ['trees'], + ['graph', 'trees'], + ['graph', 'minors', 'trees'], + ['graph', 'minors', 'survey']] +dictionary = Dictionary(texts) +corpus = [dictionary.doc2bow(text) for text in texts] + +# Assign some authors randomly to the documents above. +author2doc = {'john': [0, 1, 2, 3, 4, 5, 6], 'jane': [2, 3, 4, 5, 6, 7, 8], 'jack': [0, 2, 4, 6, 8], 'jill': [1, 3, 5, 7]} +doc2author = {0: ['john', 'jack'], 1: ['john', 'jill'], 2: ['john', 'jane', 'jack'], 3: ['john', 'jane', 'jill'], + 4: ['john', 'jane', 'jack'], 5: ['john', 'jane', 'jill'], 6: ['john', 'jane', 'jack'], 7: ['jane', 'jill'], + 8: ['jane', 'jack']} + +# Make mappings from author names to integer IDs and vice versa. +# Note that changing these may change everything, as it influences +# the random intialization (basically reordering gamma). +id2author = dict(zip(range(4), ['john', 'jane', 'jack', 'jill'])) +author2id = dict(zip(['john', 'jane', 'jack', 'jill'], range(4))) + +def testfile(): + # temporary data will be stored to this file + return os.path.join(tempfile.gettempdir(), 'gensim_models.tst') + + +class TestAuthorTopicModel(unittest.TestCase, basetests.TestBaseTopicModel): + def setUp(self): + self.corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm')) + self.class_ = atmodel.AuthorTopicModel + self.model = self.class_(corpus, id2word=dictionary, author2doc=author2doc, num_topics=2, passes=100) + + def testTransform(self): + passed = False + # sometimes, training gets stuck at a local minimum + # in that case try re-training the model from scratch, hoping for a + # better random initialization + for i in range(25): # restart at most 5 times + # create the transformation model + # NOTE: LdaModel tests do not use set random_state. Is it necessary? + model = self.class_(id2word=dictionary, num_topics=2, passes=100, random_state=0) + model.update(self.corpus, author2doc) + + jill_topics = model.get_author_topics(author2id['jill']) + + # NOTE: this test may easily fail if the author-topic model is altered in any way. The model's + # output is sensitive to a lot of things, like the scheduling of the updates, or like the + # author2id (because the random initialization changes when author2id changes). If it does + # fail, simply be aware of whether we broke something, or if it just naturally changed the + # output of the model slightly. + vec = matutils.sparse2full(jill_topics, 2) # convert to dense vector, for easier equality tests + expected = [0.91, 0.08] + passed = np.allclose(sorted(vec), sorted(expected), atol=1e-1) # must contain the same values, up to re-ordering + if passed: + break + logging.warning("Author-topic model failed to converge on attempt %i (got %s, expected %s)" % + (i, sorted(vec), sorted(expected))) + self.assertTrue(passed) + + def testAuthor2docMissing(self): + # Check that the results are the same if author2doc is constructed automatically from doc2author. + model = self.class_(corpus, author2doc=author2doc, doc2author=doc2author, id2word=dictionary, alpha='symmetric', passes=10, random_state=0) + model2 = self.class_(corpus, doc2author=doc2author, id2word=dictionary, alpha='symmetric', passes=10, random_state=0) + + # Compare Jill's topics before after save/load. + jill_topics = model.get_author_topics(author2id['jill']) + jill_topics2 = model2.get_author_topics(author2id['jill']) + jill_topics = matutils.sparse2full(jill_topics, model.num_topics) + jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) + self.assertTrue(np.allclose(jill_topics, jill_topics2)) + + def testDoc2authorMissing(self): + # Check that the results are the same if doc2author is constructed automatically from author2doc. + model = self.class_(corpus, author2doc=author2doc, doc2author=doc2author, id2word=dictionary, alpha='symmetric', passes=10, random_state=0) + model2 = self.class_(corpus, author2doc=author2doc, id2word=dictionary, alpha='symmetric', passes=10, random_state=0) + + # Compare Jill's topics before after save/load. + jill_topics = model.get_author_topics(author2id['jill']) + jill_topics2 = model2.get_author_topics(author2id['jill']) + jill_topics = matutils.sparse2full(jill_topics, model.num_topics) + jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) + self.assertTrue(np.allclose(jill_topics, jill_topics2)) + + def testAlphaAuto(self): + model1 = self.class_(corpus, author2doc=author2doc, id2word=dictionary, alpha='symmetric', passes=10) + modelauto = self.class_(corpus, author2doc=author2doc, id2word=dictionary, alpha='auto', passes=10) + + # did we learn something? + self.assertFalse(all(np.equal(model1.alpha, modelauto.alpha))) + + # NOTE: it could test that the bound is higher in modelauto. Same in testEtaAuto. + + def testAlpha(self): + kwargs = dict( + author2doc=author2doc, + id2word=dictionary, + num_topics=2, + alpha=None + ) + expected_shape = (2,) + + # should not raise anything + self.class_(**kwargs) + + kwargs['alpha'] = 'symmetric' + model = self.class_(**kwargs) + self.assertEqual(model.alpha.shape, expected_shape) + self.assertTrue(all(model.alpha == np.array([0.5, 0.5]))) + + kwargs['alpha'] = 'asymmetric' + model = self.class_(**kwargs) + self.assertEqual(model.alpha.shape, expected_shape) + self.assertTrue(np.allclose(model.alpha, [0.630602, 0.369398])) + + kwargs['alpha'] = 0.3 + model = self.class_(**kwargs) + self.assertEqual(model.alpha.shape, expected_shape) + self.assertTrue(all(model.alpha == np.array([0.3, 0.3]))) + + kwargs['alpha'] = 3 + model = self.class_(**kwargs) + self.assertEqual(model.alpha.shape, expected_shape) + self.assertTrue(all(model.alpha == np.array([3, 3]))) + + kwargs['alpha'] = [0.3, 0.3] + model = self.class_(**kwargs) + self.assertEqual(model.alpha.shape, expected_shape) + self.assertTrue(all(model.alpha == np.array([0.3, 0.3]))) + + kwargs['alpha'] = np.array([0.3, 0.3]) + model = self.class_(**kwargs) + self.assertEqual(model.alpha.shape, expected_shape) + self.assertTrue(all(model.alpha == np.array([0.3, 0.3]))) + + # all should raise an exception for being wrong shape + kwargs['alpha'] = [0.3, 0.3, 0.3] + self.assertRaises(AssertionError, self.class_, **kwargs) + + kwargs['alpha'] = [[0.3], [0.3]] + self.assertRaises(AssertionError, self.class_, **kwargs) + + kwargs['alpha'] = [0.3] + self.assertRaises(AssertionError, self.class_, **kwargs) + + kwargs['alpha'] = "gensim is cool" + self.assertRaises(ValueError, self.class_, **kwargs) + + + def testEtaAuto(self): + model1 = self.class_(corpus, author2doc=author2doc, id2word=dictionary, eta='symmetric', passes=10) + modelauto = self.class_(corpus, author2doc=author2doc, id2word=dictionary, eta='auto', passes=10) + + # did we learn something? + self.assertFalse(all(np.equal(model1.eta, modelauto.eta))) + + def testEta(self): + kwargs = dict( + author2doc=author2doc, + id2word=dictionary, + num_topics=2, + eta=None + ) + num_terms = len(dictionary) + expected_shape = (num_terms,) + + # should not raise anything + model = self.class_(**kwargs) + self.assertEqual(model.eta.shape, expected_shape) + self.assertTrue(all(model.eta == np.array([0.5] * num_terms))) + + kwargs['eta'] = 'symmetric' + model = self.class_(**kwargs) + self.assertEqual(model.eta.shape, expected_shape) + self.assertTrue(all(model.eta == np.array([0.5] * num_terms))) + + kwargs['eta'] = 0.3 + model = self.class_(**kwargs) + self.assertEqual(model.eta.shape, expected_shape) + self.assertTrue(all(model.eta == np.array([0.3] * num_terms))) + + kwargs['eta'] = 3 + model = self.class_(**kwargs) + self.assertEqual(model.eta.shape, expected_shape) + self.assertTrue(all(model.eta == np.array([3] * num_terms))) + + kwargs['eta'] = [0.3] * num_terms + model = self.class_(**kwargs) + self.assertEqual(model.eta.shape, expected_shape) + self.assertTrue(all(model.eta == np.array([0.3] * num_terms))) + + kwargs['eta'] = np.array([0.3] * num_terms) + model = self.class_(**kwargs) + self.assertEqual(model.eta.shape, expected_shape) + self.assertTrue(all(model.eta == np.array([0.3] * num_terms))) + + # should be ok with num_topics x num_terms + testeta = np.array([[0.5] * len(dictionary)] * 2) + kwargs['eta'] = testeta + self.class_(**kwargs) + + # all should raise an exception for being wrong shape + kwargs['eta'] = testeta.reshape(tuple(reversed(testeta.shape))) + self.assertRaises(AssertionError, self.class_, **kwargs) + + kwargs['eta'] = [0.3] + self.assertRaises(AssertionError, self.class_, **kwargs) + + kwargs['eta'] = [0.3] * (num_terms + 1) + self.assertRaises(AssertionError, self.class_, **kwargs) + + kwargs['eta'] = "gensim is cool" + self.assertRaises(ValueError, self.class_, **kwargs) + + kwargs['eta'] = "asymmetric" + self.assertRaises(ValueError, self.class_, **kwargs) + + def testTopTopics(self): + top_topics = self.model.top_topics(self.corpus) + + for topic, score in top_topics: + self.assertTrue(isinstance(topic, list)) + self.assertTrue(isinstance(score, float)) + + for v, k in topic: + self.assertTrue(isinstance(k, six.string_types)) + self.assertTrue(isinstance(v, float)) + + def testGetTopicTerms(self): + topic_terms = self.model.get_topic_terms(1) + + for k, v in topic_terms: + self.assertTrue(isinstance(k, numbers.Integral)) + self.assertTrue(isinstance(v, float)) + + def testGetAuthorTopics(self): + + model = self.class_(self.corpus, author2doc=author2doc, id2word=dictionary, num_topics=2, passes= 100, random_state=np.random.seed(0)) + + author_topics = [] + for a in id2author.keys(): + author_topics.append(model.get_author_topics(a)) + + for topic in author_topics: + self.assertTrue(isinstance(topic, list)) + for k, v in topic: + self.assertTrue(isinstance(k, int)) + self.assertTrue(isinstance(v, float)) + + # FIXME: Not sure about the test below. In LDA it is: The number of document-topic distributions + # with length 0 is less than the number of documents? Why? Commented out code below is the + # author-topic equivalent of this test (without the minimum_phi_value tests). + + # Test case to check the filtering effect of minimum_probability + #author_topic_count_na = 0 + + #all_topics = model.get_document_topics(self.corpus, minimum_probability=0.8) + # + #for topic in all_topics: + # self.assertTrue(isinstance(topic, tuple)) + # for k, v in topic: # list of doc_topics + # self.assertTrue(isinstance(k, int)) + # self.assertTrue(isinstance(v, float)) + # if len(topic) != 0: + # author_topic_count_na += 1 + + #self.assertTrue(model.num_authors > author_topic_count_na) + + def testTermTopics(self): + + model = self.class_(self.corpus, author2doc=author2doc, id2word=dictionary, num_topics=2, passes=100, random_state=np.random.seed(0)) + + # check with word_type + result = model.get_term_topics(2) + for topic_no, probability in result: + self.assertTrue(isinstance(topic_no, int)) + self.assertTrue(isinstance(probability, float)) + + # if user has entered word instead, check with word + result = model.get_term_topics(str(model.id2word[2])) + for topic_no, probability in result: + self.assertTrue(isinstance(topic_no, int)) + self.assertTrue(isinstance(probability, float)) + + def testPasses(self): + # long message includes the original error message with a custom one + self.longMessage = True + # construct what we expect when passes aren't involved + test_rhots = list() + model = self.class_(id2word=dictionary, chunksize=1, num_topics=2) + final_rhot = lambda: pow(model.offset + (1 * model.num_updates) / model.chunksize, -model.decay) + + # generate 5 updates to test rhot on + for x in range(5): + model.update(self.corpus, author2doc) + test_rhots.append(final_rhot()) + + for passes in [1, 5, 10, 50, 100]: + model = self.class_(id2word=dictionary, chunksize=1, num_topics=2, passes=passes) + self.assertEqual(final_rhot(), 1.0) + # make sure the rhot matches the test after each update + for test_rhot in test_rhots: + model.update(self.corpus, author2doc) + + msg = ", ".join(map(str, [passes, model.num_updates, model.state.numdocs])) + self.assertAlmostEqual(final_rhot(), test_rhot, msg=msg) + + self.assertEqual(model.state.numdocs, len(corpus) * len(test_rhots)) + self.assertEqual(model.num_updates, len(corpus) * len(test_rhots)) + + def testPersistence(self): + fname = testfile() + model = self.model + model.save(fname) + model2 = self.class_.load(fname) + self.assertEqual(model.num_topics, model2.num_topics) + self.assertTrue(np.allclose(model.expElogbeta, model2.expElogbeta)) + + # Compare Jill's topics before after save/load. + jill_topics = model.get_author_topics(author2id['jill']) + jill_topics2 = model2.get_author_topics(author2id['jill']) + jill_topics = matutils.sparse2full(jill_topics, model.num_topics) + jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) + self.assertTrue(np.allclose(jill_topics, jill_topics2)) + + def testPersistenceIgnore(self): + fname = testfile() + model = atmodel.AuthorTopicModel(self.corpus, author2doc=author2doc, num_topics=2) + model.save(fname, ignore='id2word') + model2 = atmodel.AuthorTopicModel.load(fname) + self.assertTrue(model2.id2word is None) + + model.save(fname, ignore=['id2word']) + model2 = atmodel.AuthorTopicModel.load(fname) + self.assertTrue(model2.id2word is None) + + def testPersistenceCompressed(self): + fname = testfile() + '.gz' + model = self.model + model.save(fname) + model2 = self.class_.load(fname, mmap=None) + self.assertEqual(model.num_topics, model2.num_topics) + self.assertTrue(np.allclose(model.expElogbeta, model2.expElogbeta)) + + # Compare Jill's topics before after save/load. + jill_topics = model.get_author_topics(author2id['jill']) + jill_topics2 = model2.get_author_topics(author2id['jill']) + jill_topics = matutils.sparse2full(jill_topics, model.num_topics) + jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) + self.assertTrue(np.allclose(jill_topics, jill_topics2)) + + def testLargeMmap(self): + fname = testfile() + model = self.model + + # simulate storing large arrays separately + model.save(testfile(), sep_limit=0) + + # test loading the large model arrays with mmap + model2 = self.class_.load(testfile(), mmap='r') + self.assertEqual(model.num_topics, model2.num_topics) + self.assertTrue(isinstance(model2.expElogbeta, np.memmap)) + self.assertTrue(np.allclose(model.expElogbeta, model2.expElogbeta)) + + # Compare Jill's topics before after save/load. + jill_topics = model.get_author_topics(author2id['jill']) + jill_topics2 = model2.get_author_topics(author2id['jill']) + jill_topics = matutils.sparse2full(jill_topics, model.num_topics) + jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) + self.assertTrue(np.allclose(jill_topics, jill_topics2)) + + def testLargeMmapCompressed(self): + fname = testfile() + '.gz' + model = self.model + + # simulate storing large arrays separately + model.save(fname, sep_limit=0) + + # test loading the large model arrays with mmap + self.assertRaises(IOError, self.class_.load, fname, mmap='r') + +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG) + unittest.main() From 9429c0a0cc3c51d9f10ba76e8dd87017c32b77d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 9 Dec 2016 17:35:02 +0100 Subject: [PATCH 064/100] Updated unit tests. Fixed some mistakes. Added some tests; testing update fuctionality. --- gensim/test/test_atmodel.py | 116 ++++++++++++++++++++++++++---------- 1 file changed, 86 insertions(+), 30 deletions(-) diff --git a/gensim/test/test_atmodel.py b/gensim/test/test_atmodel.py index 7b7e8d1cc1..576988ed88 100644 --- a/gensim/test/test_atmodel.py +++ b/gensim/test/test_atmodel.py @@ -34,6 +34,9 @@ # Perhaps test that the bound increases, in general (i.e. in several of the tests below where it makes # sense. +logger = logging.getLogger('gensim') +logger.propagate = False + module_path = os.path.dirname(__file__) # needed because sample data files are located in the same folder datapath = lambda fname: os.path.join(module_path, 'test_data', fname) @@ -56,11 +59,13 @@ 4: ['john', 'jane', 'jack'], 5: ['john', 'jane', 'jill'], 6: ['john', 'jane', 'jack'], 7: ['jane', 'jill'], 8: ['jane', 'jack']} -# Make mappings from author names to integer IDs and vice versa. -# Note that changing these may change everything, as it influences -# the random intialization (basically reordering gamma). -id2author = dict(zip(range(4), ['john', 'jane', 'jack', 'jill'])) -author2id = dict(zip(['john', 'jane', 'jack', 'jill'], range(4))) +# More data with new and old authors (to test update method). +# Although the text is just a subset of the previous, the model +# just sees it as completely new data. +texts_new = texts[0:3] +author2doc_new = {'jill': [0], 'bob': [0, 1], 'sally': [1, 2]} +dictionary_new = Dictionary(texts_new) +corpus_new = [dictionary_new.doc2bow(text) for text in texts_new] def testfile(): # temporary data will be stored to this file @@ -84,7 +89,7 @@ def testTransform(self): model = self.class_(id2word=dictionary, num_topics=2, passes=100, random_state=0) model.update(self.corpus, author2doc) - jill_topics = model.get_author_topics(author2id['jill']) + jill_topics = model.get_author_topics(model.author2id['jill']) # NOTE: this test may easily fail if the author-topic model is altered in any way. The model's # output is sensitive to a lot of things, like the scheduling of the updates, or like the @@ -100,33 +105,84 @@ def testTransform(self): (i, sorted(vec), sorted(expected))) self.assertTrue(passed) + def testBasic(self): + # Check that training the model produces a positive topic vector for some author + # Otherwise, many of the other tests are invalid. + + model = self.class_(corpus, author2doc=author2doc, id2word=dictionary, num_topics=2) + + jill_topics = model.get_author_topics(model.author2id['jill']) + jill_topics = matutils.sparse2full(jill_topics, model.num_topics) + self.assertTrue(all(jill_topics > 0)) + def testAuthor2docMissing(self): # Check that the results are the same if author2doc is constructed automatically from doc2author. - model = self.class_(corpus, author2doc=author2doc, doc2author=doc2author, id2word=dictionary, alpha='symmetric', passes=10, random_state=0) - model2 = self.class_(corpus, doc2author=doc2author, id2word=dictionary, alpha='symmetric', passes=10, random_state=0) + model = self.class_(corpus, author2doc=author2doc, doc2author=doc2author, id2word=dictionary, num_topics=2, random_state=0) + model2 = self.class_(corpus, doc2author=doc2author, id2word=dictionary, num_topics=2, random_state=0) - # Compare Jill's topics before after save/load. - jill_topics = model.get_author_topics(author2id['jill']) - jill_topics2 = model2.get_author_topics(author2id['jill']) + # Compare Jill's topics before in both models. + jill_topics = model.get_author_topics(model.author2id['jill']) + jill_topics2 = model2.get_author_topics(model.author2id['jill']) jill_topics = matutils.sparse2full(jill_topics, model.num_topics) jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) self.assertTrue(np.allclose(jill_topics, jill_topics2)) def testDoc2authorMissing(self): # Check that the results are the same if doc2author is constructed automatically from author2doc. - model = self.class_(corpus, author2doc=author2doc, doc2author=doc2author, id2word=dictionary, alpha='symmetric', passes=10, random_state=0) - model2 = self.class_(corpus, author2doc=author2doc, id2word=dictionary, alpha='symmetric', passes=10, random_state=0) + model = self.class_(corpus, author2doc=author2doc, doc2author=doc2author, id2word=dictionary, num_topics=2, random_state=0) + model2 = self.class_(corpus, author2doc=author2doc, id2word=dictionary, num_topics=2, random_state=0) - # Compare Jill's topics before after save/load. - jill_topics = model.get_author_topics(author2id['jill']) - jill_topics2 = model2.get_author_topics(author2id['jill']) + # Compare Jill's topics before in both models. + jill_topics = model.get_author_topics(model.author2id['jill']) + jill_topics2 = model2.get_author_topics(model.author2id['jill']) jill_topics = matutils.sparse2full(jill_topics, model.num_topics) jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) self.assertTrue(np.allclose(jill_topics, jill_topics2)) + def testUpdate(self): + # Check that calling update after the model already has been trained works. + model = self.class_(corpus, author2doc=author2doc, id2word=dictionary, num_topics=2) + + jill_topics = model.get_author_topics(model.author2id['jill']) + jill_topics = matutils.sparse2full(jill_topics, model.num_topics) + + model.update() + jill_topics2 = model.get_author_topics(model.author2id['jill']) + jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) + + # Did we learn something? + self.assertFalse(all(np.equal(jill_topics, jill_topics2))) + + def testUpdateNewData(self): + # Check that calling update with new documents and/or authors after the model already has + # been trained works. + model = self.class_(corpus, author2doc=author2doc, id2word=dictionary, num_topics=2) + + jill_topics = model.get_author_topics(model.author2id['jill']) + jill_topics = matutils.sparse2full(jill_topics, model.num_topics) + + model.update(corpus_new, author2doc_new) + jill_topics2 = model.get_author_topics(model.author2id['jill']) + jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) + + # Did we learn more about Jill? + self.assertFalse(all(np.equal(jill_topics, jill_topics2))) + + def testUpdateNewData(self): + # Check that calling update with new documents and/or authors after the model already has + # been trained works. + model = self.class_(corpus, author2doc=author2doc, id2word=dictionary, num_topics=2) + + model.update(corpus_new, author2doc_new) + + # Did we learn something about Sally? + sally_topics = model.get_author_topics(model.author2id['sally']) + sally_topics = matutils.sparse2full(sally_topics, model.num_topics) + self.assertTrue(all(sally_topics > 0)) + def testAlphaAuto(self): - model1 = self.class_(corpus, author2doc=author2doc, id2word=dictionary, alpha='symmetric', passes=10) - modelauto = self.class_(corpus, author2doc=author2doc, id2word=dictionary, alpha='auto', passes=10) + model1 = self.class_(corpus, author2doc=author2doc, id2word=dictionary, alpha='symmetric', passes=10, num_topics=2) + modelauto = self.class_(corpus, author2doc=author2doc, id2word=dictionary, alpha='auto', passes=10, num_topics=2) # did we learn something? self.assertFalse(all(np.equal(model1.alpha, modelauto.alpha))) @@ -190,8 +246,8 @@ def testAlpha(self): def testEtaAuto(self): - model1 = self.class_(corpus, author2doc=author2doc, id2word=dictionary, eta='symmetric', passes=10) - modelauto = self.class_(corpus, author2doc=author2doc, id2word=dictionary, eta='auto', passes=10) + model1 = self.class_(corpus, author2doc=author2doc, id2word=dictionary, eta='symmetric', passes=10, num_topics=2) + modelauto = self.class_(corpus, author2doc=author2doc, id2word=dictionary, eta='auto', passes=10, num_topics=2) # did we learn something? self.assertFalse(all(np.equal(model1.eta, modelauto.eta))) @@ -280,7 +336,7 @@ def testGetAuthorTopics(self): model = self.class_(self.corpus, author2doc=author2doc, id2word=dictionary, num_topics=2, passes= 100, random_state=np.random.seed(0)) author_topics = [] - for a in id2author.keys(): + for a in model.id2author.keys(): author_topics.append(model.get_author_topics(a)) for topic in author_topics: @@ -358,9 +414,9 @@ def testPersistence(self): self.assertEqual(model.num_topics, model2.num_topics) self.assertTrue(np.allclose(model.expElogbeta, model2.expElogbeta)) - # Compare Jill's topics before after save/load. - jill_topics = model.get_author_topics(author2id['jill']) - jill_topics2 = model2.get_author_topics(author2id['jill']) + # Compare Jill's topics before after and save/load. + jill_topics = model.get_author_topics(model.author2id['jill']) + jill_topics2 = model2.get_author_topics(model.author2id['jill']) jill_topics = matutils.sparse2full(jill_topics, model.num_topics) jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) self.assertTrue(np.allclose(jill_topics, jill_topics2)) @@ -384,9 +440,9 @@ def testPersistenceCompressed(self): self.assertEqual(model.num_topics, model2.num_topics) self.assertTrue(np.allclose(model.expElogbeta, model2.expElogbeta)) - # Compare Jill's topics before after save/load. - jill_topics = model.get_author_topics(author2id['jill']) - jill_topics2 = model2.get_author_topics(author2id['jill']) + # Compare Jill's topics before and after save/load. + jill_topics = model.get_author_topics(model.author2id['jill']) + jill_topics2 = model2.get_author_topics(model.author2id['jill']) jill_topics = matutils.sparse2full(jill_topics, model.num_topics) jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) self.assertTrue(np.allclose(jill_topics, jill_topics2)) @@ -404,9 +460,9 @@ def testLargeMmap(self): self.assertTrue(isinstance(model2.expElogbeta, np.memmap)) self.assertTrue(np.allclose(model.expElogbeta, model2.expElogbeta)) - # Compare Jill's topics before after save/load. - jill_topics = model.get_author_topics(author2id['jill']) - jill_topics2 = model2.get_author_topics(author2id['jill']) + # Compare Jill's topics before and after save/load. + jill_topics = model.get_author_topics(model.author2id['jill']) + jill_topics2 = model2.get_author_topics(model.author2id['jill']) jill_topics = matutils.sparse2full(jill_topics, model.num_topics) jill_topics2 = matutils.sparse2full(jill_topics2, model.num_topics) self.assertTrue(np.allclose(jill_topics, jill_topics2)) From e0dc2d9036d9e9355fd956463937e0af6d889cf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Fri, 9 Dec 2016 18:01:39 +0100 Subject: [PATCH 065/100] Forgot to add num_docs to ids of new authors in id2author. Some comments. --- gensim/models/atmodel.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index 29f6c52de9..fff05437c7 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -414,7 +414,6 @@ def log_perplexity(self, chunk, chunk_doc_idx=None, total_docs=None): corpus_words = sum(cnt for document in chunk for _, cnt in document) subsample_ratio = 1.0 * total_docs / len(chunk) perwordbound = self.bound(chunk, chunk_doc_idx, subsample_ratio=subsample_ratio) / (subsample_ratio * corpus_words) - print(perwordbound) logger.info("%.3f per-word bound, %.1f perplexity estimate based on a held-out corpus of %i documents with %i words" % (perwordbound, np.exp2(-perwordbound), len(chunk), corpus_words)) return perwordbound @@ -528,8 +527,10 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, self.total_docs += len_input_corpus - # FIXME: don't treat the corpus as a list. It's either a list or an MmCorpus instance. - # Perhaps if it is some sort of other iterable, it can be stored as an MmCorpus anyway. + # FIXME: consider initializing self.corpus as an MmCorpus, and adding documents to it + # as they arrive (using itertools.chain and MmCorpus.serialize). It should also be an + # option that self.corpus is just a list, and input corpora to update simply extend + # self.corpus. self.corpus.extend(corpus) # Obtain a list of new authors. @@ -543,7 +544,7 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, # Add new authors do author2id/id2author dictionaries. for a_id, a_name in enumerate(new_authors): self.author2id[a_name] = a_id + self.num_authors - self.id2author[a_id] = a_name + self.id2author[a_id + self.num_authors] = a_name # Increment the number of total authors seen. self.num_authors += num_new_authors @@ -831,7 +832,14 @@ def get_author_topics(self, author_id, minimum_probability=None): Return topic distribution the given author, as a list of (topic_id, topic_probability) 2-tuples. Ignore topics with very low probability (below `minimum_probability`). + + Obtaining topic probabilities as in LDA (via `per_word_topics`) is not supported. + """ + + # FIXME: makes more sense to accept author name and then: + # author_id = self.author2id[author_name] + if minimum_probability is None: minimum_probability = self.minimum_probability minimum_probability = max(minimum_probability, 1e-8) # never allow zero values in sparse output From aabc0f4e5d35a5f3410c31ac94ab8bc01ce581e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Sun, 11 Dec 2016 16:26:23 +0100 Subject: [PATCH 066/100] Made it possible to use serialized corpora (MmCorpus), and made unit tests of that functionality. There are some caveats to the use of serialized corpora in the author-topic model. Updated docstring. --- docs/notebooks/at_with_nips.ipynb | 588 ++++++++++++++++++++---------- gensim/models/atmodel.py | 117 +++++- gensim/test/test_atmodel.py | 80 +++- 3 files changed, 572 insertions(+), 213 deletions(-) diff --git a/docs/notebooks/at_with_nips.ipynb b/docs/notebooks/at_with_nips.ipynb index 64c65f75b6..0e9a4c4f86 100644 --- a/docs/notebooks/at_with_nips.ipynb +++ b/docs/notebooks/at_with_nips.ipynb @@ -43,7 +43,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 168, "metadata": { "collapsed": false }, @@ -62,16 +62,13 @@ "from pprint import pprint\n", "from random import sample\n", "import bokeh\n", + "import os\n", "import line_profiler\n", "\n", "import logging\n", "\n", "from gensim.models import AuthorTopicModel\n", "from gensim.models import atmodel\n", - "from gensim.models import AuthorTopicModel2\n", - "from gensim.models import atmodel2\n", - "from gensim.models import AuthorTopicModelOld\n", - "from gensim.models import atmodelold\n", "from gensim.models import LdaModel\n", "from gensim.models import ldamodel\n", "\n", @@ -110,7 +107,7 @@ }, { "cell_type": "code", - "execution_count": 361, + "execution_count": 24, "metadata": { "collapsed": false }, @@ -147,7 +144,7 @@ }, { "cell_type": "code", - "execution_count": 362, + "execution_count": 25, "metadata": { "collapsed": false }, @@ -176,7 +173,7 @@ }, { "cell_type": "code", - "execution_count": 363, + "execution_count": 26, "metadata": { "collapsed": true }, @@ -187,7 +184,7 @@ }, { "cell_type": "code", - "execution_count": 364, + "execution_count": 27, "metadata": { "collapsed": false }, @@ -212,7 +209,7 @@ }, { "cell_type": "code", - "execution_count": 365, + "execution_count": 28, "metadata": { "collapsed": false }, @@ -235,7 +232,7 @@ }, { "cell_type": "code", - "execution_count": 366, + "execution_count": 29, "metadata": { "collapsed": false }, @@ -250,7 +247,7 @@ }, { "cell_type": "code", - "execution_count": 367, + "execution_count": 30, "metadata": { "collapsed": false }, @@ -278,7 +275,7 @@ }, { "cell_type": "code", - "execution_count": 368, + "execution_count": 31, "metadata": { "collapsed": true }, @@ -290,7 +287,7 @@ }, { "cell_type": "code", - "execution_count": 369, + "execution_count": 32, "metadata": { "collapsed": false }, @@ -308,7 +305,7 @@ }, { "cell_type": "code", - "execution_count": 370, + "execution_count": 33, "metadata": { "collapsed": false }, @@ -317,7 +314,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjQAAAGcCAYAAADOLDodAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XmYHFXZ/vHvTdgDCVHfEBBEEBMDypJBATUgRonI5isq\nDuKGKMoiRlEQRfmBC24EIYgoKIgwyCIvIJggqBAgsmSQNaDsCZCwJCQhYcny/P441aSm6JlM9Szd\nnbk/19VXT586VfVU9cz006fOOaWIwMzMzKyZrVbvAMzMzMx6ygmNmZmZNT0nNGZmZtb0nNCYmZlZ\n03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmVJmmWpN/kXo+TtFzSu/th\n3z+QtCT3elC275P7et/Z/g7O9rdxf+yvVpKOkfSwpKWSbq13PN0l6S3Z+T2g3rFYc3FCY01D0mez\nf3TVHj+qd3wDTLV7ppS+j4qk70jau4Z9Ly+7r7K6iC2o4Vj7k6QPAz8C/gF8DjiurgGZ9YPV6x2A\nWUlB+uf8aKH8nv4PxSoi4jpJ60TEKyVX/S5wHnBliXW+D5xQcj+16Cy23wHn1XCs/Wk3YAlwcPiG\nfTZAOKGxZjQ5Itq7W1mSgDUj4uU+jGnA6+sPeEnrRsTiiFhOP7TQdCZLEBo5mQHYEFjUiMmM/x6t\nr/iSk61S8v0pJH1a0r3AS8C4bLkkfV3SvZJekvSUpF9JGlLYjiR9L+sr8oKkayW9TdLMQt+RDv05\ncuVV+1lI2lPS1Gyb8yVdIelthTp/lDRP0ibZ8oWSnpZ0UpX9SNIESXdJejGrd7Wk7bLlN0m6vZNz\n9ZCkLltGOjsPVeq9pg+NpJGS/ixpdhbb45LOlzS48j4BawKVc7W8cm6z87o828afJM0jXT7p9Jxn\nyz4t6YFsf7cW+/Rk5/a/VdZ7dZvdiK2z9/aI3O/VE5JOrfJ7daOkdklbS/qHpMXZuf16V+9Dbv3V\nJX0/e+9eUuojc4KkNQqxfwoYmsW5TJ30R8l+d5ZIGpwrOzpb76Rc2erZ+39Crmw9SROzv4mXJM2Q\n9LXC9lf29zhM0h8kPS9prqSzgQ7nLKu3kaRzs3P1kqQnJV0maZPunDcbGNxCY81oqKTX5wsi4rlC\nnd2BTwKnA3OBx7Py3wGt2fMpwBbAEcC2ksZm3/4h9T84GrgCmAK0ANcA6xT201l/iteUS/occDZw\nNfAtYDBwKDBV0vYRMSu37urZ/qYC38iO55uS/hsRZ+c2+wfSh9eVwG9IH8K7ADsC/86W/0rSyIj4\nTy6WnYHNgW9XiT2vu+ehEndl+2tl9VYjnec5wCbA3sCQiFgk6UDg98CN2XkBeLCwrT8D9wPH5Mo6\nO+fjgAOAU0mXWw4DpkjaISIeWMm6r5ZHxLJuxFZ8b38AHAtMJv3OjSa9ty2F36sA3gD8FbgYuBD4\nBPAzSXdGxHVVYss7JzvGC0m/GzuRLo2NAvbPxX4osC3wJUDATZ1sbyrpPXoP6f0CeC+wDBibq9dC\nes9vyI5XwFXZer8F7gL2AE6WtFFEHF3Yz2v+HrNtXEn6Xf0V8ACwH+m8F9+j/wO2JL23j5NaoHYn\n/U7NwgwgIvzwoykewGdJlxqKj2W5OoOysleALQvrvy9btl+hfI+s/GPZ6+HZ+pcW6p2U1ftNruxE\n4JUqsX6B9KGwcfZ6feB54LRCvQ2z8km5svOydb9VqPtv4Obc6w9m8fy0i3O2AfAicEKh/PRsv2t3\nsW6Z8zAui/nd2euWrM7eK3lPX8xvp3BelwPndLLsldzrynu+FHh7rnwzUmvAhYVz+5+VbXMlsRXf\n2w2z83RFod5Xs3qfypVNzco+kStbk5TwXbCSczUmO87TC+UnZ9t8T+E453bjb2oQsBA4MVc2l5Qw\nvVT5/QC+mR3jetnr/bJYjips71JSMvmmbvw9Vrbx1VzZaqQkchlwQFb2umI9P/yo9vAlJ2s2AXwF\n+EDu8cEq9a6LiAcLZR8j/bP+p6TXVx7A7aQPr92yeuNJ/4hPK6x/Sg/i/hApqbmwsO9lwG25fef9\npvD6RlKLUsV+pA/xEzvbaUQ8D/yF9K0eSJcBgI+TEpWXuoh5d2o/D89nz3tIWrsb9asJ4Ncl6k+N\niFc7h0fEY6QWgA/VuP/u+iDpPBXPy5nAYmDPQvn8iLio8iJS36Pb6PjeVvNh0jkpDk//BakVprif\nlYqIZcA0UqsekrYBhgI/BtYgtZ5AarW5MyJeyF7vQUpSTi9s8mTSuSie82p/j3sAL5P7PY/UkjUp\nO56KxaQkaTdJQ0seog0gTmisGd0WEX/PP6rUebRK2VtJ3/aeKTzmAGuTWiQA3pQ9d/gHHBGzSd9m\na7El6Z/01MK+nwben9t3xQtZMpI3DxiWe70FMCsiVhbTH4DNJe2Uvf4Q8HrSt/iubJY9lz4PEfEQ\n8EvgEOA5SX+V9BVJ669kn0WPlKhb/MAE+A+wvqRhVZb1lsp5+k++MFKn10dyyytmVtlG8b3tbD9L\ns3Ob388TpPejuJ/uuhF4Z9YPZywwMyLuJI0crFx2eg/pdzcfy6yIeLGwrRm55XmPVtnvZsATVZLq\nB/IvsuXHAnsBT0v6p6SjJBX/ZmyAcx8aW1UV/9FCSuCfBD5Nx2+AFU9nz5Vl3Rkh0lmdQVX2HaT+\nO89WqV/s5Lqsk+2qk5+78tdsnwcC/8qen4iIf65kvTLn4TUiYkLWyXNfUmvPJOBoSTtlSVF3VHsf\nyyieo+6+Xz3Zx8p0570tu7xsDHlTSUPhdyS1xEzNlY+VtDXpi8ANPdhftfdRVH8/XrPtiPiFpMuA\nj5BaUH8AfFvSrvlWORvY3EJjA8lDpA6ZNxZbeLJH5R/jo9nzyPzKkkaQLhvlzQMGSVq3UP7mKvsG\neLqTfU+lvAeBTYojaYoiYilZ51NJG5A65p7fje0/mj135zx0tu97IuKHEbErsCup9etL+Srd2U43\nvbVK2UhgYUTMy17PI/UrKnpzlbLuxvZo9jwqXyhpzWy7j3VzO93Zz+qS3lLYz8bAej3Yz79Ily53\nIbXIVH4XbwDeTbocGqSWnHwsm0gqdg4fnT13J5bKNoqXJEdVqUtEPBwRJ0fEeOAdpE7K3RodZgOD\nExobSC4idcD8bnFBNiy1khj8jfQt+ohCtQlVtvkQ6RvlLrltrUdqBcr7K/AC8J2sD0tx/2/o5jHk\nXUpqZe3OLLDnkZK5M0kfBN1JaMqchw4kDZFU/P9yD+mDca1c2SKqJxi1eG/WB6QSw5tJlykm5+o8\nBLxe0uhcvTeSkryi7sZWOU9HFsoPIY1k+0s3ttEdV5N+175WKP8G6bxeVctGs8tG7aTf2Y3o2EIz\nGDgceCAi8i2LV5P+lg4tbG4C6Vz8tRu7vpr0u3BIpSD72zicjiPm1slGzeU9TPp7WitXb4SkUVV+\n72yA8CUnazY1N61HxN+zSyDflTQGuJb0zXQkqcPwV0gjVeZImggcJekK0j/nHUgdkOcWNvtX4Ang\nHEk/z8oOAp4CXp2nJCLmSzqcNFy8XdKFpMtAm5E6c/6Dkt82I+JaSW3A15XmhrmGdOlkLDAlIvKd\nLW+XNIPUGfiu7jTTlzwP0PG9+SAwUdLFwH9JHUw/S7q09udcvenA7tn8JU8BD0VE1XlzuuEe4BpJ\np5He10Oz5/+Xq3MBaSj6FVm99YAvk4aGb1vYXrdiy87TT4BjJV1NSmBGZ9udRmod67GIaJd0PnBo\n1qF8KrAz6RLiRRHR2dDs7pgKHAU8FxEzsv09Jekh0t/Hbwv1LyO14PxE0pasGLa9J/CziKjWT6jo\nMlLr0M+zVqfKsO1ia+dWwGRJFwH3kRKmj5H6gbXl6v2c1Pl9E9KlZRto6j3Myg8/uvsgfSAuA8Z0\nUWdQVucXXdT5ImlUyQukSxB3AD8EhhfqfY+UrLxA+hY+itSh8zeFemNIH1wvkr45HkZhaG+u7vtI\nLQbzsu0+AJwFbJercx7pg6UY94nAy4UykT6I7sv2P5s0smebKusfk8X09ZLnvdp5eBw4M1enOGx7\ni+y4/ktq6Xg6W3eXwrbfBvwz2/ayyrnNjnUZac6aLs9D/j0nfbj/JzsXt1biKay/O3A3aVjyvaR5\nYKoN2+4sts7e28Oy7b2Una9fAusX6kwFpleJ6TxSK8jK3otB2fvxULafR0gJ2+pVtvea36Eutrt3\ndkyXFcp/R2HoeW7ZYNKopllZLPcDR5b5eyR1hP4DaVTcc6Q5f7an47DtN5BG2t0HLCAl0zcBH6ly\nzEuL74sfA+eh7BfBzLpB0kzgrxHxpZVWbjCSvkGaQ+ZNEfFUveMxM+tNvtZoNnAcRJoPxMmMma1y\n3IfGbBWmdI+efUj9Xt6GR4WY2SrKCY1ZOZ3dC6hRjSCNaJpLuv3BlDrHY2bWJ9yHxszMzJqe+9CY\nmZlZ03NCY2ZmZk3PCY2Z9YikH0gq3ouqv2MYJGm5pOKdqHuyzXHZNvfprW2W2PcfJf23v/dr1syc\n0Jj1IUmfzT4UK48XJT0g6bRV6G7BzdZRuox6HVcAy+u0b7Om5FFOZn0vSPdbehRYm3RH468Ae0h6\ne0S8VMfYrGs9uYt1T3yujvs2a0pOaMz6x+SIaM9+/p2kuaQb+e0L/Kl+Ya2cpHUjYnG94xhIImJZ\nPfbr99qamS85mdXH30nfwDevFEjaXNLFkp6TtEjSNEkfzq8k6ZncTTBR8rykJbm7hSPp6Kxs3VzZ\nKEmXZNt/UdJtkvYubL9yiWwXSb+SNId0/6pSJH1B0nWS5mT7ukfSFwt1filpdqHsjGz/X86VbZyV\nHdTNfX86u6z3oqRbJb27Sp03SjpH0mxJL0m6W9Jnq2wugNUkHSdplqTFkv4mafPC9nbN3rvHs+09\nJunn+btESzpG0jJJGxd3ktV9UdL62evX9KGRtJ6kiZJmZvuYkd04M1/nLdm5OqBQXuljdGyu7AdZ\n2UhJf5I0j3STVLOm5ITGrD62zJ6fA8j600wj3aV6EnAssBZwpaR9c+vdBOySe70NUElk3pMrfy/Q\nXvm2LWlr0p2NRwE/Js0Y/ALwf4XtV/yKNLPw/yPd/6msr5Bu1PlD4BukmzWeWUhqpgL/I2lkIe5l\npDuGV+xCSiymdmO/44CfAeeSbuI4HJgiaVSlgqQRpBtX7gqcChyZxfp7SYcWtifS5cI9gZ9kj3eT\nbqiY9wnS+zUJOJx0I84jSTd3rLgw297Hq8T9MeDqiFiYve7QL0mSgKuAI0h3855AuvHnyUp3+q5F\nZft/Jt1E8hjSzSHNmlO9747phx+r8oMVdwjfDXg98EZgf+AZUkKxUVZvYlZv59y6g0l3VX4oV/YN\n4BVgcPb6cNKH8TTgR7l6c4Gf515fS7qrePGuzDcC9xfiXU66y7S6eYzV7lS9VpV6fwNm5F5vmO3r\nC9nrYdk5uBB4PFdvEjB7JTEMyra1FHh7rnwz0p2gL8yVnUO6W/jQwjYuAp4F1shej8u2eScwKFdv\nQhbnyJUc73eyeDbKld0C3Fyot3O2n0/kys4D/pN7vV9W56jCupcCS0g3HAV4S1bvgE7Oz7GF9205\ncE69/0788KM3Hm6hMet7Aq4jJTEzgQuABcBHYsWNIvcAbo2IaZWVImIR8BvgzZK2yoqnkvq+VS6j\njM3KpmY/I2kbYIOsDEnDSAnVxcBQSa+vPIBrgLdK2igXbwC/jYiaR/hExMuvHrw0JNvX9cBISetk\ndeYAD7KixWks8DLwC2ATSZsVjrE7pkbEPbk4HgOuBD6UxSLgf4HLgdWrnIthwHaFbZ4dHfu0TCW9\np1t0crzrZtu7OauX396fgB0lvSlXtj+wmNTy0pk9SIns6YXyk0nJyoe6WLcrAfy6xnXNGooTGrO+\nF6RLMB8A3gdsFRFviYhrc3U2Ax6osu6M3HKAdtKHX+WSzHtZkdDsIGnNbFmQWl8gXd4S6Rv5M4XH\n8Vmd4hDyR/MvJK0hacP8o6sDljRW0t8lvQA8n+3rhGzx0FzVGwvHcitwOzAfGCtpKPB2up/QPFil\n7D/A+lliNwJYHziU156L32T1i+ei2IdoXvY8rFIgaTNJf5D0HKnl7RlSEgsdj/ei7PkTubL9gL9E\n151xNwNmRcSLhfLi70ctHunBumYNw6OczPrHbbFilFPNImKppFuAXSS9BdgIuIH0AboGsCMpMZgR\nEc9lq1W+uPwc6OzmlMVEoPjBuQvpklGQkqOQtGlEPFnckKS3ZnXvIV2emUlqXdiH1Ack/0VqKvBZ\nSZuSEptrIyIk3ZS9riQPN3QSd3fkhz9X9n0u8MdO6t9ZeN3ZiCNB6nBLuqS3PvAjUmK6GHgTqQ/N\nq8cbEbMkTSMlND+XNJZ0GfLCEsfQlc5a1QZ1sU7xvTZrSk5ozBrDY6QOu0Wjc8srpgLfInUgfiYi\n/gMg6V5S4jGWdJml4uHseUlE/L3G+KaTWpjynumk7j6k5GrP7LISWXzjq9SttLyMB8YA389e3wB8\nnpTQLOS1SUZn3lqlbCSwMCLmSVoALAJW68G5KNqO1HelNSJeHYIvqbPLQBcCv5S0Bely00LgryvZ\nx6PAeyWtU2ilKf5+VBLADQrr96QFx6wp+JKTWWO4GniXpB0rBZIGA18CHomI+3J1p5Im6DuSFZeV\nyH7+NKnV5tVLNBHxDKmT7yHZCJ8OJL1hZcFFxPMR8ffCo7PbHVRaNF79/5Jd7vlMle0+CMwhdXZe\njdTvpHKMo0j9XW4u0Z/nvVkfosp+3wzsBUzO9rcMuAz4hKTRxZWrnIvu7Lfa8Yr0/lRb/2Kyjruk\ny01X5PvgdOJqYE3SpbK8SgflvwJExDzSJb5dCvUO7ySWqiQNVRrmv1531zGrN7fQmPW97lwuOAlo\nBSZLOpU0SulzpG/WHy3UnUYaPTMSODNXfgOpr061Ic6HZWV3S/otqdVmQ9IImzcC25eMtytTSMOb\nr872NQT4IvAUr+2fAikR+xhpmPkLWdltpEshW5JGJXXXPcA1kk4jnaNDs+f/l6vzLdIH/q1ZfDOA\n1wE7kFq38klfd87FvaR+KKdkHZlfyI5nSLXKETFH0lTgm8B6dG9ixctI7+9PJG0J3EXqKLwn8LOI\nyPfzOQs4StJ8Up+r95FakMq8r58EzsieL1pJXbOG4BYas7630m/GEfE0Kbm4hvRt+kek4cZ7RcQV\nhbqLSUOw8x1/ISUsQRryPLOwzgzSB/ZfSEOzJwGHkL7dn0BHtYxuenWdbF8fI/1/+TlwMHAaaW6b\naipx51uVlpKGOHd3/plKDNcBR5GO8XhS68/uWUyVbc8G3knqR/PRLLavkhKQozs7rs7Ks5aqvUhJ\nxrHAd0lJzue7iPVPpGTmeTrv15TfR5CSl1OBvUnD/EcCX4+IYwrrfZ/Ud+cTpMRyaRZf2Xturar3\n57JVlHowMtPMzMysITREC002xPMKSU9kU3HvU1g+WNKkbMrvxZLulXRIoc5akk6X9KykhUpTvA8v\n1NlU0lVK08rPlvRTSQ1xDszMzKx2jfJhPhj4N+k6f7Umo4nA7qROdG8DTgEmSdorV+cUUpPsfqTr\n4xuTZtEEIEtcrib1G9qJ1CT9OV7b3G5mZmZNpuEuOUlaTppB9Ypc2d2kqct/mCu7nXTvk+8p3ZTv\nGeCTEXFZtnwUqbPfThFxq6Q9gCtI05A/m9U5hNQZ83+ya/ZmZmbWhBqlhWZlbgb2UXaXWkm7keab\nqHSmayG1vFRm5iQiHiDdr2XnrGgn4O5KMpOZQprFc+s+jd7MzMz6VLMkNEeQWltmSXqFdOnosIi4\nKVs+gnRzvAWF9eawYgjmiOx1cTl0HKZpZmZmTaZZ5qH5KmlK971IrS67AL+S9ORKZvsU3Rt6WLVO\ndoO58aRZOl8qE7CZmdkAtzbwZmBK7lYsfabhExpJawM/BPaNiMlZ8T2StifNN/F3YDawpqQhhVaa\n4axohanMPZFXucFeseWmYjxwfg8PwczMbCD7FHBBX++k4RMa0j1h1uC1rSjLWHHJbDpp8qhxpBk1\nkTSSdHO4ylTq04BjJb0h149md9JdffPTyuc9CvDHP/6R0aNfM0u69ZEJEyYwceLEeocxoPic9z+f\n8/7nc96/ZsyYwYEHHgjZZ2lfa4iEJrtnzZasmJp7C0nbAnMjYqak64GfSXqJdBO295HuC/M1gIhY\nIOls4GRJlZvZnQrcFBG3Zdu8hpS4nCfpaNL9bk4EJnVxT5qXAEaPHs2YMWN69Zitc0OHDvX57mc+\n5/3P57z/+ZzXTb902WiIhIY0Jfs/WDE19y+y8nOBg0h3pP0x8EfSPVceA74dEb/JbaNyk7ZLgLVI\nN6M7rLIwIpZn89acQWq1WUS6R8z3MTMzs6bWEAlNRFxPFyOusvvcfGEl23iZNBrqiC7qzCR1LDYz\nM7NVSLMM2zYzMzPrlBMaazitra31DmHA8Tnvfz7n/c/nfNXWcLc+aCSSxgDTp0+f7o5kZmZmJbS3\nt9PS0gLQEhHtfb0/t9CYmZlZ03NCY2ZmZk3PCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8J\njZmZmTU9JzRmZmbW9JzQmJmZWdNzQmNmZmZNzwmNmZmZNT0nNGZmZtb0nNCYmZlZ03NCY2ZmZk3P\nCY2ZmZk1PSc0ZmZm1vSc0JiZmVnTc0JjZmZmTc8JjZmZmTU9JzRmZmbW9BoioZE0VtIVkp6QtFzS\nPlXqjJZ0uaTnJb0g6RZJm+SWryXpdEnPSloo6RJJwwvb2FTSVZIWSZot6aeSGuIcmJmZWe0a5cN8\nMPBv4DAgigslvQWYCtwH7AK8AzgReClX7RRgT2C/rM7GwKW5bawGXA2sDuwEfBb4HHBCbx+MmZmZ\n9a/V6x0AQERMBiYDSFKVKj8AroqIb+fKHqn8IGkIcBDwyYi4Piv7PDBD0rsi4lZgPPA2YLeIeBa4\nW9JxwEmSjo+IpX1xbGZmZtb3GqWFplNZgrMn8F9JkyXNkfQvSfvmqrWQkrPrKgUR8QDwOLBzVrQT\ncHeWzFRMAYYCW/flMZiZmZX13HPwxBP1jqJ5NHxCAwwH1gOOJl0y+iBwGfBnSWOzOiOAVyJiQWHd\nOdmySp05VZaTq2NmZtYQTjwRxo+vdxTNoyEuOa1EJen6v4g4Nfv5LknvBr5M6lvTGVGlT04V3alj\nZmbWb8KfTKU0Q0LzLLAUmFEonwG8J/t5NrCmpCGFVprhrGiFmQ28s7CNDbPnYstNBxMmTGDo0KEd\nylpbW2ltbe3WAZiZmZUVAVV7lTagtrY22traOpTNnz+/X2No+IQmIpZIug0YVVg0Engs+3k6KekZ\nR7ochaSRwJuAm7M604BjJb0h149md2A+afRUpyZOnMiYMWN6eihmZmbd1kwJTbUv+e3t7bS0tPRb\nDA2R0EgaDGxJukQEsIWkbYG5ETET+BlwoaSpwD+APYC9gF0BImKBpLOBkyXNAxYCpwI3RcRt2Tav\nISUu50k6GtiINPR7UkQs6Y/jNDMz665mSmgaQUMkNMAOpEQlsscvsvJzgYMi4v8kfRk4Fvgl8ADw\n0YiYltvGBGAZcAmwFmkY+GGVhRGxXNJewBmkVptFwDnA9/vusMzMzGrjhKachkhosrljuhxxFRHn\nkBKQzpa/DByRPTqrM5PUsmNmZtbwnNB0XzMM2zYzMxtw3EJTjhMaMzOzBuRh2+U4oTEzM2tAbqEp\nxwmNmZlZA3JCU44TGjMzswbkhKYcJzRmZmYNyglN9zmhMTMza0BuoSnHCY2ZmVkD8iincpzQmJmZ\nNSC30JTjhMbMzKwBOaEpxwmNmZlZA3JCU44TGjMzswblhKb7nNCYmZk1ILfQlOOExszMrAE5oSnH\nCY2ZmVkD8rDtcpzQmJmZNSC30JTjhMbMzKwBOaEpxwmNmZlZg3JC031OaMzMzBqQW2jKcUJjZmbW\ngJzQlOOExszMrAF5lFM5TmjMzMwakFtoynFCY2Zm1qCc0HSfExozM7MG5BaachoioZE0VtIVkp6Q\ntFzSPl3UPTOr89VC+TBJ50uaL2mepLMkDS7U2UbSDZJelPSYpG/21TGZmZn1hBOachoioQEGA/8G\nDgM67QYl6SPAu4Anqiy+ABgNjAP2BHYBzsytuz4wBXgEGAN8Ezhe0sG9cwhmZma9xwlNOavXOwCA\niJgMTAaQqr99kt4InAqMB64uLHtbVt4SEXdkZUcAV0k6KiJmAwcCawBfiIilwAxJ2wNfB87qkwMz\nMzOrkROachqlhaZLWZLzB+CnETGjSpWdgXmVZCZzLam1Z8fs9U7ADVkyUzEFGCVpaB+EbWZmVjMP\n2y6nKRIa4BjglYiY1MnyEcDT+YKIWAbMzZZV6swprDcnt8zMzKyhuIWm+xriklNXJLUAXwW2r2V1\nuuiTky1nJXWYMGECQ4d2bMRpbW2ltbW1hpDMzMxWrpkuObW1tdHW1tahbP78+f0aQ8MnNMB7gf8B\nZua61wwCTpb0tYjYApgNDM+vJGkQMCxbRva8YWHblXWKLTcdTJw4kTFjxtR8AGZmZmU1U0JT7Ut+\ne3s7LS0t/RZDM1xy+gOwDbBt7vEk8FNSR2CAacAGWSffinGkFphbc3V2yRKdit2BByKif9NIMzOz\nlWimhKYRNEQLTTZfzJasuAS0haRtgbkRMROYV6i/BJgdEf8FiIj7JU0BfivpK8CawGlAWzbCCdKw\n7u8Bv5P0E+AdpEtZR/bt0ZmZmZXnhKachkhogB2Af5D6sgTwi6z8XOCgKvWr9Xk5AJhEGt20HLiE\nXLISEQskjc/q3A48CxwfEWf30jGYmZn1Go9yKqchEpqIuJ4Sl7+yfjPFsudJc810td7dwK6lAzQz\nM6sDt9B0XzP0oTEzMxtwfMmpHCc0ZmZmDcgJTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BO\naMpxQmNmZmZNzwmNmZlZA3ILTTlOaMzMzBqQE5pynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaA\nnNCU44TGzMzMmp4TGjMzswbkFppynNCYmZk1ICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMys\nAS1bBoPhqbIgAAAgAElEQVQG1TuK5uGExszMrAEtWwarr17vKJqHExozM7MGtHSpE5oynNCYmZk1\nICc05TihMTMza0BOaMpxQmNmZtaAnNCU44TGzMysATmhKachEhpJYyVdIekJScsl7ZNbtrqkn0i6\nS9ILWZ1zJW1U2MYwSedLmi9pnqSzJA0u1NlG0g2SXpT0mKRv9tcxmpmZleGEppyGSGiAwcC/gcOA\nKCxbF9gO+H/A9sD/AqOAywv1LgBGA+OAPYFdgDMrCyWtD0wBHgHGAN8Ejpd0cC8fi5mZWY8tXep5\naMpoiNwvIiYDkwGkjvMiRsQCYHy+TNLhwC2SNomIWZJGZ3VaIuKOrM4RwFWSjoqI2cCBwBrAFyJi\nKTBD0vbA14Gz+vYIzczMyvE8NOU0SgtNWRuQWnKez17vBMyrJDOZa7M6O+bq3JAlMxVTgFGShvZx\nvGZmZqX4klM5TZfQSFoLOAm4ICJeyIpHAE/n60XEMmButqxSZ05hc3Nyy8zMzBqGE5pymiqhkbQ6\ncDGp5eXQ7qzCa/vkFJezkjpmZmb9zglNOU1zqnLJzKbA+3OtMwCzgeGF+oOAYdmySp0NC5utrFNs\nuelgwoQJDB3a8apUa2srra2tZQ7BzMys25opoWlra6Otra1D2fz58/s1hqY4VblkZgtgt4iYV6gy\nDdhA0va5fjTjSC0wt+bq/EDSoOxyFMDuwAMR0eVZnzhxImPGjOmNQzEzM+uWZkpoqn3Jb29vp6Wl\npd9iaIhLTpIGS9pW0nZZ0RbZ602zlpZLSUOtDwTWkLRh9lgDICLuJ3Xw/a2kd0p6D3Aa0JaNcII0\nrPsV4HeStpK0P/BV4Bf9d6RmZmbd00wJTSNolFO1A/APUl+WYEWScS5p/pm9s/J/Z+WVvjG7ATdk\nZQcAk0ijm5YDlwBHVnYQEQskjc/q3A48CxwfEWf32VGZmZnVYPny9PA8NN3XEAlNRFxP161FK21J\niojnSS04XdW5G9i1XHRmZmb9a1nWMcItNN3XEJeczMzMbAUnNOU5oTEzM2swS7MpYJ3QdJ8TGjMz\nswbjhKa8XkloJA2StJ2kYb2xPTMzs4HMCU15NSU0kk6R9IXs50HA9UA7MFPS+3ovPDMzs4HHCU15\ntbbQfAy4M/t5b2Bz4G3AROCHvRCXmZnZgOWEprxaE5o3sOKWAh8GLo6I/wC/A97RG4GZmZkNVE5o\nyqs1oZkDbJVdbvoQaTI7gHWBZZ2uZWZmZitVSWg8sV731Zr7/R64CHiKNGPv37LyHYH7eyEuMzOz\nAcvz0JRX06mKiOMl3UO68/XFEfFytmgZcFJvBWdmZjYQ+ZJTeTWfqoi4BEDS2rmyc3sjKDMzs4HM\nCU15tQ7bHiTpOElPAC9I2iIrP7EynNvMzMxq44SmvFo7BX8H+BzwLeCVXPk9wME9jMnMzGxAc0JT\nXq0JzWeAL0XE+XQc1XQnaT4aMzMzq5ETmvJqTWjeCDzYyfbWqD0cMzMzc0JTXq0JzX3A2CrlHwPu\nqD0cMzMz8zw05dWa+50AnCvpjaSk6KOSRpEuRe3VW8GZmZkNRG6hKa+mFpqIuJyUuHwAWERKcEYD\ne0fE37pa18zMzLrmifXK68k8NDcCH+zFWMzMzAy30NSi1nlo3ilpxyrlO0raoedhmZmZDVxOaMqr\ntVPw6aTbHhS9MVtmZmZmNXJCU16tCc1WQHuV8juyZWZmZlYjJzTl1ZrQvAxsWKV8I2Bp7eGYmZmZ\nE5ryak1orgF+LGlopUDSBsCPAI9yMjMz6wHPQ1NerQnNUaQ+NI9J+oekfwCPACOAb5TdmKSxkq6Q\n9ISk5ZL2qVLnBElPSlos6W+StiwsHybpfEnzJc2TdJakwYU620i6QdKLkh6T9M2ysZqZmfW1pUtB\ngtVq/ZQegGqdh+YJYBvSzSnvA6YDRwLviIiZNWxyMPBv4DAgigslHQ0cDhwCvIs0980USWvmql1A\nmgtnHLAnsAtwZm4b6wNTSInXGOCbwPGSfDNNMzNrKEuX+nJTWT2Zh2YR8JveCCIiJgOTASSpSpUj\ngRMj4sqszmeAOcBHgIskjQbGAy0RcUdW5wjgKklHRcRs4EDSfaa+EBFLgRmStge+DpzVG8dhZmbW\nG5Ytc0JTVs2nS9JI4H3AcAotPRFxQs/C6rCfzUmXsq7LbX+BpFuAnYGLgJ2AeZVkJnMtqbVnR+Dy\nrM4NWTJTMQX4lqShETG/t2I2MzPrCbfQlFfT6ZL0ReAM4FlgNh0vEwXpVgi9ZUS2zTmF8jnZskqd\np/MLI2KZpLmFOg9X2UZlmRMaMzNrCE5oyqv1dH0X+E5E/KQ3gylJVOlvU7JO5fJWl9uZMGECQ4cO\n7VDW2tpKa2vrymI0MzMrrdkSmra2Ntra2jqUzZ/fv+0EtZ6uYcDFvRlIF2aTEo8N6dhKM5w0kV+l\nzvD8SpIGkeKcnatTnDunsk6x9aeDiRMnMmbMmNKBm5mZ1aLZEppqX/Lb29tpaWnptxhqHRB2MbB7\nbwbSmYh4hJSMjKuUSRpC6htzc1Y0Ddgg6+RbMY6UCN2aq7NLluhU7A484P4zZmbWSJotoWkEtZ6u\nB4ETJe0E3A0syS+MiFPLbCybL2ZLVlwC2kLStsDcbBj4KcB3JT0IPAqcCMwidfYlIu6XNAX4raSv\nAGsCpwFt2QgnSMO6vwf8TtJPgHcAXyWNoDIzM2sYS5d6Ur2yak1ovgS8AOyaPfICKJXQADsA/8jW\nDeAXWfm5wEER8VNJ65LmldkAmArsERGv5LZxADCJNLppOXAJuWQlGxk1PqtzO6lD8/ERcXbJWM3M\nzPqUW2jKq+l0RcTmvRlERFzPSi5/RcTxwPFdLH+eNNdMV9u4m9cmYGZmZg3F89CU16NJlSWtKWmU\nJJ92MzOzXuIWmvJqSmgkrSvpbGAxcC/wpqz8NEnH9GJ8ZmZmA44TmvJqbaH5MbAtaabgl3Ll1wL7\n9zAmMzOzAc0JTXm1nq6PAPtHxL8k5Seluxd4S8/DMjMzG7ic0JRXawvN/1C41UBmMCufvdfMzMy6\n4ISmvFoTmtuBPXOvK0nMwaQJ7MzMzKxGnoemvFrzv2OBv0raKtvGkZK2Jt392sOizczMesAtNOXV\n1EITETeSOgWvTpopeHfS/ZB2jojpvReemZnZwOOEprzSpyubc+YAYEpEfLH3QzIzMxvYPLFeeaVb\naCJiKfBrYO3eD8fMzMzcQlNerZ2CbwW2X2ktMzMzK80JTXm1nq5fAb+QtAkwHViUXxgRd/U0MDMz\ns4HKCU15tZ6uC7Pn/F21A1D27MFmZmZmNXJCU16tp6tX77ZtZmZmKzihKa+m0xURj/V2IGZmZpZ4\nYr3yakpoJH2mq+UR8YfawjEzMzO30JRX6+n6ZeH1GsC6wCvAYsAJjZmZWY08D015tV5yGlYsk/RW\n4AzgZz0NyszMbCBzC015tc5D8xoR8V/gGF7bemNmZmYlOKEpr9cSmsxSYONe3qaZmdmA4oSmvFo7\nBe9TLAI2Ag4HbuppUGZmZgOZE5ryaj1d/1d4HcAzwN+Bb/QoIjMzswHOCU15tXYK7u1LVWZmZpbx\nPDTlNUViImk1SSdKeljSYkkPSvpulXonSHoyq/M3SVsWlg+TdL6k+ZLmSTpL0uD+OxIzM7OVcwtN\neTUlNJIukXRMlfJvSrq452G9xjHAIcChwNuAbwHfknR4bt9Hk/rwHAK8i3TDzCmS1sxt5wJgNDAO\n2BPYBTizD+I1MzOrmROa8mptodkVuKpK+WRSktDbdgYuj4jJEfF4RPwZuIaUuFQcCZwYEVdGxD3A\nZ0gjrj4CIGk0MB74QkTcHhE3A0cAn5Q0og9iNjMzq4kn1iuv1oRmPdKswEVLgCG1h9Opm4Fx2eR9\nSNoWeA9wdfZ6c2AEcF1lhYhYANxCSoYAdgLmRcQdue1eS+rQvGMfxGxmZlYTt9CUV2tCczewf5Xy\nTwL31R5Op04C/gTcL+kVYDpwSkRcmC0fQUpM5hTWm5Mtq9R5Or8wIpYBc3N1zMzM6s4JTXm1nq4T\ngT9LegtpqDakfimtwMd7I7CC/YEDWJEwbQf8UtKTEXFeF+uJlOh0pTt1zMzM+o0TmvJqHbZ9paSP\nAMcCHwNeBO4CPhAR1/difBU/BX4UEZUOx/dKejPwbeA8YDYpMdmQjq00w4HKJabZ2etXSRoEDOO1\nLTsdTJgwgaFDh3Yoa21tpbW1tYZDMTMz69zy5enRTAlNW1sbbW1tHcrmz5/frzHUfLoi4iqqdwzu\nC+vy2laU5WSXzCLiEUmzSa1EdwFIGkLqG3N6Vn8asIGk7XP9aMaREqFbutr5xIkTGTNmTG8ch5mZ\nWZeWLUvPzZTQVPuS397eTktLS7/FUOutD94JrBYRtxTKdwSWRcTtvRFczpXAdyTNBO4FxgATgLNy\ndU4BvivpQeBR0mWxWcDlABFxv6QpwG8lfQVYEzgNaIuI2b0cr5mZWU2WLk3PnlivnFo7BZ8ObFql\n/I2saBHpTYcDl2Tbvo90CeoM4HuVChHxU1KCciapxWUdYI+IyI/GOgC4nzS66S/ADaR5a8zMzBrC\nkiXpeY016htHs6m1QWsroL1K+R3Zsl4VEYuAr2ePruodDxzfxfLngQN7MzYzM7PetHBhel5//frG\n0WxqbaF5mdQBt2gjYGnt4ZiZmQ1slYRmSF/M6rYKqzWhuQb4saRXh/5I2gD4EfC33gjMzMxsIKoM\nDnILTTm1XnI6itT/5DFJlRFD25GGP3+6NwIzMzMbiB55JD1vskl942g2tc5D84SkbYBPAduS5qH5\nPWnE0JJejM/MzGxAmTUrXW563evqHUlz6ck8NIuA3/RiLGZmZgPe/PlQmMvVuqHWeWg+TrrNwUjS\nhHf/BS6IiEt6MTYzM7MBxwlNbUp1Cpa0mqQ/kW4UuRXwIPAwsDVwkaQLJan3wzQzMxsYnNDUpmwL\nzZHAB4B9IuIv+QWS9iH1ozmSNGuvmZmZlfT8805oalF22PbngW8WkxmAiLgC+BZwUG8EZmZmNhC5\nhaY2ZROat5JuG9CZa7M6ZmZmVoP77oONN653FM2nbELzIrBBF8uHAC/VHo6ZmdnAtWQJPPMMvP3t\n9Y6k+ZRNaKYBX+li+WFZHTMzMytp0aL0vN569Y2jGZXtFPxD4J+SXg/8nHTnagGjgW8A+wK79WqE\nZmZmA0QloRk8uL5xNKNSCU1E3Cxpf9KEevsVFs8DWiPipt4KzszMbCCp3JjSCU15pSfWi4jLJE0B\ndidNrAfwH+CaiFjcm8GZmZkNJO3t6fmtHl5TWq33clos6QPA9yJibi/HZGZmNiCdcQYMGwYbbVTv\nSJpP2ZmC8/f+PABYLyu/W9KmvRmYmZnZQPPcczB+fL2jaE5lRzndL+kxSRcAawOVJObNwBq9GZiZ\nmdlAEgEzZ8IOO9Q7kuZUNqEZCnwcmJ6te7Wk/wBrAeMljejl+MzMzAaE55+HF16ATTZZeV17rbIJ\nzRoRcWtE/II0yd72pNshLCPd8uAhSQ/0coxmZmarvBkz0vOoUfWNo1mV7RS8QNIdwE3AmsC6EXGT\npKXA/sAs4F29HKOZmdkqb/bs9Lype6TWpGwLzcbAD4CXScnQ7ZKmkpKbMUBExI29G6KZmdmqrzIH\nzfrr1zeOZlUqoYmIZyPiyoj4NrAYeCdwGhCkmYMXSLq+98M0MzNbtS1YAGutBWuuWe9ImlPZFpqi\n+RFxEbAEeD+wOfCrHkdVhaSNJZ0n6VlJiyXdKWlMoc4Jkp7Mlv9N0paF5cMknS9pvqR5ks6S5PkY\nzcys7hYudOtMT/QkodmG1GcG4DFgSUTMjog/9TysjiRtQOq38zIwnhX3jpqXq3M0cDhwCKkfzyJg\niqR8rntBtu44YE9gF+DM3o7XzMysrIULYciQekfRvGqaKRggImbmfu7rG50fAzweEQfnyh4r1DkS\nODEirgSQ9BlgDvAR4CJJo0nJUEtE3JHVOQK4StJRETG7j4/BzMysUwsWuIWmJ3p6yam/7E3qgHyR\npDmS2iW9mtxI2hwYAVxXKYuIBcAtwM5Z0U7AvEoyk7mW1P9nx74+ADMzs674klPPNEtCswXwFeAB\n0k0xfw2cKunAbPkIUmIyp7DenGxZpc7T+YURsQyYm6tjZmZWFw8/7CHbPVHzJad+thpwa0Qcl72+\nU9LWpCTnj12sJ1Ki05Xu1DEzM+tT998PH/pQvaNoXs2S0DwFzCiUzQA+mv08m5SYbEjHVprhwB25\nOsPzG5A0CBjGa1t2OpgwYQJDhw7tUNba2kpra2v3j8DMzKwT8+alG1O+9a31jqQ2bW1ttLW1dSib\nP39+v8bQLAnNTUBxMuhRZB2DI+IRSbNJo5fuApA0hNQ35vSs/jRgA0nb5/rRjCMlQrd0tfOJEycy\nZsyYrqqYmZnV7MEH0/OWW3Zdr1FV+5Lf3t5OS0tLv8XQLAnNROAmSd8GLiIlKgcDX8zVOQX4rqQH\ngUeBE0nDyi8HiIj7JU0BfivpK6TZjU8D2jzCyczM6qnZE5pG0BQJTUTcLul/gZOA44BHgCMj4sJc\nnZ9KWpc0r8wGwFRgj4h4JbepA4BJpNFNy4FLSMO9zczM6ubZZ2HttaHQu8FKaIqEBiAirgauXkmd\n44Hju1j+PHBgZ8vNzMzq4YUXYL316h1Fc2uWYdtmZmarrEWLnND0lBMaMzOzOnvhBRjsOwv2iBMa\nMzOzOvMlp55zQmNmZlZnTmh6zgmNmZlZnfmSU885oTEzM6szdwruOSc0ZmZmdTZrFmy4Yb2jaG5O\naMzMzOro6afhoYdgu+3qHUlzc0JjZmZWR7NmQQRsvXW9I2luTmjMzMzq6Pnn0/MGG9Q3jmbnhMbM\nzKyO5s5Nz05oesYJjZmZWR3ddx8MGQKve129I2luTmjMzMzq6JprYLfdQKp3JM3NCY2ZmVmdzJ8P\nt9wC739/vSNpfk5ozMzM6qS9HZYuhXe/u96RND8nNGZmZnVywQVpQr13vKPekTQ/JzRmZmZ1cNNN\ncNZZcMQRsNZa9Y6m+TmhMTMzq4Ojj07PX/96feNYVTihMTMz62dPPJFaaL71LVhnnXpHs2pwQmNm\nZtbPZs5MzwceWN84ViVOaMzMzPrZXXeleWfe+MZ6R7LqcEJjZmbWz+64A97+ds8O3Juc0JiZmfWz\nadNg223rHcWqxQmNmZlZP1qwIF1yGjeu3pGsWpoyoZH0bUnLJZ2cK1tL0umSnpW0UNIlkoYX1ttU\n0lWSFkmaLemnkpryHJiZWXO64QaIgJ13rnckq5am+zCX9E7gi8CdhUWnAHsC+wG7ABsDl+bWWw24\nGlgd2An4LPA54IQ+D9rMzCxz990wbBiMGlXvSFYtTZXQSFoP+CNwMPB8rnwIcBAwISKuj4g7gM8D\n75H0rqzaeOBtwKci4u6ImAIcBxwmafX+PA4zMxu4nnkGhg9feT0rp6kSGuB04MqI+HuhfAdSy8t1\nlYKIeAB4HKg06u0E3B0Rz+bWmwIMBbbus4jNzMwyixfD5ZfD1v7U6XVN0zIh6ZPAdqTkpWhD4JWI\nWFAonwOMyH4ekb0uLq8sK17CMjMz61WTJsEjj6SkxnpXUyQ0kjYh9ZH5YEQsKbMqEN2o1506ZmZm\nNVu+HL773TS66e1vr3c0q56mSGiAFuB/gOmSlJUNAnaRdDjwIWAtSUMKrTTDWdEKMxt4Z2G7G2bP\nxZabDiZMmMDQoUM7lLW2ttLa2lr6QMzMbGBqb4clS+CLX6x3JL2vra2Ntra2DmXz58/v1xgU0fiN\nE5IGA5sVis8BZgAnAU8AzwCfjIjLsnVGAvcDO0bEbZI+BFwJbFTpRyPpS8BPgOHVWn4kjQGmT58+\nnTFjxvTJsZmZ2cDwpS/BpZfC7Nmwxhr1jqbvtbe309LSAtASEe19vb+maKGJiEXAffkySYuA5yJi\nRvb6bOBkSfOAhcCpwE0RcVu2yjXZNs6TdDSwEXAiMKnkZSwzM7NSHnwQzjkHjj56YCQz9dAUCU0n\nik1LE4BlwCXAWsBk4LBXK0csl7QXcAZwM7CI1Mrz/f4I1szMBq6vfQ023hi+/e16R7LqatqEJiLe\nX3j9MnBE9uhsnZnAXn0cmpmZ2avuvx+uugouuADWXbfe0ay6mm0eGjMzs6bS1garrw7jx9c7klWb\nExozM7M+EgG/+10a2fS619U7mlWbExozM7M+ctxxMGsWfPrT9Y5k1de0fWjMzMwa1aJFcMwxaWbg\nfff1nbX7gxMaMzOzXvTSS7DnnnD99bDffnD++fWOaGDwJSczM7NesnDhimTme9+DSy6Btdaqd1QD\ng1tozMzMesnhh8O//gU33ABjx9Y7moHFLTRmZmY9FAHf+Q784Q/w+c87makHJzRmZmY99Mtfwo9+\nBDvsAD/7Wb2jGZic0JiZmdVo+XL485/hqKNg773h1lthnXXqHdXA5D40ZmZmNViyJCUxU6bANtuk\nCfSkekc1cLmFxszMrKSnnoLNN0/JzIknwh13wBveUO+oBja30JiZmZVw881psrwXX4Rrr4Vx4+od\nkYFbaMzMzLpl4UI49FB4z3tgvfVS64yTmcbhhMbMzGwlliyBr34VzjgDDjkEHnggJTbWOJzQmJmZ\ndeGxx9LNJc85Bz71Kfj1r2HNNesdlRU5oTEzM6vi5Zfh+OPhLW+BP/0JjjwSzj233lFZZ9wp2MzM\nLOfll+GPf4QTToDHH4ePfjTdNXujjeodmXXFCY2ZmVnmM59JyUwEvOtd0NYG7353vaOy7vAlJzMz\nG/CefDKNYDrvvNQiM21ausmkk5nm4RYaMzMbsJYtgx/+EH7wgzSS6Wc/S7cxsObjhMbMzAacCLj0\nUjj8cJgzB774RTjuONh003pHZrVyQmNmZgPGU0/B738PF18M//437LgjXHghvO999Y7Meqop+tBI\n+rakWyUtkDRH0mWSRhbqrCXpdEnPSloo6RJJwwt1NpV0laRFkmZL+qmkpjgHZmZWmwiYPBk+/nF4\n05vgO9+BwYPTEOxp05zMrCqa5cN8LHAasCPwAWAN4BpJ+Zu0nwLsCewH7AJsDFxaWZglLleTWqV2\nAj4LfA44oe/DNzOz/rZsWRqxNGoU7LEH/POf8IlPpKHYN96YRjT57tirjqa45BQRH86/lvQ54Gmg\nBbhR0hDgIOCTEXF9VufzwAxJ74qIW4HxwNuA3SLiWeBuSccBJ0k6PiKW9t8RmZlZX4hIM/pOmQJ/\n+xvMnQujR6fLSh//OKzWLF/jrbRmfWs3AAKYm71uISVn11UqRMQDwOPAzlnRTsDdWTJTMQUYCmzd\n1wGbmVnfWb48JTD/+79w0EGpf8wHPwgXXAD33gv77+9kZlXXFC00eZJEurx0Y0TclxWPAF6JiAWF\n6nOyZZU6c6osryy7sw/CNTOzPrRwYZo7ZtIkmDED1l4bTj4ZJkyod2TW35ouoQF+BWwFvLcbdUVq\nyVmZ7tQxM7MGEAGXXw7XXptGLC1eDDvtlO639NGPwurN+MlmPdZUb7ukScCHgbER8WRu0WxgTUlD\nCq00w1nRCjMbeGdhkxtmz8WWmw4mTJjA0KFDO5S1trbS2tpa8gjMzKwWEXDbbWnumLPOSn1jhgyB\nj30MDjss3abA6qetrY22trYOZfPnz+/XGBTRHI0TWTKzL7BrRDxcWDYEeIbUKfiyrGwkcD+wY0Tc\nJulDwJXARpV+NJK+BPwEGB4RS6rscwwwffr06YwZM6YPj87MzKqZNQuuuCK1xNx+exqVVOkn8+EP\ne5RSI2tvb6elpQWgJSLa+3p/TdFCI+lXQCuwD7BIUqVlZX5EvBQRCySdDZwsaR6wEDgVuCkibsvq\nXgPcB5wn6WhgI+BEYFK1ZMbMzPrX8uVpdNL116eOvHfeCTNnpmWjRqW+MQcdBIUGczOgSRIa4Muk\nfi7/LJR/HvhD9vMEYBlwCbAWMBk4rFIxIpZL2gs4A7gZWAScA3y/D+M2M7NORMDNN6f5YW67Df7+\n99TJd911YautYPfdYYcd0vMWW9Q7Wmt0TZHQRMRKB9tFxMvAEdmjszozgb16MTQzMyvhttvguutg\n9my46KJ0K4LVVoORI9NEd+PHw557eoi1ldcUCY2ZmTWfpUtTC8w998Add6S+ME8/DWusAa9/PYwZ\nk4Zb77EHrLPOyrdn1hUnNGZm1mOvvAL33w833JBaYB59NCUyS7M52IcPT5eO3v9++NSnYM016xqu\nrYKc0JiZWbdFpOTliSdSx93HHkt9YP7yF3j55VRn5EhoaYF994Wdd06PIUPqGrYNAE5ozMysUw8/\nnFpd7r8/JTC33pouG+Vtthl8+cuw664wdiy84Q31idUGNic0ZmbG00+nfi5PPQVTp6ZOu489Bvfd\nl1plhgxJLS/77ptGHg0bBltvnco8M681Av8ampkNAIsXp4npnnoqtbY89BA8+GD6+cUX4aWXVtQd\nMSIlKzvtlG7qePDBsOGGHnlkjc0JjZnZKuDFF1O/lvZ2eOCB9Jg5E5555v+3d+/BdZTnHce/P8my\nLQtkg8EYjMGyjTG0BGzuBScmYJiUgQxNQwKUwCRNSUPbKZ0A7UxmSNNOOrkxoQl00tKmuRBaekuY\nBkq52LiXEBpDKQFjEyzfEVggy/eb/PaPZzdndaybLekcHen3mdmxzrvvrnafc7zn0fu++260tnR0\nlOrW18MZZ8App8Ctt8KsWdDUBAsWRNnUqZ6B12qPExozsxqyYUPc/rxmTTzPaM0aaG0tzagLkbCc\nfjrMnx8T0rW0RJfROefAnDlw8slOWGz0cUJjZjbC7NsHK1fC669Hl1BbW7S4vPZaPNsI4KSTYMqU\naF259tr497TTouVl3rxqHr1ZdTihMTOrsJRiwO22bdG6smsXrFgRY1pWr45kJjduHMycGQnLNddE\nwvLhD8c4FzMrcUJjZjaEUoqBt9u2RdKyYUO0sLS2xmRz69fHgNyUum83aVJ0CS1cCDfcEANy58yJ\nZFkTtxsAAA8sSURBVKahoSqnYlZTnNCYmQ1AVxd0dsbdQK++Gl1Aq1dHcpInLAcPxuDcfIK53FFH\nRWJy4okxU+4nPxktLtOmxSDc5ua4Dbq+viqnZjYqOKExszFvy5ZISLq6IkFpbY0Bt2vXluZj2bo1\nkpWiE06IAbfz58OSJXDssVE+Y0YkKLNnxx1DnmjObPg5oTGzUaPYjdPaCjt2xM/r1sH27dH9s2oV\ntLdHctLaGq0u7e3d9zNuXCQjRx8d/y5aBJMnR/LS2BgJy5lnRsuLmY0MTmjMbETr6Ci1oGzfHi0o\ne/bEv3v3xl0/W7dGd8+aNXGHUG+kSFDmzo3E5PLLowVl1iw466yYi2XatGh58W3NZrXFCY2ZDZuU\nYPPm+Lejo3TLMcTg2G3b4udiC0pnZ7SsrF0b68oTlPr6SDqOPjqeIXTMMfEgxHHjosVk1qyol6+H\nGHA7e3bU8VOezUYnJzRm1quUYixJV1fMibJ/f5Tv3RstJgcOxOvOzkhG8oRk06ZIYHbtKm3Tk+bm\nmE5/woS4o2fixEhO6utLg2XHj49kZMqU6PJpaor6ZmZFTmjMRqn8jps9e2J+k6JNmyJRSSm6afLB\nru3tMQh28+ZYv2tXtJz0ZuLE0s8tLTEodvbseA5Q3iKSD5Ctq4uyfJvGxtIgWjOzwXJCYzaCpBSJ\nSO6dd2JOE4gWj02b4ue8BQS6d8/s2VO6W6e9vTQoti+TJkU3jRQtIy0t8TTl8oTk2GNjdtrc9Oke\nFGtmI4cTGrNBamsrjQWBUvdL0Vtvwdtvl17nLSDQPTnZuHFgSYjUfcK1vHumqSkGuh5/fCQns2dH\n90y+PjdxYnTxeOCrmY0WTmhsVNq5M1o3cnv3RtdKV1epbMuWSDSK8i6XXLFLprx7Jl+fjyPpz9Sp\npbEfDQ2RbDQ0RPn550dryIQJUZ5PsNbQEC0m48dHl81pp0VZfb0Ht5qZFTmhsYrYvj26Q3K7d0dy\nUD79+8aNpdYKiPX5s24gumPy23Vz774bLR7lv28gGhu7jwPJWzWKZc3NcPbZ0ZqxZEmpeyY3aVIk\nHXlZfmtwY2P3/RZbSMzMbGg5oRmj9u3rPynoqVVjIAlFSjGmY+fO0jblE5f1paEhWity5QlDc3PM\nzJqrqytNeFbcR94CkjvppO6DUMeNi9t/zcys9jmhqZKDB7sP/mxrK33pHzjQ8wRhbW3RTdKb4j4g\nEpE1aw59rkw+N0gxUTkczc2l+T2g54SipSUSijwJKc4Jkps5M2ZfLWpsjOfdmJmZHY4xl9BIuh34\nDDAdeAn43ZTS//S1TVdXTJe+b18kCPv3x5L/nNu1K1omiolKPmFYUUoxqVixC2Zgxx5JQG9jJ/Lp\n2ovrL744BoSWa2zsnnAUB5AWzZxZ+a6Shx9+mBtuuKGyv3SMc8wrzzGvPMd8dBtTCY2kjwBfBX4L\neB64A3hC0ryUUq+dIhdc0HP5hAmHPnRuxozu3RozZsTdJHV13euVJxTjx0e9fDDo8cfH9OtFdXXd\nu1BGK190Ks8xrzzHvPIc89FtTCU0RALzzZTSdwAkfQq4Gvg48KXeNrr7brj00piptJhoNDWNjQTD\nzMxspBszCY2kBuBc4At5WUopSXoKuLivba+/HhYuHOYDNDMzsyNW13+VUeM4oB4om3mEt4jxNGZm\nZlajxkwLTR8EpF7WTQRYuXJl5Y7G6Ozs5IUXXqj2YYwpjnnlOeaV55hXVuG7c2Jf9YaKUvnMZqNU\n1uW0C/hQSunRQvnfApNTStf1sM2NwEMVO0gzM7PR56aU0veH+5eMmRaalNJ+SSuAy4FHASQpe/3n\nvWz2BHATsBY4zJuszczMxrSJwCziu3TYjZkWGgBJ1wPfBm6jdNv2rwPzU0p9TFlnZmZmI9mYaaEB\nSCk9Iuk44PPACcD/Alc5mTEzM6ttY6qFxszMzEansXTbtpmZmY1STmh6Iel2Sa2Sdkt6TtL51T6m\nWiXpHkkHy5ZXC+snSLpfUruk7ZL+UdK0sn3MlPQjSTsltUn6kiR/fjOSFkl6VNKmLL7X9lDn85I2\nS9ol6UlJc8vWHyPpIUmdkjokPSipqazOeyQtz/5frJN053Cf20jVX8wlfauHz/1jZXUc8wGS9EeS\nnpe0TdJbkv5F0ryyOkNyLZG0WNIKSXskrZZ0SyXOcaQZYMyXlX3GuyQ9UFanIjH3F0IPCs98ugdY\nQDzE8ols/I0dmZ8R45amZ8ulhXVfIx5B8SHgvcBJwD/lK7MP/mPEmK+LgFuAW4mxUBaaiDFht9PD\nvEqS7gZ+hxgQfwGwk/hMFx91+n3gDOLOv6uJ9+KbhX0cTdyt0AosBO4EPifpN4fhfGpBnzHPPE73\nz335g4Qc84FbBHwduBC4AmgA/l1SY6HOoK8lkmYB/wo8DZwN3Ac8KGnJsJzVyDaQmCfgLyl9zk8E\n7spXVjTmKSUvZQvwHHBf4bWAjcBd1T62WlyIxPCFXtY1A3uB6wplpwMHgQuy1x8A9gPHFercBnQA\n46p9fiNtyWJ3bVnZZuCOsrjvBq7PXp+RbbegUOcq4AAwPXv920B7MebAnwGvVvucq730EvNvAf/c\nxzbzHfNBxfy4LH6XZq+H5FoCfBH4v7Lf9TDwWLXPudpLecyzsqXAvX1sU7GYu4WmjErPfHo6L0sR\n3X6f+WR9Oi1rmn9D0vckzczKzyUy92K8VwHrKcX7IuDl1P2J6E8Ak4FfGv5Dr22SWoi/nIox3gb8\nhO4x7kgpvVjY9Cnir68LC3WWp5QOFOo8AZwuafIwHX6tW5w11b8m6QFJxxbWXYxjPhhTiFi9m70e\nqmvJRcT7QFkdX/8PjXnuJklbJL0s6QtlLTgVi7kTmkP5mU9D7zmiifEq4FNAC7A8GyswHdiXfcEW\nFeM9nZ7fD/B7MhDTiYtQX5/p6cDbxZUppS7iwuX34cg8DnwMeD/RBP8+4DFJytY75kcoi+HXgP9M\nKeXj8YbqWtJbnWZJEwZ77LWql5hDzKb/G8Bi4uHPNwPfLayvWMzH1Dw0g9TXM5+sDyml4iyRP5P0\nPLAOuJ7eZ2AeaLz9nhy5gcS4vzr5l7PfhzIppUcKL1+R9DLwBnHhX9rHpo55/x4AzqT7WLzeDMW1\nxDEvxfySYmFK6cHCy1cktQFPS2pJKbX2s88hjblbaA7VDnQRA5yKpnFoBmlHIKXUCawG5gJtwHhJ\nzWXVivFu49D3I3/t96R/bcTFoa/PdFv2+hck1QPHZOvyOj3tA/w+9Cu7uLcTn3twzI+IpG8Avwos\nTiltLqwa7LWkv5hvSyntG8yx16qymL/ZT/WfZP8WP+cVibkTmjIppf1A/swnoNszn/67Wsc1mkg6\nCphDDFRdQQyCLMZ7HnAKpXj/GDir7C6zK4FOoNj0aT3Ivkjb6B7jZmKcRjHGUyQtKGx6OZEIPV+o\n897sSzd3JbAqS1KtD5JOBqYC+ReCY36Ysi/WDwKXpZTWl60e7LVkZaHO5XR3ZVY+5vQT854sIFpV\nip/zysS82qOmR+JCdIXsJvq/5xO3Ub4DHF/tY6vFBfgycQvlqcCvAE8SfzFNzdY/QNyWupgY2Pdf\nwH8Utq8jbp1/HHgPMRbnLeBPqn1uI2UhbiE+GziHuAvh97PXM7P1d2Wf4WuAs4AfAK8D4wv7eAz4\nKXA+0ay8CvhuYX0zkYR+m2h6/giwA/hEtc9/pMU8W/clImk8lbhY/5S4gDc45kcU7weIO2MWEX/N\n58vEsjqDupYQD1PcQdx5czrwaWAfcEW1YzDSYg7MBj5LTClwKnAt8HPgmWrEvOoBG6lLFtC1RGLz\nY+C8ah9TrS7E7Xcbs1iuJ+beaCmsn0DMddAObAf+AZhWto+ZxDwFO7L/DF8E6qp9biNlIQacHiS6\nS4vL3xTqfC77ctxF3EEwt2wfU4DvEX85dQB/BUwqq3MW8Gy2j/XAZ6p97iMx5sRThv+NaBnbA6wB\n/oKyP4oc88OKd0+x7gI+VqgzJNeS7L1dkV2zXgdurvb5j8SYAycDy4At2edzFTGtwFHViLmf5WRm\nZmY1z2NozMzMrOY5oTEzM7Oa54TGzMzMap4TGjMzM6t5TmjMzMys5jmhMTMzs5rnhMbMzMxqnhMa\nMzMzq3lOaMzMzKzmOaExszFD0lJJ91b7OMxs6DmhMbOKkHSbpG2S6gplTZL2S3q6rO5lkg5KmlXp\n4zSz2uSExswqZSnxFOrzCmWLgDeBiySNL5S/D1iXUlp7uL9E0rjBHKSZ1SYnNGZWESml1UTysrhQ\nvBj4AdAKXFRWvhRA0kxJP5S0XVKnpL+XNC2vKOkeSS9K+oSkNcTTrZE0SdJ3su02SfqD8mOS9GlJ\nqyXtltQm6ZGhPWszqxQnNGZWScuAywqvL8vKns3LJU0ALgSeyer8EJhCtOZcAcwB/q5sv3OBXwOu\nA87Jyr6SbXMNcCWRJJ2bbyDpPOA+4LPAPOAqYPkgz8/MqsRNs2ZWScuAe7NxNE1E8rEcGA/cBvwx\ncEn2epmkJcAvA7NSSpsBJN0MvCLp3JTSimy/DcDNKaV3szpNwMeBG1NKy7KyW4CNhWOZCewAfpRS\n2glsAF4apvM2s2HmFhozq6R8HM35wKXA6pRSO9FCc2E2jmYx8EZKaSMwH9iQJzMAKaWVwFbgjMJ+\n1+XJTGYOkeQ8X9iuA1hVqPMksA5ozbqmbpTUOGRnamYV5YTGzCompfQGsInoXrqMSGRIKb1JtJBc\nQmH8DCAg9bCr8vKdPaynl23zY9kBLAQ+CmwmWodektQ84BMysxHDCY2ZVdpSIplZTHRB5ZYDHwAu\noJTQvAqcImlGXknSmcDkbF1vfg4coDDQWNIxxFiZX0gpHUwpPZNS+kPgbGAW8P4jOCczqzKPoTGz\nSlsK3E9cf54tlC8HvkF0FS0DSCk9Jell4CFJd2Tr7geWppRe7O0XpJR2Svpr4MuS3gW2AH8KdOV1\nJF0NzM5+bwdwNdGys+rQPZrZSOeExswqbSkwEViZUtpSKH8WOAp4LaXUVij/IPD1bP1B4HHg9wbw\ne+4kxus8CmwHvgoUu5O2EndG3ZMdz+vAR7MxOmZWY5RSr13MZmZmZjXBY2jMzMys5jmhMTMzs5rn\nhMbMzMxqnhMaMzMzq3lOaMzMzKzmOaExMzOzmueExszMzGqeExozMzOreU5ozMzMrOY5oTEzM7Oa\n54TGzMzMap4TGjMzM6t5/w+ZKiTWKpLyuAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -343,7 +340,7 @@ }, { "cell_type": "code", - "execution_count": 371, + "execution_count": 163, "metadata": { "collapsed": false }, @@ -361,7 +358,7 @@ }, { "cell_type": "code", - "execution_count": 372, + "execution_count": 164, "metadata": { "collapsed": false }, @@ -372,7 +369,7 @@ }, { "cell_type": "code", - "execution_count": 373, + "execution_count": 165, "metadata": { "collapsed": false }, @@ -402,7 +399,7 @@ }, { "cell_type": "code", - "execution_count": 358, + "execution_count": 197, "metadata": { "collapsed": false }, @@ -416,7 +413,7 @@ }, { "cell_type": "code", - "execution_count": 374, + "execution_count": 200, "metadata": { "collapsed": false }, @@ -425,34 +422,35 @@ "name": "stdout", "output_type": "stream", "text": [ - "-8.22548993324\n", - "-7.00248242458\n", - "-6.99740973863\n", - "-6.98998990308\n", - "-6.97948100519\n", - "-6.9657412708\n", - "-6.94923575245\n", - "-6.93078544187\n", - "-6.91130176592\n", - "-6.89159127379\n", - "CPU times: user 6.37 s, sys: 12 ms, total: 6.38 s\n", - "Wall time: 6.38 s\n" + "-8.22783601141\n", + "-6.95384791808\n", + "-6.92227455516\n", + "-6.89499090053\n", + "-6.86991386225\n", + "-6.8461946909\n", + "-6.82374414163\n", + "-6.80292182083\n", + "-6.78419418262\n", + "-6.76785403568\n", + "CPU times: user 7.85 s, sys: 32 ms, total: 7.88 s\n", + "Wall time: 7.88 s\n" ] } ], "source": [ "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", - " author2doc=author2doc, doc2author=doc2author, id2author=id2author, var_lambda=None, \\\n", - " chunksize=2000, passes=10, update_every=1, \\\n", - " alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, \\\n", - " eval_every=1, iterations=10, gamma_threshold=1e-10, \\\n", - " minimum_probability=0.01, random_state=1, ns_conf={}, \\\n", - " minimum_phi_value=0.01, per_word_topics=False)\n" + " author2doc=author2doc, doc2author=doc2author, var_lambda=None, \\\n", + " chunksize=2000, passes=10, update_every=1, \\\n", + " alpha='auto', eta='auto', decay=0.5, offset=1.0, \\\n", + " eval_every=1, iterations=10, gamma_threshold=1e-10, \\\n", + " minimum_probability=0.01, random_state=0, ns_conf={},\\\n", + " serialized=True, serialization_path='/tmp/model_serializer.mm', \\\n", + " minimum_phi_value=0.01)\n" ] }, { "cell_type": "code", - "execution_count": 376, + "execution_count": 187, "metadata": { "collapsed": false }, @@ -461,26 +459,46 @@ "name": "stdout", "output_type": "stream", "text": [ - "-6.86383519922\n", - "-6.80098424134\n", - "-6.77073905151\n", - "-6.74776922681\n", - "-6.72954797628\n", - "-6.71464583585\n", - "-6.70217200697\n", - "-6.6915373574\n", - "-6.68233766042\n", - "-6.67428444055\n" + "-8.22707493832\n", + "-6.95390890605\n", + "-6.92238881248\n", + "-6.89518116711\n", + "-6.87017561642\n", + "-6.84650273233\n", + "-6.82405446145\n", + "-6.8031601438\n", + "-6.7842610768\n", + "-6.76764861124\n", + "CPU times: user 8.08 s, sys: 28 ms, total: 8.11 s\n", + "Wall time: 8.13 s\n" ] } ], "source": [ - "model.update()" + "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", + " author2doc=author2doc, doc2author=doc2author, id2author=id2author, var_lambda=None, \\\n", + " chunksize=2000, passes=10, update_every=1, \\\n", + " alpha='auto', eta='auto', decay=0.5, offset=1.0, \\\n", + " eval_every=1, iterations=10, gamma_threshold=1e-10, \\\n", + " minimum_probability=0.01, random_state=0, ns_conf={},\\\n", + " serialized=True, serialization_path='/tmp/model_serializer.mm', \\\n", + " minimum_phi_value=0.01)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 199, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "os.remove('/tmp/model_serializer.mm')" ] }, { "cell_type": "code", - "execution_count": 375, + "execution_count": 53, "metadata": { "collapsed": false }, @@ -488,10 +506,10 @@ { "data": { "text/plain": [ - "-7.0166219933262406" + "-6.8552693884314806" ] }, - "execution_count": 375, + "execution_count": 53, "metadata": {}, "output_type": "execute_result" } @@ -503,43 +521,7 @@ }, { "cell_type": "code", - "execution_count": 316, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-8.22548993324\n", - "-7.00248242458\n", - "-6.99740973863\n", - "-6.98998990308\n", - "-6.97948100519\n", - "-6.9657412708\n", - "-6.94923575245\n", - "-6.93078544187\n", - "-6.91130176592\n", - "-6.89159127379\n", - "CPU times: user 6.19 s, sys: 8 ms, total: 6.2 s\n", - "Wall time: 6.2 s\n" - ] - } - ], - "source": [ - "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", - " author2doc=author2doc, doc2author=doc2author, id2author=id2author, var_lambda=None, \\\n", - " chunksize=2000, passes=10, update_every=1, \\\n", - " alpha='symmetric', eta='symmetric', decay=0.5, offset=1.0, \\\n", - " eval_every=1, iterations=10, gamma_threshold=1e-10, \\\n", - " minimum_probability=0.01, random_state=1, ns_conf={}, \\\n", - " minimum_phi_value=0.01, per_word_topics=False)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 377, + "execution_count": 153, "metadata": { "collapsed": false }, @@ -548,28 +530,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.033*\"memory\" + 0.011*\"capacity\" + 0.010*\"bit\" + 0.008*\"associative\" + 0.008*\"associative_memory\" + 0.008*\"stored\" + 0.008*\"circuit\" + 0.007*\"threshold\" + 0.006*\"vector\" + 0.006*\"address\"'),\n", + " '0.007*\"cell\" + 0.006*\"fig\" + 0.006*\"chip\" + 0.005*\"memory\" + 0.005*\"current\" + 0.005*\"circuit\" + 0.004*\"control\" + 0.004*\"element\" + 0.004*\"node\" + 0.004*\"analog\"'),\n", " (1,\n", - " '0.025*\"vector\" + 0.012*\"probability\" + 0.008*\"group\" + 0.006*\"let\" + 0.006*\"distribution\" + 0.006*\"class\" + 0.005*\"response\" + 0.005*\"matrix\" + 0.004*\"position\" + 0.004*\"principle\"'),\n", + " '0.007*\"cell\" + 0.006*\"circuit\" + 0.005*\"firing\" + 0.005*\"threshold\" + 0.004*\"current\" + 0.004*\"memory\" + 0.004*\"chain\" + 0.003*\"potential\" + 0.003*\"analog\" + 0.003*\"synaptic\"'),\n", " (2,\n", - " '0.016*\"node\" + 0.012*\"matrix\" + 0.009*\"hopfield\" + 0.008*\"code\" + 0.007*\"optimization\" + 0.006*\"element\" + 0.006*\"stable\" + 0.006*\"sequence\" + 0.005*\"constraint\" + 0.005*\"graph\"'),\n", + " '0.009*\"map\" + 0.006*\"cell\" + 0.005*\"delay\" + 0.005*\"region\" + 0.005*\"cortex\" + 0.005*\"field\" + 0.004*\"fig\" + 0.004*\"et\" + 0.004*\"brain\" + 0.004*\"et_al\"'),\n", " (3,\n", - " '0.029*\"cell\" + 0.011*\"activity\" + 0.010*\"visual\" + 0.010*\"cortex\" + 0.008*\"stimulus\" + 0.008*\"frequency\" + 0.008*\"synaptic\" + 0.008*\"map\" + 0.008*\"response\" + 0.007*\"cortical\"'),\n", + " '0.013*\"vector\" + 0.008*\"memory\" + 0.008*\"matrix\" + 0.007*\"hidden\" + 0.005*\"bit\" + 0.004*\"hopfield\" + 0.004*\"propagation\" + 0.004*\"recognition\" + 0.004*\"node\" + 0.004*\"machine\"'),\n", " (4,\n", - " '0.032*\"image\" + 0.023*\"classifier\" + 0.015*\"node\" + 0.012*\"fig\" + 0.012*\"field\" + 0.011*\"processor\" + 0.008*\"map\" + 0.007*\"region\" + 0.007*\"edge\" + 0.007*\"pixel\"'),\n", + " '0.008*\"image\" + 0.005*\"vector\" + 0.005*\"energy\" + 0.004*\"probability\" + 0.004*\"activation\" + 0.004*\"recognition\" + 0.004*\"node\" + 0.003*\"constraint\" + 0.003*\"element\" + 0.003*\"direction\"'),\n", " (5,\n", - " '0.012*\"delay\" + 0.008*\"theory\" + 0.007*\"vector\" + 0.007*\"attractor\" + 0.007*\"matrix\" + 0.007*\"stability\" + 0.006*\"role\" + 0.006*\"symmetric\" + 0.005*\"oscillation\" + 0.005*\"decision\"'),\n", + " '0.008*\"cell\" + 0.005*\"pulse\" + 0.004*\"response\" + 0.004*\"circuit\" + 0.004*\"human\" + 0.004*\"synaptic\" + 0.003*\"dynamic\" + 0.003*\"generalization\" + 0.003*\"fig\" + 0.003*\"probability\"'),\n", " (6,\n", - " '0.019*\"cell\" + 0.017*\"firing\" + 0.014*\"circuit\" + 0.014*\"response\" + 0.013*\"spike\" + 0.011*\"potential\" + 0.010*\"current\" + 0.008*\"stimulus\" + 0.008*\"fig\" + 0.008*\"synaptic\"'),\n", + " '0.016*\"classifier\" + 0.009*\"node\" + 0.007*\"cell\" + 0.006*\"fig\" + 0.006*\"noise\" + 0.005*\"vector\" + 0.005*\"propagation\" + 0.005*\"gaussian\" + 0.005*\"response\" + 0.005*\"region\"'),\n", " (7,\n", - " '0.013*\"chip\" + 0.012*\"synapse\" + 0.011*\"human\" + 0.011*\"region\" + 0.010*\"chain\" + 0.010*\"analog\" + 0.009*\"current\" + 0.008*\"voltage\" + 0.007*\"pulse\" + 0.007*\"gain\"'),\n", + " '0.015*\"hidden\" + 0.007*\"hidden_unit\" + 0.005*\"node\" + 0.005*\"image\" + 0.005*\"speech\" + 0.004*\"field\" + 0.004*\"test\" + 0.004*\"propagation\" + 0.003*\"memory\" + 0.003*\"activation\"'),\n", " (8,\n", - " '0.021*\"recognition\" + 0.017*\"speech\" + 0.012*\"hidden\" + 0.010*\"trained\" + 0.007*\"word\" + 0.007*\"frame\" + 0.007*\"experiment\" + 0.007*\"test\" + 0.005*\"hidden_layer\" + 0.005*\"class\"'),\n", + " '0.018*\"cell\" + 0.007*\"response\" + 0.007*\"stimulus\" + 0.007*\"activity\" + 0.007*\"memory\" + 0.006*\"circuit\" + 0.005*\"firing\" + 0.005*\"spike\" + 0.005*\"current\" + 0.005*\"threshold\"'),\n", " (9,\n", - " '0.025*\"hidden\" + 0.015*\"propagation\" + 0.014*\"hidden_unit\" + 0.012*\"back_propagation\" + 0.010*\"noise\" + 0.010*\"vector\" + 0.007*\"activation\" + 0.007*\"gradient\" + 0.006*\"generalization\" + 0.005*\"hidden_layer\"')]" + " '0.006*\"recognition\" + 0.006*\"vector\" + 0.006*\"node\" + 0.005*\"image\" + 0.005*\"object\" + 0.004*\"hidden\" + 0.004*\"matrix\" + 0.004*\"activity\" + 0.004*\"memory\" + 0.003*\"activation\"')]" ] }, - "execution_count": 377, + "execution_count": 153, "metadata": {}, "output_type": "execute_result" } @@ -580,7 +562,7 @@ }, { "cell_type": "code", - "execution_count": 378, + "execution_count": 154, "metadata": { "collapsed": false }, @@ -592,37 +574,55 @@ "\n", "Yaser S.Abu-Mostafa\n", "Docs: [62]\n", - "[(1, 0.022912313824938635),\n", - " (3, 0.13023641564906427),\n", - " (5, 0.053619476428563552),\n", - " (6, 0.74281673337282872),\n", - " (7, 0.039357296238215982)]\n", + "[(0, 0.063956335844106579),\n", + " (1, 0.053163242604314737),\n", + " (2, 0.073141834026729913),\n", + " (3, 0.15358065656122918),\n", + " (4, 0.2154289685648422),\n", + " (5, 0.086185792133343),\n", + " (6, 0.088650562757781395),\n", + " (7, 0.11268528663252109),\n", + " (8, 0.024718901373058849),\n", + " (9, 0.12848841950207313)]\n", "\n", "Geoffrey E. Hinton\n", "Docs: [143, 284, 230, 197]\n", - "[(0, 0.18261149058601064),\n", - " (1, 0.13054478554688737),\n", - " (2, 0.047327566056840936),\n", - " (4, 0.23227760420733609),\n", - " (5, 0.034549256573029284),\n", - " (6, 0.01533364796903508),\n", - " (7, 0.018779595429739036),\n", - " (8, 0.018536703478209651),\n", - " (9, 0.31657298712825083)]\n", + "[(0, 0.12453909470072276),\n", + " (1, 0.11653992080959714),\n", + " (2, 0.072371812887613782),\n", + " (3, 0.17649817519139802),\n", + " (4, 0.052740141953525575),\n", + " (5, 0.057163427236664309),\n", + " (6, 0.057154031534308335),\n", + " (7, 0.093765539006208445),\n", + " (8, 0.1191013272803598),\n", + " (9, 0.13012652939960184)]\n", "\n", "Michael I. Jordan\n", "Docs: [237]\n", - "[(0, 0.1538433724459583),\n", - " (2, 0.0152049788742559),\n", - " (3, 0.14170418712027841),\n", - " (4, 0.012409363037171063),\n", - " (6, 0.015302663997163653),\n", - " (7, 0.6227732950816125),\n", - " (8, 0.03443767276723967)]\n", + "[(0, 0.15777961962694384),\n", + " (1, 0.071385004095134874),\n", + " (2, 0.093955849768421695),\n", + " (3, 0.18282262156263937),\n", + " (4, 0.043744614390617158),\n", + " (5, 0.10643414059432785),\n", + " (6, 0.044197010836679408),\n", + " (7, 0.039139632462058517),\n", + " (8, 0.096934587152057369),\n", + " (9, 0.16360691951111986)]\n", "\n", "James M. Bower\n", "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(3, 0.046716420148960519), (6, 0.92386961466365525), (7, 0.023818236102952976)]\n" + "[(0, 0.17491220279789674),\n", + " (1, 0.18288880369257593),\n", + " (2, 0.065008482222265865),\n", + " (3, 0.052482897817207996),\n", + " (4, 0.078872244490342899),\n", + " (5, 0.13953722855507336),\n", + " (6, 0.034160198829966917),\n", + " (7, 0.081218060387068358),\n", + " (8, 0.075958004456140232),\n", + " (9, 0.11496187675146149)]\n" ] } ], @@ -650,7 +650,7 @@ }, { "cell_type": "code", - "execution_count": 305, + "execution_count": 177, "metadata": { "collapsed": false, "scrolled": true @@ -660,16 +660,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "-6.87227188261\n", - "-6.8076409676\n", - "-6.77662522591\n", - "-6.75300412768\n", - "-6.73424728384\n", - "-6.71889665571\n", - "-6.70604600927\n", - "-6.69509178841\n", - "-6.68561753933\n", - "-6.67732515051\n" + "-6.75334000877\n", + "-6.70998344096\n", + "-6.68882122239\n", + "-6.6720780584\n", + "-6.65790946046\n", + "-6.64568753857\n", + "-6.63508575556\n", + "-6.62586073078\n", + "-6.6178073486\n", + "-6.61074947975\n" ] } ], @@ -788,20 +788,107 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 83, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "corpus = [[(0,1)]]\n", - "MmCorpus.serialize('/tmp/corpus.mm', corpus)\n", - "corpus = MmCorpus('/tmp/corpus.mm')" + "from itertools import chain\n", + "from shutil import copyfile" ] }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 92, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "corpus = []\n", + "gensim.corpora.MmCorpus.serialize('/tmp/corpus.mm', corpus)\n", + "corpus = gensim.corpora.MmCorpus('/tmp/corpus.mm')" + ] + }, + { + "cell_type": "code", + "execution_count": 94, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "corpus2 = [[(10,1)]]\n", + "corpus_both = chain(corpus, corpus2)\n", + "copyfile('/tmp/corpus.mm', '/tmp/corpus.mm.tmp')\n", + "corpus.input = '/tmp/corpus.mm.tmp'\n", + "gensim.corpora.MmCorpus.serialize('/tmp/corpus.mm', corpus_both)\n", + "corpus = gensim.corpora.MmCorpus('/tmp/corpus.mm')" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def init_empty_corpus(serialized, serialization_path):\n", + " if serialized:\n", + " # Tnitialize the corpus as a serialized empty list.\n", + " # This corpus will be extended in self.update.\n", + " MmCorpus.serialize(serialization_path, []) # Serialize empty corpus.\n", + " corpus = MmCorpus(serialization_path) # Store serialized corpus object in self.corpus.\n", + " else:\n", + " # All input corpora are assumed to just be lists.\n", + " corpus = []\n", + " return corpus\n", + "\n", + "def extend_corpus(old, new, serialized, serialization_path):\n", + " if serialized:\n", + " # Re-serialize the entire corpus while appending the new documents.\n", + " corpus_chain = chain(old, new) # A generator with the old and new documents.\n", + " copyfile(serialization_path, serialization_path + '.tmp') # Make a temporary copy of the file where the corpus is serialized.\n", + " old.input = serialization_path + '.tmp' # Point the old corpus at this temporary file.\n", + " MmCorpus.serialize(serialization_path, corpus_chain) # Re-serialize the old corpus, and extend it with the new corpus.\n", + " old = MmCorpus(serialization_path) # Store the new serialized corpus object in self.corpus.\n", + " else:\n", + " # self.corpus and corpus are just lists, just extend the list.\n", + " # First check that corpus is actually a list.\n", + " assert isinstance(new, list), \"If serialized == False, all input corpora must be lists.\"\n", + " old.extend(new)\n", + " return old\n" + ] + }, + { + "cell_type": "code", + "execution_count": 105, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "serialized = True\n", + "serialization_path = '/tmp/corpus.mm'\n", + "corpus = init_empty_corpus(serialized, serialization_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 107, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "corpus = extend_corpus(corpus, [[(0,1)]], serialized, serialization_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 108, "metadata": { "collapsed": false }, @@ -809,16 +896,54 @@ { "data": { "text/plain": [ - "True" + "[(0, 1.0)]" ] }, - "execution_count": 46, + "execution_count": 108, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "type(corpus).__name__ == 'MmCorpus'" + "corpus[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "def corpus_gen(corpus):\n", + " for doc in corpus:\n", + " yield doc" + ] + }, + { + "cell_type": "code", + "execution_count": 163, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "corpus = [[(0,1), (10, 2)], [(1,10)]]\n", + "corpus = corpus_gen(corpus)\n", + "gensim.corpora.MmCorpus.serialize('/tmp/corpus.mm', corpus)\n", + "corpus = gensim.corpora.MmCorpus('/tmp/corpus.mm')" + ] + }, + { + "cell_type": "code", + "execution_count": 336, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "corpus.input = '/tmp/corpus_temp.mm'" ] }, { @@ -1025,7 +1150,7 @@ }, { "cell_type": "code", - "execution_count": 324, + "execution_count": 183, "metadata": { "collapsed": false }, @@ -1037,7 +1162,7 @@ }, { "cell_type": "code", - "execution_count": 325, + "execution_count": 184, "metadata": { "collapsed": false }, @@ -1046,8 +1171,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 5.94 s, sys: 44 ms, total: 5.99 s\n", - "Wall time: 5.99 s\n" + "CPU times: user 6.13 s, sys: 24 ms, total: 6.16 s\n", + "Wall time: 6.15 s\n" ] } ], @@ -1150,7 +1275,7 @@ }, { "cell_type": "code", - "execution_count": 58, + "execution_count": 89, "metadata": { "collapsed": false }, @@ -1164,7 +1289,7 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": 90, "metadata": { "collapsed": false }, @@ -1175,7 +1300,7 @@ "\n", "
\n", " \n", - " Loading BokehJS ...\n", + " Loading BokehJS ...\n", "
" ] }, @@ -1223,7 +1348,7 @@ "\n", " function display_loaded() {\n", " if (window.Bokeh !== undefined) {\n", - " Bokeh.$(\"#e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0\").text(\"BokehJS successfully loaded.\");\n", + " Bokeh.$(\"#87c57a20-05a4-4e89-a4b4-c2627090f39d\").text(\"BokehJS successfully loaded.\");\n", " } else if (Date.now() < window._bokeh_timeout) {\n", " setTimeout(display_loaded, 100)\n", " }\n", @@ -1265,9 +1390,9 @@ " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", " }\n", - " };var element = document.getElementById(\"e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0\");\n", + " };var element = document.getElementById(\"87c57a20-05a4-4e89-a4b4-c2627090f39d\");\n", " if (element == null) {\n", - " console.log(\"Bokeh: ERROR: autoload.js configured with elementid 'e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0' but no matching script tag was found. \")\n", + " console.log(\"Bokeh: ERROR: autoload.js configured with elementid '87c57a20-05a4-4e89-a4b4-c2627090f39d' but no matching script tag was found. \")\n", " return false;\n", " }\n", "\n", @@ -1280,7 +1405,7 @@ " \n", " function(Bokeh) {\n", " \n", - " Bokeh.$(\"#e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0\").text(\"BokehJS is loading...\");\n", + " Bokeh.$(\"#87c57a20-05a4-4e89-a4b4-c2627090f39d\").text(\"BokehJS is loading...\");\n", " },\n", " function(Bokeh) {\n", " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", @@ -1303,7 +1428,7 @@ " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", " window._bokeh_failed_load = true;\n", " } else if (!force) {\n", - " var cell = $(\"#e8c53ef7-1db1-4f6d-8aac-8c90f6d131d0\").parents('.cell').data().cell;\n", + " var cell = $(\"#87c57a20-05a4-4e89-a4b4-c2627090f39d\").parents('.cell').data().cell;\n", " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", " }\n", "\n", @@ -1331,18 +1456,40 @@ }, { "cell_type": "code", - "execution_count": 60, + "execution_count": 101, "metadata": { - "collapsed": true + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-8.2268124131006175, -6.9537044366488798, -6.9219607839123887, -6.894414237250329, -6.8689542686648819, -6.8447258437974066, -6.8216878633917295, -6.8002809924812544, -6.7810529233013082, -6.7643492394144529]\n" + ] + } + ], + "source": [ + "print(model.perwordbound)" + ] + }, + { + "cell_type": "code", + "execution_count": 103, + "metadata": { + "collapsed": false }, "outputs": [], "source": [ - "from scipy.special import psi" + "sym_bound = [-8.2268124131006175, -7.0028312886986592, -6.9983353496896177, -6.9920334045252472, -6.9834462646550692, -6.9724080682148131, -6.959010673469697, -6.9436014751446384, -6.9267331557797345, -6.9090211704296633]\n", + "alpha_auto_bound = [-8.2268124131006175, -6.9932405146123067, -6.9832943275841428, -6.9727565892336711, -6.9609464524488551, -6.9476614401472379, -6.9330202055642625, -6.9173822631441642, -6.9012200919426148, -6.8849715273282976]\n", + "eta_auto = [-8.2268124131006175, -6.9632952107352324, -6.9370024957065883, -6.9136966964250792, -6.8914783308986083, -6.8695469093457477, -6.8478606164307889, -6.8268741286187042, -6.8072291640504838, -6.7894361810128938]\n", + "both_auto = [-8.2268124131006175, -6.9537044366488798, -6.9219607839123887, -6.894414237250329, -6.8689542686648819, -6.8447258437974066, -6.8216878633917295, -6.8002809924812544, -6.7810529233013082, -6.7643492394144529]" ] }, { "cell_type": "code", - "execution_count": 112, + "execution_count": 105, "metadata": { "collapsed": false }, @@ -1353,7 +1500,7 @@ "\n", "\n", "
\n", - "
\n", + "
\n", "
\n", "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "x = tsne.embedding_[:, 0]\n", + "y = tsne.embedding_[:, 1]\n", + "author_names = list(model.id2author.values())\n", + "\n", + "# Radius of each point corresponds to the number of documents attributed to that author.\n", + "scale = 0.1\n", + "radii = [len(author2doc[a]) * scale for a in author_names]\n", + "\n", + "source = ColumnDataSource(\n", + " data=dict(\n", + " x=x,\n", + " y=y,\n", + " author_names=author_names,\n", + " radii=radii,\n", + " )\n", + " )\n", + "\n", + "hover = HoverTool(\n", + " tooltips=[\n", + " (\"index\", \"$index\"),\n", + " (\"author\", \"@author_names\"),\n", + " (\"(x,y)\", \"($x, $y)\"),\n", + " (\"radius\", \"@radii\"),\n", + " ]\n", + " )\n", + "\n", + "p = figure(tools=[hover, 'crosshair,pan,wheel_zoom,box_zoom,reset,save,lasso_select'])\n", + "p.scatter('x', 'y', radius='radii', source=source, fill_alpha=0.6, line_color=None)\n", + "show(p)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.2" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} From ac9ecd4ec7ba4bfbe78ae92a7d5d3d5a746d8837 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lavur=20Mortensen?= Date: Wed, 28 Dec 2016 13:21:48 +0000 Subject: [PATCH 074/100] Updated notebooks (just to trigger rebuild). --- docs/notebooks/at_stackexchange.ipynb | 320 +++++++++++++++++--------- docs/notebooks/atmodel_tutorial.ipynb | 128 +++++++---- 2 files changed, 290 insertions(+), 158 deletions(-) diff --git a/docs/notebooks/at_stackexchange.ipynb b/docs/notebooks/at_stackexchange.ipynb index 03a3c24fda..4fe14c46a5 100644 --- a/docs/notebooks/at_stackexchange.ipynb +++ b/docs/notebooks/at_stackexchange.ipynb @@ -157,7 +157,51 @@ }, { "cell_type": "code", - "execution_count": 128, + "execution_count": 253, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "doc = nlp(post_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 255, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "token = doc[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 275, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Normal double-acting baking powder makes CO2 (thus giving a rising effect) in two ways: when it gets wet, and when it is heated. Baking soda only makes CO2 when it gets wet. From Wikipedia: The acid in a baking powder can be either fast-acting or slow-acting.[6] A fast-acting acid reacts in a wet mixture with baking soda at room temperature, and a slow-acting acid will not react until heated in an oven. Baking powders that contain both fast- and slow-acting acids are double acting; those that contain only one acid are single acting. By providing a second rise in the oven, double-acting baking powders increase the reliability of baked goods by rendering the time elapsed between mixing and baking less critical, and this is the type most widely available to consumers today.to consumers today. See: http://en.wikipedia.org/wiki/Baking_powder " + ] + }, + "execution_count": 275, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "doc" + ] + }, + { + "cell_type": "code", + "execution_count": 345, "metadata": { "collapsed": false }, @@ -177,7 +221,8 @@ "\n", " # Keep only words (no numbers, no punctuation).\n", " # Lemmatize tokens, remove punctuation and remove stopwords.\n", - " doc = [token.lemma_ for token in doc if token.is_alpha and not token.is_stop]\n", + " #doc = [token.lemma_ for token in doc if token.is_alpha and not token.is_stop]\n", + " doc = [token.lemma_ for token in doc if not token.is_stop]\n", " \n", " # Remove common words from a stopword list.\n", " #doc = [token for token in doc if token not in STOPWORDS]\n", @@ -191,7 +236,7 @@ }, { "cell_type": "code", - "execution_count": 129, + "execution_count": 346, "metadata": { "collapsed": false }, @@ -236,7 +281,7 @@ }, { "cell_type": "code", - "execution_count": 130, + "execution_count": 347, "metadata": { "collapsed": true }, @@ -257,7 +302,7 @@ }, { "cell_type": "code", - "execution_count": 131, + "execution_count": 348, "metadata": { "collapsed": false }, @@ -292,7 +337,7 @@ }, { "cell_type": "code", - "execution_count": 203, + "execution_count": 358, "metadata": { "collapsed": false }, @@ -306,9 +351,9 @@ "\n", "# Filter out words that occur too frequently or too rarely.\n", "# Disregarding stop words, this dataset has a very high number of low frequency words.\n", - "#max_freq = 0.5\n", - "#min_count = 5\n", - "#dictionary.filter_extremes(no_below=min_count, no_above=max_freq)\n", + "max_freq = 1.0 # No filtering.\n", + "min_count = 5\n", + "dictionary.filter_extremes(no_below=min_count, no_above=max_freq)\n", "\n", "dict0 = dictionary[0] # This sort of \"initializes\" dictionary.id2token.\n", "\n", @@ -325,7 +370,18 @@ }, { "cell_type": "code", - "execution_count": 204, + "execution_count": 359, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "tagname2postid = atmodel.construct_author2doc(corpus, postid2tagname)" + ] + }, + { + "cell_type": "code", + "execution_count": 360, "metadata": { "collapsed": true }, @@ -338,6 +394,9 @@ "train_postid2tagname = {i: postid2tagname[j] for i, j in enumerate(range(100, num_docs))}\n", "test_postid2tagname = {i: postid2tagname[j] for i, j in enumerate(range(100))}\n", "\n", + "train_tagname2postid = atmodel.construct_author2doc(train_corpus, train_postid2tagname)\n", + "test_tagname2postid = atmodel.construct_author2doc(test_corpus, test_postid2tagname)\n", + "\n", "train_tag_set = set()\n", "for d, tags in train_postid2tagname.items():\n", " for tag in tags:\n", @@ -360,7 +419,7 @@ }, { "cell_type": "code", - "execution_count": 205, + "execution_count": 361, "metadata": { "collapsed": false }, @@ -371,7 +430,7 @@ "text": [ "Train data dimensionality:\n", "Number of authors: 444 (462 in total)\n", - "Number of unique tokens: 17868\n", + "Number of unique tokens: 2764\n", "Number of documents: 900\n" ] } @@ -385,7 +444,7 @@ }, { "cell_type": "code", - "execution_count": 198, + "execution_count": 314, "metadata": { "collapsed": false }, @@ -397,7 +456,7 @@ }, { "cell_type": "code", - "execution_count": 199, + "execution_count": 315, "metadata": { "collapsed": true }, @@ -408,7 +467,7 @@ }, { "cell_type": "code", - "execution_count": 200, + "execution_count": 316, "metadata": { "collapsed": true }, @@ -419,7 +478,7 @@ }, { "cell_type": "code", - "execution_count": 201, + "execution_count": 317, "metadata": { "collapsed": false }, @@ -431,7 +490,7 @@ }, { "cell_type": "code", - "execution_count": 206, + "execution_count": 362, "metadata": { "collapsed": false }, @@ -440,8 +499,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 3min 12s, sys: 1min 35s, total: 4min 47s\n", - "Wall time: 3min 12s\n" + "CPU times: user 1min 36s, sys: 1min 4s, total: 2min 41s\n", + "Wall time: 1min 30s\n" ] } ], @@ -450,12 +509,38 @@ "%time model = AuthorTopicModel(corpus=train_corpus, num_topics=num_topics, id2word=dictionary.id2token, \\\n", " author2doc=None, doc2author=train_postid2tagname, var_lambda=None, \\\n", " chunksize=1000, passes=100, update_every=1, \\\n", - " alpha='auto', eta='auto', decay=0.5, offset=1.0, \\\n", - " eval_every=0, iterations=1, gamma_threshold=1e-10, \\\n", + " alpha='auto', eta='auto', decay=0.5, offset=1.0, \\\n", + " eval_every=0, iterations=1, gamma_threshold=1e-10, \\\n", " minimum_probability=0.01, random_state=0, ns_conf={},\\\n", " serialized=False, serialization_path='/tmp/model_serializer.mm')" ] }, + { + "cell_type": "code", + "execution_count": 234, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-13.4469052387\n" + ] + } + ], + "source": [ + "# Compute the per-word bound.\n", + "# Number of words in corpus.\n", + "corpus_words = sum(cnt for document in train_corpus for _, cnt in document)\n", + "\n", + "# Compute bound and divide by number of words.\n", + "perwordbound = model.bound(train_corpus, author2doc=train_tagname2postid, \\\n", + " doc2author=train_postid2tagname) / corpus_words\n", + "print(perwordbound)" + ] + }, { "cell_type": "code", "execution_count": 670, @@ -541,7 +626,7 @@ }, { "cell_type": "code", - "execution_count": 180, + "execution_count": 320, "metadata": { "collapsed": false }, @@ -552,27 +637,27 @@ "text": [ "baking\n", "#Docs: 74\n", - "[(11, 0.9997550615295645)]\n", + "[(3, 0.99969861353423572)]\n", "\n", "eggs\n", "#Docs: 38\n", - "[(11, 0.99401404541369576)]\n", + "[(10, 0.99962213699351399)]\n", "\n", "pasta\n", "#Docs: 19\n", - "[(18, 0.99907960744019053)]\n", + "[(8, 0.99891468476715684)]\n", "\n", "herbs\n", "#Docs: 13\n", - "[(18, 0.9965139331397066)]\n", + "[(7, 0.99709305343567833)]\n", "\n", "beef\n", "#Docs: 15\n", - "[(11, 0.99886254523459339)]\n", + "[(12, 0.99879414856070836)]\n", "\n", "salmon\n", "#Docs: 6\n", - "[(17, 0.99067945969954918)]\n" + "[(18, 0.99213976423946681)]\n" ] } ], @@ -633,7 +718,7 @@ }, { "cell_type": "code", - "execution_count": 183, + "execution_count": 304, "metadata": { "collapsed": true }, @@ -651,7 +736,7 @@ }, { "cell_type": "code", - "execution_count": 184, + "execution_count": 305, "metadata": { "collapsed": false }, @@ -662,7 +747,7 @@ }, { "cell_type": "code", - "execution_count": 186, + "execution_count": 306, "metadata": { "collapsed": true }, @@ -673,7 +758,7 @@ }, { "cell_type": "code", - "execution_count": 187, + "execution_count": 307, "metadata": { "collapsed": false }, @@ -699,47 +784,47 @@ " \n", " 290\n", " food-safety\n", - " 0.999047\n", + " 0.965386\n", " \n", " \n", - " 82\n", - " bread\n", - " 0.995433\n", + " 278\n", + " poaching\n", + " 0.916794\n", " \n", " \n", - " 420\n", - " grilling\n", - " 0.992176\n", + " 82\n", + " bread\n", + " 0.881186\n", " \n", " \n", - " 298\n", - " oil\n", - " 0.989559\n", + " 260\n", + " chili-peppers\n", + " 0.866564\n", " \n", " \n", - " 262\n", - " italian-cuisine\n", - " 0.988004\n", + " 85\n", + " allium\n", + " 0.859600\n", " \n", " \n", - " 410\n", - " beef\n", - " 0.987376\n", + " 173\n", + " dry-aging\n", + " 0.855882\n", " \n", " \n", " 240\n", " please-remove-this-tag\n", - " 0.983450\n", + " 0.843243\n", " \n", " \n", - " 172\n", - " refrigerator\n", - " 0.982212\n", + " 212\n", + " dairy-free\n", + " 0.840160\n", " \n", " \n", - " 283\n", - " cooking-time\n", - " 0.981675\n", + " 196\n", + " fats\n", + " 0.816782\n", " \n", " \n", "\n", @@ -748,18 +833,18 @@ "text/plain": [ " Tag Score\n", "231 baking 1.000000\n", - "290 food-safety 0.999047\n", - "82 bread 0.995433\n", - "420 grilling 0.992176\n", - "298 oil 0.989559\n", - "262 italian-cuisine 0.988004\n", - "410 beef 0.987376\n", - "240 please-remove-this-tag 0.983450\n", - "172 refrigerator 0.982212\n", - "283 cooking-time 0.981675" + "290 food-safety 0.965386\n", + "278 poaching 0.916794\n", + "82 bread 0.881186\n", + "260 chili-peppers 0.866564\n", + "85 allium 0.859600\n", + "173 dry-aging 0.855882\n", + "240 please-remove-this-tag 0.843243\n", + "212 dairy-free 0.840160\n", + "196 fats 0.816782" ] }, - "execution_count": 187, + "execution_count": 307, "metadata": {}, "output_type": "execute_result" } @@ -774,7 +859,26 @@ }, { "cell_type": "code", - "execution_count": 188, + "execution_count": 308, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "#Docs: 1\n" + ] + } + ], + "source": [ + "print('#Docs:', len(model.author2doc['baking-powder']))" + ] + }, + { + "cell_type": "code", + "execution_count": 309, "metadata": { "collapsed": false }, @@ -793,80 +897,80 @@ " \n", " \n", " \n", - " 218\n", - " salmon\n", + " 202\n", + " baking-powder\n", " 1.000000\n", " \n", " \n", - " 184\n", - " sausages\n", - " 0.986118\n", + " 120\n", + " stews\n", + " 0.998336\n", " \n", " \n", - " 31\n", - " chinese-cuisine\n", - " 0.970935\n", + " 257\n", + " juice\n", + " 0.997528\n", " \n", " \n", - " 118\n", - " rice\n", - " 0.966995\n", + " 378\n", + " creme-brulee\n", + " 0.995559\n", " \n", " \n", - " 428\n", - " standards\n", - " 0.961706\n", + " 225\n", + " elderberries\n", + " 0.995318\n", " \n", " \n", - " 174\n", - " knife-skills\n", - " 0.958705\n", + " 154\n", + " measuring-scales\n", + " 0.995110\n", " \n", " \n", - " 346\n", - " fish\n", - " 0.952128\n", + " 394\n", + " sponge-cake\n", + " 0.992812\n", " \n", " \n", - " 225\n", - " elderberries\n", - " 0.940287\n", + " 339\n", + " asparagus\n", + " 0.992152\n", " \n", " \n", - " 12\n", - " frosting\n", - " 0.935332\n", + " 360\n", + " salad-dressing\n", + " 0.991331\n", " \n", " \n", - " 343\n", - " paella\n", - " 0.878331\n", + " 54\n", + " fire\n", + " 0.989778\n", " \n", " \n", "\n", "" ], "text/plain": [ - " Tag Score\n", - "218 salmon 1.000000\n", - "184 sausages 0.986118\n", - "31 chinese-cuisine 0.970935\n", - "118 rice 0.966995\n", - "428 standards 0.961706\n", - "174 knife-skills 0.958705\n", - "346 fish 0.952128\n", - "225 elderberries 0.940287\n", - "12 frosting 0.935332\n", - "343 paella 0.878331" + " Tag Score\n", + "202 baking-powder 1.000000\n", + "120 stews 0.998336\n", + "257 juice 0.997528\n", + "378 creme-brulee 0.995559\n", + "225 elderberries 0.995318\n", + "154 measuring-scales 0.995110\n", + "394 sponge-cake 0.992812\n", + "339 asparagus 0.992152\n", + "360 salad-dressing 0.991331\n", + "54 fire 0.989778" ] }, - "execution_count": 188, + "execution_count": 309, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "sims = get_sims(model.get_author_topics('salmon', minimum_probability=0.0), tag_vecs)\n", + "sims = get_sims(model.get_author_topics('baking-powder', minimum_probability=0.0), tag_vecs)\n", "\n", "# Print the most similar tags.\n", "sims_df = pd.DataFrame([(id2tag[elem[0]], elem[1]) for elem in enumerate(sims)], columns=['Tag', 'Score'])\n", diff --git a/docs/notebooks/atmodel_tutorial.ipynb b/docs/notebooks/atmodel_tutorial.ipynb index 8671acdd9b..0236c49b02 100644 --- a/docs/notebooks/atmodel_tutorial.ipynb +++ b/docs/notebooks/atmodel_tutorial.ipynb @@ -6,9 +6,9 @@ "source": [ "# The author-topic model: LDA with metadata\n", "\n", - "In this tutorial, you will learn how to use the author-topic model in Gensim. First, we will apply it to get insight about authors and topics in a corpus consisting of scientific papers; secondly, we will apply the model to tags on questions about cooking on StackExchange, and implement a simple automatic tagging system.\n", + "In this tutorial, you will learn how to use the author-topic model in Gensim. First, we will apply it to a corpus consisting of scientific papers, to get insight about the authors of the papers. After that, we will apply the model on StackExchange posts with tags, and implement a simple automatic tagging system.\n", "\n", - "The author-topic model is in extension of Latent Dirichlet Allocation (LDA). Each document is associated with a set of authors, and the topic distribution for each of these authors are learned. Each author is also associated with multiple documents. To learn about the theoretical side of the author-topic model, see [Rosen-Zvi and co-authors](https://mimno.infosci.cornell.edu/info6150/readings/398.pdf), for example.\n", + "The author-topic model is in extension of Latent Dirichlet Allocation (LDA). Each document is associated with a set of authors, and the topic distributions for each of these authors are learned. Each author is also associated with multiple documents. To learn about the theoretical side of the author-topic model, see [Rosen-Zvi and co-authors](https://mimno.infosci.cornell.edu/info6150/readings/398.pdf), for example.\n", "\n", "Naturally, familiarity with topic modelling, LDA and Gensim is assumed in this tutorial. If you are not familiar with either LDA, or its Gensim implementation, consider some of these resources:\n", "* Gentle introduction to the LDA model: http://blog.echen.me/2011/08/22/introduction-to-latent-dirichlet-allocation/\n", @@ -17,13 +17,13 @@ "* Pre-processing and training LDA: https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/lda_training_tips.ipynb\n", "\n", "\n", - "In part 1, we will illustrate basic usage of the model, and explore the resulting representation a bit. How to load and pre-process the dataset used is also covered.\n", + "In part 1 of this tutorial, we will illustrate basic usage of the model, and explore the resulting representation. How to load and pre-process the dataset used is also covered.\n", "\n", "In part 2, we will develop a simple automatic tagging system, and some more of the model's functionality will be shown.\n", "\n", "## Part 1: analyzing scientific papers\n", "\n", - "The data used in part 1 consists of scientific papers about machine learning, from the Neural Information Processing Systems conference. It is the same dataset used in the [Pre-processing and training LDA](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/lda_training_tips.ipynb) tutorial, mentioned earlier.\n", + "The data used in part 1 consists of scientific papers about machine learning, from the Neural Information Processing Systems conference (NIPS). It is the same dataset used in the [Pre-processing and training LDA](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/lda_training_tips.ipynb) tutorial, mentioned earlier.\n", "\n", "You can download the data from Sam Roweis' website (http://www.cs.nyu.edu/~roweis/data.html).\n", "\n", @@ -371,9 +371,17 @@ " eval_every=0, iterations=1, random_state=1)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Before we explore the model, let's try to improve upon it. To do this, we will train several models with different random initializations, by giving different seeds for the random number generator (`random_state`). We evaluate the topic coherence of the model using the [top_topics](https://radimrehurek.com/gensim/models/ldamodel.html#gensim.models.ldamodel.LdaModel.top_topics) method, and pick the model with the highest topic coherence.\n", + "\n" + ] + }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 73, "metadata": { "collapsed": false }, @@ -382,8 +390,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 14min 22s, sys: 2min 12s, total: 16min 34s\n", - "Wall time: 14min 9s\n" + "CPU times: user 14min 30s, sys: 1min 18s, total: 15min 49s\n", + "Wall time: 14min 43s\n" ] } ], @@ -392,22 +400,38 @@ "model_list = []\n", "for i in range(5):\n", " model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", - " author2doc=author2doc, chunksize=2000, passes=100, alpha='auto', eta='auto', \\\n", + " author2doc=author2doc, chunksize=2000, passes=100, gamma_threshold=1e-10, \\\n", " eval_every=0, iterations=1, random_state=i)\n", " top_topics = model.top_topics(corpus)\n", " tc = sum([t[1] for t in top_topics])\n", " model_list.append((model, tc))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Choose the model with the highest topic coherence." + ] + }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 66, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Topic coherence: -1.766e+03\n" + ] + } + ], "source": [ - "model, tc = max(model_list, key=lambda x: x[1])" + "model, tc = max(model_list, key=lambda x: x[1])\n", + "print('Topic coherence: %.3e' %tc)" ] }, { @@ -419,7 +443,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 67, "metadata": { "collapsed": false }, @@ -428,28 +452,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.014*\"recognition\" + 0.013*\"class\" + 0.013*\"classifier\" + 0.013*\"speech\" + 0.011*\"word\" + 0.010*\"classification\" + 0.007*\"layer\" + 0.006*\"character\" + 0.005*\"training_set\" + 0.005*\"context\"'),\n", + " '0.009*\"control\" + 0.007*\"memory\" + 0.006*\"prediction\" + 0.006*\"table\" + 0.006*\"signal\" + 0.005*\"search\" + 0.005*\"controller\" + 0.005*\"system\" + 0.004*\"user\" + 0.004*\"run\"'),\n", " (1,\n", - " '0.013*\"image\" + 0.010*\"object\" + 0.009*\"visual\" + 0.009*\"component\" + 0.008*\"layer\" + 0.008*\"signal\" + 0.008*\"representation\" + 0.007*\"field\" + 0.006*\"direction\" + 0.006*\"motion\"'),\n", + " '0.013*\"neuron\" + 0.010*\"threshold\" + 0.009*\"f\" + 0.008*\"let\" + 0.008*\"theorem\" + 0.007*\"bound\" + 0.007*\"class\" + 0.007*\"node\" + 0.007*\"p\" + 0.006*\"layer\"'),\n", " (2,\n", - " '0.008*\"gaussian\" + 0.007*\"w\" + 0.007*\"density\" + 0.007*\"noise\" + 0.006*\"approximation\" + 0.006*\"likelihood\" + 0.006*\"matrix\" + 0.006*\"mixture\" + 0.005*\"prior\" + 0.005*\"y\"'),\n", + " '0.009*\"w\" + 0.008*\"matrix\" + 0.007*\"noise\" + 0.007*\"approximation\" + 0.007*\"gaussian\" + 0.006*\"density\" + 0.005*\"optimal\" + 0.005*\"generalization\" + 0.005*\"sample\" + 0.005*\"y\"'),\n", " (3,\n", - " '0.021*\"neuron\" + 0.013*\"threshold\" + 0.008*\"memory\" + 0.007*\"layer\" + 0.007*\"circuit\" + 0.006*\"connection\" + 0.006*\"synapse\" + 0.006*\"cell\" + 0.006*\"activation\" + 0.006*\"bound\"'),\n", + " '0.013*\"image\" + 0.009*\"distance\" + 0.008*\"cluster\" + 0.006*\"trajectory\" + 0.005*\"transformation\" + 0.005*\"object\" + 0.005*\"solution\" + 0.005*\"matrix\" + 0.005*\"dynamic\" + 0.004*\"inverse\"'),\n", " (4,\n", - " '0.010*\"sequence\" + 0.007*\"p\" + 0.007*\"f\" + 0.006*\"memory\" + 0.006*\"rule\" + 0.006*\"representation\" + 0.005*\"node\" + 0.005*\"let\" + 0.005*\"matrix\" + 0.004*\"language\"'),\n", + " '0.014*\"action\" + 0.011*\"control\" + 0.010*\"policy\" + 0.009*\"q\" + 0.009*\"reinforcement\" + 0.008*\"optimal\" + 0.006*\"dynamic\" + 0.005*\"robot\" + 0.005*\"environment\" + 0.005*\"reward\"'),\n", " (5,\n", - " '0.011*\"action\" + 0.010*\"optimal\" + 0.010*\"policy\" + 0.008*\"q\" + 0.006*\"reinforcement\" + 0.005*\"decision\" + 0.005*\"cost\" + 0.004*\"rule\" + 0.004*\"control\" + 0.004*\"reward\"'),\n", + " '0.015*\"representation\" + 0.012*\"layer\" + 0.011*\"image\" + 0.009*\"object\" + 0.008*\"component\" + 0.006*\"face\" + 0.006*\"map\" + 0.006*\"signal\" + 0.005*\"code\" + 0.005*\"activity\"'),\n", " (6,\n", - " '0.008*\"layer\" + 0.008*\"net\" + 0.007*\"chip\" + 0.007*\"hidden\" + 0.007*\"architecture\" + 0.006*\"signal\" + 0.006*\"hidden_unit\" + 0.005*\"representation\" + 0.005*\"noise\" + 0.005*\"propagation\"'),\n", + " '0.013*\"speech\" + 0.012*\"classifier\" + 0.012*\"class\" + 0.010*\"recognition\" + 0.009*\"mixture\" + 0.009*\"classification\" + 0.007*\"tree\" + 0.007*\"likelihood\" + 0.006*\"node\" + 0.006*\"sample\"'),\n", " (7,\n", - " '0.024*\"image\" + 0.010*\"distance\" + 0.007*\"cluster\" + 0.006*\"object\" + 0.005*\"constraint\" + 0.005*\"pixel\" + 0.005*\"recognition\" + 0.004*\"graph\" + 0.004*\"transformation\" + 0.004*\"surface\"'),\n", + " '0.024*\"neuron\" + 0.021*\"cell\" + 0.010*\"response\" + 0.009*\"spike\" + 0.009*\"stimulus\" + 0.008*\"activity\" + 0.008*\"synaptic\" + 0.006*\"signal\" + 0.006*\"frequency\" + 0.006*\"cortex\"'),\n", " (8,\n", - " '0.019*\"neuron\" + 0.019*\"cell\" + 0.010*\"response\" + 0.009*\"spike\" + 0.008*\"stimulus\" + 0.007*\"activity\" + 0.007*\"frequency\" + 0.007*\"signal\" + 0.007*\"synaptic\" + 0.006*\"visual\"'),\n", + " '0.019*\"image\" + 0.011*\"chip\" + 0.010*\"motion\" + 0.010*\"circuit\" + 0.010*\"field\" + 0.008*\"analog\" + 0.008*\"direction\" + 0.007*\"visual\" + 0.007*\"map\" + 0.007*\"object\"'),\n", " (9,\n", - " '0.024*\"control\" + 0.012*\"dynamic\" + 0.010*\"trajectory\" + 0.009*\"motor\" + 0.007*\"controller\" + 0.007*\"movement\" + 0.007*\"robot\" + 0.006*\"position\" + 0.005*\"forward\" + 0.005*\"system\"')]" + " '0.010*\"net\" + 0.009*\"recognition\" + 0.009*\"word\" + 0.009*\"hidden\" + 0.008*\"architecture\" + 0.008*\"character\" + 0.007*\"recurrent\" + 0.007*\"layer\" + 0.007*\"rule\" + 0.007*\"hidden_unit\"')]" ] }, - "execution_count": 29, + "execution_count": 67, "metadata": {}, "output_type": "execute_result" } @@ -473,7 +497,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 68, "metadata": { "collapsed": false }, @@ -486,25 +510,22 @@ "YannLeCun\n", "Docs: [143, 406, 370, 495, 456, 449, 595, 616, 760, 752, 1532]\n", "Topics:\n", - "[(0, 0.55690462184283596),\n", - " (6, 0.10083730596510701),\n", - " (7, 0.25652573327449635),\n", - " (9, 0.078144169477103245)]\n", + "[(3, 0.29943682408405564), (9, 0.70035037360056807)]\n", "\n", "GeoffreyE.Hinton\n", "Docs: [56, 143, 284, 230, 197, 462, 463, 430, 688, 784, 826, 848, 869, 1387, 1684, 1728]\n", "Topics:\n", - "[(1, 0.84433690567662456), (2, 0.15547549123537502)]\n", + "[(4, 0.07225384180855414), (5, 0.92764230357402855)]\n", "\n", "TerrenceJ.Sejnowski\n", "Docs: [513, 530, 539, 468, 611, 581, 600, 594, 703, 711, 849, 981, 944, 865, 850, 883, 881, 1221, 1137, 1224, 1146, 1282, 1248, 1179, 1424, 1359, 1528, 1484, 1571, 1727, 1732]\n", "Topics:\n", - "[(1, 0.9998644049926757)]\n", + "[(5, 0.86190832291064989), (7, 0.13802575466031855)]\n", "\n", "JamesM.Bower\n", "Docs: [17, 48, 58, 131, 101, 126, 127, 281, 208, 225]\n", "Topics:\n", - "[(8, 0.97693607253656878), (9, 0.01880923677106713)]\n" + "[(7, 0.99980671969007273)]\n" ] } ], @@ -626,7 +647,7 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": 70, "metadata": { "collapsed": true }, @@ -639,7 +660,7 @@ }, { "cell_type": "code", - "execution_count": 93, + "execution_count": 71, "metadata": { "collapsed": false, "scrolled": true @@ -651,7 +672,7 @@ "\n", "
\n", " \n", - " Loading BokehJS ...\n", + " Loading BokehJS ...\n", "
" ] }, @@ -699,7 +720,7 @@ "\n", " function display_loaded() {\n", " if (window.Bokeh !== undefined) {\n", - " Bokeh.$(\"#6102b01d-ca55-4245-960a-82f5a01ce86d\").text(\"BokehJS successfully loaded.\");\n", + " Bokeh.$(\"#412cdc4a-952a-4aad-a8e6-0d41a2082466\").text(\"BokehJS successfully loaded.\");\n", " } else if (Date.now() < window._bokeh_timeout) {\n", " setTimeout(display_loaded, 100)\n", " }\n", @@ -741,9 +762,9 @@ " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", " }\n", - " };var element = document.getElementById(\"6102b01d-ca55-4245-960a-82f5a01ce86d\");\n", + " };var element = document.getElementById(\"412cdc4a-952a-4aad-a8e6-0d41a2082466\");\n", " if (element == null) {\n", - " console.log(\"Bokeh: ERROR: autoload.js configured with elementid '6102b01d-ca55-4245-960a-82f5a01ce86d' but no matching script tag was found. \")\n", + " console.log(\"Bokeh: ERROR: autoload.js configured with elementid '412cdc4a-952a-4aad-a8e6-0d41a2082466' but no matching script tag was found. \")\n", " return false;\n", " }\n", "\n", @@ -756,7 +777,7 @@ " \n", " function(Bokeh) {\n", " \n", - " Bokeh.$(\"#6102b01d-ca55-4245-960a-82f5a01ce86d\").text(\"BokehJS is loading...\");\n", + " Bokeh.$(\"#412cdc4a-952a-4aad-a8e6-0d41a2082466\").text(\"BokehJS is loading...\");\n", " },\n", " function(Bokeh) {\n", " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", @@ -779,7 +800,7 @@ " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", " window._bokeh_failed_load = true;\n", " } else if (!force) {\n", - " var cell = $(\"#6102b01d-ca55-4245-960a-82f5a01ce86d\").parents('.cell').data().cell;\n", + " var cell = $(\"#412cdc4a-952a-4aad-a8e6-0d41a2082466\").parents('.cell').data().cell;\n", " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", " }\n", "\n", @@ -813,12 +834,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "If you are unable to view the plot below, it is available [here]() (**TODO:** make a page for the plot, and include the link), or view the entire notebook [here]() (**TODO:** make nvbiewer page for the notebook or something)." + "If you are unable to view or interact with the plot below, it is available [here]() (**TODO:** make a page for the plot, and include the link), or view the entire notebook [here]() (**TODO:** make nvbiewer page for the notebook or something)." ] }, { "cell_type": "code", - "execution_count": 94, + "execution_count": 72, "metadata": { "collapsed": false }, @@ -829,7 +850,7 @@ "\n", "\n", "
\n", - "
\n", + "
\n", "
\n", "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "iterations = range(10)\n", - "\n", - "p1 = figure(title='Variational lower bound', x_axis_label='Iterations', y_axis_label='Per word bound')\n", - "s1 = p1.line(iterations, sym_bound, color='red')\n", - "p1.circle(iterations, sym_bound, color='red')\n", - "s2 = p1.line(iterations, alpha_auto_bound, color='blue')\n", - "p1.circle(iterations, alpha_auto_bound, color='blue')\n", - "s3 = p1.line(iterations, eta_auto, color='green')\n", - "p1.circle(iterations, eta_auto, color='green')\n", - "s4 = p1.line(iterations, both_auto, color='black')\n", - "p1.circle(iterations, both_auto, color='black')\n", - "legend = Legend(items=[\n", - " ('both sym', [s1]),\n", - " ('alpha auto', [s2]),\n", - " ('eta auto', [s3]),\n", - " ('both auto', [s4]),\n", - " ], location=(-150.0, -200.0))\n", - "p1.add_layout(legend, 'right')\n", - "p1.plot_height=400\n", - "p1.plot_width=600\n", - "p1.toolbar_location = None\n", - "\n", - "show(p1)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": false - }, - "source": [ - "## Line profiling" - ] - }, - { - "cell_type": "code", - "execution_count": 198, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(atmodel)\n", - "AuthorTopicModel = atmodel.AuthorTopicModel" - ] - }, - { - "cell_type": "code", - "execution_count": 199, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Timer unit: 1e-06 s\n", - "\n", - "Total time: 728.228 s\n", - "File: /home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/atmodel.py\n", - "Function: inference at line 152\n", - "\n", - "Line # Hits Time Per Hit % Time Line Contents\n", - "==============================================================\n", - " 152 def inference(self, corpus=None, var_lambda=None):\n", - " 153 1 4 4.0 0.0 if corpus is None:\n", - " 154 # TODO: is copy necessary here?\n", - " 155 corpus = self.corpus.copy()\n", - " 156 \n", - " 157 1 5 5.0 0.0 self.num_docs = len(corpus) # TODO: this needs to be different if the algorithm is truly online.\n", - " 158 \n", - " 159 1 355 355.0 0.0 logger.info('Starting inference. Training on %d documents.', len(corpus))\n", - " 160 \n", - " 161 1 3 3.0 0.0 vectorized = False # FIXME: set to True.\n", - " 162 1 3 3.0 0.0 numstable_sm = False # FIXME: set to True.\n", - " 163 \n", - " 164 1 2 2.0 0.0 if not numstable_sm:\n", - " 165 1 4 4.0 0.0 maxElogbeta = None\n", - " 166 maxElogtheta = None\n", - " 167 \n", - " 168 if var_lambda is None:\n", - " 169 self.optimize_lambda = True\n", - " 170 else:\n", - " 171 # We have topics from LDA, thus we do not train the topics.\n", - " 172 1 4 4.0 0.0 self.optimize_lambda = False\n", - " 173 1 3120 3120.0 0.0 \n", - " 174 1 49 49.0 0.0 # Initial values of gamma and lambda.\n", - " 175 1 14 14.0 0.0 # Parameters of gamma distribution same as in `ldamodel`.\n", - " 176 var_gamma = self.random_state.gamma(100., 1. / 100.,\n", - " 177 1 4 4.0 0.0 (self.num_authors, self.num_topics))\n", - " 178 1 5 5.0 0.0 tilde_gamma = var_gamma.copy()\n", - " 179 1 11563 11563.0 0.0 self.var_gamma = var_gamma\n", - " 180 1 141 141.0 0.0 \n", - " 181 if var_lambda is None:\n", - " 182 var_lambda = self.random_state.gamma(100., 1. / 100.,\n", - " 183 (self.num_topics, self.num_terms))\n", - " 184 tilde_lambda = var_lambda.copy()\n", - " 185 else:\n", - " 186 1 10 10.0 0.0 self.norm_lambda = var_lambda.copy()\n", - " 187 for k in xrange(self.num_topics):\n", - " 188 1 8 8.0 0.0 self.norm_lambda[k, :] = var_lambda[k, :] / var_lambda.sum(axis=1)[k]\n", - " 189 \n", - " 190 self.var_lambda = var_lambda\n", - " 191 1 370334 370334.0 0.1 \n", - " 192 1 1157125 1157125.0 0.2 var_phi = dict() # TODO: remove once non-vectorized code is not used anymore.\n", - " 193 1 4 4.0 0.0 \n", - " 194 # Initialize dirichlet expectations.\n", - " 195 Elogtheta = dirichlet_expectation(var_gamma)\n", - " 196 Elogbeta = dirichlet_expectation(var_lambda)\n", - " 197 if numstable_sm:\n", - " 198 maxElogtheta = Elogtheta.max()\n", - " 199 1 551 551.0 0.0 maxElogbeta = Elogbeta.max(axis=0)\n", - " 200 1 1720 1720.0 0.0 expElogtheta = numpy.exp(Elogtheta - maxElogtheta)\n", - " 201 expElogbeta = numpy.exp(Elogbeta - maxElogbeta)\n", - " 202 1 3 3.0 0.0 else:\n", - " 203 expElogtheta = numpy.exp(Elogtheta)\n", - " 204 expElogbeta = numpy.exp(Elogbeta)\n", - " 205 \n", - " 206 if self.eval_every > 0:\n", - " 207 word_bound = self.word_bound(corpus, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", - " 208 2 10 5.0 0.0 theta_bound = self.theta_bound(Elogtheta)\n", - " 209 1 2 2.0 0.0 beta_bound = self.beta_bound(Elogbeta)\n", - " 210 1741 6426 3.7 0.0 bound = word_bound + theta_bound + beta_bound\n", - " 211 logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound)\n", - " 212 for _pass in xrange(self.passes):\n", - " 213 1740 21268 12.2 0.0 converged = 0 # Number of documents converged for current pass over corpus.\n", - " 214 1740 359952 206.9 0.0 for d, doc in enumerate(corpus):\n", - " 215 1740 213240 122.6 0.0 # TODO: a smarter of computing rho may be necessary. In ldamodel,\n", - " 216 1740 5804 3.3 0.0 # it's: pow(offset + pass_ + (self.num_updates / chunksize), -decay).\n", - " 217 rhot = self.rho(d + _pass)\n", - " 218 1740 3509 2.0 0.0 ids = numpy.array([id for id, _ in doc]) # Word IDs in doc.\n", - " 219 cts = numpy.array([cnt for _, cnt in doc]) # Word counts.\n", - " 220 authors_d = self.doc2author[d] # List of author IDs for document d.\n", - " 221 \n", - " 222 if vectorized:\n", - " 223 phinorm = self.compute_phinorm(ids, authors_d, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", - " 224 1740 2172516 1248.6 0.3 else:\n", - " 225 var_phi = dict()\n", - " 226 \n", - " 227 1740 47495 27.3 0.0 # TODO: if not used, get rid of these.\n", - " 228 1740 106401 61.1 0.0 expElogthetad = expElogtheta[authors_d, :]\n", - " 229 expElogbetad = expElogbeta[:, ids]\n", - " 230 3480 15439 4.4 0.0 \n", - " 231 for iteration in xrange(self.iterations):\n", - " 232 #logger.info('iteration %i', iteration)\n", - " 233 1740 16311 9.4 0.0 \n", - " 234 lastgamma = tilde_gamma[authors_d, :]\n", - " 235 \n", - " 236 1740 4061 2.3 0.0 ## Update phi.\n", - " 237 953484 1988572 2.1 0.3 if not vectorized:\n", - " 238 951744 1886620 2.0 0.3 for v in ids:\n", - " 239 3141324 6424372 2.0 0.9 phi_sum = 0.0\n", - " 240 24085380 49026745 2.0 6.7 for a in authors_d:\n", - " 241 21895800 59556256 2.7 8.2 for k in xrange(self.num_topics):\n", - " 242 21895800 53252861 2.4 7.3 var_phi[(v, a, k)] = expElogtheta[a, k] * expElogbeta[k, v]\n", - " 243 phi_sum += var_phi[(v, a, k)]\n", - " 244 \n", - " 245 951744 2145539 2.3 0.3 # Normalize phi over k.\n", - " 246 3141324 6397555 2.0 0.9 phi_norm_const = 1.0 / (phi_sum + 1e-100)\n", - " 247 24085380 48475653 2.0 6.7 for a in authors_d:\n", - " 248 21895800 52318586 2.4 7.2 for k in xrange(self.num_topics):\n", - " 249 var_phi[(v, a, k)] *= phi_norm_const\n", - " 250 5731 13183 2.3 0.0 \n", - " 251 43901 99066 2.3 0.0 for a in authors_d:\n", - " 252 39910 97817 2.5 0.0 for k in xrange(self.num_topics):\n", - " 253 21935710 46111162 2.1 6.3 tilde_gamma[a, k] = 0.0\n", - " 254 21895800 96645068 4.4 13.3 for vi, v in enumerate(ids):\n", - " 255 39910 176150 4.4 0.0 tilde_gamma[a, k] += cts[vi] * var_phi[(v, a, k)]\n", - " 256 39910 111018 2.8 0.0 tilde_gamma[a, k] *= len(self.author2doc[a])\n", - " 257 tilde_gamma[a, k] += self.alpha[k]\n", - " 258 else:\n", - " 259 # Update gamma.\n", - " 260 for a in authors_d:\n", - " 261 tilde_gamma[a, :] = self.alpha + len(self.author2doc[a]) * expElogtheta[a, :] * numpy.dot(cts / phinorm, expElogbetad.T)\n", - " 262 \n", - " 263 # Update gamma and lambda.\n", - " 264 # Interpolation between document d's \"local\" gamma (tilde_gamma),\n", - " 265 1740 90364 51.9 0.0 # and \"global\" gamma (var_gamma). Same goes for lambda.\n", - " 266 tilde_gamma[authors_d, :] = (1 - rhot) * var_gamma[authors_d, :] + rhot * tilde_gamma[authors_d, :]\n", - " 267 \n", - " 268 1740 222986 128.2 0.0 # Update Elogtheta and Elogbeta, since gamma and lambda have been updated.\n", - " 269 1740 4349 2.5 0.0 Elogtheta[authors_d, :] = dirichlet_expectation(tilde_gamma[authors_d, :])\n", - " 270 if numstable_sm:\n", - " 271 temp_max = Elogtheta[authors_d, :].max()\n", - " 272 maxElogtheta = temp_max if temp_max > maxElogtheta else maxElogtheta\n", - " 273 expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :] - maxElogtheta)\n", - " 274 1740 26103 15.0 0.0 else:\n", - " 275 expElogtheta[authors_d, :] = numpy.exp(Elogtheta[authors_d, :])\n", - " 276 1740 3716 2.1 0.0 \n", - " 277 if vectorized:\n", - " 278 phinorm = self.compute_phinorm(ids, authors_d, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", - " 279 \n", - " 280 # Check for convergence.\n", - " 281 1740 3772 2.2 0.0 # Criterion is mean change in \"local\" gamma and lambda.\n", - " 282 if iteration > 0:\n", - " 283 meanchange_gamma = numpy.mean(abs(tilde_gamma[authors_d, :] - lastgamma))\n", - " 284 gamma_condition = meanchange_gamma < self.threshold\n", - " 285 # logger.info('Mean change in gamma: %.3e', meanchange_gamma)\n", - " 286 if gamma_condition:\n", - " 287 # logger.info('Converged after %d iterations.', iteration)\n", - " 288 converged += 1\n", - " 289 break\n", - " 290 # End of iterations loop.\n", - " 291 \n", - " 292 1740 62078 35.7 0.0 # FIXME: there are too many different gamma variables!\n", - " 293 var_gamma = tilde_gamma.copy()\n", - " 294 1740 4404 2.5 0.0 \n", - " 295 if self.optimize_lambda:\n", - " 296 # Update lambda.\n", - " 297 # only one update per document.\n", - " 298 1740 3590 2.1 0.0 \n", - " 299 if vectorized:\n", - " 300 # NOTE: probably not much speed-up is gained here. Consider\n", - " 301 # whether it can be done better.\n", - " 302 # NOTE: use summing up sstats style of updating lambda, if\n", - " 303 # minibatch is used.\n", - " 304 expElogtheta_sum_a = expElogtheta[authors_d, :].sum(axis=0)\n", - " 305 sstats = numpy.outer(expElogtheta_sum_a.T, cts/phinorm)\n", - " 306 sstats *= expElogbeta[:, ids]\n", - " 307 eta_rep = numpy.tile(self.eta[ids], [self.num_topics, 1])\n", - " 308 tilde_lambda[:, ids] = eta_rep + self.num_docs * sstats\n", - " 309 19140 47104 2.5 0.0 else:\n", - " 310 9534840 21285620 2.2 2.9 for k in xrange(self.num_topics):\n", - " 311 9517440 20443355 2.1 2.8 for vi, v in enumerate(ids):\n", - " 312 9517440 19164083 2.0 2.6 cnt = cts[vi]\n", - " 313 31413240 66466759 2.1 9.1 phi_sum = 0.0\n", - " 314 21895800 55718359 2.5 7.7 for a in authors_d:\n", - " 315 9517440 43921370 4.6 6.0 phi_sum += var_phi[(v, a, k)]\n", - " 316 tilde_lambda[k, v] = self.eta[v] + self.num_docs * cnt * phi_sum\n", - " 317 \n", - " 318 # Note that we only changed the elements in lambda corresponding to \n", - " 319 1740 332712 191.2 0.0 # the words in document d, hence the [:, ids] indexing.\n", - " 320 1740 67830245 38982.9 9.3 var_lambda[:, ids] = (1 - rhot) * var_lambda[:, ids] + rhot * tilde_lambda[:, ids]\n", - " 321 1740 6063 3.5 0.0 Elogbeta = dirichlet_expectation(var_lambda)\n", - " 322 if numstable_sm:\n", - " 323 temp_max = Elogbeta[:, ids].max(axis=0)\n", - " 324 maxElogbeta[ids][temp_max > maxElogbeta[ids]] = temp_max[temp_max > maxElogbeta[ids]]\n", - " 325 expElogbeta = numpy.exp(Elogbeta - maxElogbeta)\n", - " 326 1740 3189258 1832.9 0.4 else:\n", - " 327 1740 157876 90.7 0.0 expElogbeta = numpy.exp(Elogbeta)\n", - " 328 var_lambda = var_lambda.copy()\n", - " 329 \n", - " 330 # Print topics:\n", - " 331 # pprint(self.show_topics())\n", - " 332 # End of corpus loop.\n", - " 333 \n", - " 334 1 5 5.0 0.0 \n", - " 335 if self.eval_every > 0 and (_pass + 1) % self.eval_every == 0:\n", - " 336 self.var_gamma = var_gamma\n", - " 337 self.var_lambda = var_lambda\n", - " 338 prev_bound = bound\n", - " 339 word_bound = self.word_bound(corpus, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", - " 340 theta_bound = self.theta_bound(Elogtheta)\n", - " 341 beta_bound = self.beta_bound(Elogbeta)\n", - " 342 bound = word_bound + theta_bound + beta_bound\n", - " 343 logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound)\n", - " 344 # NOTE: bound can be computed as below. We compute each term for now because it can be useful for debugging.\n", - " 345 \n", - " 346 #logger.info('Converged documents: %d/%d', converged, self.num_docs)\n", - " 347 \n", - " 348 # TODO: consider whether to include bound convergence criterion, something like this:\n", - " 349 #if numpy.abs(bound - prev_bound) / abs(prev_bound) < self.bound_threshold:\n", - " 350 # break\n", - " 351 # End of pass over corpus loop.\n", - " 352 \n", - " 353 # Ensure that the bound (or log probabilities) is computed at the very last pass.\n", - " 354 1 4 4.0 0.0 if self.eval_every > 0 and not (_pass + 1) % self.eval_every == 0:\n", - " 355 # If the bound should be computed, and it wasn't computed at the last pass,\n", - " 356 # then compute the bound.\n", - " 357 self.var_gamma = var_gamma\n", - " 358 self.var_lambda = var_lambda\n", - " 359 prev_bound = bound\n", - " 360 word_bound = self.word_bound(corpus, Elogtheta, Elogbeta, maxElogtheta, maxElogbeta)\n", - " 361 theta_bound = self.theta_bound(Elogtheta)\n", - " 362 beta_bound = self.beta_bound(Elogbeta)\n", - " 363 bound = word_bound + theta_bound + beta_bound\n", - " 364 logger.info('Total bound: %.3e. Word bound: %.3e. theta bound: %.3e. beta bound: %.3e.', bound, word_bound, theta_bound, beta_bound)\n", - " 365 \n", - " 366 \n", - " 367 1 5 5.0 0.0 self.var_lambda = var_lambda\n", - " 368 1 5 5.0 0.0 self.var_gamma = var_gamma\n", - " 369 \n", - " 370 1 4 4.0 0.0 return var_gamma, var_lambda\n", - "\n" - ] - } - ], - "source": [ - "model = AuthorTopicModel(corpus=None, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=0, random_state=1, var_lambda=None)\n", - "profile = line_profiler.LineProfiler(model.inference)\n", - "result = profile.runcall(model.inference, corpus=corpus)\n", - "profile.print_stats()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Disjoint set stuff" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "def find_disjoint_sets(d):\n", - " while True:\n", - " for tuple_, set1 in d.items():\n", - " try:\n", - " match = next(k for k, set2 in d.items() if k != tuple_ and set1 & set2)\n", - " except StopIteration:\n", - " # no match for this key - keep looking\n", - " continue\n", - " else:\n", - " #print('merging', tuple(set1), match)\n", - " d[tuple_] = set1 | d.pop(match)\n", - " break\n", - " else:\n", - " # no match for any key - we are done!\n", - " break\n", - "\n", - " output = sorted(tuple(s) for s in d.values())\n", - " \n", - " return output" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "collapsed": false, - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[(0,), (1,), (2,), (3,), (4,), (6,), (7,), (8,), (9,), (10,), (11,), (12,), (13,), (14,), (15,), (16, 63, 39), (18,), (19, 59), (20,), (21,), (22,), (23,), (24, 53), (25, 84), (26,), (27,), (28,), (29,), (30,), (32,), (33,), (34,), (35,), (36,), (37,), (38,), (40,), (41,), (42,), (43,), (44,), (45,), (46,), (47,), (48, 17, 58, 5), (49,), (50,), (51,), (52,), (54,), (55,), (56,), (57,), (60,), (61,), (62,), (64,), (65,), (66,), (67,), (68,), (69,), (70,), (71,), (72,), (73, 31), (74,), (75,), (76,), (77,), (78,), (79,), (80,), (81,), (82,), (83,), (85,), (86,), (87,), (88,), (89,)]\n", - "81\n", - "0.0870358943939209\n" - ] - } - ], - "source": [ - "start = time()\n", - "\n", - "thing = {a: set(_list) for a, _list in author2doc.items()}\n", - "disjoint_authors = find_disjoint_sets(thing)\n", - "print(disjoint_authors)\n", - "print(len(disjoint_authors))\n", - "\n", - "print(time() - start)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## LDA author-topic hack" - ] - }, - { - "cell_type": "code", - "execution_count": 132, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "author_corpus = []\n", - "for a, doc_ids in author2doc.items():\n", - " temp = {}\n", - " for d in doc_ids:\n", - " for v, cnt in corpus[d]:\n", - " if temp.get(v):\n", - " temp[v] += cnt\n", - " else:\n", - " temp[v] = cnt\n", - " author_corpus.append(list(temp.items()))" - ] - }, - { - "cell_type": "code", - "execution_count": 133, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(gensim.models.ldamodel)\n", - "LdaModel = gensim.models.ldamodel.LdaModel" - ] - }, - { - "cell_type": "code", - "execution_count": 134, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 4min 2s, sys: 7min 16s, total: 11min 18s\n", - "Wall time: 3min 25s\n" - ] - } - ], - "source": [ - "%time lda = LdaModel(corpus=author_corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", - " iterations=100, alpha='symmetric', eta='symmetric', eval_every=0, chunksize=2000, random_state=1)" - ] - }, - { - "cell_type": "code", - "execution_count": 135, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.019*\"signal\" + 0.017*\"component\" + 0.015*\"source\" + 0.009*\"independent\" + 0.009*\"ica\" + 0.008*\"noise\" + 0.008*\"eeg\" + 0.008*\"frequency\" + 0.007*\"response\" + 0.007*\"separation\"'),\n", - " (1,\n", - " '0.005*\"policy\" + 0.005*\"optimal\" + 0.005*\"bound\" + 0.005*\"action\" + 0.005*\"kernel\" + 0.005*\"let\" + 0.004*\"xi\" + 0.004*\"class\" + 0.004*\"decision\" + 0.004*\"reinforcement\"'),\n", - " (2,\n", - " '0.010*\"control\" + 0.010*\"cluster\" + 0.009*\"distance\" + 0.008*\"image\" + 0.007*\"clustering\" + 0.007*\"class\" + 0.006*\"nonlinear\" + 0.006*\"classification\" + 0.006*\"controller\" + 0.004*\"measure\"'),\n", - " (3,\n", - " '0.028*\"image\" + 0.013*\"object\" + 0.011*\"visual\" + 0.009*\"motion\" + 0.007*\"position\" + 0.006*\"field\" + 0.006*\"direction\" + 0.005*\"filter\" + 0.005*\"pixel\" + 0.005*\"view\"'),\n", - " (4,\n", - " '0.013*\"layer\" + 0.011*\"hidden\" + 0.008*\"net\" + 0.006*\"node\" + 0.006*\"memory\" + 0.006*\"neuron\" + 0.006*\"hidden_unit\" + 0.005*\"activation\" + 0.005*\"threshold\" + 0.004*\"propagation\"'),\n", - " (5,\n", - " '0.008*\"word\" + 0.007*\"recognition\" + 0.005*\"classifier\" + 0.005*\"rule\" + 0.004*\"class\" + 0.004*\"classification\" + 0.004*\"character\" + 0.004*\"table\" + 0.003*\"trained\" + 0.003*\"language\"'),\n", - " (6,\n", - " '0.010*\"speech\" + 0.006*\"mixture\" + 0.006*\"estimate\" + 0.006*\"recognition\" + 0.005*\"hidden\" + 0.005*\"prediction\" + 0.005*\"sequence\" + 0.005*\"estimation\" + 0.005*\"context\" + 0.005*\"likelihood\"'),\n", - " (7,\n", - " '0.017*\"circuit\" + 0.014*\"chip\" + 0.014*\"neuron\" + 0.013*\"analog\" + 0.010*\"voltage\" + 0.007*\"vlsi\" + 0.007*\"signal\" + 0.006*\"control\" + 0.005*\"cell\" + 0.005*\"implementation\"'),\n", - " (8,\n", - " '0.009*\"gaussian\" + 0.006*\"matrix\" + 0.005*\"noise\" + 0.005*\"prior\" + 0.005*\"field\" + 0.005*\"likelihood\" + 0.005*\"posterior\" + 0.005*\"bayesian\" + 0.004*\"mixture\" + 0.004*\"approximation\"'),\n", - " (9,\n", - " '0.020*\"cell\" + 0.020*\"neuron\" + 0.010*\"stimulus\" + 0.010*\"spike\" + 0.009*\"response\" + 0.007*\"synaptic\" + 0.006*\"activity\" + 0.006*\"firing\" + 0.006*\"cortex\" + 0.005*\"orientation\"')]" - ] - }, - "execution_count": 135, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "lda.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 136, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [643, 1161]\n", - "[(1, 0.23332003952694552),\n", - " (5, 0.53385075047018016),\n", - " (6, 0.10891675344072629),\n", - " (8, 0.12227386376013714)]\n", - "\n", - "Geoffrey E. Hinton\n", - "Docs: [143, 284, 230, 197]\n", - "[(0, 0.02352470105235863),\n", - " (1, 0.010279793220247807),\n", - " (2, 0.020384798749417784),\n", - " (3, 0.22316974630812836),\n", - " (4, 0.29378098848291623),\n", - " (5, 0.28354005954382777),\n", - " (6, 0.06921176883627865),\n", - " (8, 0.076066638965696737)]\n", - "\n", - "Michael I. Jordan\n", - "Docs: [237]\n", - "[(1, 0.22743516568855809),\n", - " (2, 0.35536065944136824),\n", - " (3, 0.03147447824503067),\n", - " (4, 0.33259716011404672),\n", - " (6, 0.019782536548970251),\n", - " (8, 0.032916511196237168)]\n", - "\n", - "James M. Bower\n", - "Docs: [131, 101, 126, 127, 281, 208, 225]\n", - "[(0, 0.024730774978235743),\n", - " (2, 0.013137901461419016),\n", - " (3, 0.098173137689669399),\n", - " (5, 0.037453180336151123),\n", - " (7, 0.20974998834305741),\n", - " (9, 0.60758868832493407)]\n" - ] - } - ], - "source": [ - "name = 'Yaser S.Abu-Mostafa'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(lda.get_document_topics(author_corpus[author2id[name]]))\n", - "\n", - "name = 'Geoffrey E. Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(lda.get_document_topics(author_corpus[author2id[name]]))\n", - "\n", - "name = 'Michael I. Jordan'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(lda.get_document_topics(author_corpus[author2id[name]]))\n", - "\n", - "name = 'James M. Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(lda.get_document_topics(author_corpus[author2id[name]]))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "## Unit testing stuff" - ] - }, - { - "cell_type": "code", - "execution_count": 410, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "texts = [['human', 'interface', 'computer'],\n", - " ['survey', 'user', 'computer', 'system', 'response', 'time'],\n", - " ['eps', 'user', 'interface', 'system'],\n", - " ['system', 'human', 'system', 'eps'],\n", - " ['user', 'response', 'time'],\n", - " ['trees'],\n", - " ['graph', 'trees'],\n", - " ['graph', 'minors', 'trees'],\n", - " ['graph', 'minors', 'survey']]\n", - "\n", - "author2doc = {'john': [0, 1, 2, 3, 4, 5, 6], 'jane': [2, 3, 4, 5, 6, 7, 8], 'jack': [0, 2, 4, 6, 8], 'jill': [1, 3, 5, 7]}\n", - "doc2author = {0: ['john', 'jack'], 1: ['john', 'jill'], 2: ['john', 'jane', 'jack'], 3: ['john', 'jane', 'jill'],\n", - " 4: ['john', 'jane', 'jack'], 5: ['john', 'jane', 'jill'], 6: ['john', 'jane', 'jack'], 7: ['jane', 'jill'],\n", - " 8: ['jane', 'jack']}\n" - ] - }, - { - "cell_type": "code", - "execution_count": 420, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "dictionary = Dictionary(texts)\n", - "corpus = [dictionary.doc2bow(text) for text in texts]" - ] - }, - { - "cell_type": "code", - "execution_count": 504, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "model = AuthorTopicModel(corpus, author2doc=author2doc, doc2author=None, id2word=dictionary, num_topics=2)\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 505, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "jill_topics = model.get_author_topics(model.author2id['jill'])\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.5.2" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/docs/notebooks/at_with_nips_old.ipynb b/docs/notebooks/at_with_nips_old.ipynb deleted file mode 100644 index 460ff2ad09..0000000000 --- a/docs/notebooks/at_with_nips_old.ipynb +++ /dev/null @@ -1,2850 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "application/javascript": [ - "// Run for table of contents.\n", - "$.getScript('https://kmahelona.github.io/ipython_notebook_goodies/ipython_notebook_toc.js')\n", - "\n", - "// https://github.com/kmahelona/ipython_notebook_goodies" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "%%javascript\n", - "// Run for table of contents.\n", - "$.getScript('https://kmahelona.github.io/ipython_notebook_goodies/ipython_notebook_toc.js')\n", - "\n", - "// https://github.com/kmahelona/ipython_notebook_goodies" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Tests with NIPS data\n", - "\n", - "

Table of Contents

\n", - "
\n" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "import matplotlib\n", - "import matplotlib.pyplot as plt\n", - "from nltk.tokenize import RegexpTokenizer\n", - "from nltk.stem.wordnet import WordNetLemmatizer\n", - "import gensim\n", - "from gensim.models import Phrases\n", - "from gensim.corpora import Dictionary\n", - "from gensim.models import LdaModel\n", - "from imp import reload\n", - "from pprint import pprint\n", - "from random import sample\n", - "import bokeh\n", - "\n", - "import logging\n", - "\n", - "from gensim.models import AuthorTopicModel\n", - "from gensim.models import atmodel\n", - "\n", - "from time import time\n", - "\n", - "%matplotlib inline" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Configure logging.\n", - "\n", - "log_dir = '../../../log_files/log.log' # On my own machine.\n", - "#log_dir = '../../../../log_files/log.log' # On Hetzner\n", - "\n", - "logger = logging.getLogger()\n", - "fhandler = logging.FileHandler(filename=log_dir, mode='a')\n", - "formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", - "fhandler.setFormatter(formatter)\n", - "logger.addHandler(fhandler)\n", - "logger.setLevel(logging.DEBUG)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Load and prepare data structure" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import os\n", - "import re\n", - "\n", - "# Folder containing all NIPS papers.\n", - "data_dir = '../../../../data/nipstxt/' # On my own machine.\n", - "#data_dir = '../../../nipstxt/' # On Hetzner.\n", - "\n", - "# Folders containin individual NIPS papers.\n", - "#yrs = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']\n", - "yrs = ['00']\n", - "dirs = ['nips' + yr for yr in yrs]\n", - "\n", - "# Get all document texts and their corresponding IDs.\n", - "docs = []\n", - "doc_ids = []\n", - "for yr_dir in dirs:\n", - " files = os.listdir(data_dir + yr_dir) # List of filenames.\n", - " for filen in files:\n", - " # Get document ID.\n", - " (idx1, idx2) = re.search('[0-9]+', filen).span() # Matches the indexes of the start end end of the ID.\n", - " doc_ids.append(yr_dir[4:] + '_' + str(int(filen[idx1:idx2])))\n", - " \n", - " # Read document text.\n", - " # Note: ignoring characters that cause encoding errors.\n", - " with open(data_dir + yr_dir + '/' + filen, errors='ignore', encoding='utf-8') as fid:\n", - " txt = fid.read()\n", - " docs.append(txt)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "filenames = [data_dir + 'idx/a' + yr + '.txt' for yr in yrs] # Using the years defined in previous cell.\n", - "\n", - "# Get all author names and their corresponding document IDs.\n", - "author2id = dict()\n", - "author2doc = dict()\n", - "i = 0\n", - "for yr in yrs:\n", - " filename = data_dir + 'idx/a' + yr + '.txt'\n", - " for line in open(filename, errors='ignore', encoding='utf-8'):\n", - " contents = re.split(',', line)\n", - " author_name = (contents[1] + contents[0]).strip()\n", - " ids = [c.strip() for c in contents[2:]]\n", - " if not author2id.get(author_name):\n", - " author2id[author_name] = i\n", - " author2doc[i] = []\n", - " i += 1\n", - " \n", - " author_id = author2id[author_name]\n", - " author2doc[author_id].extend([yr + '_' + id for id in ids])\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Make a mapping from author ID to author name.\n", - "id2author = dict(zip(author2id.values(), author2id.keys()))" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Use an integer ID in author2doc, instead of the IDs provided in the NIPS dataset.\n", - "\n", - "# Mapping from ID of document in NIPS datast, to an integer ID.\n", - "doc_id_dict = dict(zip(doc_ids, range(len(doc_ids))))\n", - "\n", - "for a, a_doc_ids in author2doc.items():\n", - " for i, doc_id in enumerate(a_doc_ids):\n", - " author2doc[a][i] = doc_id_dict[doc_id]" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Make a mapping from document IDs to author IDs.\n", - "# Same as in the atvb code.\n", - "doc2author = {}\n", - "for d, _ in enumerate(docs):\n", - " author_ids = []\n", - " for a, a_doc_ids in author2doc.items():\n", - " if d in a_doc_ids:\n", - " author_ids.append(a)\n", - " doc2author[d] = author_ids" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Pre-process and vectorize data" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Tokenize the documents.\n", - "\n", - "# Split the documents into tokens.\n", - "tokenizer = RegexpTokenizer(r'\\w+')\n", - "for idx in range(len(docs)):\n", - " docs[idx] = docs[idx].lower() # Convert to lowercase.\n", - " docs[idx] = tokenizer.tokenize(docs[idx]) # Split into words.\n", - "\n", - "# Remove numbers, but not words that contain numbers.\n", - "docs = [[token for token in doc if not token.isnumeric()] for doc in docs]\n", - "\n", - "# Remove words that are only one character.\n", - "docs = [[token for token in doc if len(token) > 1] for doc in docs]" - ] - }, - { - "cell_type": "code", - "execution_count": 91, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Lemmatize the documents.\n", - "\n", - "# Lemmatize all words in documents.\n", - "lemmatizer = WordNetLemmatizer()\n", - "docs = [[lemmatizer.lemmatize(token) for token in doc] for doc in docs]" - ] - }, - { - "cell_type": "code", - "execution_count": 92, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Compute bigrams.\n", - "\n", - "# Add bigrams and trigrams to docs (only ones that appear 20 times or more).\n", - "bigram = Phrases(docs, min_count=20)\n", - "for idx in range(len(docs)):\n", - " for token in bigram[docs[idx]]:\n", - " if '_' in token:\n", - " # Token is a bigram, add to document.\n", - " docs[idx].append(token)" - ] - }, - { - "cell_type": "code", - "execution_count": 93, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Create a dictionary representation of the documents.\n", - "dictionary = Dictionary(docs)" - ] - }, - { - "cell_type": "code", - "execution_count": 94, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Remove rare and common tokens.\n", - "\n", - "# Filter out words that occur too frequently or too rarely.\n", - "max_freq = 0.5\n", - "min_wordcount = 20\n", - "dictionary.filter_extremes(no_below=min_wordcount, no_above=max_freq)\n", - "\n", - "dict0 = dictionary[0] # This sort of \"initializes\" dictionary.id2token." - ] - }, - { - "cell_type": "code", - "execution_count": 95, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAicAAAGcCAYAAAACtQD2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3XecXVW5//HPl0DAABmaSeggEKqUhBK8EOSCAaSIYmGw\nACoWhMuNckURfnDBgogEaSqKoqCDCNIDoQmEJphwgUDovSShpJAQ0ub5/bH2YfbsnOnl7Jl836/X\neZ05a6+997P3mZnznLXXWlsRgZmZmVlZLFfrAMzMzMzynJyYmZlZqTg5MTMzs1JxcmJmZmal4uTE\nzMzMSsXJiZmZmZWKkxMzMzMrFScnZmZmVipOTszMzKxUnJyYWTOSXpV0Ue71XpIaJX2sF/b9Y0mL\ncq8HZPs+u6f3ne3v69n+1umN/XWWpB9Iel7SYkkP1jqe9pK0SXZ+D6t1LFZuTk6sFCQdnv3Tqvb4\naa3jW8ZUu6dFh+9zIelHkg7sxL4bO7qvjmoltqATx9qbJH0S+CnwT+AI4OSaBmTWA5avdQBmOUH6\nR/tioXxK74diFRFxu6QPRcTCDq56EnApcH0H1jkFOK2D++mMlmL7A3BpJ461N+0JLAK+Hr45mvVT\nTk6sbG6OiMntrSxJwMCIWNCDMS3zevrDWtKgiHgvIhrphZaTlmQf9mVOTACGAvPKmJj479G6iy/r\nWJ+R738g6cuSHgfeB/bKlkvSdyU9Lul9SW9IulDS4MJ2JOn/ZX0r5kq6TdIWkl4p9LVo1v8hV161\nX4Kk/SVNzLY5W9J1krYo1LlM0kxJ62XL35U0Q9IZVfYjSWMlPSppflZvvKTts+X3Svp3C+fqOUmt\ntli0dB6q1Fuqz4mk4ZL+IWlaFtvLkv4iaeXK+wQMBCrnqrFybrPz2pht42+SZpIuUbR4zrNlX5b0\nVLa/B4t9YLJz+0yV9T7YZjtia+m9PTb3e/WapHOr/F7dI2mypK0l/VPSe9m5/W5r70Nu/eUlnZK9\nd+8r9Sk5TdIKhdi/CNRlcS5RC/03st+dRZJWzpWdkK13Rq5s+ez9Py1XtoqkcdnfxPuSpkr678L2\n2/p7XF3SnyXNkvSOpIuBZucsq7e2pD9l5+p9Sa9LulrSeu05b9Y/ueXEyqZO0pr5goh4u1BnDHAo\ncAHwDvByVv4HoD57Pgf4CHAssJ2k3bNv5ZCu158AXAdMAEYCtwAfKuynpf4HS5VLOgK4GBgPfB9Y\nGTgamChph4h4Nbfu8tn+JgLfy47nfyQ9ExEX5zb7Z9IH0fXARaQP1NHALsD/ZcsvlDQ8Ip7OxbIr\nsDHwwyqx57X3PFTirmx/xazecqTzPB1YDzgQGBwR8yR9CfgjcE92XgCeLWzrH8CTwA9yZS2d872A\nw4BzSZc0vgNMkLRjRDzVxroflEfEknbEVnxvfwycCNxM+p3bkvTejiz8XgWwFnAT8HfgcuDzwC8k\nPRIRt1eJLe+S7BgvJ/1ujCJdftoc+EIu9qOB7YBvAALubWF7E0nv0X+Q3i+A3YAlwO65eiNJ7/nd\n2fEKuDFb73fAo8B+wNmS1o6IEwr7WervMdvG9aTf1QuBp4BDSOe9+B5dA2xKem9fJrUMjSH9Tr2K\nLZsiwg8/av4ADic15xcfS3J1BmRlC4FNC+t/PFt2SKF8v6z8s9nrIdn6VxXqnZHVuyhXdjqwsEqs\nXyP9g18ne70qMAs4r1BvaFZ+fq7s0mzd7xfq/h9wX+71J7J4zmzlnK0GzAdOK5RfkO13pVbW7ch5\n2CuL+WPZ65FZnQPbeE/n57dTOK+NwCUtLFuYe115zxcD2+TKNyR9S7+8cG6fbmubbcRWfG+HZufp\nukK9/8rqfTFXNjEr+3yubCApeftrG+dqRHacFxTKz862+R+F43ynHX9TA4B3gdNzZe+Qkp/3K78f\nwP9kx7hK9vqQLJbjC9u7ipQYbtCOv8fKNv4rV7YcKSFcAhyWla1RrOeHHxHhyzpWKgF8G9g79/hE\nlXq3R8SzhbLPkv7x3ilpzcoD+Dfpg2jPrN4+pH+q5xXWP6cLce9LSlAuL+x7CfBQbt95FxVe30Nq\n6ak4hPSBfHpLO42IWcANpG/bQGpqBz5HSjrebyXmMXT+PMzKnveTtFI76lcTwG86UH9iRHzQMToi\nXiJ9M9+3k/tvr0+QzlPxvPwWeA/Yv1A+OyKuqLyI1FfnIZq/t9V8knROikOmf0lqHSnup00RsQS4\nn9TahqRtgTrgZ8AKpFYNSK0pj0TE3Oz1fqSE44LCJs8mnYviOa/297gfsIDc73mkFqbzs+OpeI+U\n8Owpqa6Dh2j9mJMTK5uHIuKO/KNKnRerlG1G+hb2ZuExHViJ1FIAsEH23OyfaURMI33L7IxNSf9w\nJxb2PQP4z9y+K+ZmiUXeTGD13OuPAK9GRFsx/RnYWNKo7PW+wJqkb9et2TB77vB5iIjngF8B3wTe\nlnSTpG9LWrWNfRa90IG6xQ8/gKeBVSWtXmVZd6mcp6fzhZE6fL6QW17xSpVtFN/blvazODu3+f28\nRno/ivtpr3uAnbJ+K7sDr0TEI6QRcJVLO/9B+t3Nx/JqRMwvbGtqbnnei1X2uyHwWpUE+an8i2z5\nicABwAxJd0o6XlLxb8aWMe5zYn1R8Z8mpET7deDLNP9mVjEje64sa89Ih5bqDKiy7yD1d3mrSv1i\nB88lLWxXLfzcmpuyfX4JeCB7fi0i7mxjvY6ch6VExNisg+OnSK0w5wMnSBqVJTjtUe197IjiOWrv\n+9WVfbSlPe9tR5d3NIa8iaTh2buQWkgm5sp3l7Q1Kam/uwv7q/Y+iurvx1LbjohfSroaOJjUsvlj\n4IeS9si3ltmyxS0n1l88R+qMeE+x5SV7VP7JvZg9D8+vLGkY6dJM3kxggKRBhfKNquwbYEYL+55I\nxz0LrFccEVIUEYvJOl5KWo3UKfUv7dj+i9lze85DS/ueEhE/iYg9gD1IrVLfyFdpz3baabMqZcOB\ndyNiZvZ6JqkfTtFGVcraG9uL2fPm+UJJA7PtvtTO7bRnP8tL2qSwn3WAVbqwnwdIlwdHk1pKKr+L\ndwMfI11yDFILSz6W9SQVO0ZvmT23J5bKNoqX/TavUpeIeD4izo6IfYCPkjrotmuUk/VPTk6sv7iC\n1PnwpOKCbKhk5UP+VtK322ML1cZW2eZzpG96o3PbWoXUOpN3EzAX+FHW56O4/7XaeQx5V5FaNtsz\n++elpMTst6R/6u1JTjpyHpqRNFhS8X/HFNKH3Iq5snlUTxY6Y7esz0Qlho1IlwJuztV5DlhT0pa5\neuuSErai9sZWOU/HFcq/SRqRdUM7ttEe40m/a/9dKP8e6bze2JmNZpdmJpN+Z9emecvJysAxwFMR\nkW/xG0/6Wzq6sLmxpHNxUzt2PZ70u/DNSkH2t3EMzUd+fSgb/ZX3POnvacVcvWGSNq/ye2f9lC/r\nWJl0uvk6Iu7ILjOcJGkEcBvpG+NwUmfZb5NGXEyXNA44XtJ1pH+0O5I6375T2OxNwGvAJZLOysq+\nCrwBfDAPRkTMlnQMaQjzZEmXky61bEjqyPhPOvgtMCJuk9QAfFdp7pFbSJcndgcmRES+o+G/JU0l\ndYR9tD1N4R08D9D8vfkEME7S34FnSJ0rDyddvvpHrt4kYEw2P8YbwHMRUXVelnaYAtwi6TzS+3p0\n9vy/uTp/JQ2Pvi6rtwrwLdJw5e0K22tXbNl5+jlwoqTxpGRky2y795NarbosIiZL+gtwdNaZeiKw\nK+ky3RUR0dJw4faYCBwPvB0RU7P9vSHpOdLfx+8K9a8mtaz8XNKmNA0l3h/4RURU61dTdDWp1eas\nrDWoMpS42Aq5FXCzpCuAJ0jJz2dJ/aYacvXOInX8Xo90+db6u1oPF/LDj4gPhhIvAUa0UmdAVueX\nrdQ5ijQ6Yi6pmf9h4CfAkEK9/0dKPOaSvh1vTurMeFGh3gjSh9B80je671AYbpqr+3HSN/mZ2Xaf\nAn4PbJ+rcynpQ6IY9+nAgkKZSB8qT2T7n0YaobJtlfV/kMX03Q6e92rn4WXgt7k6xaHEH8mO6xlS\nC8SMbN3RhW1vAdyZbXtJ5dxmx7qENCdKq+ch/56TPqifzs7Fg5V4CuuPAR4jDZV9nDTPSLWhxC3F\n1tJ7+51se+9n5+tXwKqFOhOBSVViupTUOtHWezEgez+ey/bzAin5Wr7K9pb6HWpluwdmx3R1ofwP\nFIZD55atTBqd82oWy5PAcR35eyR1Av4zaXTX26Q5ZXag+VDitUgjxp4A5pAS43uBg6sc8+Li++JH\n/30oe+PNlnmSXgFuiohvtFm5ZCR9jzRHyQYR8Uat4zEz6wpfvzPrH75Kmm/CiYmZ9Xnuc2LWRynd\nM+UgUj+RLfDoBjPrJ5ycmDVp6d4sZTWMNDLnHdIU9hNqHI+ZWbdwnxMzMzMrFfc5MTMzs1JxcmJm\nZmal4uTEzFok6ceSivcG6u0YBkhqlFS8Y29XtrlXts2DumubHdj3ZZKe6e39mvUlTk7MOknS4dkH\nXOUxX9JTks7rR3dV7WudhDuiVscVQGON9m3WJ3i0jlnXBOn+Ny8CK5Hu/PptYD9J28TSt4y38ujK\n3X674oga7tusT3ByYtZ1N0fE5OznP0h6h3STtE8Bf6tdWG2TNCgi3qt1HMuSiFhSi/36vba+xJd1\nzLrfHaRvxhtXCiRtLOnvkt6WNE/S/ZI+mV9J0pu5GwyiZJakRbm7KiPphKxsUK5sc0lXZtufL+kh\nSQcWtl+5DDVa0oWSppPuJ9Qhkr4m6XZJ07N9TZF0VKHOryRNK5T9Otv/t3Jl62RlX23nvr+cXTqb\nL+lBSR+rUmddSZdImibpfUmPSTq8yuYCWE7SyZJelfSepFslbVzY3h7Ze/dytr2XJJ2Vv5uupB9I\nWiJpneJOsrrzJa2avV6qz4mkVSSNk/RKto+p2U0J83U2yc7VYYXySp+cE3NlP87Khkv6m6SZpBtQ\nmvUJTk7Mut+m2fPbAFn/k/tJd/M9HziRdDv46yV9KrfevcDo3OttgUpS8h+58t2AyZVvwZK2Jt0B\ndnPgZ6SZYucC1xS2X3EhaUbZ/yXdj6ejvk26CeJPgO+RboT320KCMhH4sKThhbiXkO6sXDGalCRM\nbMd+9wJ+AfyJdIO8IcAESZtXKkgaRrop4B7AucBxWax/lHR0YXsiXZLbH/h59vgY6WZ1eZ8nvV/n\nA8eQbnJ4HOnGeRWXZ9v7XJW4PwuMj4h3s9fN+vFIEnAjcCzprsdjSTdVPFvpjsidUdn+P0g36PsB\n6cZ7Zn1Dre886IcfffVB052U9yTd4n1d4AvAm6TkYO2s3ris3q65dVcm3X32uVzZ94CFwMrZ62NI\nH6z3Az/N1XsHOCv3+jbS3ZeLd6+9B3iyEG8j6W68aucxVruj74pV6t0KTM29Hprt62vZ69Wzc3A5\n8HKu3vnAtDZiGJBtazGwTa58Q9Idcy/PlV1CuqtyXWEbVwBvAStkr/fKtvkIMCBXb2wW5/A2jvdH\nWTxr58r+BdxXqLdrtp/P58ouBZ7OvT4kq3N8Yd2rgEWkmzkCbJLVO6yF83Ni4X1rBC6p9d+JH350\n5uGWE7OuEXA7KSF5Bfgr6dbvB0fTTfj2Ax6MiPsrK0XEPOAiYCNJW2XFE0n9wCqXKnbPyiZmPyNp\nW2C1rAxJq5OSo78DdZLWrDyAW4DNJK2dizeA30VEp0eqRMSCDw5eGpzt6y5guKQPZXWmA8/S1BK0\nO7AA+CWwnqQNC8fYHhMjYkoujpeA64F9s1gEfBq4Fli+yrlYHdi+sM2Lo3kfkImk9/QjLRzvoGx7\n92X18tv7G7CLpA1yZV8A3iO1iLRkP1JSekGh/GxS4rFvK+u2JoDfdHJds5pycmLWNUG6zLE38HFg\nq4jYJCJuy9XZEHiqyrpTc8sBJpM+yCqXPXajKTnZUdLAbFmQWkUgXUIS6Zvym4XHqVmd4rDmF/Mv\nJK0gaWj+0doBS9pd0h2S5gKzsn2dli2uy1W9p3AsDwL/BmYDu0uqA7ah/cnJs1XKngZWzZK0YcCq\nwNEsfS4uyuoXz0Wxz83M7Hn1SoGkDSX9WdLbpBaxN0kJKTQ/3iuy58/nyg4BbojWO6JuCLwaEfML\n5cXfj854oQvrmtWMR+uYdd1D0TRap9MiYrGkfwGjJW0CrA3cTfowXAHYhfQhPzUi3s5Wq3zBOAto\n6cZ/xQ/14ofgaNJlmSAlOiFp/Yh4vbghSZtldaeQLoG8QvrWfxCpz0T+C89E4HBJ65OSlNsiIiTd\nm72uJAJ3txB3e+SH5Fb2/SfgshbqP1J43dLIGUHqbEq6bLYq8FNSkvkesAGpz8kHxxsRr0q6n5Sc\nnCVpd9Klvss7cAytaam1a0Ar6xTfa7M+wcmJWc97idRZtWjL3PKKicD3SZ1n34yIpwEkPU5KInYn\nXcqoeD57XhQRd3Qyvkmklp+8N1uoexApUdo/u3RDFt8+VepWWkT2AUYAp2Sv7waOJCUn77J0wtCS\nzaqUDQfejYiZkuYA84DlunAuirYn9fWoj4gPhoVLaulSy+XAryR9hHRJ513gpjb28SKwm6QPFVpP\nir8flWRutcL6XWlZMSslX9Yx63njgZ0l7VIpkLQy8A3ghYh4Ild3Imkyt+NounRD9vOXSa0pH1wG\niYg3SR1cv5mNVGlG0lptBRcRsyLijsKjpSnrKy0NH/zvyC6pfKXKdp8FppM6+i5H6qdROcbNSf1D\n7utA/5fdsj43lf1uBBwA3JztbwlwNfB5SVsWV65yLtqz32rHK9L7U239v5N1WiVd0rku32elBeOB\ngaTLUXmVzrk3AUTETNJltNGFese0EEtVkuqUhp6v0t51zHqbW07MuqY9TfJnAPXAzZLOJY22OYL0\njfczhbr3k0aBDAd+myu/m9S3pdqw2+9kZY9J+h2pNWUoaaTIusAOHYy3NRNIQ27HZ/saDBwFvMHS\n/TkgJVWfJQ19npuVPUS63LApaXRNe00BbpF0HukcHZ09/2+uzvdJH94PZvFNBdYAdiS1OuUTuPac\ni8dJ/TbOyTrxzs2OZ3C1yhExXdJE4H+AVWjfJHxXk97fn0vaFHiU1El2f+AXEZHvF/N74HhJs0l9\nlD5OatnpyPt6KPDr7PmKNuqa1YRbTsy6ps1vrBExg5Qo3EL6lvtT0hDYAyLiukLd90jDgvOdXiEl\nH0EahvtKYZ2ppA/fG0jDhc8Hvkn61n0azXVmlM4H62T7+izpf8dZwNeB80hzp1RTiTvf2rOYNOy2\nvfObVGK4HTiedIynklplxmQxVbY9DdiJ1O/kM1ls/0VKJk5o6bhaKs9akA4gJQwnAieREpYjW4n1\nb6TEZBYt9wPK7yNIici5wIGkoefDge9GxA8K651C6uvyeVKSuDiLr6P3QOqv90uyfkJdGFFoZmZm\n1u1q3nIiaTlJp0t6Pps++llJJ1Wpd5qk13NTTG9aWL66pL9Imi1ppqTfZ9f1zczMrA+peXJCmlb5\nm6Trx1uQrhl/X9IxlQqSTiA1h38T2JnUI39CNu9DxV9Jvdv3IjWRjqb5NXszMzPrA2p+WUfS9aTp\nq4/KlV0JvBcRX8lev07qGDYuez2YdL358Ii4IuuZ/zgwMiIezursQ7pfxXrZdWgzMzPrA8rQcnIf\nsFc2uROStiPd5Gx89npjUg/7yoyMRMQcUoe6XbOiUcDMSmKSuY3U6WsXzMzMrM8ow1DiM0g96Z+U\ntISUMP0oIiqzKg4jJRnTC+tNp2lY4DBgRn5hRCyR9A7Nhw6amZlZyZUhOfkCacKiQ4EnSDMy/krS\n6xFxaSvribaHw7VYJ7t51z6k2Rnf72DMZmZmy7KVgI2ACbnbaXSbMiQnZ5JuB//37PXj2cyPPyTd\nWnwaKckYSvPWkyGk+SDI6jSbACq7J8bqLN3iUrEP8Jeuh29mZrbM+iJpQEq3KkNyMoilWzcayfrD\nRMQLkqaRRuE8Ch90iN2FpluM3w+sJmmHXL+TvUhJzb9a2O+LAJdddhlbbrnUTNfLlLFjxzJu3Lha\nh1EKPheJz0MTn4vE5yHxeUimTp3Kl770JSjc5by7lCE5uR74kaRXSCNuRpDuKfH7XJ1zgJMkPUs6\nEacDrwLXAkTEk5ImAL+T9G3SfSrOAxpaGanzPsCWW27JiBEjuv2g+pK6urpl/hxU+FwkPg9NfC4S\nn4fE52EpPdItogzJyTGkZOMC0qWZ10n3fTi9UiEizpQ0iDRvyWqkKa/3i4iFue0cRpq2+zZSy8uV\npJtzmZmZWR9S8+QkIuYB380erdU7lXQ/jZaWzwK+1J2xmZmZWe8rwzwnZmZmZh9wcmLU19fXOoTS\n8LlIfB6a+FwkPg+Jz0PvqPn09bUiaQQwadKkSe7cZGZm1gGTJ09m5MiRkG4bM7m7t++WEzMzMysV\nJydmZmZWKk5OzMzMrFScnJiZmVmpODkxMzOzUnFyYmZmtoyaPh023RTuuqvWkTTn5MTMzGwZtWgR\nPPccvN8jd8jpPCcnZmZmy6jGxvQs1TaOIicnZmZmy6jKPKxOTszMzKwUnJyYmZlZqTg5MTMzs1Jx\ncmJmZmal4uTEzMzMSsXJiZmZmZVKJTlZrmTZQMnCMTMzs97ieU7MzMysVHxZx8zMzErFyYmZmZmV\nipMTMzMzKxUnJ2ZmZlYqTk7MzMysVJycmJmZWal4KLGZmZmViidha4GkFyQ1Vnmcly1fUdIFkt6S\n9K6kKyUNKWxjfUk3SponaZqkMyXV/NjMzMzKzJd1WrYjMCz3+AQQwBXZ8nOA/YFDgNHAOsBVlZWz\nJGQ8sDwwCjgcOAI4rVeiNzMz66PKmpwsX+sAIuLt/GtJBwLPRcRESYOBrwKHRsRd2fIjgamSdo6I\nB4F9gC2APSPiLeAxSScDZ0g6NSIW9+oBmZmZ9RFlTU7K0HLyAUkrAF8ELs6KdiQlULdX6kTEU8DL\nwK5Z0SjgsSwxqZgA1AFb93TMZmZmfZWTk/b5NCmp+FP2eiiwMCLmFOpNJ10CInueXmU5uTpmZmZW\n4OSkfb4K3BQR09qoJ1K/lLa0p46ZmdkyqazJSc37nFRI2gDYGzg4VzwNGChpcKH1ZAhNrSPTgJ0K\nmxuaPRdbVJYyduxY6urqmpXV19dTX1/fgejNzMz6nvbMc9LQ0EBDQ0OzstmzZ/dgVCVKTkitJtNJ\nI28qJgGLgb2AqwEkDQc2AO7L6twPnChprVy/kzHAbOCJtnY6btw4RowY0S0HYGZm1pe0Z56Tal/Y\nJ0+ezMiRI3ssrlIkJ5JEGv57SUQ0VsojYo6ki4GzJc0E3gXOBe6NiIeyareQkpBLJZ0ArA2cDpwf\nEYt68TDMzMz6FF/Wad3ewPrAH6ssGwssAa4EVgRuBr5TWRgRjZIOAH5Nak2ZB1wCnNKzIZuZmfVt\nTk5aERG3AgNaWLYAODZ7tLT+K8ABPROdmZlZ/1TW5KRso3XMzMyslzg5MTMzs1JxcmJmZmal4uTE\nzMzMSqU985zUgpMTMzOzZVR75jmphZKFY2ZmZr3Fl3XMzMysVJycmJmZWak4OTEzM7NScXJiZmZm\npeLkxMzMzErFyYmZmZmViuc5MTMzs1Jxy4mZmZmViidhMzMzs1Jxy4mZmZmVipMTMzMzKxUnJ2Zm\nZlYqTk7MzMysVJycmJmZWal4nhMzMzMrFbecmJmZWal4nhMzMzMrFbecmJmZWak4OTEzM7NScXJi\nZmZmpeLkxMzMzErFyYmZmZmViuc5aYWkdSRdKuktSe9JekTSiEKd0yS9ni2/VdKmheWrS/qLpNmS\nZkr6vaSVe/dIzMzM+g63nLRA0mrAvcACYB9gS+B7wMxcnROAY4BvAjsD84AJkgbmNvXXbN29gP2B\n0cBve+EQzMzM+qSyznOyfK0DAH4AvBwRX8+VvVSocxxwekRcDyDpK8B04GDgCklbkhKbkRHxcFbn\nWOBGScdHxLSePggzM7O+xi0nLTsQ+LekKyRNlzRZ0geJiqSNgWHA7ZWyiJgD/AvYNSsaBcysJCaZ\n24AAdunpAzAzM+uLnJy07CPAt4GngDHAb4BzJX0pWz6MlGRML6w3PVtWqTMjvzAilgDv5OqYmZlZ\nTlmTkzJc1lkOeDAiTs5ePyJpa1LCclkr64mUtLSmPXXMzMyWSU5OWvYGMLVQNhX4TPbzNFKSMZTm\nrSdDgIdzdYbkNyBpALA6S7e4NDN27Fjq6uqaldXX11NfX9/+IzAzM+uD2pOcNDQ00NDQ0Kxs9uzZ\nPRhVOZKTe4HNC2Wbk3WKjYgXJE0jjcJ5FEDSYFJfkguy+vcDq0naIdfvZC9SUvOv1nY+btw4RowY\n0VoVMzOzfqk985xU+8I+efJkRo4c2WNxlSE5GQfcK+mHwBWkpOPrwFG5OucAJ0l6FngROB14FbgW\nICKelDQB+J2kbwMDgfOABo/UMTMzqy5K2vGh5slJRPxb0qeBM4CTgReA4yLi8lydMyUNIs1bshow\nEdgvIhbmNnUYcD5plE4jcCVpCLKZmZlVEVG+/iZQguQEICLGA+PbqHMqcGory2cBX2ppuZmZmTUX\nUb4J2KAcQ4nNzMysBsracuLkxMzMbBnl5MTMzMxKxcmJmZmZlUpjo5MTMzMzKxG3nJiZmVmpODkx\nMzOzUnFyYmZmZqXieU7MzMysVNxyYmZmZqXi5MTMzMxKxcmJmZmZlYrnOTEzM7NSccuJmZmZlYqT\nEzMzMysVJydmZmZWKp7nxMzMzErFLSdmZmZWKk5OzMzMrFScnJiZmVmpeJ4TMzMzKxW3nJiZmVmp\nODkxMzOzUnFyYmZmZqXi5MTMzMxKxZOwmZmZWam45cTMzMxKxcmJmZmZlYrnOWmBpFMkNRYeT+SW\nryjpAklvSXpX0pWShhS2sb6kGyXNkzRN0pmSan5sZmZmZdavW04kDZC0vaTVO7mJKcBQYFj22C23\n7Bxgf+AQYDSwDnBVbt/LAeOB5YFRwOHAEcBpnYzFzMxsmdCvkhNJ50j6WvbzAOAuYDLwiqSPd2KT\niyPizYiYkT3eybY9GPgqMDYi7oqIh4Ejgf+QtHO27j7AFsAXI+KxiJgAnAx8R9LynTk+MzOzZUG/\nSk6AzwJImAt3AAAgAElEQVSPZD8fCGxMShDGAT/pxPY2k/SapOckXSZp/ax8JKlF5PZKxYh4CngZ\n2DUrGgU8FhFv5bY3AagDtu5ELGZmZsuE/pacrAVMy37+JPD3iHga+APw0Q5u6wHSZZh9gG+REp27\nJa1MusSzMCLmFNaZni0je55eZTm5OmZmZlZQ1nlOOnvZYzqwlaQ3gH2Bo7PyQcCSjmwouwxTMUXS\ng8BLwOeB91tYTUC0Z/MdicXMzGxZUtaWk84mJ38ErgDeICUAt2bluwBPdiWgiJgt6WlgU+A2YKCk\nwYXWkyE0tY5MA3YqbGZo9lxsUVnK2LFjqaura1ZWX19PfX19Z8I3MzPrM9qTnDQ0NNDQ0NCsbPbs\n2T0YVSeTk4g4VdIUYH3SJZ0F2aIlwBldCUjSKsAmwJ+AScBiYC/g6mz5cGAD4L5slfuBEyWtlet3\nMgaYDTxBG8aNG8eIESO6ErKZmVmf1J55Tqp9YZ88eTIjR47ssbg6PZolIq4EkLRSruxPHd2OpF8A\n15Mu5awL/C8pIbk8IuZIuhg4W9JM4F3gXODeiHgo28QtpCTkUkknAGsDpwPnR8Sizh6fmZlZf1fW\nyzqdHUo8QNLJkl4D5kr6SFZ+emWIcQesB/yVdDnocuBNYFREvJ0tHwvcAFwJ3Am8TprzBICIaAQO\nILXa3Af8GbgEOKUzx2ZmZrasKGty0tmWkx+RJjv7PvC7XPkU4L+Bi9u7oYhotXNHdsno2OzRUp1X\nSAmKmZmZtVNZk5PODiD6CvCNiPgLzUfnPEKa78TMzMxKrr8lJ+sCz7awvRU6H46ZmZn1lrLOc9LZ\nkJ4Adq9S/lng4c6HY2ZmZr2lrC0nne1zchrwJ0nrkhKcz0janHS5x30/zMzM+oCyJiedajmJiGtJ\nScjewDxSsrIlcGBE3NraumZmZlYO7ZnnpBa6Ms/JPcAnujEWMzMz60X9quVE0k6SdqlSvoukHbse\nlpmZmfW0fpWcABeQpq4vWjdbZmZmZiXX35KTrYDJVcofzpaZmZlZyfW35GQBTXf+zVubdF8cMzMz\nK7n+lpzcAvxMUl2lQNJqwE8Bj9YxMzPrA8o6CVtnR+scD9wNvCSpMuna9sB04MvdEZiZmZn1rLK2\nnHQqOYmI1yRtC3wR2A6YD/wRaIiIRd0Yn5mZmfWQ/jjPyTzgom6MxczMzHpRv2o5AZA0HPg4MIRC\n35WIOK1rYZmZmVlP61fJiaSjgF8DbwHTgMgtDtJ09mZmZlZi/So5AU4CfhQRP+/OYMzMzKz3lDU5\n6ewAotWBv3dnIGZmZta7+lty8ndgTHcGYmZmZr2rv81z8ixwuqRRwGNAs+HDEXFuVwMzMzOzntXf\nhhJ/A5gL7JE98gJwcmJmZlZyZb2s09lJ2Dbu7kDMzMysd5U1OenSlSZJAyVtLqnT86WYmZlZbfSr\n5ETSIEkXA+8BjwMbZOXnSfpBN8ZnZmZmPaRfJSfAz0j31Pk48H6u/DbgC12MyczMzHpBWZOTzl6O\nORj4QkQ8ICk/O+zjwCZdD8vMzMx6WlmTk862nHwYmFGlfGWaT2VvZmZmJVXWeU46G9K/gf1zrysJ\nydeB+7sUkZmZmfWKss5z0tnk5ETgp5J+Tbo0dJykW4EjgR91JSBJP5TUKOnsXNmKki6Q9JakdyVd\nKWlIYb31Jd0oaZ6kaZLOlFTCfNDMzKwc+tVlnYi4h9QhdnnSDLFjgOnArhExqbPBSNoJOAp4pLDo\nHFJLzSHAaGAd4KrcessB47N4RgGHA0fguyObmZm1qKzJSYc7xGZzmhwGTIiIo7orEEmrAJeRLg2d\nnCsfDHwVODQi7srKjgSmSto5Ih4E9gG2APaMiLeAxySdDJwh6dSIWNxdcZqZmfUXZU1OOtxykn3Q\n/wZYqZtjuQC4PiLuKJTvSEqibs/F8BTwMrBrVjQKeCxLTComAHXA1t0cp5mZWb/Qb5KTzIPADt0V\nhKRDge2BH1ZZPBRYGBFzCuXTgWHZz8Oy18Xl5OqYmZlZTlmTk87Oc3Ih8EtJ6wGTgHn5hRHxaHs3\nlG3jHOATEbGorfr5VWnfsGUPbTYzM6uivyUnl2fP+bsPB00Jw4AObGskad6USdIHp2gAMFrSMcC+\nwIqSBhdaT4bQ1DoyDdipsN2h2XOxRaWZsWPHUldX16ysvr6e+vr6DhyCmZlZ39Oe5KShoYGGhoZm\nZbNnz+7BqDqfnHTnXYlvAz5aKLsEmAqcAbwGLAL2Aq4GkDScdD+f+7L69wMnSlor1+9kDDAbeKK1\nnY8bN44RI0Z0/SjMzMz6mMbGtidhq/aFffLkyYwcObLH4upUchIRL3VXABExj0ICIWke8HZETM1e\nXwycLWkm8C6pxebeiHgoW+WWbBuXSjoBWBs4HTi/g5eKzMzMlhn96rKOpK+0tjwi/ty5cJo2UXg9\nFlgCXAmsCNwMfCe3v0ZJBwC/JrWmzCO1vpzSxTjMzMz6rX6VnAC/KrxeARgELATeA7qUnETEfxZe\nLwCOzR4trfMKcEBX9mtmZrYs6VfJSUSsXiyTtBmp5eIXXQ3KzMzMel5Zk5Nuu/dMRDwD/IClW1XM\nzMyshPp9cpJZTLrvjZmZmZVcWZOTznaIPahYRBohcwxwb1eDMjMzs57Xr5IT4JrC6wDeBO4Avtel\niMzMzKxXLFnSj5KTiOjuy0FmZmbWi6ZNg0mT4HOfq3UkS3OSYWZmtgy6+up0WefII2sdydI6lZxI\nulLSD6qU/4+kv3c9LDMzM+tJb78Na6wBqy81OUjtdbblZA/gxirlNwOjOx+OmZmZ9YZ582DllWsd\nRXWdTU5WIc0GW7QIGNz5cMzMzKw3zJ0Lq6xS6yiq62xy8hjwhSrlh9LGXYDNzMys9ubNK29y0tmh\nxKcD/5C0CWn4MMBeQD1Qwn6/ZmZmljd3bnkv63R2KPH1kg4GTgQ+C8wHHgX2joi7ujE+MzMz6wH9\nseWEiLiR6p1izczMrOTmzoX11691FNV1dijxTpJ2qVK+i6Qdux6WmZmZ9aQyt5x0tkPsBUC1fGvd\nbJmZmZmVWJn7nHQ2OdkKmFyl/OFsmZmZmZVYfxxKvAAYWqV8bWBx58MxMzOz3tAfL+vcAvxMUl2l\nQNJqwE+BW7sjMDMzM+s5Zb6s09nROscDdwMvSXo4K9semA58uTsCMzMzs56xcCEsXlzelpPOznPy\nmqRtgS8C25HmOfkj0BARi7oxPjMzM+tmc+em5/7WckJEzAMu6sZYzMzMrBfMnp2eB5f0bnidSk4k\nfY40Vf1wIIBngL9GxJXdGJuZmZn1gDffTM9DhtQ2jpZ0qEOspOUk/Q34G2nI8LPA88DWwBWSLpek\n7g/TzMzMusuMGem5rMlJR1tOjgP2Bg6KiBvyCyQdROp3chxwTveEZ2ZmZt2tkpystVZt42hJR4cS\nHwn8TzExAYiI64DvA1/tjsDMzMysZ8yYAWusASusUOtIqutocrIZcFsry2/L6piZmVlJzZhR3ks6\n0PHkZD6wWivLBwPvd2SDkr4l6RFJs7PHfZL2zS1fUdIFkt6S9K6kKyUNKWxjfUk3SponaZqkMyV1\ndoI5MzOzfq2/JSf3A99uZfl3sjod8QpwAjAye9wBXCtpy2z5OcD+wCHAaGAd4KrKylkSMp7Uf2YU\ncDhwBHBaB+MwMzNbJpQ9Oeloh9ifAHdKWhM4C3gSELAl8D3gU8CeHdlgRNxYKDpJ0reBUZJeI/Vh\nOTQi7gKQdCQwVdLOEfEgsA+wBbBnRLwFPCbpZOAMSadGhO/1Y2ZmlvPyy7DffrWOomUdajmJiPuA\nL5ASkPuBmcA7wL1ZWX1E3NvZYLKhyocCg7LtjyQlULfnYngKeBnYNSsaBTyWJSYVE4A60hBnMzMz\nyyxeDM8/D5uVuIdohydhi4irJU0AxpAmYQN4GrglIt7rTBCStiElIysB7wKfjognJe0ALIyIOYVV\npgPDsp+HZa+LyyvLHulMTGZmZv3Ryy/DokWw6aa1jqRlnb23znuS9gb+X0S80w1xPEm6R89qpL4l\nf5Y0upX6Is1M25b21DEzM1tmPPNMeu43yYmk9SLi1ezlYcCZwDuSHgM+GRGvdCaIrF/I89nLyZJ2\nJk3mdgUwUNLgQuvJEJpaR6YBOxU2OTR7LraoLGXs2LHU1dU1K6uvr6e+vr5jB2FmZtYHPP10mt9k\ngw3aV7+hoYGGhoZmZbMrN+fpIR1tOXlS0tukPiYrAeuT+n9sBHTnVC7LASsCk4DFwF7A1QCShgMb\nAPdlde8HTpS0Vq7fyRhgNvBEWzsaN24cI0aM6MbQzczMyuuhh2C77WD5dmYA1b6wT548mZEjR/ZA\ndElHhxLXAZ8jJQ3LAeMlPU1KJPaRNKy1lauR9BNJu0naUNI2kn4G7AFclrWWXAycLenjkkaSpsi/\nNyIeyjZxCykJuVTStpL2AU4Hzo+IRR2Nx8zMrD978EHYZZdaR9G6jiYnK0TEgxHxS9KEbDuQprRf\nQhry+5ykpzq4zaHAn0n9Tm4jjdAZExF3ZMvHAjcAVwJ3Aq+T+qUAEBGNwAFZDPdl27oEOKWDcZiZ\nmfVrs2bBU0+VPznp6GWdOZIeJl3WGQgMioh7JS0mDTF+Fdi5IxuMiK+3sXwBcGz2aKnOK6QExczM\nzFowZUp63n772sbRlo62nKwD/BhYQEps/i1pIilRGQFERNzTvSGamZlZd5gyJfU12XzzWkfSuo5O\nwvZWRFwfET8E3iONkjmPNGT3LFLLyl3dH6aZmZl11ZQpMHw4DBxY60ha19Wb482OiCuARcB/AhsD\nF3Y5KjMzM+t2jz4KW/eBudO7kpxsS+pjAvASsCgipkXE37oelpmZmXWn999PI3U+9rFaR9K2Ts0Q\nCx90Qq38vE33hGNmZmY94aGHYMEC2GOPWkfStq5e1jEzM7OSW7IETjwR1lkHtt221tG0rdMtJ2Zm\nZtY3XHcd3HMP3HknDBhQ62ja5pYTMzOzfu4Pf4Bdd+0bl3TAyYmZmVm/98wzKTnpK5ycmJmZ9WMR\n8OqrsO66tY6k/ZycmJmZ9WNz5sC8eU5OzMzMrCRezWYkW2+92sbREU5OzMzM+rHXXkvPbjkxMzOz\nUnjxxfS8zjo1DaNDnJyYmZn1UxFw2WWw227lv9lfnidhMzMz64cWL4YvfAEmToRrrql1NB3j5MTM\nzKyfmTkTDjwQHngArrwSPvWpWkfUMU5OzMzM+pmjj4apU+GOO2D06FpH03Huc2JmZtaPvP8+XHst\nfP/7fTMxAScnZmZm/crdd8P8+fDJT9Y6ks5zcmJmZtaP/OMfsOGGsM02tY6k85ycmJmZ9RMXXwy/\n/S0ceihItY6m85ycmJmZ9RPjxqXnb32rtnF0lZMTMzOzfmDhQnjqKbjwQthoo1pH0zVOTszMzPqB\nZ55JE69tvXWtI+k6JydmZmb9wKOPpmcnJ2ZmZlYK110HH/0orLlmrSPpOicnZmZmfdz48WnitUMP\nrXUk3aPmyYmkH0p6UNIcSdMlXS1peKHOipIukPSWpHclXSlpSKHO+pJulDRP0jRJZ0qq+fGZmZn1\npMmT4dOfhj32SNPW9wdl+PDeHTgP2AXYG1gBuEXSh3J1zgH2Bw4BRgPrAFdVFmZJyHjSvYJGAYcD\nRwCn9Xz4ZmZmtXPhhbDuuqnlZLXVah1N96j5jf8iotkEu5KOAGYAI4F7JA0GvgocGhF3ZXWOBKZK\n2jkiHgT2AbYA9oyIt4DHJJ0MnCHp1IhY3HtHZGZm1jveey/NCHv00TBwYK2j6T5laDkpWg0I4J3s\n9UhSEnV7pUJEPAW8DOyaFY0CHssSk4oJQB3QD/otm5mZLe2ss2DePPja12odSfcqVXIiSaRLOPdE\nxBNZ8TBgYUTMKVSfni2r1JleZTm5OmZmZv1GBPzxj3DkkbDxxrWOpnvV/LJOwYXAVsBu7agrUgtL\nW9pTx8zMrE956il48UU48MBaR9L9SpOcSDof+CSwe0S8nls0DRgoaXCh9WQITa0j04CdCpscmj0X\nW1SaGTt2LHV1dc3K6uvrqa+v7+ARmJmZ9Y4XX4QxY2CNNWDPPXt2Xw0NDTQ0NDQrmz17do/uUxG1\nb1jIEpNPAXtExPOFZYOBN0kdYq/OyoYDTwK7RMRDkvYFrgfWrvQ7kfQN4OfAkIhYVGWfI4BJkyZN\nYsSIET14dGZmZt3nscdgr71glVXg9ttrc0ln8uTJjBw5EmBkREzu7u3XvOVE0oVAPXAQME9SpcVj\ndkS8HxFzJF0MnC1pJvAucC5wb0Q8lNW9BXgCuFTSCcDawOnA+dUSEzMzs77q4oth+eXh3nth7bVr\nHU3PqHlyAnyL1C/kzkL5kcCfs5/HAkuAK4EVgZuB71QqRkSjpAOAXwP3AfOAS4BTejBuMzOzXrVw\nYZoN9oAD+m9iAiVITiKizRFDEbEAODZ7tFTnFeCAbgzNzMysVI49Nt19+KKLah1JzyrVUGIzMzNr\n2YMPwuGHw8c/XutIepaTEzMzsz7ixRdhiy1qHUXPc3JiZmbWB8yeDbNmwYYb1jqSnufkxMzMrA94\n6aX0vNFGNQ2jVzg5MTMz6wMeeCA9LwstJzUfrWNmZmbVLVgA48bBH/6QRunsvTcMWwbuGOeWEzMz\nsxKKgEMOgZNOglGjoKEBJkyA5ZaBT263nJiZmZXQ3/4GN94IV10Fn/lMraPpXU5OzMzMSmLqVJg8\nGR56CC64AOrr4dOfrnVUvc/JiZmZWY0tWQLf+x6cdx40Nqab+p12WiqTah1d73NyYmZmVkMzZsBx\nx8EVV8DPfw7f+hastFK6ud+yahk+dDMzs9qaOxe22y6Nyrn0UjjssFpHVA5OTszMzGrkwgvhzTfT\nMOGNN651NOWxDAxIMjMzK5+rr4YTToCvfc2JSZGTEzMzs1727LPws5/BHnvAb35T62jKx8mJmZlZ\nL2lsTJOqbbYZTJkCp5yybI7GaYuTEzMzs15y3HHwk5/Ad78Lzz8Pe+5Z64jKyR1izczMesHLL8Pv\nfpfmLzn55FpHU25OTszMzHrAwoUwcWIajfOPf8DNN8Nqq8HRR9c6svJzcmJmZtYN3n0XfvWrlIxc\ney3Mng2zZqVl22wD3/gGnHgirLFGbePsC5ycmJmZdYPzz4dTT4X11oMxY2D99WHffdPrYcPc8bUj\nnJyYmZl1wRNPpHvg3HxzmrPk97+vdUR9n0frmJmZddK4cbDttmmG1zPPTCNxrOvccmJmZtYBS5bA\nAw/AZZelCdTGjk0Tqq24Yq0j6z+cnJiZmbVh9mx4+2044wy47jqYPh3q6uCcc+C//sv9SbqbkxMz\nM7MWvPZauv/NX/6SXq+xBnz5y7D//rDLLjB4cG3j66+cnJiZmeVcdhk89hjceis8+mhKSH71K9ho\nI9htNw8F7g1OTszMbJkXATfckDq0/utf6S7Bw4fD2WfDV76SJk+z3lOK0TqSdpd0naTXJDVKOqhK\nndMkvS7pPUm3Stq0sHx1SX+RNFvSTEm/l7Ry7x2FmZn1Fe+9B//8Z7o8s8IK6XHQQTBgAJx7brrv\nzc03p/4kTkx6X1laTlYG/g/4A3BVcaGkE4BjgMOBF4AfAxMkbRkRC7NqfwWGAnsBA4FLgN8CX+rp\n4M3MrJwWLEgzt159dUpInn8+JSVvvAFvvZVaSM48Ez70IdhkE9h7b3duLYNSJCcRcTNwM4BU9dfi\nOOD0iLg+q/MVYDpwMHCFpC2BfYCREfFwVudY4EZJx0fEtF44DDMzq7FZs+AXv4DnnoP582H8eFi8\nGJZbDlZaKSUhBx8Ma64Jn/oUjBzpIcBlVIrkpDWSNgaGAbdXyiJijqR/AbsCVwCjgJmVxCRzGxDA\nLsC1vRexmZn1ln//O42meeaZ9HrmzNR/ZNSo1AJyyimw6aapI+t669U2Vmu/0icnpMQkSC0ledOz\nZZU6M/ILI2KJpHdydczMrA9bsgQmTICbbkrJyPvvw913w9Zbw+GHp2RkpZXgiCNgnXVqHa11RV9I\nTloiUtLS1TpmZlYis2al+UUWLkx9Rd56C665JpXPnw+rrw7/+Z+w6qpwwQVw1FGwfF/+NLOl9IW3\ncxopyRhK89aTIcDDuTpD8itJGgCsztItLs2MHTuWurq6ZmX19fXU19d3LWozM2uXCHj11TTz6jXX\nwP33w7x5aVldHay7LhxwAGyxBey6K+y4YxpdY72joaGBhoaGZmWzZ8/u0X0qolwNC5IagYMj4rpc\n2evALyJiXPZ6MCnp+EpE/F3SFsDjwI65DrFjgPHAetU6xEoaAUyaNGkSI0aM6PHjMjMzmDIlTQUP\n6f40U6ak5yefTGW77AJjxsB++6XLNFtt5VlYy2jy5MmMHDkS0kCUyd29/VK0nGTzkWxKaiEB+Iik\n7YB3IuIV4BzgJEnPAi8CpwOvknV0jYgnJU0Afifp26ShxOcBDR6pY2ZWO889l1pDXn8drr02va5Y\nfvnUCrLVVqlT66c/nVpKzEqRnAA7Av8k9Q8J4JdZ+Z+Ar0bEmZIGkeYtWQ2YCOyXm+ME4DDgfNIo\nnUbgStIQZDMz6yXz5sHEiSkZueaa1IF1ueVS/5D990937/3oR1PdNdaAIUNa354tm0qRnETEXbQx\nW21EnAqc2sryWXjCNTOzXhEBL7+cRtBMmpRmU7355nTJptJfZKed4Ec/guOPh0GDahuv9S2lSE7M\nzKxcZs9OycbixU1lS5bA9den0TNvvZX6i1RsuCF8/vMwbFjqvDp0KKy1Vu/Hbf2DkxMzs2VQYyM8\n/jgsWpR+vvHGdAlmcta1cfHilIwUbbEFjBgB668PJ50EH/5wmm11u+16N37r35ycmJn1I0uWpIQj\nLyJN4/766+l52rTUMvLCC011Bg2CnXeGn/88dVQdMCCNmBk6tPm2VlzR956xnufkxMysD3nqqTQn\nyC23pOe8iFT+9tvV111xxTSb6sc+ljqp7rsvrL12WrbRRqmDqlkZODkxM6uh+fOb5v2A1Jn0mmvS\n1OwAc+c2vY6Al15K5XV1sMMOS2/vc59L95Ep2nLLdDnGrC9wcmJm1o0eeaT5XB4V8+enJOO995rK\nItJsqLNmNa+70kpN831Iaar2jTZKrzfbDEaPTn09Vl21Rw7BrOacnJiZVTFnTuqjAek+LxMmpGSi\nYsGCdN+XuXObr1dMNPK22SbdITfvyCNT8lHpxyGlWVLXXLPrx2DWVzk5MbNl1rRpabhsY2N6/frr\nKQlpbEzDZOfMaaq77rqwyirN1x8zJo1eyVt/ffjEJ6p3Gl19dXcmNWsPJydm1mctXgwPP5xGqCxZ\nkm4c9847S9ebPj2NUqk2NDZvwAA48EBYbTXYYw/45CdTMjFwIIwcmTqRmlnPc3JiZjW3cGHqk1H0\nzjtp0q/KnBs33JAm/6qYNavp0guk0Sabbbb0dpZfHn7yk6VHo6ywQkpGVl+9qcwJiFntOTkxs241\nY0bTRF7VPP54ugttRWMj3HorvPtu9forrJA6iEKa6GvMmKZlAwak2Ug//OH0esMNl770YmZ9j5MT\ns2XUjBnVWysqIuD225uGrj7zDNx9d9vbfeedpmGw1Sy3XLpkMnBgU9lRR8GoUUvXHTAgdRZdbbW2\n92tm/YeTE7M+4s034Z//bOq8Wc3EifD0021va/58uPfetutJsN566edBg+ArX2lqxWjJqqvCwQen\nCb+qGTSo+WUUM7MiJydmPWj2bHjxxZaXP/ggPPbY0uWLFsG11zafnGvBgrY7dK65Juy1V/tGhFx4\n4dLDWos23BCGD297W2Zm3cnJiVmmsbHlD/9HH4WHHmp53Ycfrt7P4plnmicYRRJstVX1Tpj775/m\nxagYOBA+9ammybmqWXHF1PnTzKwv878x69dmzkyJRVEE3HQTvPFGet3YmOa7aOmeJJASiJZGcqyx\nBhx0UOojkTdmTBoNUiyvWGst2Hjjto/DzGxZ4uTESmXOnHT5oui55+DOO1ter3LDs+KN0KZNW3oG\nz4q6Oth226bXn/tc9U6ZkDpkHnBAy0mGmZl1Hycn1mOeeCJd1ii66y549tmly+fPT6ND8lOE5w0e\n3HyER9Fmm8EhhzQvW2WVljtnDhvme5OYmZWRkxOratq0pnknHnggzU1RTbV7jlS8/Xb18ro62H33\npTttfuhDcNZZsMkmS68zaFAaUuqWCzOz/s/JyTKgsTFN3V2tP8X06anvRb4j6KJF8K9/NU8sPvKR\n6v0tBg6Er32teifN9dZLfS6KSciqq7Y9HNXMzJZdTk5KorExddxctKjtuq2NHHnrrTTFdz7ZiGh5\nFIqU7h9SnNb7i1+Ej340/Tx06NI3NzMzM+spTk46acmSpWfBfOaZ1J+iJU8+2fLEV3PmNM3E2R7b\nb199yOiAAXDiiTBkSPPyESNghx2Wri956KmZmZWLP5Za8PTTTclCRJqZ84UXml7feWea/rto4MCW\nP+xb65y53HLpEsi667YdW10dbLRRe47CzMys71nmk5PzzksdPufNayqLSB09830uVl0Vdt65qf/E\nwQfDnns239bKK8O++6YblZmZmVnnLPPJyQ03wN57L32b9Q03bD4N+Jpr+m6nZmZmvWGZT04mTEj9\nMczMzKwcWpiM28zMzKw2nJyYmZlZqfSr5ETSdyS9IGm+pAck7VTrmPqChoaGWodQGj4Xic9DE5+L\nxOch8XnoHf0mOZH0BeCXwCnADsAjwARJa9U0sD7Af2xNfC4Sn4cmPheJz0Pi89A7+k1yAowFfhsR\n/7+9ew+2sirjOP79oQKKgzh5YSwUlbybF1AoRUFFUkcdsnEcTC2qwazJrElzqrEspzvjDadGbfJa\nUU1pqaMiBylvjGLmhYsp4gUPieIRARXh6Y+1trxsz0Hw7LP3Pvv9fWb2DO9a691nrYd3r/3s9117\nv9dHxDzgbGAlMKmx3TIzM7NN0RLJiaQtgOHAPZWyiAhgOvDJRvXLzMzMNl1LJCfAdsBmwJKq8iXA\n4EHfvXcAAAoVSURBVPp3x8zMzD6sVv+dEwHRRV1/gLlz59avN02qo6ODOXPmNLobTcGxSByHdRyL\nxHFIHIek8N7ZI/eYV0RX7929R76ssxI4JSJuLZT/DtgmIiZ0ss9E4Ka6ddLMzKz1nB4RN9f6SVvi\nzElErJb0CHA0cCuAJOXty7vY7U7gdOA54K0u2piZmdn79QeGkt5La64lzpwASDoVuA6YDMwmfXvn\ns8BeEfFKI/tmZmZmG68lzpwARMS0/JsmFwM7Av8GxjsxMTMz611a5syJmZmZtYZW+SqxmZmZtYhS\nJidluAePpNGSbpX0kqS1kk7qpM3FkhZLWinpbknDquq3lXSTpA5JyyRdI2lA/UbRfZIulDRb0huS\nlkj6q6Q9qtr0kzRV0lJJyyX9WdIOVW2GSLpN0gpJ7ZJ+LqnXvH4knS3psfx/2SHpfkmfLtS3fAw6\nk4+PtZKmFMpKEQtJF+WxFx9PFepLEQcASTtJuiGPdWV+rRxc1aYM8+XCTo6JtZKuyPV1OyZ63UHU\nXSrPPXgGkNbdfJVOfutF0gXA10gLiA8FVpDi0LfQ7GZgb9K3nk4AjgB+07PdrrnRwBXASOAYYAvg\nLklbFtpcShrfKaQx7gT8pVKZX1i3k9ZojQLOAj5PWt/UW7wAXED6JeXhwAzgFkl75/oyxGA9Sh9K\nvkyaA4rKFIsnSGv0BufH4YW6UsRB0iDgPuBtYDxpzvsWsKzQpizz5QjWHQuDgXGk949pub5+x0RE\nlOoBPAhcVtgW8CJwfqP71oNjXgucVFW2GDivsD0QWAWcmrf3zvsdVGgzHngXGNzoMXUjFtvlcR1e\nGPfbwIRCmz1zm0Pz9nHAamC7QpvJpMlr80aPqRuxeBX4QhljAGwNzAeOAtqAKWU7Hkgf0OZ0UVem\nOPwUuPcD2pR1vrwUWNCIY6JUZ07ke/AAIGlXUlZcjMMbwEOsi8MoYFlEPFrYdTopix5Zp672hEGk\nMbyWt4eTsvxiLOYDz7N+LB6PiKWF57kT2AbYt6c7XGuS+kg6DdgKeIASxgCYCvw9ImZUlY+gXLH4\nuNKl32ck3ShpSC4v0zFxIvCwpGn50u8cSV+qVJZ1vszvl6cD1+aiur42SpWc4HvwVAwmvWg2FIfB\nwP+KlRGxhvSm3itjJUmkTwL/iojKtfXBwDt5simqjkVnsYJeFAtJ+0laTvr0cxXpE9A8ShQDgJyY\nHQhc2En1jpQnFg+STrmPJ93FfVdgVl4nUaZjYjfgK6QzaccCvwYul/S5XF/K+RKYQEoqrsvbdX1t\ntMzvnHTThu7BUyYbE4feHKurgH1Y/7p6VzZ2nL0pFvOAA0hnj04Brpd0xAbat1wMJH2MlKCOi4jV\nm7IrLRaLiCj+sucTkmYDi4BT6fpXs1suDqQP6bMj4vt5+zFJ+5ISlhs3sF+rz5eTgDsiov0D2vXI\nMVG2MydLgTWkDLBoB96f7bWydtIBtaE4tOft90jaDNiWXhgrSVcCxwNjImJxoaod6CtpYNUu1bGo\njlVlu9fEIiLejYhnI2JORHyXtBD0XEoUA9Lliu2BRyStlrQaOBI4V9I7pLH0K0ks1hMRHcACYBjl\nOiZeBqrvADsX2Dn/u4zz5c6kLxBcXSiu6zFRquQkf1Kq3IMHWO8ePPc3ql/1FhELSQdRMQ4DSddG\nK3F4ABgk6aDCrkeTXqQP1amrNZETk5OBsRHxfFX1I6RFa8VY7EGamIqx2L/qG13HAh3AU/RefYB+\nlCsG04H9SZd1DsiPh0mfkCv/Xk05YrEeSVsDu5MWf5bpmLiPtLCzaE/SWaTSzZfZJFIycXuhrL7H\nRKNXAzdg9fGppFXWZwJ7kb7q9SqwfaP7VuNxDiBNtgeSVlN/I28PyfXn53GfSJqs/wY8DfQtPMft\npMn6EOAw0jXZGxo9tk2Mw1WkleKjSRl85dG/qs1CYAzpk/V9wD8L9X1IZxnuAD5Buka/BPhRo8e3\nCXG4hHQ5axdgP+AnpInmqLLEYAOxee/bOmWKBfAL0tdBdwE+Bdydx/GRksVhBGkd1oWk5GwisBw4\nrdCmFPNlHodIN8S9pJO6uh0TDQ9Eg4J/Tg7+KlKmN6LRfeqBMR5JSkrWVD1+W2jzA9KnpJWkFdXD\nqp5jEOkTZQfpDf5qYKtGj20T49BZDNYAZxba9CP9FsrSPCn9Cdih6nmGAP8A3swvtp8BfRo9vk2I\nwzXAs/mYbwfuIicmZYnBBmIzg/WTk1LEAvg96WcUVpG+cXEzsGvZ4pDHcTzwnzwXPglM6qRNy8+X\neRzj8hw5rJO6uh0TvreOmZmZNZVSrTkxMzOz5ufkxMzMzJqKkxMzMzNrKk5OzMzMrKk4OTEzM7Om\n4uTEzMzMmoqTEzMzM2sqTk7MzMysqTg5MTMzs6bi5MTMWoKkNklTGt0PM+s+Jydm1m2SJkt6Q1Kf\nQtkASasl3VPVdqyktZKG1rufZtY7ODkxs1poI90Je0ShbDTwMjBKUt9C+ZHAooh4blP/iKTNu9NJ\nM+sdnJyYWbdFxAJSIjKmUDyGdGv5hcCoqvI2AElDJN0iabmkDkl/lLRDpaGkiyQ9KumLkp4F3srl\nW0m6Pu/3kqRvVvdJ0jmSFkhaJald0rTajtrMeoqTEzOrlZnA2ML22Fx2b6VcUj9gJDAjt7mFdKv5\n0cAxwO7AH6qedxjwGWACcGAu+2Xe50TgWFLCM7yyg6QRwGXA94A9gPHArG6Oz8zqxKdIzaxWZgJT\n8rqTAaREYhbQF5gM/BA4LG/PlDQO2A8YGhGLASSdATwpaXhEPJKfdwvgjIh4LbcZAEwCJkbEzFx2\nFvBioS9DgDeB2yJiBfAC8FgPjdvMasxnTsysVirrTg4BDgcWRMRS0pmTkXndyRjgmYh4EdgLeKGS\nmABExFzgdWDvwvMuqiQm2e6khGV2Yb9lwPxCm7uBRcDCfPlnoqQtazZSM+tRTk7MrCYi4hngJdIl\nnLGkpISIeJl05uIwCutNAAHRyVNVl6/opJ4u9q305U3gYOA0YDHprM1jkgZu9IDMrGGcnJhZLbWR\nEpMxpMs8FbOA44BDWZecPAXsLOmjlUaS9gG2yXVd+S/wLoVFtpK2Ja0teU9ErI2IGRHxHeAAYChw\n1IcYk5nVmdecmFkttQFTSXPLvYXyWcCVpMsxMwEiYrqkx4GbJJ2X66YCbRHxaFd/ICJWSLoW+IWk\n14BXgB8DayptJJ0A7Jb/7jLgBNIZl/nvf0YzazZOTsysltqA/sDciHilUH4vsDUwLyLaC+UnA1fk\n+rXAHcDXN+LvfJu0vuVWYDnwK6B4yeZ10jd8Lsr9eRo4La9pMbMmp4guL9uamZmZ1Z3XnJiZmVlT\ncXJiZmZmTcXJiZmZmTUVJydmZmbWVJycmJmZWVNxcmJmZmZNxcmJmZmZNRUnJ2ZmZtZUnJyYmZlZ\nU3FyYmZmZk3FyYmZmZk1FScnZmZm1lT+DwrH78/1pfXIAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# Frequency distribution of words.\n", - "\n", - "one_doc = []\n", - "for doc in docs:\n", - " one_doc.extend(doc)\n", - "\n", - "bow = dictionary.doc2bow(one_doc)\n", - "word_freq = [cnt for _, cnt in bow]\n", - "\n", - "plt.plot(sorted(word_freq))\n", - "plt.xlabel('Words')\n", - "plt.ylabel('#Occurences')\n", - "plt.title('Frequency distribution of words.\\nPower-law behaviour.')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 96, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# Vectorize data.\n", - "\n", - "# Bag-of-words representation of the documents.\n", - "corpus = [dictionary.doc2bow(doc) for doc in docs]" - ] - }, - { - "cell_type": "code", - "execution_count": 97, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Number of authors: 166\n", - "Number of unique tokens: 681\n", - "Number of documents: 90\n" - ] - } - ], - "source": [ - "print('Number of authors: %d' % len(author2doc))\n", - "print('Number of unique tokens: %d' % len(dictionary))\n", - "print('Number of documents: %d' % len(corpus))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Disjoint set stuff" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "def find_disjoint_sets(d):\n", - " while True:\n", - " for tuple_, set1 in d.items():\n", - " try:\n", - " match = next(k for k, set2 in d.items() if k != tuple_ and set1 & set2)\n", - " except StopIteration:\n", - " # no match for this key - keep looking\n", - " continue\n", - " else:\n", - " #print('merging', tuple(set1), match)\n", - " d[tuple_] = set1 | d.pop(match)\n", - " break\n", - " else:\n", - " # no match for any key - we are done!\n", - " break\n", - "\n", - " output = sorted(tuple(s) for s in d.values())\n", - " \n", - " return output" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "collapsed": false, - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[(0,), (1,), (2,), (3,), (4,), (6,), (7,), (8,), (9,), (10,), (11,), (12,), (13,), (14,), (15,), (16, 63, 39), (18,), (19, 59), (20,), (21,), (22,), (23,), (24, 53), (25, 84), (26,), (27,), (28,), (29,), (30,), (32,), (33,), (34,), (35,), (36,), (37,), (38,), (40,), (41,), (42,), (43,), (44,), (45,), (46,), (47,), (48, 17, 58, 5), (49,), (50,), (51,), (52,), (54,), (55,), (56,), (57,), (60,), (61,), (62,), (64,), (65,), (66,), (67,), (68,), (69,), (70,), (71,), (72,), (73, 31), (74,), (75,), (76,), (77,), (78,), (79,), (80,), (81,), (82,), (83,), (85,), (86,), (87,), (88,), (89,)]\n", - "81\n", - "0.0870358943939209\n" - ] - } - ], - "source": [ - "start = time()\n", - "\n", - "thing = {a: set(_list) for a, _list in author2doc.items()}\n", - "disjoint_authors = find_disjoint_sets(thing)\n", - "print(disjoint_authors)\n", - "print(len(disjoint_authors))\n", - "\n", - "print(time() - start)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Online AT VB 2" - ] - }, - { - "cell_type": "code", - "execution_count": 101, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(onlineatvb2)\n", - "OnlineAtVb2 = onlineatvb2.OnlineAtVb2" - ] - }, - { - "cell_type": "code", - "execution_count": 102, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 10.9 s, sys: 12 ms, total: 10.9 s\n", - "Wall time: 10.9 s\n" - ] - } - ], - "source": [ - "%time model_online2 = OnlineAtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 100, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Speed improvement from new algorithm: 4.709677!\n" - ] - } - ], - "source": [ - "print(\"Speed improvement from new algorithm: %f!\" %((2 * 60 + 26) / 31))" - ] - }, - { - "cell_type": "code", - "execution_count": 218, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.007*rule + 0.005*class + 0.005*classifier + 0.004*probability + 0.004*cue + 0.004*distribution + 0.004*sample + 0.003*sequence + 0.003*tree + 0.003*evidence'),\n", - " (1,\n", - " '0.056*motion + 0.052*velocity + 0.051*muscle + 0.044*robot + 0.040*reinforcement + 0.035*controller + 0.029*obstacle + 0.028*command + 0.028*reinforcement_learning + 0.027*movement'),\n", - " (2,\n", - " '0.049*cell + 0.027*spike + 0.024*stimulus + 0.022*eye + 0.020*firing + 0.019*response + 0.017*burst + 0.016*inhibition + 0.016*fiber + 0.016*wave'),\n", - " (3,\n", - " '0.029*attractor + 0.026*vc + 0.024*theorem + 0.019*bound + 0.019*xt + 0.017*fixed_point + 0.016*eigenvalue + 0.016*threshold + 0.015*let + 0.014*capacity'),\n", - " (4,\n", - " '0.039*hmm + 0.032*tdnn + 0.030*speech + 0.030*mlp + 0.028*phonetic + 0.026*speaker + 0.024*segmentation + 0.021*recognition + 0.021*hybrid + 0.021*phoneme'),\n", - " (5,\n", - " '0.055*chip + 0.055*word + 0.043*circuit + 0.033*analog + 0.031*vlsi + 0.030*pulse + 0.028*voltage + 0.027*board + 0.027*perturbation + 0.024*processor'),\n", - " (6,\n", - " '0.027*rbf + 0.023*spline + 0.015*schedule + 0.015*basis_function + 0.012*weight_decay + 0.012*approximation + 0.010*regression + 0.010*validation + 0.009*stochastic + 0.009*prediction'),\n", - " (7,\n", - " '0.071*depth + 0.068*node + 0.056*contour + 0.050*projection + 0.042*polynomial + 0.039*proof + 0.032*gate + 0.028*hidden_node + 0.027*boolean + 0.027*boolean_function'),\n", - " (8,\n", - " '0.005*image + 0.005*object + 0.004*neuron + 0.004*eq + 0.004*character + 0.003*filter + 0.003*field + 0.003*dynamic + 0.003*receptive + 0.003*receptive_field'),\n", - " (9,\n", - " '0.031*grammar + 0.027*module + 0.023*expert + 0.021*string + 0.020*symbol + 0.019*recurrent + 0.017*language + 0.014*automaton + 0.014*giles + 0.014*mozer')]" - ] - }, - "execution_count": 218, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_online2.show_topics(num_topics=10)" - ] - }, - { - "cell_type": "code", - "execution_count": 214, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Sheila \t Kannappan\n", - "Docs: [100]\n", - "[(0, 0.29470045213299129),\n", - " (1, 0.018773780023831975),\n", - " (2, 0.071451542822641448),\n", - " (3, 0.026741158302140633),\n", - " (4, 0.018099032024313566),\n", - " (5, 0.015363132745463916),\n", - " (6, 0.089347751415205109),\n", - " (7, 0.020278388465418653),\n", - " (8, 0.31198092387189108),\n", - " (9, 0.1332638381961023)]\n" - ] - } - ], - "source": [ - "name = id2author[114]\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))" - ] - }, - { - "cell_type": "code", - "execution_count": 200, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [357]\n", - "[(0, 0.16874400828774647),\n", - " (1, 0.05776392793070604),\n", - " (2, 0.018385851898290052),\n", - " (3, 0.090073600218074618),\n", - " (4, 0.12243813551115512),\n", - " (5, 0.048550522852509548),\n", - " (6, 0.1728010777698884),\n", - " (7, 0.19524400649884482),\n", - " (8, 0.056488897891914927),\n", - " (9, 0.069509971140870139)]\n", - "\n", - "Geoffrey E. Hinton\n" - ] - }, - { - "ename": "KeyError", - "evalue": "'Geoffrey E. Hinton'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'Geoffrey E. Hinton'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\n%s'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Docs:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 9\u001b[0m \u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_online2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_author_topics\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 10\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mKeyError\u001b[0m: 'Geoffrey E. Hinton'" - ] - } - ], - "source": [ - "name = 'Yaser S.Abu-Mostafa'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Geoffrey E. Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Michael I. Jordan'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))\n", - "\n", - "name = 'James M. Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online2.get_author_topics(author2id[name]))" - ] - }, - { - "cell_type": "code", - "execution_count": 162, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Speed improvement from new algorithm: 5.503876!\n" - ] - } - ], - "source": [ - "print(\"Speed improvement from new algorithm: %f!\" %(28.4 / 5.16))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Online AT VB" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 13.6 s, sys: 16 ms, total: 13.6 s\n", - "Wall time: 13.6 s\n" - ] - } - ], - "source": [ - "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", - " iterations=100, alpha='auto', eta='symmetric', random_state=1)\n", - "var_lambda = lda.state.get_lambda()" - ] - }, - { - "cell_type": "code", - "execution_count": 118, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(onlineatvb)\n", - "OnlineAtVb = onlineatvb.OnlineAtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 157, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 28.3 s, sys: 12 ms, total: 28.4 s\n", - "Wall time: 28.4 s\n" - ] - } - ], - "source": [ - "%time model_online = OnlineAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=10, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=200, random_state=2, var_lambda=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 40, - "metadata": { - "collapsed": false, - "scrolled": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.075*image + 0.037*field + 0.034*visual + 0.031*position + 0.029*move + 0.025*map + 0.025*location + 0.021*center + 0.021*search + 0.019*human'),\n", - " (1,\n", - " '0.044*bit + 0.038*code + 0.030*hopfield + 0.029*matrix + 0.024*eq + 0.019*stored + 0.017*minimum + 0.016*stage + 0.014*optimization + 0.013*column'),\n", - " (2,\n", - " '0.031*iv + 0.025*differential + 0.023*code + 0.023*scheme + 0.020*adaptive + 0.017*find + 0.016*criterion + 0.015*he + 0.014*bound + 0.014*half'),\n", - " (3,\n", - " '0.035*activity + 0.033*array + 0.027*cell + 0.023*synaptic + 0.020*low + 0.018*rate + 0.017*synapsis + 0.016*region + 0.016*storage + 0.016*distribution'),\n", - " (4,\n", - " '0.052*role + 0.049*loop + 0.046*processor + 0.037*sequence + 0.029*gain + 0.021*product + 0.018*activation + 0.018*multiple + 0.018*edge + 0.017*address'),\n", - " (5,\n", - " '0.028*stimulus + 0.024*classification + 0.024*shape + 0.020*circuit + 0.018*fully + 0.018*design + 0.015*power + 0.015*pp + 0.014*sample + 0.014*experiment'),\n", - " (6,\n", - " '0.042*capacity + 0.034*associative_memory + 0.019*feedback + 0.018*cell + 0.017*phase + 0.016*interaction + 0.015*delay + 0.014*recall + 0.014*sequence + 0.014*matrix'),\n", - " (7,\n", - " '0.061*node + 0.049*hidden + 0.036*convergence + 0.033*energy + 0.030*gradient + 0.030*dynamic + 0.019*back_propagation + 0.016*back + 0.016*propagation + 0.016*learning_algorithm'),\n", - " (8,\n", - " '0.060*training + 0.039*representation + 0.029*connectionist + 0.028*trained + 0.020*context + 0.017*learned + 0.017*target + 0.015*mcclelland + 0.015*hidden_unit + 0.015*rumelhart'),\n", - " (9,\n", - " '0.074*firing + 0.056*stimulus + 0.056*cell + 0.037*connectivity + 0.033*path + 0.030*potential + 0.027*temporal + 0.027*control + 0.021*synaptic + 0.019*inhibition')]" - ] - }, - "execution_count": 40, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_online.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 273, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [21]\n", - "[(0, 0.16188318876615412), (1, 0.80823920909246583), (3, 0.021312448059559796)]\n", - "\n", - "Geoffrey E. Hinton\n", - "Docs: [146, 276, 235, 270]\n", - "[(0, 0.14004630013032807),\n", - " (1, 0.23772038268835666),\n", - " (2, 0.5640333145036398),\n", - " (3, 0.058200002677675597)]\n", - "\n", - "Michael I. Jordan\n", - "Docs: [205]\n", - "[(0, 0.26951795605324808),\n", - " (1, 0.1612862641672847),\n", - " (2, 0.4872153771544665),\n", - " (3, 0.081980402625000656)]\n", - "\n", - "James M. Bower\n", - "Docs: [150, 128, 162, 101, 188, 251, 244]\n", - "[(0, 0.67413384788621999),\n", - " (1, 0.071583305581578827),\n", - " (2, 0.06345028631865203),\n", - " (3, 0.19083256021354914)]\n" - ] - } - ], - "source": [ - "name = 'Yaser S.Abu-Mostafa'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Geoffrey E. Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Michael I. Jordan'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online.get_author_topics(author2id[name]))\n", - "\n", - "name = 'James M. Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model_online.get_author_topics(author2id[name]))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Test on a small dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 202, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "n_docs = 10\n", - "\n", - "from copy import deepcopy\n", - "\n", - "small_doc2author = deepcopy(dict(list(doc2author.items())[:n_docs]))\n", - "small_doc2author = dict(small_doc2author)\n", - "\n", - "small_corpus = corpus[:n_docs]" - ] - }, - { - "cell_type": "code", - "execution_count": 203, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "authors_ids = set()\n", - "for d, a_doc_ids in small_doc2author.items():\n", - " for a in a_doc_ids:\n", - " authors_ids.add(a)\n", - "\n", - "authors_ids = list(authors_ids)\n", - "author_id_dict = dict(zip(authors_ids, range(len(authors_ids))))" - ] - }, - { - "cell_type": "code", - "execution_count": 204, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "for d, a_ids in small_doc2author.items():\n", - " for i, a in enumerate(a_ids):\n", - " small_doc2author[d][i] = author_id_dict[a]" - ] - }, - { - "cell_type": "code", - "execution_count": 205, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# Make a mapping from author IDs to document IDs.\n", - "small_author2doc = {}\n", - "for a in range(len(author_id_dict)):\n", - " small_author2doc[a] = []\n", - " for d, a_ids in small_doc2author.items():\n", - " if a in a_ids:\n", - " small_author2doc[a].append(d)" - ] - }, - { - "cell_type": "code", - "execution_count": 206, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "author_id_dict_rev = dict(zip(range(len(authors_ids)), authors_ids))\n", - "\n", - "small_id2author = {}\n", - "for a, a_id in author_id_dict_rev.items():\n", - " small_id2author[a] = id2author[a_id]" - ] - }, - { - "cell_type": "code", - "execution_count": 207, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "phi is 10 x 681 x 10 (68100 elements)\n", - "mu is 10 x 681 x 21 (143010 elements)\n" - ] - } - ], - "source": [ - "print('phi is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), 10,\n", - " len(small_corpus) * len(dictionary.id2token) * 10))\n", - "print('mu is %d x %d x %d (%d elements)' %(len(small_corpus), len(dictionary.id2token), len(small_author2doc),\n", - " len(small_corpus) * len(dictionary.id2token) * len(small_author2doc)))" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(onlineatvb)\n", - "OnlineAtVb = onlineatvb.OnlineAtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 212, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 34.6 s, sys: 4 ms, total: 34.6 s\n", - "Wall time: 34.6 s\n" - ] - } - ], - "source": [ - "%time model = OnlineAtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", - " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-3, \\\n", - " iterations=1, passes=200, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=10, random_state=1, var_lambda=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 133, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Amir F.Atiya\n", - "Docs: [5]\n", - "[(0, 0.26236663424329809),\n", - " (1, 0.055837758145413023),\n", - " (2, 0.32385947243135804),\n", - " (4, 0.031231118362347546),\n", - " (5, 0.049702348068489471),\n", - " (6, 0.063277167602715914),\n", - " (7, 0.11515798924424819),\n", - " (9, 0.098115022122885684)]\n", - "\n", - "FrankWilczek\n", - "Docs: [1]\n", - "[(0, 0.21018310687516228),\n", - " (1, 0.39886126379385306),\n", - " (2, 0.18071281961456737),\n", - " (3, 0.052218386110533886),\n", - " (5, 0.039636353968810233),\n", - " (8, 0.032375816267307712),\n", - " (9, 0.073725725628590477)]\n" - ] - } - ], - "source": [ - "name = 'Amir F.Atiya'\n", - "print('\\n%s' % name)\n", - "print('Docs:', model.author2doc[model.author2id[name]])\n", - "pprint(model.get_author_topics(model.author2id[name]))\n", - "\n", - "name = 'FrankWilczek'\n", - "print('\\n%s' % name)\n", - "print('Docs:', model.author2doc[model.author2id[name]])\n", - "pprint(model.get_author_topics(model.author2id[name]))\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Mini-batch" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(minibatchatvb)\n", - "MinibatchAtVb = minibatchatvb.MinibatchAtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 2min 1s, sys: 24 ms, total: 2min 1s\n", - "Wall time: 2min 1s\n" - ] - } - ], - "source": [ - "%time model_online = MinibatchAtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-10, \\\n", - " iterations=1, passes=1, alpha=None, eta=None, decay=0.5, offset=1.0, \\\n", - " eval_every=1, random_state=1, var_lambda=None, chunksize=1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Offline AT VB 2" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(atvb2)\n", - "AtVb2 = atvb2.AtVb2" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 21min 58s, sys: 376 ms, total: 21min 58s\n", - "Wall time: 21min 58s\n" - ] - } - ], - "source": [ - "%time model_offline2 = AtVb2(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=100, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", - " eval_every=10, random_state=1)" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.018*path + 0.014*center + 0.013*constraint + 0.011*map + 0.011*activity + 0.010*array + 0.010*rate + 0.010*cycle + 0.010*visual + 0.010*iv'),\n", - " (1,\n", - " '0.019*matrix + 0.016*delay + 0.013*associative_memory + 0.013*capacity + 0.012*potential + 0.010*storage + 0.010*classification + 0.010*dynamic + 0.010*synaptic + 0.009*rate'),\n", - " (2,\n", - " '0.044*cell + 0.020*stimulus + 0.014*probability + 0.010*region + 0.009*training + 0.008*noise + 0.007*field + 0.007*node + 0.007*actual + 0.007*area'),\n", - " (3,\n", - " '0.026*code + 0.025*hopfield + 0.015*sequence + 0.015*image + 0.013*energy + 0.013*length + 0.013*machine + 0.012*field + 0.012*matrix + 0.011*minimum'),\n", - " (4,\n", - " '0.032*processor + 0.023*activation + 0.012*dynamic + 0.012*operation + 0.012*hidden + 0.011*energy + 0.011*edge + 0.010*machine + 0.010*update + 0.009*training'),\n", - " (5,\n", - " '0.024*hidden + 0.016*hidden_unit + 0.013*matrix + 0.012*sequence + 0.012*adaptive + 0.012*action + 0.010*multiple + 0.009*training + 0.009*back + 0.008*learn'),\n", - " (6,\n", - " '0.026*training + 0.015*stage + 0.014*bit + 0.013*optimization + 0.012*convergence + 0.011*eq + 0.011*surface + 0.011*node + 0.011*neural_net + 0.010*code'),\n", - " (7,\n", - " '0.049*cell + 0.015*node + 0.014*feature + 0.013*region + 0.011*map + 0.011*control + 0.011*back + 0.010*temporal + 0.008*cycle + 0.008*decision'),\n", - " (8,\n", - " '0.023*cell + 0.014*probability + 0.012*current + 0.012*position + 0.012*image + 0.011*principle + 0.010*noise + 0.008*dimensional + 0.008*shape + 0.007*firing'),\n", - " (9,\n", - " '0.042*representation + 0.033*activity + 0.029*role + 0.026*firing + 0.023*cell + 0.014*stimulus + 0.014*variable + 0.013*product + 0.012*potential + 0.010*synaptic')]" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_offline2.show_topics()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## \"Offline\" AT VB" - ] - }, - { - "cell_type": "code", - "execution_count": 356, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "phi is 286 x 2245 x 10 (6420700 elements)\n", - "mu is 286 x 2245 x 578 (371116460 elements)\n" - ] - } - ], - "source": [ - "print('phi is %d x %d x %d (%d elements)' %(len(corpus), len(dictionary.id2token), 10,\n", - " len(corpus) * len(dictionary.id2token) * 10))\n", - "print('mu is %d x %d x %d (%d elements)' %(len(corpus), len(dictionary.id2token), len(author2doc),\n", - " len(corpus) * len(dictionary.id2token) * len(author2doc)))" - ] - }, - { - "cell_type": "code", - "execution_count": 238, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 7.81 s, sys: 0 ns, total: 7.81 s\n", - "Wall time: 7.81 s\n" - ] - } - ], - "source": [ - "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=10, \\\n", - " iterations=100, alpha='auto', eta='symmetric', random_state=1)\n", - "var_lambda = lda.state.get_lambda()" - ] - }, - { - "cell_type": "code", - "execution_count": 185, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(atvb)\n", - "AtVb = atvb.AtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 245, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 2min 34s, sys: 104 ms, total: 2min 34s\n", - "Wall time: 2min 34s\n" - ] - } - ], - "source": [ - "%time model_offline = AtVb(corpus=corpus, num_topics=10, id2word=dictionary.id2token, id2author=id2author, \\\n", - " author2doc=author2doc, doc2author=doc2author, threshold=1e-12, \\\n", - " iterations=10, alpha='symmetric', eta='symmetric', var_lambda=None, \\\n", - " eval_every=1, random_state=1)" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.019*path + 0.015*center + 0.014*constraint + 0.011*rate + 0.011*map + 0.011*cycle + 0.010*array + 0.010*visual + 0.009*activity + 0.009*iv'),\n", - " (1,\n", - " '0.018*matrix + 0.016*delay + 0.013*associative_memory + 0.013*potential + 0.012*capacity + 0.011*synaptic + 0.010*classification + 0.010*dynamic + 0.010*storage + 0.008*circuit'),\n", - " (2,\n", - " '0.040*cell + 0.015*stimulus + 0.014*probability + 0.010*region + 0.010*training + 0.009*noise + 0.008*convergence + 0.007*field + 0.007*node + 0.007*positive'),\n", - " (3,\n", - " '0.026*code + 0.024*hopfield + 0.015*sequence + 0.015*image + 0.013*length + 0.012*matrix + 0.012*energy + 0.012*field + 0.012*machine + 0.011*current'),\n", - " (4,\n", - " '0.032*processor + 0.023*activation + 0.013*dynamic + 0.013*energy + 0.012*operation + 0.011*edge + 0.010*hidden + 0.010*machine + 0.010*update + 0.009*matrix'),\n", - " (5,\n", - " '0.022*hidden + 0.016*hidden_unit + 0.014*matrix + 0.013*sequence + 0.012*adaptive + 0.012*action + 0.010*multiple + 0.009*training + 0.008*back + 0.008*stored'),\n", - " (6,\n", - " '0.025*training + 0.015*stage + 0.014*bit + 0.013*optimization + 0.012*convergence + 0.011*eq + 0.011*surface + 0.010*neural_net + 0.010*code + 0.010*hidden'),\n", - " (7,\n", - " '0.056*cell + 0.017*node + 0.015*region + 0.013*feature + 0.013*map + 0.012*back + 0.011*control + 0.010*temporal + 0.009*decision + 0.008*activity'),\n", - " (8,\n", - " '0.023*cell + 0.013*probability + 0.013*image + 0.012*position + 0.012*current + 0.011*principle + 0.010*noise + 0.008*dimensional + 0.007*shape + 0.007*firing'),\n", - " (9,\n", - " '0.042*representation + 0.034*activity + 0.029*role + 0.025*firing + 0.021*cell + 0.017*stimulus + 0.014*variable + 0.014*product + 0.012*potential + 0.010*synaptic')]" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model_offline.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 142, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.015*dynamic + 0.014*delay + 0.012*frequency + 0.011*phase + 0.010*noise + 0.008*temporal + 0.007*filter + 0.007*oscillation + 0.007*target + 0.007*controller'),\n", - " (1,\n", - " '0.017*memory + 0.017*vector + 0.015*matrix + 0.013*hopfield + 0.011*probability + 0.008*capacity + 0.008*let + 0.008*fig + 0.007*code + 0.007*distribution'),\n", - " (2,\n", - " '0.035*cell + 0.018*response + 0.012*region + 0.012*stimulus + 0.011*cortex + 0.009*fig + 0.009*sensory + 0.009*motor + 0.009*control + 0.009*velocity'),\n", - " (3,\n", - " '0.041*image + 0.038*field + 0.023*visual + 0.016*map + 0.015*receptive + 0.014*receptive_field + 0.014*motion + 0.012*eye + 0.011*direction + 0.008*vision'),\n", - " (4,\n", - " '0.030*hidden + 0.017*hidden_unit + 0.016*activation + 0.012*propagation + 0.010*processor + 0.009*back_propagation + 0.008*gradient + 0.007*hidden_layer + 0.007*bit + 0.006*internal'),\n", - " (5,\n", - " '0.018*vector + 0.016*sequence + 0.016*object + 0.014*memory + 0.009*adaptive + 0.009*matrix + 0.008*recurrent + 0.008*action + 0.008*self + 0.008*view'),\n", - " (6,\n", - " '0.025*classifier + 0.024*recognition + 0.023*speech + 0.014*classification + 0.013*trained + 0.011*class + 0.010*test + 0.010*noise + 0.010*hidden + 0.009*word'),\n", - " (7,\n", - " '0.033*node + 0.008*position + 0.007*connectionist + 0.005*neural_net + 0.005*tree + 0.005*character + 0.004*move + 0.004*generalization + 0.004*search + 0.004*human'),\n", - " (8,\n", - " '0.036*circuit + 0.024*analog + 0.024*chip + 0.020*voltage + 0.020*current + 0.014*synapse + 0.010*transistor + 0.010*vlsi + 0.009*device + 0.009*implementation'),\n", - " (9,\n", - " '0.030*cell + 0.021*firing + 0.019*synaptic + 0.017*activity + 0.016*potential + 0.010*synapsis + 0.010*spike + 0.009*stimulus + 0.009*memory + 0.009*membrane')]" - ] - }, - "execution_count": 142, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 149, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [21]\n" - ] - }, - { - "ename": "NameError", - "evalue": "name 'model' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\n%s'\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Docs:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauthor2doc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mpprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_author_topics\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mauthor2id\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'Geoffrey E. Hinton'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mNameError\u001b[0m: name 'model' is not defined" - ] - } - ], - "source": [ - "name = 'Yaser S.Abu-Mostafa'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Geoffrey E. Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Michael I. Jordan'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'James M. Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.019*cell + 0.008*matrix + 0.008*representation + 0.008*training + 0.007*activity + 0.007*node + 0.006*dynamic + 0.006*field + 0.006*probability + 0.005*hopfield'),\n", - " (1,\n", - " '0.016*cell + 0.007*matrix + 0.007*capacity + 0.006*feature + 0.006*activity + 0.006*node + 0.006*field + 0.006*dynamic + 0.006*training + 0.006*stimulus'),\n", - " (2,\n", - " '0.012*cell + 0.010*training + 0.008*matrix + 0.007*stimulus + 0.007*hopfield + 0.006*image + 0.006*noise + 0.006*representation + 0.006*hidden + 0.006*convergence'),\n", - " (3,\n", - " '0.011*cell + 0.008*hopfield + 0.007*activity + 0.007*rate + 0.006*matrix + 0.006*hidden + 0.006*field + 0.006*training + 0.005*node + 0.005*representation'),\n", - " (4,\n", - " '0.012*cell + 0.008*activity + 0.007*matrix + 0.007*training + 0.006*field + 0.006*code + 0.006*representation + 0.006*firing + 0.006*current + 0.005*synaptic'),\n", - " (5,\n", - " '0.014*cell + 0.008*hidden + 0.007*sequence + 0.007*training + 0.006*field + 0.006*noise + 0.006*node + 0.006*dynamic + 0.006*hopfield + 0.006*representation'),\n", - " (6,\n", - " '0.025*cell + 0.011*matrix + 0.009*training + 0.006*activity + 0.006*probability + 0.006*hopfield + 0.006*synaptic + 0.005*node + 0.005*stimulus + 0.005*representation'),\n", - " (7,\n", - " '0.016*cell + 0.008*training + 0.007*activity + 0.007*representation + 0.007*matrix + 0.007*hidden + 0.007*noise + 0.006*hopfield + 0.006*probability + 0.006*firing'),\n", - " (8,\n", - " '0.012*cell + 0.008*image + 0.007*training + 0.006*feature + 0.006*hopfield + 0.006*representation + 0.006*probability + 0.006*firing + 0.006*activity + 0.005*synaptic'),\n", - " (9,\n", - " '0.012*cell + 0.008*matrix + 0.008*activity + 0.007*representation + 0.007*training + 0.006*image + 0.006*capacity + 0.006*rate + 0.006*hopfield + 0.006*node')]" - ] - }, - "execution_count": 31, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 118, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Yaser S.Abu-Mostafa\n", - "Docs: [21]\n", - "[(0, 0.090225715808980797),\n", - " (1, 0.014047723409152875),\n", - " (3, 0.38971799227229242),\n", - " (4, 0.30695125800680684),\n", - " (5, 0.11680215128570454),\n", - " (7, 0.012641840087616362),\n", - " (8, 0.069095036605336377)]\n", - "\n", - "Geoffrey E. Hinton\n", - "Docs: [276, 235, 270]\n", - "[(0, 0.17326190127690461),\n", - " (2, 0.062709625689712375),\n", - " (3, 0.023215349136065065),\n", - " (4, 0.096803072840719678),\n", - " (5, 0.1267901905748583),\n", - " (6, 0.47635551675437715),\n", - " (7, 0.025581291656655011),\n", - " (9, 0.013530262666658776)]\n", - "\n", - "Michael I. Jordan\n", - "Docs: [205]\n", - "[(0, 0.22189029162114421),\n", - " (2, 0.033072831647105602),\n", - " (4, 0.051509519512663651),\n", - " (5, 0.63361728214218349),\n", - " (7, 0.045992411979857574),\n", - " (9, 0.012757930948596466)]\n", - "\n", - "James M. Bower\n", - "Docs: [188, 251, 244]\n", - "[(1, 0.29194178492747924),\n", - " (2, 0.47740737076112999),\n", - " (3, 0.023636461735819542),\n", - " (4, 0.010413505064807139),\n", - " (7, 0.018554608959817139),\n", - " (9, 0.17063597622983562)]\n" - ] - } - ], - "source": [ - "name = 'Yaser S.Abu-Mostafa'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Geoffrey E. Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'Michael I. Jordan'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))\n", - "\n", - "name = 'James M. Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[author2id[name]])\n", - "pprint(model.get_author_topics(author2id[name]))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Test on small corpus" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "lda = LdaModel(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, passes=10)\n", - "var_lambda = lda.state.get_lambda()" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reload(atvb)\n", - "AtVb = atvb.AtVb" - ] - }, - { - "cell_type": "code", - "execution_count": 210, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 1min 25s, sys: 0 ns, total: 1min 25s\n", - "Wall time: 1min 25s\n" - ] - } - ], - "source": [ - "%time model = AtVb(corpus=small_corpus, num_topics=10, id2word=dictionary.id2token, id2author=small_id2author, \\\n", - " author2doc=small_author2doc, doc2author=small_doc2author, threshold=1e-12, \\\n", - " iterations=100, alpha='symmetric', eta='symmetric', \\\n", - " eval_every=10, random_state=1, var_lambda=None)" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.071*group + 0.039*matrix + 0.032*feedback + 0.027*whose + 0.018*obtain + 0.016*scheme + 0.015*constraint + 0.015*expression + 0.014*unique + 0.013*computational'),\n", - " (1,\n", - " '0.041*map + 0.040*field + 0.034*location + 0.033*brain + 0.030*node + 0.021*requires + 0.020*propagation + 0.016*back_propagation + 0.016*distribution + 0.014*mechanism'),\n", - " (2,\n", - " '0.084*processor + 0.075*edge + 0.052*activation + 0.034*update + 0.021*column + 0.020*run + 0.019*implementation + 0.018*control + 0.018*operation + 0.017*content'),\n", - " (3,\n", - " '0.046*image + 0.038*gradient + 0.027*flow + 0.025*field + 0.024*analog + 0.023*circuit + 0.022*constraint + 0.018*square + 0.017*vision + 0.017*technique'),\n", - " (4,\n", - " '0.023*dynamic + 0.021*phase + 0.018*cell + 0.018*variable + 0.017*with_respect + 0.017*respect + 0.016*path + 0.015*noise + 0.014*energy + 0.011*limit'),\n", - " (5,\n", - " '0.080*processor + 0.061*activation + 0.040*edge + 0.040*update + 0.021*store + 0.020*operation + 0.018*required + 0.018*address + 0.017*stored + 0.016*machine'),\n", - " (6,\n", - " '0.038*map + 0.037*brain + 0.033*stimulus + 0.024*functional + 0.021*noise + 0.020*associative_memory + 0.020*recall + 0.017*series + 0.015*scale + 0.015*associated'),\n", - " (7,\n", - " '0.049*potential + 0.044*cell + 0.035*connectivity + 0.026*synaptic + 0.025*artificial + 0.023*architecture + 0.015*temporal + 0.014*brain + 0.014*computational + 0.013*action'),\n", - " (8,\n", - " '0.075*image + 0.032*log + 0.024*dimensional + 0.018*mapping + 0.017*matrix + 0.016*center + 0.015*node + 0.014*recall + 0.013*back + 0.013*th'),\n", - " (9,\n", - " '0.058*scheme + 0.048*capacity + 0.047*probability + 0.040*representation + 0.030*stored + 0.028*binary + 0.025*represented + 0.023*code + 0.022*relationship + 0.021*bound')]" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0, 0.55485121572041607),\n", - " (4, 0.17897884328936686),\n", - " (6, 0.14414251935372879),\n", - " (8, 0.11957893769069983)]" - ] - }, - "execution_count": 35, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.get_author_topics(0)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "## LDA" - ] - }, - { - "cell_type": "code", - "execution_count": 131, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "reload(gensim.models.ldamodel)\n", - "LdaModel = gensim.models.ldamodel.LdaModel" - ] - }, - { - "cell_type": "code", - "execution_count": 151, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 2.48 s, sys: 524 ms, total: 3 s\n", - "Wall time: 2.43 s\n" - ] - } - ], - "source": [ - "%time lda = LdaModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, passes=1, \\\n", - " iterations=1, alpha='symmetric', eta='symmetric', eval_every=0)" - ] - }, - { - "cell_type": "code", - "execution_count": 154, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 288 ms, sys: 0 ns, total: 288 ms\n", - "Wall time: 290 ms\n", - "Bound: -3.588e+05\n" - ] - } - ], - "source": [ - "%time lda_bound = lda.bound(sample(corpus, 10))\n", - "print('Bound: %.3e' % lda_bound)" - ] - }, - { - "cell_type": "code", - "execution_count": 155, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0,\n", - " '0.004*neuron + 0.003*image + 0.003*layer + 0.003*field + 0.003*class + 0.003*cell + 0.003*signal + 0.003*noise + 0.003*hidden + 0.002*node'),\n", - " (1,\n", - " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*class + 0.003*signal + 0.003*matrix + 0.003*layer + 0.003*noise + 0.002*hidden + 0.002*recognition'),\n", - " (2,\n", - " '0.004*cell + 0.003*neuron + 0.003*matrix + 0.003*signal + 0.003*image + 0.003*hidden + 0.002*rule + 0.002*response + 0.002*field + 0.002*dynamic'),\n", - " (3,\n", - " '0.005*neuron + 0.003*layer + 0.003*image + 0.003*cell + 0.002*class + 0.002*net + 0.002*hidden + 0.002*control + 0.002*sequence + 0.002*response'),\n", - " (4,\n", - " '0.004*layer + 0.003*image + 0.003*neuron + 0.003*cell + 0.003*hidden + 0.003*signal + 0.003*component + 0.002*recognition + 0.002*net + 0.002*node'),\n", - " (5,\n", - " '0.005*image + 0.004*neuron + 0.004*layer + 0.003*hidden + 0.003*cell + 0.002*control + 0.002*class + 0.002*net + 0.002*noise + 0.002*signal'),\n", - " (6,\n", - " '0.005*neuron + 0.005*layer + 0.004*hidden + 0.003*image + 0.003*cell + 0.003*class + 0.003*rule + 0.002*noise + 0.002*net + 0.002*matrix'),\n", - " (7,\n", - " '0.004*neuron + 0.003*image + 0.003*cell + 0.003*hidden + 0.003*recognition + 0.003*field + 0.003*layer + 0.002*noise + 0.002*node + 0.002*component'),\n", - " (8,\n", - " '0.004*neuron + 0.003*image + 0.003*signal + 0.003*recognition + 0.003*cell + 0.003*layer + 0.003*noise + 0.003*rule + 0.002*class + 0.002*hidden'),\n", - " (9,\n", - " '0.005*neuron + 0.004*class + 0.003*layer + 0.003*image + 0.003*cell + 0.002*hidden + 0.002*signal + 0.002*control + 0.002*field + 0.002*net')]" - ] - }, - "execution_count": 155, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "lda.show_topics()" - ] - }, - { - "cell_type": "code", - "execution_count": 150, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Document 5\n", - "[(0, 0.11806384798431847),\n", - " (1, 0.099612053680607937),\n", - " (2, 0.076668193975964943),\n", - " (3, 0.075072909998916373),\n", - " (4, 0.067243477696594139),\n", - " (5, 0.1004083782314163),\n", - " (6, 0.1049567779188061),\n", - " (7, 0.10291505408912022),\n", - " (8, 0.12682229186467239),\n", - " (9, 0.12823701455958317)]\n", - "\n", - "Document 50\n", - "[(0, 0.12019310780479558),\n", - " (1, 0.11241507965934601),\n", - " (2, 0.084261861610351887),\n", - " (3, 0.074722708722277847),\n", - " (4, 0.089536455599529025),\n", - " (5, 0.11951468917677081),\n", - " (6, 0.077140801257090358),\n", - " (7, 0.086592729473957755),\n", - " (8, 0.12048290979429044),\n", - " (9, 0.11513965690159025)]\n" - ] - } - ], - "source": [ - "d = 5\n", - "print('Document %d' %d)\n", - "pprint(lda[corpus[d]])\n", - "\n", - "d = 50\n", - "print('\\nDocument %d' %d)\n", - "pprint(lda[corpus[d]])" - ] - }, - { - "cell_type": "code", - "execution_count": 145, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "['scaling',\n", - " 'property',\n", - " 'of',\n", - " 'coarse',\n", - " 'coded',\n", - " 'symbol',\n", - " 'memory',\n", - " 'ronald',\n", - " 'rosenfeld',\n", - " 'david',\n", - " 'touretzky',\n", - " 'computer',\n", - " 'science',\n", - " 'department',\n", - " 'carnegie',\n", - " 'mellon',\n", - " 'university',\n", - " 'pittsburgh',\n", - " 'pennsylvania',\n", - " 'abstract']" - ] - }, - "execution_count": 145, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "docs[0][:20]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Convergence and speed plots" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from bokeh.io import output_notebook\n", - "from bokeh.models.layouts import Row, Column\n", - "from bokeh.layouts import gridplot, layout\n", - "from bokeh.models import Title, Legend, Div\n", - "from bokeh.plotting import figure, output_file, show" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "
\n", - " \n", - " Loading BokehJS ...\n", - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/javascript": [ - "\n", - "(function(global) {\n", - " function now() {\n", - " return new Date();\n", - " }\n", - "\n", - " var force = \"1\";\n", - "\n", - " if (typeof (window._bokeh_onload_callbacks) === \"undefined\" || force !== \"\") {\n", - " window._bokeh_onload_callbacks = [];\n", - " window._bokeh_is_loading = undefined;\n", - " }\n", - "\n", - "\n", - " \n", - " if (typeof (window._bokeh_timeout) === \"undefined\" || force !== \"\") {\n", - " window._bokeh_timeout = Date.now() + 5000;\n", - " window._bokeh_failed_load = false;\n", - " }\n", - "\n", - " var NB_LOAD_WARNING = {'data': {'text/html':\n", - " \"
\\n\"+\n", - " \"

\\n\"+\n", - " \"BokehJS does not appear to have successfully loaded. If loading BokehJS from CDN, this \\n\"+\n", - " \"may be due to a slow or bad network connection. Possible fixes:\\n\"+\n", - " \"

\\n\"+\n", - " \"
    \\n\"+\n", - " \"
  • re-rerun `output_notebook()` to attempt to load from CDN again, or
  • \\n\"+\n", - " \"
  • use INLINE resources instead, as so:
  • \\n\"+\n", - " \"
\\n\"+\n", - " \"\\n\"+\n", - " \"from bokeh.resources import INLINE\\n\"+\n", - " \"output_notebook(resources=INLINE)\\n\"+\n", - " \"\\n\"+\n", - " \"
\"}};\n", - "\n", - " function display_loaded() {\n", - " if (window.Bokeh !== undefined) {\n", - " Bokeh.$(\"#8e011ac0-f662-4201-8e19-c1d0bd286cb0\").text(\"BokehJS successfully loaded.\");\n", - " } else if (Date.now() < window._bokeh_timeout) {\n", - " setTimeout(display_loaded, 100)\n", - " }\n", - " }\n", - "\n", - " function run_callbacks() {\n", - " window._bokeh_onload_callbacks.forEach(function(callback) { callback() });\n", - " delete window._bokeh_onload_callbacks\n", - " console.info(\"Bokeh: all callbacks have finished\");\n", - " }\n", - "\n", - " function load_libs(js_urls, callback) {\n", - " window._bokeh_onload_callbacks.push(callback);\n", - " if (window._bokeh_is_loading > 0) {\n", - " console.log(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", - " return null;\n", - " }\n", - " if (js_urls == null || js_urls.length === 0) {\n", - " run_callbacks();\n", - " return null;\n", - " }\n", - " console.log(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", - " window._bokeh_is_loading = js_urls.length;\n", - " for (var i = 0; i < js_urls.length; i++) {\n", - " var url = js_urls[i];\n", - " var s = document.createElement('script');\n", - " s.src = url;\n", - " s.async = false;\n", - " s.onreadystatechange = s.onload = function() {\n", - " window._bokeh_is_loading--;\n", - " if (window._bokeh_is_loading === 0) {\n", - " console.log(\"Bokeh: all BokehJS libraries loaded\");\n", - " run_callbacks()\n", - " }\n", - " };\n", - " s.onerror = function() {\n", - " console.warn(\"failed to load library \" + url);\n", - " };\n", - " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", - " }\n", - " };var element = document.getElementById(\"8e011ac0-f662-4201-8e19-c1d0bd286cb0\");\n", - " if (element == null) {\n", - " console.log(\"Bokeh: ERROR: autoload.js configured with elementid '8e011ac0-f662-4201-8e19-c1d0bd286cb0' but no matching script tag was found. \")\n", - " return false;\n", - " }\n", - "\n", - " var js_urls = ['https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.js', 'https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.js'];\n", - "\n", - " var inline_js = [\n", - " function(Bokeh) {\n", - " Bokeh.set_log_level(\"info\");\n", - " },\n", - " \n", - " function(Bokeh) {\n", - " \n", - " Bokeh.$(\"#8e011ac0-f662-4201-8e19-c1d0bd286cb0\").text(\"BokehJS is loading...\");\n", - " },\n", - " function(Bokeh) {\n", - " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", - " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", - " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", - " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", - " }\n", - " ];\n", - "\n", - " function run_inline_js() {\n", - " \n", - " if ((window.Bokeh !== undefined) || (force === \"1\")) {\n", - " for (var i = 0; i < inline_js.length; i++) {\n", - " inline_js[i](window.Bokeh);\n", - " }if (force === \"1\") {\n", - " display_loaded();\n", - " }} else if (Date.now() < window._bokeh_timeout) {\n", - " setTimeout(run_inline_js, 100);\n", - " } else if (!window._bokeh_failed_load) {\n", - " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", - " window._bokeh_failed_load = true;\n", - " } else if (!force) {\n", - " var cell = $(\"#8e011ac0-f662-4201-8e19-c1d0bd286cb0\").parents('.cell').data().cell;\n", - " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", - " }\n", - "\n", - " }\n", - "\n", - " if (window._bokeh_is_loading === 0) {\n", - " console.log(\"Bokeh: BokehJS loaded, going straight to plotting\");\n", - " run_inline_js();\n", - " } else {\n", - " load_libs(js_urls, function() {\n", - " console.log(\"Bokeh: BokehJS plotting callback run at\", now());\n", - " run_inline_js();\n", - " });\n", - " }\n", - "}(this));" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "output_notebook()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 10 iterations (passes)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# NOTE: the times of both offline and online are *without* vectorization!\n", - "\n", - "offline = [-3.958e+05, -3.430e+05, -3.428e+05, -3.426e+05, -3.423e+05, -3.417e+05, -3.406e+05, -3.388e+05, -3.361e+05, -3.326e+05, -3.285e+05]\n", - "\n", - "online_1iter = [-3.958e+05, -3.471e+05, -3.456e+05, -3.417e+05, -3.338e+05, -3.244e+05, -3.165e+05, -3.111e+05, -3.075e+05, -3.051e+05, -3.036e+05]\n", - "\n", - "online_10iter = [-3.958e+05, -3.343e+05, -3.223e+05, -3.128e+05, -3.072e+05, -3.041e+05, -3.023e+05, -3.011e+05, -3.003e+05, -2.997e+05, -2.993e+05]" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "iterations = range(10)" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "offline_time = [20 * 60 + 49, 21 * 60 + 8, 21 * 60 + 25, 21 * 60 + 41, 21 * 60 + 56, 22 * 60 + 11, 22 * 60 + 25, 22 * 60 + 41, 22 * 60 + 56, 23 * 60 + 11, 23 * 60 + 26]\n", - "offline_time = np.array(offline_time) - offline_time[0]\n", - "\n", - "online_1iter_time = [23 * 60 + 54, 23 * 60 + 55, 23 * 60 + 55, 23 * 60 + 56, 23 * 60 + 58, 23 * 60 + 59, 24 * 60 + 0, 24 * 60 + 1, 24 * 60 + 2, 24 * 60 + 3, 24 * 60 + 4]\n", - "online_1iter_time = np.array(online_1iter_time) - online_1iter_time[0]\n", - " \n", - "online_10iter_time = [24 * 60 + 59, 25 * 60 + 0, 25 * 60 + 2, 25 * 60 + 3, 25 * 60 + 4, 25 * 60 + 5, 25 * 60 + 6, 25 * 60 + 7, 25 * 60 + 8, 25 * 60 + 8, 25 * 60 + 9]\n", - "online_10iter_time = np.array(online_10iter_time) - online_10iter_time[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p = figure(title=(\"Variational lower bound (initial bound at %.3e)\" % offline[0]), x_axis_label='Iterations', y_axis_label='Bound')\n", - "p.circle(iterations[1:], offline[1:], legend=\"offline\", size=5, color='red')\n", - "p.circle(iterations[1:], online_1iter[1:], legend=\"online 1 iter\", size=5, color='green')\n", - "p.circle(iterations[1:], online_10iter[1:], legend=\"online 10 iter.\", size=5, color='blue')\n", - "p.plot_height=400\n", - "p.plot_width=600\n", - "p.toolbar_location = None\n", - "show(p)" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p1 = figure(title=(\"Offline (initial bound at %.3e)\" % offline[0]), x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "p1.circle(offline_time[1:], offline[1:], size=5, color='red')\n", - "p1.plot_height=400\n", - "p1.plot_width=400\n", - "p1.toolbar_location = None\n", - "\n", - "p2 = figure(title=\"Online\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "s1 = p2.circle(online_1iter_time[1:], online_1iter[1:], size=5, line_color='green')\n", - "s2 = p2.circle(online_10iter_time[1:], online_10iter[1:], size=5, line_color='blue')\n", - "p2.plot_height=400\n", - "p2.plot_width=400\n", - "p2.toolbar_location = None\n", - "\n", - "legend = Legend(items=[('1 iter', [s1]), ('10 iter', [s2])], location=(-100, -200))\n", - "p2.add_layout(legend, 'right')\n", - "\n", - "p3 = Row(p1, p2)\n", - "\n", - "show(p3)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 100 iterations (passes)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "# NOTE: the times of both offline and online are *without* vectorization!\n", - "\n", - "offline = [-3.957e+05, -3.304e+05, -3.049e+05, -3.005e+05, -2.989e+05, -2.981e+05, -2.976e+05, -2.973e+05, -2.970e+05, -2.968e+05, -2.966e+05]\n", - "\n", - "online_1iter = [-3.957e+05, -3.072e+05, -3.008e+05, -2.997e+05, -2.991e+05, -2.986e+05, -2.983e+05, -2.981e+05, -2.979e+05, -2.977e+05, -2.976e+05]\n", - "\n", - "online_10iter = [-3.957e+05, -3.001e+05, -2.975e+05, -2.965e+05, -2.961e+05, -2.958e+05, -2.955e+05, -2.954e+05, -2.953e+05, -2.952e+05, -2.951e+05]" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "offline_time = [38 * 60 + 8, 40 * 60 + 18, 42 * 60 + 36, 44 * 60 + 44, 46 * 60 + 57, 49 * 60 + 12, 51 * 60 + 19, 53 * 60 + 29, 55 * 60 + 40, 57 * 60 + 56, 60 * 60 + 6]\n", - "offline_time = np.array(offline_time) - offline_time[0]\n", - "\n", - "online_1iter_time = [3 * 60 + 36, 3 * 60 + 59, 4 * 60 + 20, 4 * 60 + 43, 5 * 60 + 6, 5 * 60 + 28, 5 * 60 + 51, 6 * 60 + 14, 6 * 60 + 36, 6 * 60 + 56, 7 * 60 + 16]\n", - "online_1iter_time = np.array(online_1iter_time) - online_1iter_time[0]\n", - "\n", - "online_10iter_time = [8 * 60 + 1, 10 * 60 + 28, 12 * 60 + 50, 15 * 60 + 15, 17 * 60 + 40, 20 * 60 + 10, 22 * 60 + 35, 25 * 60 + 7, 27 * 60 + 31, 29 * 60 + 54, 32 * 60 + 13]\n", - "online_10iter_time = np.array(online_10iter_time) - online_10iter_time[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "iterations = range(0, 100, 10)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p = figure(title=\"Variational lower bound\", x_axis_label='Iteration (pass)', y_axis_label='Bound')\n", - "s1 = p.circle(iterations[1:], offline[1:], size=5, color='red')\n", - "s2 = p.circle(iterations[1:], online_1iter[1:], size=5, color='green')\n", - "s3 = p.circle(iterations[1:], online_10iter[1:],size=5, color='blue')\n", - "p.plot_height=400\n", - "p.plot_width=600\n", - "#p.toolbar_location = None\n", - "\n", - "legend = Legend(items=[('offline', [s1]), ('online 1 iter', [s2]), ('online 10 iter', [s3])], location=(-150, -200))\n", - "p.add_layout(legend, 'right')\n", - "\n", - "show(p)" - ] - }, - { - "cell_type": "code", - "execution_count": 54, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p1 = figure(title=\"Offline\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "p1.circle(offline_time[1:], offline[1:], size=5, color='red')\n", - "p1.plot_height=400\n", - "p1.plot_width=300\n", - "\n", - "p2 = figure(title=\"Online 1 iter\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "s1 = p2.circle(online_1iter_time[1:], online_1iter[1:], size=5, line_color='green')\n", - "p2.plot_height=400\n", - "p2.plot_width=300\n", - "\n", - "p3 = figure(title=\"Online 10 iter\", x_axis_label='Time (sec)', y_axis_label='Bound')\n", - "s3 = p3.circle(online_10iter_time[1:], online_10iter[1:], size=5, line_color='blue')\n", - "p3.plot_height=400\n", - "p3.plot_width=300\n", - "\n", - "caption = Div(text='

Variational lower bound\\n100 iterations (passes)

')\n", - "\n", - "l = layout([\n", - " [caption],\n", - " [p1, p2, p3]\n", - " ])\n", - "\n", - "show(l)" - ] - }, - { - "cell_type": "code", - "execution_count": 53, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "ename": "ImportError", - "evalue": "cannot import name 'css'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mbokeh\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mcss\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mImportError\u001b[0m: cannot import name 'css'" - ] - } - ], - "source": [ - "from bokeh import css" - ] - }, - { - "cell_type": "code", - "execution_count": 52, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "ename": "AttributeError", - "evalue": "module 'bokeh' has no attribute 'css'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mbokeh\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcss\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m: module 'bokeh' has no attribute 'css'" - ] - } - ], - "source": [ - "bokeh.css" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.5.2" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/docs/notebooks/atmodel_tests.ipynb b/docs/notebooks/atmodel_tests.ipynb new file mode 100644 index 0000000000..e1052f7240 --- /dev/null +++ b/docs/notebooks/atmodel_tests.ipynb @@ -0,0 +1,2097 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import random, sys\n", + "import numpy as np\n", + "from time import time\n", + "\n", + "from gensim.models import atmodel\n", + "from gensim.models import AuthorTopicModel\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " Loading BokehJS ...\n", + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/javascript": [ + "\n", + "(function(global) {\n", + " function now() {\n", + " return new Date();\n", + " }\n", + "\n", + " var force = \"1\";\n", + "\n", + " if (typeof (window._bokeh_onload_callbacks) === \"undefined\" || force !== \"\") {\n", + " window._bokeh_onload_callbacks = [];\n", + " window._bokeh_is_loading = undefined;\n", + " }\n", + "\n", + "\n", + " \n", + " if (typeof (window._bokeh_timeout) === \"undefined\" || force !== \"\") {\n", + " window._bokeh_timeout = Date.now() + 5000;\n", + " window._bokeh_failed_load = false;\n", + " }\n", + "\n", + " var NB_LOAD_WARNING = {'data': {'text/html':\n", + " \"
\\n\"+\n", + " \"

\\n\"+\n", + " \"BokehJS does not appear to have successfully loaded. If loading BokehJS from CDN, this \\n\"+\n", + " \"may be due to a slow or bad network connection. Possible fixes:\\n\"+\n", + " \"

\\n\"+\n", + " \"
    \\n\"+\n", + " \"
  • re-rerun `output_notebook()` to attempt to load from CDN again, or
  • \\n\"+\n", + " \"
  • use INLINE resources instead, as so:
  • \\n\"+\n", + " \"
\\n\"+\n", + " \"\\n\"+\n", + " \"from bokeh.resources import INLINE\\n\"+\n", + " \"output_notebook(resources=INLINE)\\n\"+\n", + " \"\\n\"+\n", + " \"
\"}};\n", + "\n", + " function display_loaded() {\n", + " if (window.Bokeh !== undefined) {\n", + " Bokeh.$(\"#8cbfed0a-a343-4f49-92a8-d7255e883b08\").text(\"BokehJS successfully loaded.\");\n", + " } else if (Date.now() < window._bokeh_timeout) {\n", + " setTimeout(display_loaded, 100)\n", + " }\n", + " }\n", + "\n", + " function run_callbacks() {\n", + " window._bokeh_onload_callbacks.forEach(function(callback) { callback() });\n", + " delete window._bokeh_onload_callbacks\n", + " console.info(\"Bokeh: all callbacks have finished\");\n", + " }\n", + "\n", + " function load_libs(js_urls, callback) {\n", + " window._bokeh_onload_callbacks.push(callback);\n", + " if (window._bokeh_is_loading > 0) {\n", + " console.log(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", + " return null;\n", + " }\n", + " if (js_urls == null || js_urls.length === 0) {\n", + " run_callbacks();\n", + " return null;\n", + " }\n", + " console.log(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", + " window._bokeh_is_loading = js_urls.length;\n", + " for (var i = 0; i < js_urls.length; i++) {\n", + " var url = js_urls[i];\n", + " var s = document.createElement('script');\n", + " s.src = url;\n", + " s.async = false;\n", + " s.onreadystatechange = s.onload = function() {\n", + " window._bokeh_is_loading--;\n", + " if (window._bokeh_is_loading === 0) {\n", + " console.log(\"Bokeh: all BokehJS libraries loaded\");\n", + " run_callbacks()\n", + " }\n", + " };\n", + " s.onerror = function() {\n", + " console.warn(\"failed to load library \" + url);\n", + " };\n", + " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", + " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", + " }\n", + " };var element = document.getElementById(\"8cbfed0a-a343-4f49-92a8-d7255e883b08\");\n", + " if (element == null) {\n", + " console.log(\"Bokeh: ERROR: autoload.js configured with elementid '8cbfed0a-a343-4f49-92a8-d7255e883b08' but no matching script tag was found. \")\n", + " return false;\n", + " }\n", + "\n", + " var js_urls = ['https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.js', 'https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.js'];\n", + "\n", + " var inline_js = [\n", + " function(Bokeh) {\n", + " Bokeh.set_log_level(\"info\");\n", + " },\n", + " \n", + " function(Bokeh) {\n", + " \n", + " Bokeh.$(\"#8cbfed0a-a343-4f49-92a8-d7255e883b08\").text(\"BokehJS is loading...\");\n", + " },\n", + " function(Bokeh) {\n", + " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", + " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", + " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", + " Bokeh.embed.inject_css(\"https://cdn.pydata.org/bokeh/release/bokeh-widgets-0.12.3.min.css\");\n", + " }\n", + " ];\n", + "\n", + " function run_inline_js() {\n", + " \n", + " if ((window.Bokeh !== undefined) || (force === \"1\")) {\n", + " for (var i = 0; i < inline_js.length; i++) {\n", + " inline_js[i](window.Bokeh);\n", + " }if (force === \"1\") {\n", + " display_loaded();\n", + " }} else if (Date.now() < window._bokeh_timeout) {\n", + " setTimeout(run_inline_js, 100);\n", + " } else if (!window._bokeh_failed_load) {\n", + " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", + " window._bokeh_failed_load = true;\n", + " } else if (!force) {\n", + " var cell = $(\"#8cbfed0a-a343-4f49-92a8-d7255e883b08\").parents('.cell').data().cell;\n", + " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", + " }\n", + "\n", + " }\n", + "\n", + " if (window._bokeh_is_loading === 0) {\n", + " console.log(\"Bokeh: BokehJS loaded, going straight to plotting\");\n", + " run_inline_js();\n", + " } else {\n", + " load_libs(js_urls, function() {\n", + " console.log(\"Bokeh: BokehJS plotting callback run at\", now());\n", + " run_inline_js();\n", + " });\n", + " }\n", + "}(this));" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from bokeh.io import output_notebook\n", + "from bokeh.plotting import figure, show\n", + "from bokeh.models import Legend, Div, PrintfTickFormatter\n", + "from bokeh.layouts import column, row\n", + "\n", + "output_notebook()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tests of the algorithm on artificially generated data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Test difference between blocking VB and non-blocking VB" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "atfilename = '/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/temp/blocking_vb_tests/atnonblocking.py'\n", + "with open(atfilename) as f:\n", + " code = compile(f.read(), atfilename, 'exec')\n", + " exec(code)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "atfilename = '/home/olavur/Dropbox/my_folder/workstuff/DTU/thesis/code/gensim/gensim/models/temp/blocking_vb_tests/atblocking.py'\n", + "with open(atfilename) as f:\n", + " code = compile(f.read(), atfilename, 'exec')\n", + " exec(code)" + ] + }, + { + "cell_type": "code", + "execution_count": 71, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "vocab_size = 1000\n", + "num_docs = 100\n", + "words_per_doc = 10 # Average unique words per document\n", + "word_freq = 10 # Average frequency of each word in document.\n", + "word_std = 10.0\n", + "corpus = []\n", + "for d in range(num_docs):\n", + " ids = random.sample(range(vocab_size), words_per_doc)\n", + " cts = np.random.normal(word_freq, word_std, len(ids))\n", + " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", + " doc = list(zip(ids, cts))\n", + " corpus.append(doc)\n", + " \n", + "num_authors = 100\n", + "def make_author2doc(docs_per_author=10):\n", + " author2doc = {}\n", + " for a in range(num_authors):\n", + " doc_ids = random.sample(range(num_docs), docs_per_author)\n", + " author2doc[a] = doc_ids\n", + "\n", + " return author2doc\n", + "\n", + "def make_doc2author(authors_per_doc=10):\n", + " doc2author = {}\n", + " for d in range(num_docs):\n", + " author_ids = random.sample(range(num_authors), authors_per_doc)\n", + " doc2author[d] = author_ids\n", + "\n", + " return doc2author" + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 6.13 s, sys: 0 ns, total: 6.13 s\n", + "Wall time: 6.13 s\n", + "CPU times: user 11.6 s, sys: 32 ms, total: 11.6 s\n", + "Wall time: 11.6 s\n" + ] + } + ], + "source": [ + "num_topics = 5\n", + "doc2author = make_doc2author(5)\n", + "author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", + "\n", + "%time nb_model = AtNonBlocking(corpus=corpus, num_topics=num_topics, \\\n", + " author2doc=author2doc, doc2author=doc2author, iterations=5, random_state=0)\n", + "\n", + "%time b_model = AtBlocking(corpus=corpus, num_topics=num_topics, \\\n", + " author2doc=author2doc, doc2author=doc2author, iterations=5, random_state=0)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "5.0" + ] + }, + "execution_count": 58, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "authors_per_doc = [len(authors) for authors in nb_model.doc2author.values()]\n", + "sum(authors_per_doc) / len(nb_model.doc2author)" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "50.0" + ] + }, + "execution_count": 59, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "docs_per_author = [len(docs) for docs in nb_model.author2doc.values()]\n", + "sum(docs_per_author) / len(nb_model.author2doc)" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "iterations = range(nb_model.iterations)\n", + "\n", + "p1 = figure(title='', x_axis_label='Iterations', y_axis_label='Per word bound')\n", + "s1 = p1.line(iterations, nb_model.perwordbound[1:], color='red')\n", + "p1.circle(iterations, nb_model.perwordbound[1:], color='red')\n", + "s2 = p1.line(iterations, b_model.perwordbound[1:], color='blue')\n", + "p1.circle(iterations, b_model.perwordbound[1:], color='blue')\n", + "p1.plot_height=400\n", + "p1.plot_width=600\n", + "p1.toolbar_location = None\n", + "\n", + "show(p1)" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "5.0" + ] + }, + "execution_count": 73, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "authors_per_doc = [len(authors) for authors in nb_model.doc2author.values()]\n", + "sum(authors_per_doc) / len(nb_model.doc2author)" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "5.05050505050505" + ] + }, + "execution_count": 74, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "docs_per_author = [len(docs) for docs in nb_model.author2doc.values()]\n", + "sum(docs_per_author) / len(nb_model.author2doc)" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "iterations = range(nb_model.iterations)\n", + "\n", + "p1 = figure(title='', x_axis_label='Iterations', y_axis_label='Per word bound')\n", + "s1 = p1.line(iterations, nb_model.perwordbound[1:], color='red')\n", + "p1.circle(iterations, nb_model.perwordbound[1:], color='red')\n", + "s2 = p1.line(iterations, b_model.perwordbound[1:], color='blue')\n", + "p1.circle(iterations, b_model.perwordbound[1:], color='blue')\n", + "p1.plot_height=400\n", + "p1.plot_width=600\n", + "p1.toolbar_location = None\n", + "\n", + "show(p1)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "## Scalability\n", + "\n", + "### W.r.t. number of authors" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "vocab_size = 1000\n", + "num_docs = 100\n", + "words_per_doc = 10 # Average unique words per document\n", + "word_freq = 10 # Average frequency of each word in document.\n", + "word_std = 10.0\n", + "corpus = []\n", + "for d in range(num_docs):\n", + " ids = random.sample(range(vocab_size), words_per_doc)\n", + " cts = np.random.normal(word_freq, word_std, len(ids))\n", + " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", + " doc = list(zip(ids, cts))\n", + " corpus.append(doc)\n", + " \n", + "def make_author2doc(docs_per_author=10, num_authors=100):\n", + " author2doc = {}\n", + " for a in range(num_authors):\n", + " doc_ids = random.sample(range(num_docs), docs_per_author)\n", + " author2doc[a] = doc_ids\n", + "\n", + " return author2doc\n", + "\n", + "def make_doc2author(authors_per_doc=10, num_authors=100):\n", + " doc2author = {}\n", + " for d in range(num_docs):\n", + " author_ids = random.sample(range(num_authors), authors_per_doc)\n", + " doc2author[d] = author_ids\n", + "\n", + " return doc2author" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "num_topics = 5\n", + "num_authors_list = [100, 200, 400, 800, 1000]\n", + "authors_per_doc = 5\n", + "chunksize = len(corpus) + 1" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 1min 12s, sys: 16 ms, total: 1min 12s\n", + "Wall time: 1min 12s\n" + ] + } + ], + "source": [ + "%%time\n", + "# Time the entire process.\n", + "\n", + "train_time = []\n", + "eval_time = []\n", + "for num_authors in num_authors_list:\n", + " doc2author = make_doc2author(authors_per_doc, num_authors)\n", + " author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", + " \n", + " # Get training time.\n", + " avg_elapsed = 0.0\n", + " for _ in range(10):\n", + " start = time()\n", + " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", + " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", + " iterations=10, passes=10, eval_every=0, random_state=1)\n", + " avg_elapsed += time() - start\n", + " avg_elapsed /= 10\n", + " train_time.append(avg_elapsed)\n", + " \n", + " # Get evaluation time.\n", + " avg_elapsed = 0.0\n", + " for _ in range(10):\n", + " start = time()\n", + " perwordbound = model.bound(corpus, author2doc=author2doc, doc2author=doc2author)\n", + " avg_elapsed += time() - start\n", + " avg_elapsed /= 10\n", + " eval_time.append(avg_elapsed)" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p1 = figure(title='Train time', x_axis_label='Num authors', y_axis_label='Time (sec)')\n", + "s1 = p1.line(num_authors_list, train_time)\n", + "p1.circle(num_authors_list, train_time)\n", + "p1.plot_height=400\n", + "p1.plot_width=400\n", + "p1.toolbar_location = None\n", + "\n", + "p2 = figure(title='Evaluation time', x_axis_label='Num authors', y_axis_label='Time (sec)')\n", + "s2 = p2.line(num_authors_list, eval_time)\n", + "p2.circle(num_authors_list, eval_time)\n", + "p2.plot_height=400\n", + "p2.plot_width=400\n", + "p2.toolbar_location = None\n", + "\n", + "plots = row(p1, p2)\n", + "\n", + "show(plots)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### W.r.t. number of authors PER document\n", + "\n", + "Number of authors is constant." + ] + }, + { + "cell_type": "code", + "execution_count": 122, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "vocab_size = 1000\n", + "num_docs = 100\n", + "words_per_doc = 10 # Average unique words per document\n", + "word_freq = 10 # Average frequency of each word in document.\n", + "word_std = 10.0\n", + "corpus = []\n", + "for d in range(num_docs):\n", + " ids = random.sample(range(vocab_size), words_per_doc)\n", + " cts = np.random.normal(word_freq, word_std, len(ids))\n", + " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", + " doc = list(zip(ids, cts))\n", + " corpus.append(doc)\n", + " \n", + "def make_author2doc(docs_per_author=10, num_authors=100):\n", + " author2doc = {}\n", + " for a in range(num_authors):\n", + " doc_ids = random.sample(range(num_docs), docs_per_author)\n", + " author2doc[a] = doc_ids\n", + "\n", + " return author2doc\n", + "\n", + "def make_doc2author(authors_per_doc=10, num_authors=100):\n", + " doc2author = {}\n", + " for d in range(num_docs):\n", + " author_ids = random.sample(range(num_authors), authors_per_doc)\n", + " doc2author[d] = author_ids\n", + "\n", + " return doc2author" + ] + }, + { + "cell_type": "code", + "execution_count": 125, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "num_topics = 5\n", + "num_authors = 1000\n", + "authors_per_doc_list = [5**i for i in range(4)]\n", + "chunksize = len(corpus) + 1" + ] + }, + { + "cell_type": "code", + "execution_count": 126, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "96\n", + "383\n", + "924\n", + "1000\n", + "CPU times: user 12.1 s, sys: 36 ms, total: 12.1 s\n", + "Wall time: 12.1 s\n" + ] + } + ], + "source": [ + "%%time\n", + "# Time the entire process.\n", + "\n", + "train_time = []\n", + "eval_time = []\n", + "for authors_per_doc in authors_per_doc_list:\n", + " doc2author = make_doc2author(authors_per_doc, num_authors)\n", + " author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", + " \n", + " # Get training time.\n", + " avg_elapsed = 0.0\n", + " for _ in range(1):\n", + " start = time()\n", + " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", + " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", + " iterations=10, passes=10, eval_every=0, random_state=1)\n", + " avg_elapsed += time() - start\n", + " avg_elapsed /= 1\n", + " train_time.append(avg_elapsed)\n", + " \n", + " # Get evaluation time.\n", + " avg_elapsed = 0.0\n", + " for _ in range(1):\n", + " start = time()\n", + " perwordbound = model.bound(corpus, author2doc=author2doc, doc2author=doc2author)\n", + " avg_elapsed += time() - start\n", + " avg_elapsed /= 1\n", + " eval_time.append(avg_elapsed)" + ] + }, + { + "cell_type": "code", + "execution_count": 127, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p1 = figure(title='Train time', x_axis_label='Num authors', y_axis_label='Time (sec)')\n", + "s1 = p1.line(authors_per_doc_list, train_time)\n", + "p1.circle(authors_per_doc_list, train_time)\n", + "p1.plot_height=400\n", + "p1.plot_width=400\n", + "p1.toolbar_location = None\n", + "\n", + "p2 = figure(title='Evaluation time', x_axis_label='Num authors', y_axis_label='Time (sec)')\n", + "s2 = p2.line(authors_per_doc_list, eval_time)\n", + "p2.circle(authors_per_doc_list, eval_time)\n", + "p2.plot_height=400\n", + "p2.plot_width=400\n", + "p2.toolbar_location = None\n", + "\n", + "plots = row(p1, p2)\n", + "\n", + "show(column(Div(text='

Sclability w.r.t. number of authors per document

'), plots))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### W.r.t. number of documents" + ] + }, + { + "cell_type": "code", + "execution_count": 208, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 34min 38s, sys: 4.3 s, total: 34min 42s\n", + "Wall time: 34min 48s\n" + ] + } + ], + "source": [ + "%%time\n", + "\n", + "# Set some parameters.\n", + "num_authors = 1000\n", + "authors_per_doc = 5\n", + "num_topics = 5\n", + "vocab_size = 1000\n", + "words_per_doc = 10 # Average unique words per document\n", + "word_freq = 10 # Average frequency of each word in document.\n", + "word_std = 10.0\n", + "num_docs_list = [100, 1000, 10000, 100000]\n", + "\n", + "def make_doc2author(authors_per_doc=10, num_authors=100):\n", + " doc2author = {}\n", + " for d in range(num_docs):\n", + " author_ids = random.sample(range(num_authors), authors_per_doc)\n", + " doc2author[d] = author_ids\n", + "\n", + " return doc2author\n", + "\n", + "\n", + "# Generate corpus.\n", + "corpus_big = []\n", + "for d in range(num_docs_list[-1]):\n", + " ids = random.sample(range(vocab_size), words_per_doc)\n", + " cts = np.random.normal(word_freq, word_std, len(ids))\n", + " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", + " doc = list(zip(ids, cts))\n", + " corpus_big.append(doc)\n", + "\n", + "train_time = []\n", + "eval_time = []\n", + "memory_footprint = []\n", + "for num_docs in num_docs_list:\n", + " # Run tests.\n", + " \n", + " # Construct dictionaries.\n", + " doc2author = make_doc2author(authors_per_doc, num_authors)\n", + " author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", + " \n", + " corpus = random.sample(corpus_big, num_docs)\n", + " memory_footprint.append(sys.getsizeof(corpus))\n", + " \n", + " chunksize = len(corpus) + 1\n", + " \n", + " # Get training time.\n", + " avg_elapsed = 0.0\n", + " for _ in range(1):\n", + " start = time()\n", + " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", + " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", + " iterations=10, passes=10, eval_every=0, random_state=1)\n", + " avg_elapsed += time() - start\n", + " avg_elapsed /= 1\n", + " train_time.append(avg_elapsed)\n", + " \n", + " # Get evaluation time.\n", + " avg_elapsed = 0.0\n", + " for _ in range(1):\n", + " start = time()\n", + " perwordbound = model.bound(corpus, author2doc=author2doc, doc2author=doc2author)\n", + " avg_elapsed += time() - start\n", + " avg_elapsed /= 1\n", + " eval_time.append(avg_elapsed)" + ] + }, + { + "cell_type": "code", + "execution_count": 207, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p1 = figure(title='Train time', x_axis_label='Size of corpus (bytes)', y_axis_label='Time (sec)', \\\n", + " x_axis_type='log', x_range=(10**2, 10**6))\n", + "s1 = p1.line(memory_footprint, train_time)\n", + "p1.circle(memory_footprint, train_time)\n", + "p1.plot_height=400\n", + "p1.plot_width=400\n", + "p1.toolbar_location = None\n", + "\n", + "p2 = figure(title='Evaluation time', x_axis_label='Size of corpus (bytes)', y_axis_label='Time (sec)',\\\n", + " x_axis_type='log', x_range=(10**2, 10**6))\n", + "s2 = p2.line(memory_footprint, eval_time)\n", + "p2.circle(memory_footprint, eval_time)\n", + "p2.plot_height=400\n", + "p2.plot_width=400\n", + "p2.toolbar_location = None\n", + "\n", + "plots = row(p1, p2)\n", + "\n", + "show(column(Div(text='

Sclability w.r.t. number of documents

'), plots))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "print(num_docs_list)\n", + "print(memory_footprint)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### W.r.t. size of vocab" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 20.8 s, sys: 64 ms, total: 20.9 s\n", + "Wall time: 20.9 s\n" + ] + } + ], + "source": [ + "%%time\n", + "\n", + "# Set some parameters.\n", + "num_authors = 1000\n", + "authors_per_doc = 5\n", + "num_topics = 5\n", + "vocab_size_list = [10, 100, 1000, 10000, 100000]\n", + "words_per_doc = 10 # Average unique words per document\n", + "word_freq = 10 # Average frequency of each word in document.\n", + "word_std = 10.0\n", + "num_docs = 100\n", + "\n", + "def make_doc2author(authors_per_doc=10, num_authors=100):\n", + " doc2author = {}\n", + " for d in range(num_docs):\n", + " author_ids = random.sample(range(num_authors), authors_per_doc)\n", + " doc2author[d] = author_ids\n", + "\n", + " return doc2author\n", + "\n", + "\n", + "train_time = []\n", + "eval_time = []\n", + "for vocab_size in vocab_size_list:\n", + " # Run tests.\n", + " \n", + " # Generate corpus.\n", + " corpus = []\n", + " for d in range(num_docs):\n", + " ids = random.sample(range(vocab_size), words_per_doc)\n", + " cts = np.random.normal(word_freq, word_std, len(ids))\n", + " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", + " doc = list(zip(ids, cts))\n", + " corpus.append(doc)\n", + " \n", + " # Construct dictionaries.\n", + " doc2author = make_doc2author(authors_per_doc, num_authors)\n", + " author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", + " \n", + " chunksize = len(corpus) + 1\n", + " \n", + " # Get training time.\n", + " avg_elapsed = 0.0\n", + " for _ in range(1):\n", + " start = time()\n", + " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", + " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", + " iterations=10, passes=10, eval_every=0, random_state=1)\n", + " avg_elapsed += time() - start\n", + " avg_elapsed /= 1\n", + " train_time.append(avg_elapsed)\n", + " \n", + " # Get evaluation time.\n", + " avg_elapsed = 0.0\n", + " for _ in range(10):\n", + " start = time()\n", + " perwordbound = model.bound(corpus, author2doc=author2doc, doc2author=doc2author)\n", + " avg_elapsed += time() - start\n", + " avg_elapsed /= 10\n", + " eval_time.append(avg_elapsed)" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p1 = figure(title='Train time', x_axis_label='Size of vocab', y_axis_label='Time (sec)', \\\n", + " x_axis_type='log', x_range=(1e0, 1e6), y_axis_type='log')\n", + "s1 = p1.line(vocab_size_list, train_time)\n", + "p1.circle(vocab_size_list, train_time)\n", + "p1.plot_height=400\n", + "p1.plot_width=400\n", + "p1.toolbar_location = None\n", + "\n", + "p2 = figure(title='Evaluation time', x_axis_label='Size of vocab', y_axis_label='Time (sec)',\\\n", + " x_axis_type='log', x_range=(1e0, 1e6), y_axis_type='log')\n", + "s1 = p2.line(vocab_size_list, eval_time)\n", + "p2.circle(vocab_size_list, eval_time)\n", + "p2.plot_height=400\n", + "p2.plot_width=400\n", + "p2.toolbar_location = None\n", + "\n", + "plots = row(p1, p2)\n", + "\n", + "show(column(Div(text='

Sclability w.r.t. size of vocabulary

'), plots))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### W.r.t. number of topics" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 25.7 s, sys: 56 ms, total: 25.8 s\n", + "Wall time: 25.8 s\n" + ] + } + ], + "source": [ + "%%time\n", + "\n", + "# Set some parameters.\n", + "num_authors = 1000\n", + "authors_per_doc = 5\n", + "num_topics_list = [5, 25, 125, 625]\n", + "vocab_size = 1000\n", + "words_per_doc = 10 # Average unique words per document\n", + "word_freq = 10 # Average frequency of each word in document.\n", + "word_std = 10.0\n", + "num_docs = 100\n", + "\n", + "def make_doc2author(authors_per_doc=10, num_authors=100):\n", + " doc2author = {}\n", + " for d in range(num_docs):\n", + " author_ids = random.sample(range(num_authors), authors_per_doc)\n", + " doc2author[d] = author_ids\n", + "\n", + " return doc2author\n", + "\n", + "# Generate corpus.\n", + "corpus = []\n", + "for d in range(num_docs):\n", + " ids = random.sample(range(vocab_size), words_per_doc)\n", + " cts = np.random.normal(word_freq, word_std, len(ids))\n", + " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", + " doc = list(zip(ids, cts))\n", + " corpus.append(doc)\n", + "\n", + "# Construct dictionaries.\n", + "doc2author = make_doc2author(authors_per_doc, num_authors)\n", + "author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", + "\n", + "chunksize = len(corpus) + 1\n", + "\n", + "train_time = []\n", + "for num_topics in num_topics_list:\n", + " # Get training time.\n", + " avg_elapsed = 0.0\n", + " for _ in range(1):\n", + " start = time()\n", + " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", + " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", + " iterations=10, passes=10, eval_every=0, random_state=1)\n", + " avg_elapsed += time() - start\n", + " avg_elapsed /= 1\n", + " train_time.append(avg_elapsed)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "p1 = figure(title='Train time', x_axis_label='Number of topics', y_axis_label='Time (sec)', \\\n", + " x_axis_type='log', x_range=(1e0, 2e3), y_axis_type='log')\n", + "s1 = p1.line(num_topics_list, train_time)\n", + "p1.circle(num_topics_list, train_time)\n", + "p1.plot_height=400\n", + "p1.plot_width=400\n", + "p1.toolbar_location = None\n", + "\n", + "\n", + "show(column(Div(text='

Sclability w.r.t. number of topics

'), p1))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.2" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/docs/notebooks/atmodel_tutorial.ipynb b/docs/notebooks/atmodel_tutorial.ipynb index 05c0edb7ea..12eb1c0399 100644 --- a/docs/notebooks/atmodel_tutorial.ipynb +++ b/docs/notebooks/atmodel_tutorial.ipynb @@ -6,6 +6,8 @@ "source": [ "# The author-topic model: LDA with metadata\n", "\n", + "**TODO:** StackExchange example probably won't be included, as I'm not having good results with it and cannot make the tag prediction to work at all. If it is not included, fix this section, and do similarity queries with authors.\n", + "\n", "In this tutorial, you will learn how to use the author-topic model in Gensim. First, we will apply it to a corpus consisting of scientific papers, to get insight about the authors of the papers. After that, we will apply the model on StackExchange posts with tags, and implement a simple automatic tagging system.\n", "\n", "The author-topic model is in extension of Latent Dirichlet Allocation (LDA). Each document is associated with a set of authors, and the topic distributions for each of these authors are learned. Each author is also associated with multiple documents. To learn about the theoretical side of the author-topic model, see [Rosen-Zvi and co-authors](https://mimno.infosci.cornell.edu/info6150/readings/398.pdf), for example.\n", @@ -16,6 +18,11 @@ "* Topic modelling in Gensim: http://radimrehurek.com/topic_modeling_tutorial/2%20-%20Topic%20Modeling.html\n", "* Pre-processing and training LDA: https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/lda_training_tips.ipynb\n", "\n", + "> NOTE:\n", + ">\n", + "> To run this tutorial on your own, install Gensim, SpaCy, Scikit-Learn, and Bokeh.\n", + "\n", + "**FIXME:** technical details can be found at TODO \n", "\n", "In part 1 of this tutorial, we will illustrate basic usage of the model, and explore the resulting representation. How to load and pre-process the dataset used is also covered.\n", "\n", @@ -25,6 +32,8 @@ "\n", "The data used in part 1 consists of scientific papers about machine learning, from the Neural Information Processing Systems conference (NIPS). It is the same dataset used in the [Pre-processing and training LDA](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/lda_training_tips.ipynb) tutorial, mentioned earlier.\n", "\n", + "As in the LDA tutorial, we will be performing qualitative analysis of the model, and at times this will require an understanding of the subject matter of the data. If you try running this tutorial on your own, consider applying it on a dataset with subject matter that you are familiar with. For example, try one of the [StackExchange datadump datasets](https://archive.org/details/stackexchange).\n", + "\n", "You can download the data from Sam Roweis' website (http://www.cs.nyu.edu/~roweis/data.html).\n", "\n", "In the following sections we will load the data, pre-process it, train the model, and explore the results using some of the implementation's functionality. Feel free to skip the loading and pre-processing for now, if you are familiar with the process.\n", @@ -159,7 +168,9 @@ "source": [ "In the code below, Spacy takes care of tokenization, removing non-alphabetic characters, removal of stopwords, lemmatization and named entity recognition.\n", "\n", - "Note that we only keep named entities that consist of more than one word, as single word named entities are already there." + "Note that we only keep named entities that consist of more than one word, as single word named entities are already there.\n", + "\n", + "**TODO:** use custom pipeline. Using the entire SpaCy pipeline is very expensive; for example, I'm probably doing POS tagging below, but not using it for anything." ] }, { @@ -173,8 +184,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 10min 13s, sys: 780 ms, total: 10min 14s\n", - "Wall time: 3min 27s\n" + "CPU times: user 9min 8s, sys: 464 ms, total: 9min 9s\n", + "Wall time: 3min 3s\n" ] } ], @@ -256,7 +267,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 7, "metadata": { "collapsed": true }, @@ -285,7 +296,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 8, "metadata": { "collapsed": true }, @@ -306,7 +317,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -335,7 +346,7 @@ "\n", "We train the author-topic model on the data prepared in the previous sections. \n", "\n", - "The interface to the author-topic model is very similar to that of LDA in Gensim. In addition to a corpus, ID to word mapping (`id2word`) and number of topics (`num_topics`), the author-topic model requires either a author to document ID mapping (`author2doc`), or the reverse (`doc2author`).\n", + "The interface to the author-topic model is very similar to that of LDA in Gensim. In addition to a corpus, ID to word mapping (`id2word`) and number of topics (`num_topics`), the author-topic model requires either an author to document ID mapping (`author2doc`), or the reverse (`doc2author`).\n", "\n", "Below, we have also (this can be skipped for now):\n", "* Increased the number of `passes` over the dataset (to improve the convergence of the optimization problem).\n", @@ -348,16 +359,9 @@ "We load the model, and train it." ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**FIXME:** why is autotuning turned on below?" - ] - }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 10, "metadata": { "collapsed": false }, @@ -366,29 +370,31 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 14min 10s, sys: 1min 12s, total: 15min 22s\n", - "Wall time: 14min 6s\n" + "CPU times: user 3.75 s, sys: 224 ms, total: 3.98 s\n", + "Wall time: 3.78 s\n" ] } ], "source": [ "from gensim.models import AuthorTopicModel\n", "%time model = AuthorTopicModel(corpus=corpus, num_topics=10, id2word=dictionary.id2token, \\\n", - " author2doc=author2doc, chunksize=2000, passes=100, alpha='auto', eta='auto', \\\n", - " eval_every=0, iterations=1, random_state=1)" + " author2doc=author2doc, chunksize=2000, passes=1, eval_every=0, \\\n", + " iterations=1, random_state=1)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ + "If you believe your model hasn't converged, you can continue training using `model.update()`. If you have additional documents and/or authors call `model.update(corpus, author2doc)`.\n", + "\n", "Before we explore the model, let's try to improve upon it. To do this, we will train several models with different random initializations, by giving different seeds for the random number generator (`random_state`). We evaluate the topic coherence of the model using the [top_topics](https://radimrehurek.com/gensim/models/ldamodel.html#gensim.models.ldamodel.LdaModel.top_topics) method, and pick the model with the highest topic coherence.\n", "\n" ] }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 12, "metadata": { "collapsed": false }, @@ -397,8 +403,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 14min 30s, sys: 1min 18s, total: 15min 49s\n", - "Wall time: 14min 43s\n" + "CPU times: user 11min 57s, sys: 2min 16s, total: 14min 13s\n", + "Wall time: 11min 38s\n" ] } ], @@ -423,7 +429,7 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": 15, "metadata": { "collapsed": false }, @@ -432,7 +438,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Topic coherence: -1.766e+03\n" + "Topic coherence: -1.803e+03\n" ] } ], @@ -445,12 +451,45 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Let's print the most important words in the topics." + "We save the model, to avoid having to train it again, and also show how to load it again." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Save model.\n", + "model.save('/tmp/model.atmodel')" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Load model.\n", + "model = AuthorTopicModel.load('/tmp/model.atmodel')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Explore author-topic representation\n", + "\n", + "Now that we have trained a model, we can start exploring the authors and the topics. First, let's simply print the most important words in the topics." ] }, { "cell_type": "code", - "execution_count": 67, + "execution_count": 18, "metadata": { "collapsed": false }, @@ -459,28 +498,28 @@ "data": { "text/plain": [ "[(0,\n", - " '0.009*\"control\" + 0.007*\"memory\" + 0.006*\"prediction\" + 0.006*\"table\" + 0.006*\"signal\" + 0.005*\"search\" + 0.005*\"controller\" + 0.005*\"system\" + 0.004*\"user\" + 0.004*\"run\"'),\n", + " '0.029*\"image\" + 0.006*\"constraint\" + 0.006*\"component\" + 0.006*\"region\" + 0.006*\"matrix\" + 0.006*\"pixel\" + 0.006*\"solution\" + 0.005*\"surface\" + 0.005*\"object\" + 0.005*\"source\"'),\n", " (1,\n", - " '0.013*\"neuron\" + 0.010*\"threshold\" + 0.009*\"f\" + 0.008*\"let\" + 0.008*\"theorem\" + 0.007*\"bound\" + 0.007*\"class\" + 0.007*\"node\" + 0.007*\"p\" + 0.006*\"layer\"'),\n", + " '0.008*\"optimal\" + 0.007*\"action\" + 0.007*\"q\" + 0.007*\"approximation\" + 0.006*\"policy\" + 0.005*\"gaussian\" + 0.005*\"w\" + 0.005*\"noise\" + 0.005*\"generalization\" + 0.004*\"convergence\"'),\n", " (2,\n", - " '0.009*\"w\" + 0.008*\"matrix\" + 0.007*\"noise\" + 0.007*\"approximation\" + 0.007*\"gaussian\" + 0.006*\"density\" + 0.005*\"optimal\" + 0.005*\"generalization\" + 0.005*\"sample\" + 0.005*\"y\"'),\n", + " '0.014*\"visual\" + 0.012*\"field\" + 0.010*\"object\" + 0.009*\"image\" + 0.008*\"map\" + 0.008*\"layer\" + 0.008*\"eye\" + 0.008*\"direction\" + 0.008*\"activity\" + 0.008*\"cell\"'),\n", " (3,\n", - " '0.013*\"image\" + 0.009*\"distance\" + 0.008*\"cluster\" + 0.006*\"trajectory\" + 0.005*\"transformation\" + 0.005*\"object\" + 0.005*\"solution\" + 0.005*\"matrix\" + 0.005*\"dynamic\" + 0.004*\"inverse\"'),\n", + " '0.022*\"control\" + 0.010*\"dynamic\" + 0.009*\"analog\" + 0.008*\"circuit\" + 0.008*\"trajectory\" + 0.007*\"neuron\" + 0.007*\"chip\" + 0.007*\"controller\" + 0.006*\"motor\" + 0.005*\"gate\"'),\n", " (4,\n", - " '0.014*\"action\" + 0.011*\"control\" + 0.010*\"policy\" + 0.009*\"q\" + 0.009*\"reinforcement\" + 0.008*\"optimal\" + 0.006*\"dynamic\" + 0.005*\"robot\" + 0.005*\"environment\" + 0.005*\"reward\"'),\n", + " '0.013*\"neuron\" + 0.009*\"f\" + 0.008*\"p\" + 0.008*\"memory\" + 0.007*\"let\" + 0.007*\"w\" + 0.007*\"bound\" + 0.007*\"capacity\" + 0.006*\"theorem\" + 0.006*\"threshold\"'),\n", " (5,\n", - " '0.015*\"representation\" + 0.012*\"layer\" + 0.011*\"image\" + 0.009*\"object\" + 0.008*\"component\" + 0.006*\"face\" + 0.006*\"map\" + 0.006*\"signal\" + 0.005*\"code\" + 0.005*\"activity\"'),\n", + " '0.021*\"neuron\" + 0.021*\"cell\" + 0.010*\"spike\" + 0.010*\"response\" + 0.010*\"signal\" + 0.008*\"frequency\" + 0.008*\"synapse\" + 0.008*\"synaptic\" + 0.007*\"stimulus\" + 0.006*\"voltage\"'),\n", " (6,\n", - " '0.013*\"speech\" + 0.012*\"classifier\" + 0.012*\"class\" + 0.010*\"recognition\" + 0.009*\"mixture\" + 0.009*\"classification\" + 0.007*\"tree\" + 0.007*\"likelihood\" + 0.006*\"node\" + 0.006*\"sample\"'),\n", + " '0.013*\"speech\" + 0.010*\"word\" + 0.010*\"recognition\" + 0.009*\"classifier\" + 0.008*\"class\" + 0.008*\"mixture\" + 0.007*\"likelihood\" + 0.006*\"classification\" + 0.006*\"sequence\" + 0.006*\"density\"'),\n", " (7,\n", - " '0.024*\"neuron\" + 0.021*\"cell\" + 0.010*\"response\" + 0.009*\"spike\" + 0.009*\"stimulus\" + 0.008*\"activity\" + 0.008*\"synaptic\" + 0.006*\"signal\" + 0.006*\"frequency\" + 0.006*\"cortex\"'),\n", + " '0.011*\"layer\" + 0.006*\"face\" + 0.006*\"node\" + 0.006*\"classification\" + 0.006*\"architecture\" + 0.005*\"hidden\" + 0.005*\"table\" + 0.005*\"training_set\" + 0.004*\"memory\" + 0.004*\"image\"'),\n", " (8,\n", - " '0.019*\"image\" + 0.011*\"chip\" + 0.010*\"motion\" + 0.010*\"circuit\" + 0.010*\"field\" + 0.008*\"analog\" + 0.008*\"direction\" + 0.007*\"visual\" + 0.007*\"map\" + 0.007*\"object\"'),\n", + " '0.010*\"class\" + 0.008*\"distance\" + 0.007*\"recognition\" + 0.007*\"tree\" + 0.007*\"character\" + 0.007*\"kernel\" + 0.005*\"classification\" + 0.005*\"p\" + 0.004*\"machine\" + 0.004*\"y\"'),\n", " (9,\n", - " '0.010*\"net\" + 0.009*\"recognition\" + 0.009*\"word\" + 0.009*\"hidden\" + 0.008*\"architecture\" + 0.008*\"character\" + 0.007*\"recurrent\" + 0.007*\"layer\" + 0.007*\"rule\" + 0.007*\"hidden_unit\"')]" + " '0.010*\"representation\" + 0.010*\"rule\" + 0.009*\"net\" + 0.007*\"hidden\" + 0.006*\"hidden_unit\" + 0.005*\"sequence\" + 0.005*\"activation\" + 0.005*\"connectionist\" + 0.004*\"architecture\" + 0.004*\"object\"')]" ] }, - "execution_count": 67, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -495,16 +534,18 @@ "source": [ "These topics are by no means perfect. They have problems such as *chained topics*, *intruded words*, *random topics*, and *unbalanced topics* (see [Mimno and co-authors 2011](https://people.cs.umass.edu/~wallach/publications/mimno11optimizing.pdf)). They will do for the purposes of this tutorial, however.\n", "\n", - "**TODO:** re-write the interpretation of the topics below, if necessary.\n", + "**FIXME:** re-write the interpretation of the topics below, if necessary.\n", "\n", "Below, we use the `model[name]` syntax to retrieve the topic distribution for some authors. Comparing the authors' topics with the topics above, we observe that the model has correctly identified that Yann LeCun and Geoffrey E. Hinton both have something to do with neural networks (topic 5), speech recognition (topic 1 and 5) and statistical machine learning (topic 9). We also observe that Yann LeCun has been particularly occupied with image processing, and perhaps that Geoffrey E. Hinton has worked with visual perception in neuroscience (this is less clear).\n", "\n", - "Similarly, Terrence J. Sejnowski and James M. Bower are both neuroscientist, first and foremost, and their topic distributions seem to reflect that." + "Similarly, Terrence J. Sejnowski and James M. Bower are both neuroscientist, first and foremost, and their topic distributions seem to reflect that.\n", + "\n", + "**TODO:** try to make it less cumbersome to compare author-topics below and topics above." ] }, { "cell_type": "code", - "execution_count": 68, + "execution_count": 19, "metadata": { "collapsed": false }, @@ -517,22 +558,25 @@ "YannLeCun\n", "Docs: [143, 406, 370, 495, 456, 449, 595, 616, 760, 752, 1532]\n", "Topics:\n", - "[(3, 0.29943682408405564), (9, 0.70035037360056807)]\n", + "[(8, 0.99976156441010589)]\n", "\n", "GeoffreyE.Hinton\n", "Docs: [56, 143, 284, 230, 197, 462, 463, 430, 688, 784, 826, 848, 869, 1387, 1684, 1728]\n", "Topics:\n", - "[(4, 0.07225384180855414), (5, 0.92764230357402855)]\n", + "[(1, 0.16022855953677534),\n", + " (2, 0.18085978419244769),\n", + " (8, 0.18989122578835921),\n", + " (9, 0.46894303639673746)]\n", "\n", "TerrenceJ.Sejnowski\n", "Docs: [513, 530, 539, 468, 611, 581, 600, 594, 703, 711, 849, 981, 944, 865, 850, 883, 881, 1221, 1137, 1224, 1146, 1282, 1248, 1179, 1424, 1359, 1528, 1484, 1571, 1727, 1732]\n", "Topics:\n", - "[(5, 0.86190832291064989), (7, 0.13802575466031855)]\n", + "[(2, 0.99992562868881996)]\n", "\n", "JamesM.Bower\n", "Docs: [17, 48, 58, 131, 101, 126, 127, 281, 208, 225]\n", "Topics:\n", - "[(7, 0.99980671969007273)]\n" + "[(5, 0.99980243307117478)]\n" ] } ], @@ -573,7 +617,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 20, "metadata": { "collapsed": false }, @@ -587,25 +631,22 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We can also compute the (per-word) bound." + "We can also compute the per-word bound, which is a measure of the model's predictive performance (you could also say that it is the reconstruction error)." ] }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 21, "metadata": { "collapsed": false }, "outputs": [ { - "data": { - "text/plain": [ - "-7.6914582241156673" - ] - }, - "execution_count": 44, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "-7.75230922299\n" + ] } ], "source": [ @@ -627,7 +668,7 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -636,8 +677,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 17.1 s, sys: 0 ns, total: 17.1 s\n", - "Wall time: 17.1 s\n" + "CPU times: user 15.8 s, sys: 12 ms, total: 15.8 s\n", + "Wall time: 15.8 s\n" ] } ], @@ -649,25 +690,47 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Explore author-topic representation" + "#### Plotting the authors\n", + "\n", + "Now we're going to produce the kind of pacific archipelago looking plot below. The goal of this plot is to give you a way to explore the author-topic representation in an intuitive manner.\n", + "\n", + "We take all the author-topic distributions (stored in `model.state.gamma`) and embed them in a 2D space. To do this, we reduce the dimensionality of this data using t-SNE. \n", + "\n", + "t-SNE is a method that attempts to reduce the dimensionality of a dataset, while maintaining the distances between the points. That means that if two authors are close together in the plot below, then their topic distributions are similar.\n", + "\n", + "In the cell below, we transform the author-topic representation into the t-SNE space. You can increase the `smallest_author` value if you do not want to view all the authors with few documents." ] }, { "cell_type": "code", - "execution_count": 70, + "execution_count": 26, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ + "%%time\n", "from sklearn.manifold import TSNE\n", "tsne = TSNE(n_components=2, random_state=0)\n", - "_ = tsne.fit_transform(model.state.gamma) # Result stored in tsne.embedding_" + "smallest_author = 0 # Ignore authors with documents less than this.\n", + "authors = [model.author2id[a] for a in model.author2id.keys() if len(author2doc[a]) > smallest_author]\n", + "_ = tsne.fit_transform(model.state.gamma[authors, :]) # Result stored in tsne.embedding_" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We are now ready to make the plot.\n", + "\n", + "If you are unable to view or interact with the plot below, it is available [here]() (**TODO:** make a page for the plot, and include the link), or view the entire notebook [here]() (**TODO:** make nvbiewer page for the notebook or something).\n", + "\n", + "Note that if you run this notebook yourself, you will see a different graph. The random initialization of the model will be different, and the result will thus be different to some degree. You may find an entirely different representation of the data, or it may show the same interpretation slightly differently." ] }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 27, "metadata": { "collapsed": false, "scrolled": true @@ -679,7 +742,7 @@ "\n", "
\n", " \n", - " Loading BokehJS ...\n", + " Loading BokehJS ...\n", "
" ] }, @@ -727,7 +790,7 @@ "\n", " function display_loaded() {\n", " if (window.Bokeh !== undefined) {\n", - " Bokeh.$(\"#412cdc4a-952a-4aad-a8e6-0d41a2082466\").text(\"BokehJS successfully loaded.\");\n", + " Bokeh.$(\"#7678b991-d847-4f9b-8db4-2b9c003d91ee\").text(\"BokehJS successfully loaded.\");\n", " } else if (Date.now() < window._bokeh_timeout) {\n", " setTimeout(display_loaded, 100)\n", " }\n", @@ -769,9 +832,9 @@ " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", " }\n", - " };var element = document.getElementById(\"412cdc4a-952a-4aad-a8e6-0d41a2082466\");\n", + " };var element = document.getElementById(\"7678b991-d847-4f9b-8db4-2b9c003d91ee\");\n", " if (element == null) {\n", - " console.log(\"Bokeh: ERROR: autoload.js configured with elementid '412cdc4a-952a-4aad-a8e6-0d41a2082466' but no matching script tag was found. \")\n", + " console.log(\"Bokeh: ERROR: autoload.js configured with elementid '7678b991-d847-4f9b-8db4-2b9c003d91ee' but no matching script tag was found. \")\n", " return false;\n", " }\n", "\n", @@ -784,7 +847,7 @@ " \n", " function(Bokeh) {\n", " \n", - " Bokeh.$(\"#412cdc4a-952a-4aad-a8e6-0d41a2082466\").text(\"BokehJS is loading...\");\n", + " Bokeh.$(\"#7678b991-d847-4f9b-8db4-2b9c003d91ee\").text(\"BokehJS is loading...\");\n", " },\n", " function(Bokeh) {\n", " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", @@ -807,7 +870,7 @@ " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", " window._bokeh_failed_load = true;\n", " } else if (!force) {\n", - " var cell = $(\"#412cdc4a-952a-4aad-a8e6-0d41a2082466\").parents('.cell').data().cell;\n", + " var cell = $(\"#7678b991-d847-4f9b-8db4-2b9c003d91ee\").parents('.cell').data().cell;\n", " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", " }\n", "\n", @@ -830,23 +893,14 @@ } ], "source": [ + "# Tell Bokeh to display plots inside the notebook.\n", "from bokeh.io import output_notebook\n", - "from bokeh.models import HoverTool\n", - "from bokeh.plotting import figure, show, ColumnDataSource\n", - "\n", "output_notebook()" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If you are unable to view or interact with the plot below, it is available [here]() (**TODO:** make a page for the plot, and include the link), or view the entire notebook [here]() (**TODO:** make nvbiewer page for the notebook or something)." - ] - }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 28, "metadata": { "collapsed": false }, @@ -857,7 +911,7 @@ "\n", "\n", "
\n", - "
\n", + "
\n", "
\n", "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "iterations = range(nb_model.iterations)\n", - "\n", - "p1 = figure(title='', x_axis_label='Iterations', y_axis_label='Per word bound')\n", - "s1 = p1.line(iterations, nb_model.perwordbound[1:], color='red')\n", - "p1.circle(iterations, nb_model.perwordbound[1:], color='red')\n", - "s2 = p1.line(iterations, b_model.perwordbound[1:], color='blue')\n", - "p1.circle(iterations, b_model.perwordbound[1:], color='blue')\n", - "p1.plot_height=400\n", - "p1.plot_width=600\n", - "p1.toolbar_location = None\n", - "\n", - "show(p1)" - ] - }, - { - "cell_type": "code", - "execution_count": 73, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "5.0" - ] - }, - "execution_count": 73, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "authors_per_doc = [len(authors) for authors in nb_model.doc2author.values()]\n", - "sum(authors_per_doc) / len(nb_model.doc2author)" - ] - }, - { - "cell_type": "code", - "execution_count": 74, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/plain": [ - "5.05050505050505" - ] - }, - "execution_count": 74, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "docs_per_author = [len(docs) for docs in nb_model.author2doc.values()]\n", - "sum(docs_per_author) / len(nb_model.author2doc)" - ] - }, - { - "cell_type": "code", - "execution_count": 75, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "iterations = range(nb_model.iterations)\n", - "\n", - "p1 = figure(title='', x_axis_label='Iterations', y_axis_label='Per word bound')\n", - "s1 = p1.line(iterations, nb_model.perwordbound[1:], color='red')\n", - "p1.circle(iterations, nb_model.perwordbound[1:], color='red')\n", - "s2 = p1.line(iterations, b_model.perwordbound[1:], color='blue')\n", - "p1.circle(iterations, b_model.perwordbound[1:], color='blue')\n", - "p1.plot_height=400\n", - "p1.plot_width=600\n", - "p1.toolbar_location = None\n", - "\n", - "show(p1)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "## Scalability\n", - "\n", - "### W.r.t. number of authors" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "vocab_size = 1000\n", - "num_docs = 100\n", - "words_per_doc = 10 # Average unique words per document\n", - "word_freq = 10 # Average frequency of each word in document.\n", - "word_std = 10.0\n", - "corpus = []\n", - "for d in range(num_docs):\n", - " ids = random.sample(range(vocab_size), words_per_doc)\n", - " cts = np.random.normal(word_freq, word_std, len(ids))\n", - " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", - " doc = list(zip(ids, cts))\n", - " corpus.append(doc)\n", - " \n", - "def make_author2doc(docs_per_author=10, num_authors=100):\n", - " author2doc = {}\n", - " for a in range(num_authors):\n", - " doc_ids = random.sample(range(num_docs), docs_per_author)\n", - " author2doc[a] = doc_ids\n", - "\n", - " return author2doc\n", - "\n", - "def make_doc2author(authors_per_doc=10, num_authors=100):\n", - " doc2author = {}\n", - " for d in range(num_docs):\n", - " author_ids = random.sample(range(num_authors), authors_per_doc)\n", - " doc2author[d] = author_ids\n", - "\n", - " return doc2author" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "num_topics = 5\n", - "num_authors_list = [100, 200, 400, 800, 1000]\n", - "authors_per_doc = 5\n", - "chunksize = len(corpus) + 1" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 1min 12s, sys: 16 ms, total: 1min 12s\n", - "Wall time: 1min 12s\n" - ] - } - ], - "source": [ - "%%time\n", - "# Time the entire process.\n", - "\n", - "train_time = []\n", - "eval_time = []\n", - "for num_authors in num_authors_list:\n", - " doc2author = make_doc2author(authors_per_doc, num_authors)\n", - " author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", - " \n", - " # Get training time.\n", - " avg_elapsed = 0.0\n", - " for _ in range(10):\n", - " start = time()\n", - " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", - " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", - " iterations=10, passes=10, eval_every=0, random_state=1)\n", - " avg_elapsed += time() - start\n", - " avg_elapsed /= 10\n", - " train_time.append(avg_elapsed)\n", - " \n", - " # Get evaluation time.\n", - " avg_elapsed = 0.0\n", - " for _ in range(10):\n", - " start = time()\n", - " perwordbound = model.bound(corpus, author2doc=author2doc, doc2author=doc2author)\n", - " avg_elapsed += time() - start\n", - " avg_elapsed /= 10\n", - " eval_time.append(avg_elapsed)" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p1 = figure(title='Train time', x_axis_label='Num authors', y_axis_label='Time (sec)')\n", - "s1 = p1.line(num_authors_list, train_time)\n", - "p1.circle(num_authors_list, train_time)\n", - "p1.plot_height=400\n", - "p1.plot_width=400\n", - "p1.toolbar_location = None\n", - "\n", - "p2 = figure(title='Evaluation time', x_axis_label='Num authors', y_axis_label='Time (sec)')\n", - "s2 = p2.line(num_authors_list, eval_time)\n", - "p2.circle(num_authors_list, eval_time)\n", - "p2.plot_height=400\n", - "p2.plot_width=400\n", - "p2.toolbar_location = None\n", - "\n", - "plots = row(p1, p2)\n", - "\n", - "show(plots)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### W.r.t. number of authors PER document\n", - "\n", - "Number of authors is constant." - ] - }, - { - "cell_type": "code", - "execution_count": 122, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "vocab_size = 1000\n", - "num_docs = 100\n", - "words_per_doc = 10 # Average unique words per document\n", - "word_freq = 10 # Average frequency of each word in document.\n", - "word_std = 10.0\n", - "corpus = []\n", - "for d in range(num_docs):\n", - " ids = random.sample(range(vocab_size), words_per_doc)\n", - " cts = np.random.normal(word_freq, word_std, len(ids))\n", - " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", - " doc = list(zip(ids, cts))\n", - " corpus.append(doc)\n", - " \n", - "def make_author2doc(docs_per_author=10, num_authors=100):\n", - " author2doc = {}\n", - " for a in range(num_authors):\n", - " doc_ids = random.sample(range(num_docs), docs_per_author)\n", - " author2doc[a] = doc_ids\n", - "\n", - " return author2doc\n", - "\n", - "def make_doc2author(authors_per_doc=10, num_authors=100):\n", - " doc2author = {}\n", - " for d in range(num_docs):\n", - " author_ids = random.sample(range(num_authors), authors_per_doc)\n", - " doc2author[d] = author_ids\n", - "\n", - " return doc2author" - ] - }, - { - "cell_type": "code", - "execution_count": 125, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "num_topics = 5\n", - "num_authors = 1000\n", - "authors_per_doc_list = [5**i for i in range(4)]\n", - "chunksize = len(corpus) + 1" - ] - }, - { - "cell_type": "code", - "execution_count": 126, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "96\n", - "383\n", - "924\n", - "1000\n", - "CPU times: user 12.1 s, sys: 36 ms, total: 12.1 s\n", - "Wall time: 12.1 s\n" - ] - } - ], - "source": [ - "%%time\n", - "# Time the entire process.\n", - "\n", - "train_time = []\n", - "eval_time = []\n", - "for authors_per_doc in authors_per_doc_list:\n", - " doc2author = make_doc2author(authors_per_doc, num_authors)\n", - " author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", - " \n", - " # Get training time.\n", - " avg_elapsed = 0.0\n", - " for _ in range(1):\n", - " start = time()\n", - " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", - " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", - " iterations=10, passes=10, eval_every=0, random_state=1)\n", - " avg_elapsed += time() - start\n", - " avg_elapsed /= 1\n", - " train_time.append(avg_elapsed)\n", - " \n", - " # Get evaluation time.\n", - " avg_elapsed = 0.0\n", - " for _ in range(1):\n", - " start = time()\n", - " perwordbound = model.bound(corpus, author2doc=author2doc, doc2author=doc2author)\n", - " avg_elapsed += time() - start\n", - " avg_elapsed /= 1\n", - " eval_time.append(avg_elapsed)" - ] - }, - { - "cell_type": "code", - "execution_count": 127, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p1 = figure(title='Train time', x_axis_label='Num authors', y_axis_label='Time (sec)')\n", - "s1 = p1.line(authors_per_doc_list, train_time)\n", - "p1.circle(authors_per_doc_list, train_time)\n", - "p1.plot_height=400\n", - "p1.plot_width=400\n", - "p1.toolbar_location = None\n", - "\n", - "p2 = figure(title='Evaluation time', x_axis_label='Num authors', y_axis_label='Time (sec)')\n", - "s2 = p2.line(authors_per_doc_list, eval_time)\n", - "p2.circle(authors_per_doc_list, eval_time)\n", - "p2.plot_height=400\n", - "p2.plot_width=400\n", - "p2.toolbar_location = None\n", - "\n", - "plots = row(p1, p2)\n", - "\n", - "show(column(Div(text='

Sclability w.r.t. number of authors per document

'), plots))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### W.r.t. number of documents" - ] - }, - { - "cell_type": "code", - "execution_count": 208, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 34min 38s, sys: 4.3 s, total: 34min 42s\n", - "Wall time: 34min 48s\n" - ] - } - ], - "source": [ - "%%time\n", - "\n", - "# Set some parameters.\n", - "num_authors = 1000\n", - "authors_per_doc = 5\n", - "num_topics = 5\n", - "vocab_size = 1000\n", - "words_per_doc = 10 # Average unique words per document\n", - "word_freq = 10 # Average frequency of each word in document.\n", - "word_std = 10.0\n", - "num_docs_list = [100, 1000, 10000, 100000]\n", - "\n", - "def make_doc2author(authors_per_doc=10, num_authors=100):\n", - " doc2author = {}\n", - " for d in range(num_docs):\n", - " author_ids = random.sample(range(num_authors), authors_per_doc)\n", - " doc2author[d] = author_ids\n", - "\n", - " return doc2author\n", - "\n", - "\n", - "# Generate corpus.\n", - "corpus_big = []\n", - "for d in range(num_docs_list[-1]):\n", - " ids = random.sample(range(vocab_size), words_per_doc)\n", - " cts = np.random.normal(word_freq, word_std, len(ids))\n", - " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", - " doc = list(zip(ids, cts))\n", - " corpus_big.append(doc)\n", - "\n", - "train_time = []\n", - "eval_time = []\n", - "memory_footprint = []\n", - "for num_docs in num_docs_list:\n", - " # Run tests.\n", - " \n", - " # Construct dictionaries.\n", - " doc2author = make_doc2author(authors_per_doc, num_authors)\n", - " author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", - " \n", - " corpus = random.sample(corpus_big, num_docs)\n", - " memory_footprint.append(sys.getsizeof(corpus))\n", - " \n", - " chunksize = len(corpus) + 1\n", - " \n", - " # Get training time.\n", - " avg_elapsed = 0.0\n", - " for _ in range(1):\n", - " start = time()\n", - " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", - " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", - " iterations=10, passes=10, eval_every=0, random_state=1)\n", - " avg_elapsed += time() - start\n", - " avg_elapsed /= 1\n", - " train_time.append(avg_elapsed)\n", - " \n", - " # Get evaluation time.\n", - " avg_elapsed = 0.0\n", - " for _ in range(1):\n", - " start = time()\n", - " perwordbound = model.bound(corpus, author2doc=author2doc, doc2author=doc2author)\n", - " avg_elapsed += time() - start\n", - " avg_elapsed /= 1\n", - " eval_time.append(avg_elapsed)" - ] - }, - { - "cell_type": "code", - "execution_count": 207, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p1 = figure(title='Train time', x_axis_label='Size of corpus (bytes)', y_axis_label='Time (sec)', \\\n", - " x_axis_type='log', x_range=(10**2, 10**6))\n", - "s1 = p1.line(memory_footprint, train_time)\n", - "p1.circle(memory_footprint, train_time)\n", - "p1.plot_height=400\n", - "p1.plot_width=400\n", - "p1.toolbar_location = None\n", - "\n", - "p2 = figure(title='Evaluation time', x_axis_label='Size of corpus (bytes)', y_axis_label='Time (sec)',\\\n", - " x_axis_type='log', x_range=(10**2, 10**6))\n", - "s2 = p2.line(memory_footprint, eval_time)\n", - "p2.circle(memory_footprint, eval_time)\n", - "p2.plot_height=400\n", - "p2.plot_width=400\n", - "p2.toolbar_location = None\n", - "\n", - "plots = row(p1, p2)\n", - "\n", - "show(column(Div(text='

Sclability w.r.t. number of documents

'), plots))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "print(num_docs_list)\n", - "print(memory_footprint)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### W.r.t. size of vocab" - ] - }, - { - "cell_type": "code", - "execution_count": 53, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 20.8 s, sys: 64 ms, total: 20.9 s\n", - "Wall time: 20.9 s\n" - ] - } - ], - "source": [ - "%%time\n", - "\n", - "# Set some parameters.\n", - "num_authors = 1000\n", - "authors_per_doc = 5\n", - "num_topics = 5\n", - "vocab_size_list = [10, 100, 1000, 10000, 100000]\n", - "words_per_doc = 10 # Average unique words per document\n", - "word_freq = 10 # Average frequency of each word in document.\n", - "word_std = 10.0\n", - "num_docs = 100\n", - "\n", - "def make_doc2author(authors_per_doc=10, num_authors=100):\n", - " doc2author = {}\n", - " for d in range(num_docs):\n", - " author_ids = random.sample(range(num_authors), authors_per_doc)\n", - " doc2author[d] = author_ids\n", - "\n", - " return doc2author\n", - "\n", - "\n", - "train_time = []\n", - "eval_time = []\n", - "for vocab_size in vocab_size_list:\n", - " # Run tests.\n", - " \n", - " # Generate corpus.\n", - " corpus = []\n", - " for d in range(num_docs):\n", - " ids = random.sample(range(vocab_size), words_per_doc)\n", - " cts = np.random.normal(word_freq, word_std, len(ids))\n", - " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", - " doc = list(zip(ids, cts))\n", - " corpus.append(doc)\n", - " \n", - " # Construct dictionaries.\n", - " doc2author = make_doc2author(authors_per_doc, num_authors)\n", - " author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", - " \n", - " chunksize = len(corpus) + 1\n", - " \n", - " # Get training time.\n", - " avg_elapsed = 0.0\n", - " for _ in range(1):\n", - " start = time()\n", - " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", - " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", - " iterations=10, passes=10, eval_every=0, random_state=1)\n", - " avg_elapsed += time() - start\n", - " avg_elapsed /= 1\n", - " train_time.append(avg_elapsed)\n", - " \n", - " # Get evaluation time.\n", - " avg_elapsed = 0.0\n", - " for _ in range(10):\n", - " start = time()\n", - " perwordbound = model.bound(corpus, author2doc=author2doc, doc2author=doc2author)\n", - " avg_elapsed += time() - start\n", - " avg_elapsed /= 10\n", - " eval_time.append(avg_elapsed)" - ] - }, - { - "cell_type": "code", - "execution_count": 56, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p1 = figure(title='Train time', x_axis_label='Size of vocab', y_axis_label='Time (sec)', \\\n", - " x_axis_type='log', x_range=(1e0, 1e6), y_axis_type='log')\n", - "s1 = p1.line(vocab_size_list, train_time)\n", - "p1.circle(vocab_size_list, train_time)\n", - "p1.plot_height=400\n", - "p1.plot_width=400\n", - "p1.toolbar_location = None\n", - "\n", - "p2 = figure(title='Evaluation time', x_axis_label='Size of vocab', y_axis_label='Time (sec)',\\\n", - " x_axis_type='log', x_range=(1e0, 1e6), y_axis_type='log')\n", - "s1 = p2.line(vocab_size_list, eval_time)\n", - "p2.circle(vocab_size_list, eval_time)\n", - "p2.plot_height=400\n", - "p2.plot_width=400\n", - "p2.toolbar_location = None\n", - "\n", - "plots = row(p1, p2)\n", - "\n", - "show(column(Div(text='

Sclability w.r.t. size of vocabulary

'), plots))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### W.r.t. number of topics" - ] - }, - { - "cell_type": "code", - "execution_count": 62, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 25.7 s, sys: 56 ms, total: 25.8 s\n", - "Wall time: 25.8 s\n" - ] - } - ], - "source": [ - "%%time\n", - "\n", - "# Set some parameters.\n", - "num_authors = 1000\n", - "authors_per_doc = 5\n", - "num_topics_list = [5, 25, 125, 625]\n", - "vocab_size = 1000\n", - "words_per_doc = 10 # Average unique words per document\n", - "word_freq = 10 # Average frequency of each word in document.\n", - "word_std = 10.0\n", - "num_docs = 100\n", - "\n", - "def make_doc2author(authors_per_doc=10, num_authors=100):\n", - " doc2author = {}\n", - " for d in range(num_docs):\n", - " author_ids = random.sample(range(num_authors), authors_per_doc)\n", - " doc2author[d] = author_ids\n", - "\n", - " return doc2author\n", - "\n", - "# Generate corpus.\n", - "corpus = []\n", - "for d in range(num_docs):\n", - " ids = random.sample(range(vocab_size), words_per_doc)\n", - " cts = np.random.normal(word_freq, word_std, len(ids))\n", - " cts = [int(np.ceil(abs(cnt))) for cnt in cts]\n", - " doc = list(zip(ids, cts))\n", - " corpus.append(doc)\n", - "\n", - "# Construct dictionaries.\n", - "doc2author = make_doc2author(authors_per_doc, num_authors)\n", - "author2doc = atmodel.construct_author2doc(corpus, doc2author)\n", - "\n", - "chunksize = len(corpus) + 1\n", - "\n", - "train_time = []\n", - "for num_topics in num_topics_list:\n", - " # Get training time.\n", - " avg_elapsed = 0.0\n", - " for _ in range(1):\n", - " start = time()\n", - " model = AuthorTopicModel(corpus=corpus, num_topics=num_topics, \\\n", - " author2doc=author2doc, doc2author=doc2author, chunksize=chunksize, \\\n", - " iterations=10, passes=10, eval_every=0, random_state=1)\n", - " avg_elapsed += time() - start\n", - " avg_elapsed /= 1\n", - " train_time.append(avg_elapsed)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 66, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "p1 = figure(title='Train time', x_axis_label='Number of topics', y_axis_label='Time (sec)', \\\n", - " x_axis_type='log', x_range=(1e0, 2e3), y_axis_type='log')\n", - "s1 = p1.line(num_topics_list, train_time)\n", - "p1.circle(num_topics_list, train_time)\n", - "p1.plot_height=400\n", - "p1.plot_width=400\n", - "p1.toolbar_location = None\n", - "\n", - "\n", - "show(column(Div(text='

Sclability w.r.t. number of topics

'), p1))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.5.2" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/docs/notebooks/atmodel_tutorial.ipynb b/docs/notebooks/atmodel_tutorial.ipynb index 12eb1c0399..febc91c50a 100644 --- a/docs/notebooks/atmodel_tutorial.ipynb +++ b/docs/notebooks/atmodel_tutorial.ipynb @@ -6,31 +6,29 @@ "source": [ "# The author-topic model: LDA with metadata\n", "\n", - "**TODO:** StackExchange example probably won't be included, as I'm not having good results with it and cannot make the tag prediction to work at all. If it is not included, fix this section, and do similarity queries with authors.\n", - "\n", - "In this tutorial, you will learn how to use the author-topic model in Gensim. First, we will apply it to a corpus consisting of scientific papers, to get insight about the authors of the papers. After that, we will apply the model on StackExchange posts with tags, and implement a simple automatic tagging system.\n", + "In this tutorial, you will learn how to use the author-topic model in Gensim. We will apply it to a corpus consisting of scientific papers, to get insight about the authors of the papers.\n", "\n", "The author-topic model is in extension of Latent Dirichlet Allocation (LDA). Each document is associated with a set of authors, and the topic distributions for each of these authors are learned. Each author is also associated with multiple documents. To learn about the theoretical side of the author-topic model, see [Rosen-Zvi and co-authors](https://mimno.infosci.cornell.edu/info6150/readings/398.pdf), for example.\n", "\n", - "Naturally, familiarity with topic modelling, LDA and Gensim is assumed in this tutorial. If you are not familiar with either LDA, or its Gensim implementation, consider some of these resources:\n", + "Naturally, familiarity with topic modelling, LDA and Gensim is assumed in this tutorial. If you are not familiar with either LDA, or its Gensim implementation, I would recommend starting there. Consider some of these resources:\n", "* Gentle introduction to the LDA model: http://blog.echen.me/2011/08/22/introduction-to-latent-dirichlet-allocation/\n", "* Gensim's LDA API documentation: https://radimrehurek.com/gensim/models/ldamodel.html\n", "* Topic modelling in Gensim: http://radimrehurek.com/topic_modeling_tutorial/2%20-%20Topic%20Modeling.html\n", "* Pre-processing and training LDA: https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/lda_training_tips.ipynb\n", "\n", - "> NOTE:\n", - ">\n", - "> To run this tutorial on your own, install Gensim, SpaCy, Scikit-Learn, and Bokeh.\n", - "\n", - "**FIXME:** technical details can be found at TODO \n", + "Technical details about the author-topic model:\n", + "* The model was introduced by Rosen-Zvi and co-authors: https://mimno.infosci.cornell.edu/info6150/readings/398.pdf\n", + "* The algorithm used in Gensim is described here: **FIXME: insert link to report**.\n", "\n", - "In part 1 of this tutorial, we will illustrate basic usage of the model, and explore the resulting representation. How to load and pre-process the dataset used is also covered.\n", + "> **NOTE:**\n", + ">\n", + "> To run this tutorial on your own, install Gensim, SpaCy, Scikit-Learn, Bokeh and Pandas.\n", "\n", - "In part 2, we will develop a simple automatic tagging system, and some more of the model's functionality will be shown.\n", + "In this tutorial, we will learn how to prepare data for the model, how to train it, and how to explore the resulting representation in different ways.\n", "\n", - "## Part 1: analyzing scientific papers\n", + "## Analyzing scientific papers\n", "\n", - "The data used in part 1 consists of scientific papers about machine learning, from the Neural Information Processing Systems conference (NIPS). It is the same dataset used in the [Pre-processing and training LDA](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/lda_training_tips.ipynb) tutorial, mentioned earlier.\n", + "The data we will be using consists of scientific papers about machine learning, from the Neural Information Processing Systems conference (NIPS). It is the same dataset used in the [Pre-processing and training LDA](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/lda_training_tips.ipynb) tutorial, mentioned earlier.\n", "\n", "As in the LDA tutorial, we will be performing qualitative analysis of the model, and at times this will require an understanding of the subject matter of the data. If you try running this tutorial on your own, consider applying it on a dataset with subject matter that you are familiar with. For example, try one of the [StackExchange datadump datasets](https://archive.org/details/stackexchange).\n", "\n", @@ -92,7 +90,7 @@ "cell_type": "code", "execution_count": 2, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ @@ -145,8 +143,6 @@ "* Add frequent bigrams.\n", "* Remove frequent and rare words.\n", "\n", - "Part 2 will use the same pre-processing, for the most part, so we shall explain it here.\n", - "\n", "A lot of the heavy lifting will be done by the great package, Spacy. Spacy markets itself as \"industrial-strength natural language processing\", is fast, enables multiprocessing, and is easy to use. First, let's import it and load the NLP pipline in english." ] }, @@ -168,9 +164,7 @@ "source": [ "In the code below, Spacy takes care of tokenization, removing non-alphabetic characters, removal of stopwords, lemmatization and named entity recognition.\n", "\n", - "Note that we only keep named entities that consist of more than one word, as single word named entities are already there.\n", - "\n", - "**TODO:** use custom pipeline. Using the entire SpaCy pipeline is very expensive; for example, I'm probably doing POS tagging below, but not using it for anything." + "Note that we only keep named entities that consist of more than one word, as single word named entities are already there." ] }, { @@ -184,8 +178,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 9min 8s, sys: 464 ms, total: 9min 9s\n", - "Wall time: 3min 3s\n" + "CPU times: user 9min 6s, sys: 276 ms, total: 9min 7s\n", + "Wall time: 2min 52s\n" ] } ], @@ -269,7 +263,7 @@ "cell_type": "code", "execution_count": 7, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [ @@ -370,8 +364,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 3.75 s, sys: 224 ms, total: 3.98 s\n", - "Wall time: 3.78 s\n" + "CPU times: user 3.56 s, sys: 316 ms, total: 3.87 s\n", + "Wall time: 3.65 s\n" ] } ], @@ -394,7 +388,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 11, "metadata": { "collapsed": false }, @@ -403,8 +397,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 11min 57s, sys: 2min 16s, total: 14min 13s\n", - "Wall time: 11min 38s\n" + "CPU times: user 11min 59s, sys: 2min 14s, total: 14min 13s\n", + "Wall time: 11min 41s\n" ] } ], @@ -429,7 +423,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 12, "metadata": { "collapsed": false }, @@ -438,7 +432,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Topic coherence: -1.803e+03\n" + "Topic coherence: -1.847e+03\n" ] } ], @@ -456,7 +450,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 13, "metadata": { "collapsed": false }, @@ -468,7 +462,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 14, "metadata": { "collapsed": false }, @@ -484,12 +478,14 @@ "source": [ "### Explore author-topic representation\n", "\n", - "Now that we have trained a model, we can start exploring the authors and the topics. First, let's simply print the most important words in the topics." + "Now that we have trained a model, we can start exploring the authors and the topics.\n", + "\n", + "First, let's simply print the most important words in the topics. Below we have printed topic 0. As we can see, each topic is associated with a set of words, and each word has a probability of being expressed under that topic." ] }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 15, "metadata": { "collapsed": false }, @@ -497,35 +493,106 @@ { "data": { "text/plain": [ - "[(0,\n", - " '0.029*\"image\" + 0.006*\"constraint\" + 0.006*\"component\" + 0.006*\"region\" + 0.006*\"matrix\" + 0.006*\"pixel\" + 0.006*\"solution\" + 0.005*\"surface\" + 0.005*\"object\" + 0.005*\"source\"'),\n", - " (1,\n", - " '0.008*\"optimal\" + 0.007*\"action\" + 0.007*\"q\" + 0.007*\"approximation\" + 0.006*\"policy\" + 0.005*\"gaussian\" + 0.005*\"w\" + 0.005*\"noise\" + 0.005*\"generalization\" + 0.004*\"convergence\"'),\n", - " (2,\n", - " '0.014*\"visual\" + 0.012*\"field\" + 0.010*\"object\" + 0.009*\"image\" + 0.008*\"map\" + 0.008*\"layer\" + 0.008*\"eye\" + 0.008*\"direction\" + 0.008*\"activity\" + 0.008*\"cell\"'),\n", - " (3,\n", - " '0.022*\"control\" + 0.010*\"dynamic\" + 0.009*\"analog\" + 0.008*\"circuit\" + 0.008*\"trajectory\" + 0.007*\"neuron\" + 0.007*\"chip\" + 0.007*\"controller\" + 0.006*\"motor\" + 0.005*\"gate\"'),\n", - " (4,\n", - " '0.013*\"neuron\" + 0.009*\"f\" + 0.008*\"p\" + 0.008*\"memory\" + 0.007*\"let\" + 0.007*\"w\" + 0.007*\"bound\" + 0.007*\"capacity\" + 0.006*\"theorem\" + 0.006*\"threshold\"'),\n", - " (5,\n", - " '0.021*\"neuron\" + 0.021*\"cell\" + 0.010*\"spike\" + 0.010*\"response\" + 0.010*\"signal\" + 0.008*\"frequency\" + 0.008*\"synapse\" + 0.008*\"synaptic\" + 0.007*\"stimulus\" + 0.006*\"voltage\"'),\n", - " (6,\n", - " '0.013*\"speech\" + 0.010*\"word\" + 0.010*\"recognition\" + 0.009*\"classifier\" + 0.008*\"class\" + 0.008*\"mixture\" + 0.007*\"likelihood\" + 0.006*\"classification\" + 0.006*\"sequence\" + 0.006*\"density\"'),\n", - " (7,\n", - " '0.011*\"layer\" + 0.006*\"face\" + 0.006*\"node\" + 0.006*\"classification\" + 0.006*\"architecture\" + 0.005*\"hidden\" + 0.005*\"table\" + 0.005*\"training_set\" + 0.004*\"memory\" + 0.004*\"image\"'),\n", - " (8,\n", - " '0.010*\"class\" + 0.008*\"distance\" + 0.007*\"recognition\" + 0.007*\"tree\" + 0.007*\"character\" + 0.007*\"kernel\" + 0.005*\"classification\" + 0.005*\"p\" + 0.004*\"machine\" + 0.004*\"y\"'),\n", - " (9,\n", - " '0.010*\"representation\" + 0.010*\"rule\" + 0.009*\"net\" + 0.007*\"hidden\" + 0.006*\"hidden_unit\" + 0.005*\"sequence\" + 0.005*\"activation\" + 0.005*\"connectionist\" + 0.004*\"architecture\" + 0.004*\"object\"')]" + "[('chip', 0.014645100754555081),\n", + " ('circuit', 0.011967493386263996),\n", + " ('analog', 0.011466032752399413),\n", + " ('control', 0.010067258628938444),\n", + " ('implementation', 0.0078096719430403956),\n", + " ('design', 0.0072620826472022419),\n", + " ('implement', 0.0063648695668359189),\n", + " ('signal', 0.0063389759280913392),\n", + " ('vlsi', 0.0059415519461153785),\n", + " ('processor', 0.0056545823226162124)]" ] }, - "execution_count": 18, + "execution_count": 15, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "model.show_topics(num_topics=10)" + "model.show_topic(0)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Below, we have given each topic a label based on what each topic seems to be about intuitively. " + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "topic_labels = ['Circuits', 'Neuroscience', 'Numerical optimization', 'Object recognition', \\\n", + " 'Math/general', 'Robotics', 'Character recognition', \\\n", + " 'Reinforcement learning', 'Speech recognition', 'Bayesian modelling']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Rather than just calling `model.show_topics(num_topics=10)`, we format the output a bit so it is easier to get an overview." + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Label: Circuits\n", + "Words: chip circuit analog control implementation design implement signal vlsi processor \n", + "\n", + "Label: Neuroscience\n", + "Words: neuron cell spike response synaptic activity frequency stimulus synapse signal \n", + "\n", + "Label: Numerical optimization\n", + "Words: gradient noise prediction w optimal nonlinear matrix approximation series variance \n", + "\n", + "Label: Object recognition\n", + "Words: image visual object motion field direction representation map position orientation \n", + "\n", + "Label: Math/general\n", + "Words: bound f generalization class let w p theorem y threshold \n", + "\n", + "Label: Robotics\n", + "Words: dynamic control field trajectory neuron motor net forward l movement \n", + "\n", + "Label: Character recognition\n", + "Words: node distance character layer recognition matrix image sequence p code \n", + "\n", + "Label: Reinforcement learning\n", + "Words: action policy q reinforcement rule control optimal representation environment sequence \n", + "\n", + "Label: Speech recognition\n", + "Words: recognition speech word layer classifier net classification hidden class context \n", + "\n", + "Label: Bayesian modelling\n", + "Words: mixture gaussian likelihood prior data bayesian density sample cluster posterior \n", + "\n" + ] + } + ], + "source": [ + "for topic in model.show_topics(num_topics=10):\n", + " print('Label: ' + topic_labels[topic[0]])\n", + " words = ''\n", + " for word, prob in model.show_topic(topic[0]):\n", + " words += word + ' '\n", + " print('Words: ' + words)\n", + " print()" ] }, { @@ -534,18 +601,67 @@ "source": [ "These topics are by no means perfect. They have problems such as *chained topics*, *intruded words*, *random topics*, and *unbalanced topics* (see [Mimno and co-authors 2011](https://people.cs.umass.edu/~wallach/publications/mimno11optimizing.pdf)). They will do for the purposes of this tutorial, however.\n", "\n", - "**FIXME:** re-write the interpretation of the topics below, if necessary.\n", - "\n", - "Below, we use the `model[name]` syntax to retrieve the topic distribution for some authors. Comparing the authors' topics with the topics above, we observe that the model has correctly identified that Yann LeCun and Geoffrey E. Hinton both have something to do with neural networks (topic 5), speech recognition (topic 1 and 5) and statistical machine learning (topic 9). We also observe that Yann LeCun has been particularly occupied with image processing, and perhaps that Geoffrey E. Hinton has worked with visual perception in neuroscience (this is less clear).\n", + "Below, we use the `model[name]` syntax to retrieve the topic distribution for an author. As we can see, each topic has a probability of being expressed given the particalar author." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(6, 0.99976720177983869)]" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model['YannLeCun']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's print the top topics of some authors. First, we make a function to help us do this more easily." + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from pprint import pprint\n", "\n", - "Similarly, Terrence J. Sejnowski and James M. Bower are both neuroscientist, first and foremost, and their topic distributions seem to reflect that.\n", + "def show_author(name):\n", + " print('\\n%s' % name)\n", + " print('Docs:', model.author2doc[name])\n", + " print('Topics:')\n", + " pprint([(topic_labels[topic[0]], topic[1]) for topic in model[name]])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We print some high profile researchers and inspect them. Three of these, Yann LeCun, Geoffrey E. Hinton and Christof Koch, are spot on. \n", "\n", - "**TODO:** try to make it less cumbersome to compare author-topics below and topics above." + "Terrence J. Sejnowski's results are surprising, however. He is a neuroscientist, so we would expect him to get the \"neuroscience\" label. This may indicate that Sejnowski works with the neuroscience aspects of visual perception, or perhaps simply that we have labeled the topic incorrectly." ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 55, "metadata": { "collapsed": false }, @@ -558,85 +674,119 @@ "YannLeCun\n", "Docs: [143, 406, 370, 495, 456, 449, 595, 616, 760, 752, 1532]\n", "Topics:\n", - "[(8, 0.99976156441010589)]\n", + "[('Character recognition', 0.99976720177983869)]\n" + ] + } + ], + "source": [ + "show_author('YannLeCun')" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ "\n", "GeoffreyE.Hinton\n", "Docs: [56, 143, 284, 230, 197, 462, 463, 430, 688, 784, 826, 848, 869, 1387, 1684, 1728]\n", "Topics:\n", - "[(1, 0.16022855953677534),\n", - " (2, 0.18085978419244769),\n", - " (8, 0.18989122578835921),\n", - " (9, 0.46894303639673746)]\n", + "[('Object recognition', 0.42128917017624745),\n", + " ('Math/general', 0.043249835412857811),\n", + " ('Robotics', 0.11149925993091593),\n", + " ('Bayesian modelling', 0.42388500261455564)]\n" + ] + } + ], + "source": [ + "show_author('GeoffreyE.Hinton')" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ "\n", "TerrenceJ.Sejnowski\n", "Docs: [513, 530, 539, 468, 611, 581, 600, 594, 703, 711, 849, 981, 944, 865, 850, 883, 881, 1221, 1137, 1224, 1146, 1282, 1248, 1179, 1424, 1359, 1528, 1484, 1571, 1727, 1732]\n", "Topics:\n", - "[(2, 0.99992562868881996)]\n", + "[('Object recognition', 0.99992379088787087)]\n" + ] + } + ], + "source": [ + "show_author('TerrenceJ.Sejnowski')" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ "\n", - "JamesM.Bower\n", - "Docs: [17, 48, 58, 131, 101, 126, 127, 281, 208, 225]\n", + "ChristofKoch\n", + "Docs: [9, 221, 266, 272, 349, 411, 337, 371, 450, 483, 653, 663, 754, 712, 778, 921, 1212, 1285, 1254, 1533, 1489, 1580, 1441, 1657]\n", "Topics:\n", - "[(5, 0.99980243307117478)]\n" + "[('Neuroscience', 0.99989393011046035)]\n" ] } ], "source": [ - "from pprint import pprint\n", - "\n", - "name = 'YannLeCun'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[name])\n", - "print('Topics:')\n", - "pprint(model[name])\n", - "\n", - "name = 'GeoffreyE.Hinton'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[name])\n", - "print('Topics:')\n", - "pprint(model[name])\n", - "\n", - "name = 'TerrenceJ.Sejnowski'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[name])\n", - "print('Topics:')\n", - "pprint(model[name])\n", - "\n", - "name = 'JamesM.Bower'\n", - "print('\\n%s' % name)\n", - "print('Docs:', author2doc[name])\n", - "print('Topics:')\n", - "pprint(model[name])" + "show_author('ChristofKoch')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "We can construct the `doc2author` dictionary ourselves." + "#### Simple model evaluation methods\n", + "\n", + "We can compute the per-word bound, which is a measure of the model's predictive performance (you could also say that it is the reconstruction error).\n", + "\n", + "To do that, we need the `doc2author` dictionary, which we can build automatically." ] }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 24, "metadata": { "collapsed": false }, "outputs": [], "source": [ "from gensim.models import atmodel\n", - "doc2author = atmodel.construct_doc2author(author2doc=author2doc, corpus=corpus)" + "doc2author = atmodel.construct_doc2author(model.corpus, model.author2doc)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "We can also compute the per-word bound, which is a measure of the model's predictive performance (you could also say that it is the reconstruction error)." + "Now let's evaluate the per-word bound." ] }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 25, "metadata": { "collapsed": false }, @@ -645,17 +795,18 @@ "name": "stdout", "output_type": "stream", "text": [ - "-7.75230922299\n" + "-6.9955968712\n" ] } ], "source": [ "# Compute the per-word bound.\n", "# Number of words in corpus.\n", - "corpus_words = sum(cnt for document in corpus for _, cnt in document)\n", + "corpus_words = sum(cnt for document in model.corpus for _, cnt in document)\n", "\n", "# Compute bound and divide by number of words.\n", - "perwordbound = model.bound(corpus, author2doc=author2doc, doc2author=doc2author) / corpus_words\n", + "perwordbound = model.bound(model.corpus, author2doc=model.author2doc, \\\n", + " doc2author=model.doc2author) / corpus_words\n", "print(perwordbound)" ] }, @@ -668,7 +819,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 26, "metadata": { "collapsed": false }, @@ -677,13 +828,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 15.8 s, sys: 12 ms, total: 15.8 s\n", - "Wall time: 15.8 s\n" + "CPU times: user 15.6 s, sys: 4 ms, total: 15.6 s\n", + "Wall time: 15.6 s\n" ] } ], "source": [ - "%time top_topics = model.top_topics(corpus)" + "%time top_topics = model.top_topics(model.corpus)" ] }, { @@ -703,17 +854,26 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 27, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 35.4 s, sys: 1.16 s, total: 36.5 s\n", + "Wall time: 36.4 s\n" + ] + } + ], "source": [ "%%time\n", "from sklearn.manifold import TSNE\n", "tsne = TSNE(n_components=2, random_state=0)\n", "smallest_author = 0 # Ignore authors with documents less than this.\n", - "authors = [model.author2id[a] for a in model.author2id.keys() if len(author2doc[a]) > smallest_author]\n", + "authors = [model.author2id[a] for a in model.author2id.keys() if len(model.author2doc[a]) >= smallest_author]\n", "_ = tsne.fit_transform(model.state.gamma[authors, :]) # Result stored in tsne.embedding_" ] }, @@ -723,14 +883,14 @@ "source": [ "We are now ready to make the plot.\n", "\n", - "If you are unable to view or interact with the plot below, it is available [here]() (**TODO:** make a page for the plot, and include the link), or view the entire notebook [here]() (**TODO:** make nvbiewer page for the notebook or something).\n", + "Note that if you run this notebook yourself, you will see a different graph. The random initialization of the model will be different, and the result will thus be different to some degree. You may find an entirely different representation of the data, or it may show the same interpretation slightly differently.\n", "\n", - "Note that if you run this notebook yourself, you will see a different graph. The random initialization of the model will be different, and the result will thus be different to some degree. You may find an entirely different representation of the data, or it may show the same interpretation slightly differently." + "If you can't see the plot, you are probably viewing this tutorial in a Jupyter Notebook. View it in HTML instead at https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/atmodel_tutorial.html." ] }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 28, "metadata": { "collapsed": false, "scrolled": true @@ -742,7 +902,7 @@ "\n", "
\n", " \n", - " Loading BokehJS ...\n", + " Loading BokehJS ...\n", "
" ] }, @@ -790,7 +950,7 @@ "\n", " function display_loaded() {\n", " if (window.Bokeh !== undefined) {\n", - " Bokeh.$(\"#7678b991-d847-4f9b-8db4-2b9c003d91ee\").text(\"BokehJS successfully loaded.\");\n", + " Bokeh.$(\"#c8922b96-b8ff-4ac3-b6c6-882014f91988\").text(\"BokehJS successfully loaded.\");\n", " } else if (Date.now() < window._bokeh_timeout) {\n", " setTimeout(display_loaded, 100)\n", " }\n", @@ -832,9 +992,9 @@ " console.log(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", " document.getElementsByTagName(\"head\")[0].appendChild(s);\n", " }\n", - " };var element = document.getElementById(\"7678b991-d847-4f9b-8db4-2b9c003d91ee\");\n", + " };var element = document.getElementById(\"c8922b96-b8ff-4ac3-b6c6-882014f91988\");\n", " if (element == null) {\n", - " console.log(\"Bokeh: ERROR: autoload.js configured with elementid '7678b991-d847-4f9b-8db4-2b9c003d91ee' but no matching script tag was found. \")\n", + " console.log(\"Bokeh: ERROR: autoload.js configured with elementid 'c8922b96-b8ff-4ac3-b6c6-882014f91988' but no matching script tag was found. \")\n", " return false;\n", " }\n", "\n", @@ -847,7 +1007,7 @@ " \n", " function(Bokeh) {\n", " \n", - " Bokeh.$(\"#7678b991-d847-4f9b-8db4-2b9c003d91ee\").text(\"BokehJS is loading...\");\n", + " Bokeh.$(\"#c8922b96-b8ff-4ac3-b6c6-882014f91988\").text(\"BokehJS is loading...\");\n", " },\n", " function(Bokeh) {\n", " console.log(\"Bokeh: injecting CSS: https://cdn.pydata.org/bokeh/release/bokeh-0.12.3.min.css\");\n", @@ -870,7 +1030,7 @@ " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", " window._bokeh_failed_load = true;\n", " } else if (!force) {\n", - " var cell = $(\"#7678b991-d847-4f9b-8db4-2b9c003d91ee\").parents('.cell').data().cell;\n", + " var cell = $(\"#c8922b96-b8ff-4ac3-b6c6-882014f91988\").parents('.cell').data().cell;\n", " cell.output_area.append_execute_result(NB_LOAD_WARNING)\n", " }\n", "\n", @@ -900,7 +1060,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 29, "metadata": { "collapsed": false }, @@ -911,7 +1071,7 @@ "\n", "\n", "
\n", - "
\n", + "
\n", "
\n", "