From ff8bec22ad97b7936d9bcb02d76dcbec0733b4cb Mon Sep 17 00:00:00 2001 From: Andrey Kutuzov Date: Fri, 13 Jul 2018 21:36:35 +0200 Subject: [PATCH 01/66] Update docstring: new analogy evaluation method (#2130) * Update docstring: new analogy evaluation method * Same for fasttext --- gensim/models/fasttext.py | 2 +- gensim/models/keyedvectors.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gensim/models/fasttext.py b/gensim/models/fasttext.py index 0b69ddcc8b..6616cab90e 100644 --- a/gensim/models/fasttext.py +++ b/gensim/models/fasttext.py @@ -70,7 +70,7 @@ And on word analogies ->>> analogies_result = model.wv.accuracy(datapath('questions-words.txt')) +>>> analogies_result = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) """ diff --git a/gensim/models/keyedvectors.py b/gensim/models/keyedvectors.py index 3ac3bff062..963e9e79c5 100644 --- a/gensim/models/keyedvectors.py +++ b/gensim/models/keyedvectors.py @@ -140,7 +140,7 @@ And on word analogies ->>> analogy_scores = model.wv.accuracy(datapath('questions-words.txt')) +>>> analogy_scores = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) and so on. From e4bf94cb83720a2dac5bb1bf129b9ecef051672e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Radim=20=C5=98eh=C5=AF=C5=99ek?= Date: Tue, 31 Jul 2018 09:09:23 +0300 Subject: [PATCH 02/66] Additional documentation fixes (#2121) --- docs/src/_index.rst.unused | 100 +++++++++++++++++++++++++++++++++++ docs/src/_license.rst.unused | 26 +++++++++ gensim/models/doc2vec.py | 14 ++--- gensim/models/fasttext.py | 13 ++--- gensim/models/word2vec.py | 13 ++--- 5 files changed, 147 insertions(+), 19 deletions(-) create mode 100644 docs/src/_index.rst.unused create mode 100644 docs/src/_license.rst.unused diff --git a/docs/src/_index.rst.unused b/docs/src/_index.rst.unused new file mode 100644 index 0000000000..71390c1060 --- /dev/null +++ b/docs/src/_index.rst.unused @@ -0,0 +1,100 @@ + +:github_url: https://github.com/RaRe-Technologies/gensim + +Gensim documentation +=================================== + +============ +Introduction +============ + +Gensim is a free Python library designed to automatically extract semantic +topics from documents, as efficiently (computer-wise) and painlessly (human-wise) as possible. + +Gensim is designed to process raw, unstructured digital texts ("plain text"). + +The algorithms in Gensim, such as **Word2Vec**, **FastText**, **Latent Semantic Analysis**, **Latent Dirichlet Allocation** and **Random Projections**, discover semantic structure of documents by examining statistical co-occurrence patterns within a corpus of training documents. These algorithms are **unsupervised**, which means no human input is necessary -- you only need a corpus of plain text documents. + +Once these statistical patterns are found, any plain text documents can be succinctly +expressed in the new, semantic representation and queried for topical similarity +against other documents, words or phrases. + +.. note:: + If the previous paragraphs left you confused, you can read more about the `Vector + Space Model `_ and `unsupervised + document analysis `_ on Wikipedia. + + +.. _design: + +Features +-------- + +* **Memory independence** -- there is no need for the whole training corpus to + reside fully in RAM at any one time (can process large, web-scale corpora). +* **Memory sharing** -- trained models can be persisted to disk and loaded back via mmap. Multiple processes can share the same data, cutting down RAM footprint. +* Efficient implementations for several popular vector space algorithms, + including Word2Vec, Doc2Vec, FastText, TF-IDF, Latent Semantic Analysis (LSI, LSA), + Latent Dirichlet Allocation (LDA) or Random Projection. +* I/O wrappers and readers from several popular data formats. +* Fast similarity queries for documents in their semantic representation. + +The **principal design objectives** behind Gensim are: + +1. Straightforward interfaces and low API learning curve for developers. Good for prototyping. +2. Memory independence with respect to the size of the input corpus; all intermediate + steps and algorithms operate in a streaming fashion, accessing one document + at a time. + +.. seealso:: + + We built a high performance server for NLP, document analysis, indexing, search and clustering: https://scaletext.ai. + ScaleText is a commercial product, available both on-prem or as SaaS. + Reach out at info@scaletext.com if you need an industry-grade tool with professional support. + +.. _availability: + +Availability +------------ + +Gensim is licensed under the OSI-approved `GNU LGPLv2.1 license `_ and can be downloaded either from its `github repository `_ or from the `Python Package Index `_. + +.. seealso:: + + See the :doc:`install ` page for more info on Gensim deployment. + + +.. toctree:: + :glob: + :maxdepth: 1 + :caption: Getting started + + install + intro + support + about + license + citing + + +.. toctree:: + :maxdepth: 1 + :caption: Tutorials + + tutorial + tut1 + tut2 + tut3 + + +.. toctree:: + :maxdepth: 1 + :caption: API Reference + + apiref + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` diff --git a/docs/src/_license.rst.unused b/docs/src/_license.rst.unused new file mode 100644 index 0000000000..d85983aa44 --- /dev/null +++ b/docs/src/_license.rst.unused @@ -0,0 +1,26 @@ +:orphan: + +.. _license: + +Licensing +--------- + +Gensim is licensed under the OSI-approved `GNU LGPLv2.1 license `_. + +This means that it's free for both personal and commercial use, but if you make any +modification to Gensim that you distribute to other people, you have to disclose +the source code of these modifications. + +Apart from that, you are free to redistribute Gensim in any way you like, though you're +not allowed to modify its license (doh!). + +My intent here is to **get more help and community involvement** with the development of Gensim. +The legalese is therefore less important to me than your input and contributions. + +`Contact me `_ if LGPL doesn't fit your bill but you'd like the LGPL restrictions liften. + +.. seealso:: + + We built a high performance server for NLP, document analysis, indexing, search and clustering: https://scaletext.ai. + ScaleText is a commercial product, available both on-prem or as SaaS. + Reach out at info@scaletext.com if you need an industry-grade tool with professional support. diff --git a/gensim/models/doc2vec.py b/gensim/models/doc2vec.py index 57638e71bc..08cbf7f106 100644 --- a/gensim/models/doc2vec.py +++ b/gensim/models/doc2vec.py @@ -20,13 +20,13 @@ `_. **Make sure you have a C compiler before installing Gensim, to use the optimized doc2vec routines** (70x speedup -compared to plain NumPy implementation `_). +compared to plain NumPy implementation, https://rare-technologies.com/parallelizing-word2vec-in-python/). -Examples --------- +Usage examples +============== -Initialize & train a model +Initialize & train a model: >>> from gensim.test.utils import common_texts >>> from gensim.models.doc2vec import Doc2Vec, TaggedDocument @@ -34,7 +34,7 @@ >>> documents = [TaggedDocument(doc, [i]) for i, doc in enumerate(common_texts)] >>> model = Doc2Vec(documents, vector_size=5, window=2, min_count=1, workers=4) -Persist a model to disk +Persist a model to disk: >>> from gensim.test.utils import get_tmpfile >>> @@ -43,11 +43,11 @@ >>> model.save(fname) >>> model = Doc2Vec.load(fname) # you can continue training with the loaded model! -If you're finished training a model (=no more updates, only querying, reduce memory usage), you can do +If you're finished training a model (=no more updates, only querying, reduce memory usage), you can do: >>> model.delete_temporary_training_data(keep_doctags_vectors=True, keep_inference=True) -Infer vector for new document +Infer vector for a new document: >>> vector = model.infer_vector(["system", "response"]) diff --git a/gensim/models/fasttext.py b/gensim/models/fasttext.py index 73a223f4a7..5ac973599d 100644 --- a/gensim/models/fasttext.py +++ b/gensim/models/fasttext.py @@ -13,6 +13,7 @@ This module contains a fast native C implementation of Fasttext with Python interfaces. It is **not** only a wrapper around Facebook's implementation. + For a tutorial see `this noteboook `_. @@ -22,14 +23,14 @@ Usage examples -------------- -Initialize and train a model +Initialize and train a model: >>> from gensim.test.utils import common_texts >>> from gensim.models import FastText >>> >>> model = FastText(common_texts, size=4, window=3, min_count=1, iter=10) -Persist a model to disk with +Persist a model to disk with: >>> from gensim.test.utils import get_tmpfile >>> @@ -38,7 +39,7 @@ >>> model.save(fname) >>> model = FastText.load(fname) # you can continue training with the loaded model! -Retrieve word-vector for vocab and out-of-vocab word +Retrieve word-vector for vocab and out-of-vocab word: >>> existent_word = "computer" >>> existent_word in model.wv.vocab @@ -50,7 +51,7 @@ False >>> oov_vec = model.wv[oov_word] # numpy vector for OOV word -You can perform various NLP word tasks with the model, some of them are already built-in +You can perform various NLP word tasks with the model, some of them are already built-in: >>> similarities = model.wv.most_similar(positive=['computer', 'human'], negative=['interface']) >>> most_similar = similarities[0] @@ -62,13 +63,13 @@ >>> >>> sim_score = model.wv.similarity('computer', 'human') -Correlation with human opinion on word similarity +Correlation with human opinion on word similarity: >>> from gensim.test.utils import datapath >>> >>> similarities = model.wv.evaluate_word_pairs(datapath('wordsim353.tsv')) -And on word analogies +And on word analogies: >>> analogies_result = model.wv.accuracy(datapath('questions-words.txt')) diff --git a/gensim/models/word2vec.py b/gensim/models/word2vec.py index 933e35e0bd..a84531b57c 100755 --- a/gensim/models/word2vec.py +++ b/gensim/models/word2vec.py @@ -27,12 +27,12 @@ visit https://rare-technologies.com/word2vec-tutorial/. **Make sure you have a C compiler before installing Gensim, to use the optimized word2vec routines** -(70x speedup compared to plain NumPy implementation, https://rare-technologies.com/parallelizing-word2vec-in-python/. +(70x speedup compared to plain NumPy implementation, https://rare-technologies.com/parallelizing-word2vec-in-python/). Usage examples ============== -Initialize a model with e.g. +Initialize a model with e.g.: >>> from gensim.test.utils import common_texts, get_tmpfile >>> from gensim.models import Word2Vec @@ -45,13 +45,13 @@ The training is streamed, meaning `sentences` can be a generator, reading input data from disk on-the-fly, without loading the entire corpus into RAM. -It also means you can continue training the model later +It also means you can continue training the model later: >>> model = Word2Vec.load("word2vec.model") >>> model.train([["hello", "world"]], total_examples=1, epochs=1) (0, 2) -The trained word vectors are stored in a :class:`~gensim.models.KeyedVectors` instance in `model.wv`: +The trained word vectors are stored in a :class:`~gensim.models.keyedvectors.KeyedVectors` instance in `model.wv`: >>> vector = model.wv['computer'] # numpy vector of a word @@ -68,7 +68,8 @@ >>> wv = KeyedVectors.load("model.wv", mmap='r') >>> vector = wv['computer'] # numpy vector of a word -Gensim can also load word vectors in the "word2vec C format", as this :class:`~gensim.models.KeyedVectors` instance:: +Gensim can also load word vectors in the "word2vec C format", as a +:class:`~gensim.models.keyedvectors.KeyedVectors` instance:: >>> from gensim.test.utils import datapath >>> @@ -84,7 +85,7 @@ are already built-in - you can see it in :mod:`gensim.models.keyedvectors`. If you're finished training a model (i.e. no more updates, only querying), -you can switch to the :class:`~gensim.models.KeyedVectors` instance +you can switch to the :class:`~gensim.models.keyedvectors.KeyedVectors` instance: >>> word_vectors = model.wv >>> del model From 4d921da6e34dc6c7c037ba1e4a36bf09ba56143d Mon Sep 17 00:00:00 2001 From: lopusz Date: Tue, 31 Jul 2018 09:25:41 +0200 Subject: [PATCH 03/66] Fix `min_count` handling in phrases detection using `npmi_scorer` (#2072) * Fix min_count handling in phrases detection using npmi * Refactor min_count handling in npmi phrases detection * Fix min_count inequality for compatiblity with the rest of the gensim API * Fix misleading min_count doc_string * Fix misleading min_count comment --- gensim/models/phrases.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/gensim/models/phrases.py b/gensim/models/phrases.py index ed6f4c44df..1730d769c4 100644 --- a/gensim/models/phrases.py +++ b/gensim/models/phrases.py @@ -668,7 +668,7 @@ def npmi_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count, co len_vocab : int Not used. min_count: int - Not used. + Ignore all bigrams with total collected count lower than this value. corpus_word_count : int Total number of words in the corpus. @@ -678,10 +678,15 @@ def npmi_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count, co where :math:`prob(word) = \\frac{word\_count}{corpus\_word\_count}` """ - pa = worda_count / corpus_word_count - pb = wordb_count / corpus_word_count - pab = bigram_count / corpus_word_count - return log(pab / (pa * pb)) / -log(pab) + if bigram_count >= min_count: + pa = worda_count / corpus_word_count + pb = wordb_count / corpus_word_count + pab = bigram_count / corpus_word_count + return log(pab / (pa * pb)) / -log(pab) + else: + # Return -infinity to make sure that no phrases will be created + # from bigrams less frequent than min_count + return float('-inf') def pseudocorpus(source_vocab, sep, common_terms=frozenset()): From a6c4ea4fc625174dd9fb12d739b20fe400edd3d6 Mon Sep 17 00:00:00 2001 From: Yu Yin Date: Tue, 31 Jul 2018 16:14:55 +0800 Subject: [PATCH 04/66] Improve `prune_at` parameter description for `gensim.corpora.Dictionary` (#2128) * Make clear `prune_at` documentation According to the code, the `prune_at` parameter in `Dictionary.__init__` and `add_documents` is only for reducing memory usage, and has no guarantee on correctness, but the documentation of this parameter was confusing to users. * add link to method --- gensim/corpora/dictionary.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/gensim/corpora/dictionary.py b/gensim/corpora/dictionary.py index 84e2ed9945..1e13692a2d 100644 --- a/gensim/corpora/dictionary.py +++ b/gensim/corpora/dictionary.py @@ -56,7 +56,9 @@ def __init__(self, documents=None, prune_at=2000000): documents : iterable of iterable of str, optional Documents to be used to initialize the mapping and collect corpus statistics. prune_at : int, optional - Dictionary will keep no more than `prune_at` words in its mapping, to limit its RAM footprint. + Dictionary will try to keep no more than `prune_at` words in its mapping, to limit its RAM + footprint, the correctness is not guaranteed. + Use :meth:`~gensim.corpora.dictionary.Dictionary.filter_extremes` to perform proper filtering. Examples -------- @@ -172,7 +174,9 @@ def add_documents(self, documents, prune_at=2000000): documents : iterable of iterable of str Input corpus. All tokens should be already **tokenized and normalized**. prune_at : int, optional - Dictionary will keep no more than `prune_at` words in its mapping, to limit its RAM footprint. + Dictionary will try to keep no more than `prune_at` words in its mapping, to limit its RAM + footprint, the correctness is not guaranteed. + Use :meth:`~gensim.corpora.dictionary.Dictionary.filter_extremes` to perform proper filtering. Examples -------- From 61728a07e428f486aaa9c84160b9daff0bc8109d Mon Sep 17 00:00:00 2001 From: Philip Date: Wed, 1 Aug 2018 21:21:56 -0700 Subject: [PATCH 05/66] Correctly process empty documents in `AuthorTopicModel` (#2133) * test for #1589 * bugfix #1589 * ignore unused assigned varaible * PR review * Update test_atmodel.py --- gensim/models/atmodel.py | 11 ++++++----- gensim/test/test_atmodel.py | 11 ++++++++++- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index daa7ea4ab2..d0a5940512 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -461,10 +461,11 @@ def inference(self, chunk, author2doc, doc2author, rhot, collect_sstats=False, c ids = [int(idx) for idx, _ in doc] else: ids = [idx for idx, _ in doc] - cts = np.array([cnt for _, cnt in doc]) + ids = np.array(ids, dtype=np.integer) + cts = np.array([cnt for _, cnt in doc], dtype=np.integer) # Get all authors in current document, and convert the author names to integer IDs. - authors_d = [self.author2id[a] for a in self.doc2author[doc_no]] + authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.integer) gammad = self.state.gamma[authors_d, :] # gamma of document d before update. tilde_gamma = gammad.copy() # gamma that will be updated. @@ -972,9 +973,9 @@ def bound(self, chunk, chunk_doc_idx=None, subsample_ratio=1.0, author2doc=None, else: doc_no = d # Get all authors in current document, and convert the author names to integer IDs. - authors_d = [self.author2id[a] for a in self.doc2author[doc_no]] - ids = np.array([id for id, _ in doc]) # Word IDs in doc. - cts = np.array([cnt for _, cnt in doc]) # Word counts. + authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.integer) + ids = np.array([id for id, _ in doc], dtype=np.integer) # Word IDs in doc. + cts = np.array([cnt for _, cnt in doc], dtype=np.integer) # Word counts. if d % self.chunksize == 0: logger.debug("bound: at document #%i in chunk", d) diff --git a/gensim/test/test_atmodel.py b/gensim/test/test_atmodel.py index 00d4d2aafa..50e6a32ea9 100644 --- a/gensim/test/test_atmodel.py +++ b/gensim/test/test_atmodel.py @@ -35,7 +35,6 @@ # increases the bound. # Test that models are compatiple across versions, as done in LdaModel. - # Assign some authors randomly to the documents above. author2doc = { 'john': [0, 1, 2, 3, 4, 5, 6], @@ -110,6 +109,16 @@ def testBasic(self): jill_topics = matutils.sparse2full(jill_topics, model.num_topics) self.assertTrue(all(jill_topics > 0)) + def testEmptyDocument(self): + local_texts = common_texts + [['only_occurs_once_in_corpus_and_alone_in_doc']] + dictionary = Dictionary(local_texts) + dictionary.filter_extremes(no_below=2) + corpus = [dictionary.doc2bow(text) for text in local_texts] + a2d = author2doc.copy() + a2d['joaquin'] = [len(local_texts) - 1] + + self.class_(corpus, author2doc=a2d, id2word=dictionary, num_topics=2) + def testAuthor2docMissing(self): # Check that the results are the same if author2doc is constructed automatically from doc2author. model = self.class_( From 4520adf534c6f82ebd5fcb695ecc41a8283bc4c3 Mon Sep 17 00:00:00 2001 From: Aneesh Joshi Date: Fri, 3 Aug 2018 07:02:05 +0530 Subject: [PATCH 06/66] Add `name_only` option for downloader api (#2143) * handle deprecation * handle max_count * change flag name * make flake8 compatible * move max_vocab to prepare vocab * correct max_vocab semantics * remove unnecessary nextline * fix bug and make flake8 complaint * refactor code and change sorting to key based * add tests * introduce effective_min_count * make flake8 compliant * remove clobbering of min_count * remove min_count assertion * .\gensim\models\word2vec.py * Revert ".\gensim\models\word2vec.py" This reverts commit 6c06fbccf1c2c44cdba4cd385a4cd335f53c69db. * rename max_vocab to max_final_vocab * update test to max_final_vocab * move and modify comment docs * make flake8 compliant * refactor word2vec.py * handle possible old model load errors * include effective_min_count tests * make flake compliant * remove check for max_final_vocab * include backward compat for 3.3 models * remove unnecessary newline * add test case for max_final_vocab * add name only option to downloader api * add tests * make single argument option for name_only * make name_only into name --- gensim/downloader.py | 15 ++++++++++++--- gensim/test/test_api.py | 3 +++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/gensim/downloader.py b/gensim/downloader.py index f3672ead01..3b2cf34ffa 100644 --- a/gensim/downloader.py +++ b/gensim/downloader.py @@ -29,6 +29,7 @@ Also, this API available via CLI:: python -m gensim.downloader --info # same as api.info(dataname) + python -m gensim.downloader --info name # same as api.info(name_only=True) python -m gensim.downloader --download # same as api.load(dataname, return_path=True) """ @@ -154,7 +155,7 @@ def _calculate_md5_checksum(fname): return hash_md5.hexdigest() -def info(name=None, show_only_latest=True): +def info(name=None, show_only_latest=True, name_only=False): """Provide the information related to model/dataset. Parameters @@ -164,6 +165,8 @@ def info(name=None, show_only_latest=True): show_only_latest : bool, optional If storage contains different versions for one data/model, this flag allow to hide outdated versions. Affects only if `name` is None. + name_only : bool, optional + If True, will return only the names of available models and corpora. Returns ------- @@ -205,6 +208,9 @@ def info(name=None, show_only_latest=True): if not show_only_latest: return information + if name_only: + return {"corpora": list(information['corpora'].keys()), "models": list(information['models'])} + return { "corpora": {name: data for (name, data) in information['corpora'].items() if data.get("latest", True)}, "models": {name: data for (name, data) in information['models'].items() if data.get("latest", True)} @@ -444,5 +450,8 @@ def load(name, return_path=False): data_path = load(args.download[0], return_path=True) logger.info("Data has been installed and data path is %s", data_path) elif args.info is not None: - output = info() if (args.info == full_information) else info(name=args.info) - print(json.dumps(output, indent=4)) + if args.info == 'name': + print(json.dumps(info(name_only=True), indent=4)) + else: + output = info() if (args.info == full_information) else info(name=args.info) + print(json.dumps(output, indent=4)) diff --git a/gensim/test/test_api.py b/gensim/test/test_api.py index bf84800205..13245b2205 100644 --- a/gensim/test/test_api.py +++ b/gensim/test/test_api.py @@ -72,6 +72,9 @@ def test_info(self): self.assertEqual(sorted(data.keys()), sorted(['models', 'corpora'])) self.assertTrue(len(data['models'])) self.assertTrue(len(data['corpora'])) + name_only_data = api.info(name_only=True) + self.assertEqual(len(name_only_data.keys()), 2) + self.assertTrue({'models', 'corpora'} == set(name_only_data)) if __name__ == '__main__': From 9c6db73919d032ab2f6ea35b3a9043e3b0d2aed5 Mon Sep 17 00:00:00 2001 From: Ivan Menshikh Date: Fri, 3 Aug 2018 08:12:54 +0500 Subject: [PATCH 07/66] Replace `np.integer` -> `np.int` in `AuthorTopicModel` (#2145) --- gensim/models/atmodel.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index d0a5940512..412f630099 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -461,11 +461,11 @@ def inference(self, chunk, author2doc, doc2author, rhot, collect_sstats=False, c ids = [int(idx) for idx, _ in doc] else: ids = [idx for idx, _ in doc] - ids = np.array(ids, dtype=np.integer) - cts = np.array([cnt for _, cnt in doc], dtype=np.integer) + ids = np.array(ids, dtype=np.int) + cts = np.array([cnt for _, cnt in doc], dtype=np.int) # Get all authors in current document, and convert the author names to integer IDs. - authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.integer) + authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.int) gammad = self.state.gamma[authors_d, :] # gamma of document d before update. tilde_gamma = gammad.copy() # gamma that will be updated. @@ -973,9 +973,9 @@ def bound(self, chunk, chunk_doc_idx=None, subsample_ratio=1.0, author2doc=None, else: doc_no = d # Get all authors in current document, and convert the author names to integer IDs. - authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.integer) - ids = np.array([id for id, _ in doc], dtype=np.integer) # Word IDs in doc. - cts = np.array([cnt for _, cnt in doc], dtype=np.integer) # Word counts. + authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.int) + ids = np.array([id for id, _ in doc], dtype=np.int) # Word IDs in doc. + cts = np.array([cnt for _, cnt in doc], dtype=np.int) # Word counts. if d % self.chunksize == 0: logger.debug("bound: at document #%i in chunk", d) From 2ee7facede554c960b36447439dfe7d616ee6eb9 Mon Sep 17 00:00:00 2001 From: RunHorst Date: Tue, 7 Aug 2018 15:48:25 +0200 Subject: [PATCH 08/66] Fix minor semantic issue in docs for phrases (#2148) * Fix minor semantic issue in docs for phrases * Fix minor issues in docstrings for phrases --- gensim/models/phrases.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gensim/models/phrases.py b/gensim/models/phrases.py index ed6f4c44df..603769957d 100644 --- a/gensim/models/phrases.py +++ b/gensim/models/phrases.py @@ -113,7 +113,7 @@ def score_item(self, worda, wordb, components, scorer): Returns ------- float - Score for given bi-gram, if bi-gram not presented in dictionary - return -1. + Score for given bi-gram. If bi-gram not present in dictionary - return -1. """ vocab = self.vocab @@ -136,7 +136,7 @@ def analyze_sentence(self, sentence, threshold, common_terms, scorer): threshold : float The minimum score for a bigram to be taken into account. common_terms : list of object - List of common terms, they have special treatment. + List of common terms, they receive special treatment. scorer : function Scorer function, as given to :class:`~gensim.models.phrases.Phrases`. See :func:`~gensim.models.phrases.npmi_scorer` and :func:`~gensim.models.phrases.original_scorer`. @@ -224,7 +224,7 @@ def load(cls, *args, **kwargs): else: raise ValueError( 'failed to load %s model with unknown scoring setting %s' % (cls.__name__, model.scoring)) - # if there is non common_terms attribute, initialize + # if there is no common_terms attribute, initialize if not hasattr(model, "common_terms"): logger.info('older version of %s loaded without common_terms attribute', cls.__name__) logger.info('setting common_terms to empty set') @@ -252,7 +252,7 @@ def __init__(self, sentences=None, min_count=5, threshold=10.0, threshold : float, optional Represent a score threshold for forming the phrases (higher means fewer phrases). A phrase of words `a` followed by `b` is accepted if the score of the phrase is greater than threshold. - Hardly depends on concrete socring-function, see the `scoring` parameter. + Heavily depends on concrete scoring-function, see the `scoring` parameter. max_vocab_size : int, optional Maximum size (number of tokens) of the vocabulary. Used to control pruning of less common words, to keep memory under control. The default of 40M needs about 3.6GB of RAM. Increase/decrease @@ -641,7 +641,7 @@ def original_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count len_vocab : int Size of vocabulary. min_count: int - Minimum score threshold. + Minimum collocation count threshold. corpus_word_count : int Not used in this particular scoring technique. From 010780003c873938b3fdd9f8ead7bd1ce7f23975 Mon Sep 17 00:00:00 2001 From: Rob Guinness Date: Fri, 10 Aug 2018 13:40:29 +0300 Subject: [PATCH 09/66] Remove duplicate count from `Phraser` log message (#2151) Remove duplicate count from `Phraser` log message --- gensim/models/phrases.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gensim/models/phrases.py b/gensim/models/phrases.py index 17ee648e30..09d709b193 100644 --- a/gensim/models/phrases.py +++ b/gensim/models/phrases.py @@ -776,7 +776,7 @@ def __init__(self, phrases_model): count += 1 if not count % 50000: logger.info('Phraser added %i phrasegrams', count) - logger.info('Phraser built with %i %i phrasegrams', count, len(self.phrasegrams)) + logger.info('Phraser built with %i phrasegrams', len(self.phrasegrams)) def pseudocorpus(self, phrases_model): """Alias for :func:`gensim.models.phrases.pseudocorpus`. From 17fa0dcea8bb7824f0e709fd3ff60007bcdd85f6 Mon Sep 17 00:00:00 2001 From: Luka Shostenko Date: Fri, 10 Aug 2018 14:41:26 +0300 Subject: [PATCH 10/66] Fix `ZeroDivisionError` `keywords` issue with short input (#2154) * Return keywords before matrix processing if no graph edges. * Add testcase for empty graph. --- gensim/summarization/keywords.py | 3 +++ gensim/test/test_keywords.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/gensim/summarization/keywords.py b/gensim/summarization/keywords.py index 4074088a04..9f43158146 100644 --- a/gensim/summarization/keywords.py +++ b/gensim/summarization/keywords.py @@ -512,6 +512,9 @@ def keywords(text, ratio=0.2, words=None, split=False, scores=False, pos_filter= _remove_unreachable_nodes(graph) + if not graph.edges(): + return _format_results([], [], split, scores) + # Ranks the tokens using the PageRank algorithm. Returns dict of lemma -> score pagerank_scores = _pagerank(graph) diff --git a/gensim/test/test_keywords.py b/gensim/test/test_keywords.py index c8fae400da..79df82fba6 100644 --- a/gensim/test/test_keywords.py +++ b/gensim/test/test_keywords.py @@ -95,6 +95,12 @@ def test_text_keywords_with_small_graph(self): kwds = keywords(text, words=1, split=True) self.assertTrue(len(kwds)) + def test_text_keywords_without_graph_edges(self): + # regression test, we get graph with no edges on this text + text = 'Sitio construcción. Estaremos línea.' + kwds = keywords(text, deacc=False, scores=True) + self.assertFalse(len(kwds)) + if __name__ == '__main__': logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG) From 466b32fe6b5631403fe5be8c16233df3d61a373b Mon Sep 17 00:00:00 2001 From: Shiki-H Date: Mon, 13 Aug 2018 00:02:33 -0400 Subject: [PATCH 11/66] Add multiprocessing support for `BM25` (#2146) * added multiprocessing support for bm25 * added effective_n_job check * added comment for helper function * fixed minor error * deleted unwanted comments * updated example with new api * updated example with new api * updated support for multiprocessing * updated docstring * fixed typo * fixed formatting * changed assertEqual to assertAlmostEqual * fixed docstrings to numpy-style * removed space from blank lines * moved effective_n_jobs to utils --- gensim/summarization/bm25.py | 52 ++++++++++++++++++++++++++++++------ gensim/test/test_BM25.py | 9 +++++++ gensim/utils.py | 28 +++++++++++++++++++ 3 files changed, 81 insertions(+), 8 deletions(-) diff --git a/gensim/summarization/bm25.py b/gensim/summarization/bm25.py index 3a2bf5bbf6..50019c32fe 100644 --- a/gensim/summarization/bm25.py +++ b/gensim/summarization/bm25.py @@ -22,7 +22,7 @@ ... ["cat", "outer", "space"], ... ["wag", "dog"] ... ] ->>> result = get_bm25_weights(corpus) +>>> result = get_bm25_weights(corpus, n_jobs=-1) Data: @@ -37,7 +37,9 @@ import math from six import iteritems from six.moves import xrange - +from functools import partial +from multiprocessing import Pool +from ..utils import effective_n_jobs PARAM_K1 = 1.5 PARAM_B = 0.75 @@ -152,7 +154,33 @@ def get_scores(self, document, average_idf): return scores -def get_bm25_weights(corpus): +def _get_scores(bm25, document, average_idf): + """Helper function for retrieving bm25 scores of given `document` in parallel + in relation to every item in corpus. + + Parameters + ---------- + bm25 : BM25 object + BM25 object fitted on the corpus where documents are retrieved. + document : list of str + Document to be scored. + average_idf : float + Average idf in corpus. + + Returns + ------- + list of float + BM25 scores. + + """ + scores = [] + for index in xrange(bm25.corpus_size): + score = bm25.get_score(document, index, average_idf) + scores.append(score) + return scores + + +def get_bm25_weights(corpus, n_jobs=1): """Returns BM25 scores (weights) of documents in corpus. Each document has to be weighted with every document in given corpus. @@ -160,6 +188,8 @@ def get_bm25_weights(corpus): ---------- corpus : list of list of str Corpus of documents. + n_jobs : int + The number of processes to use for computing bm25. Returns ------- @@ -174,15 +204,21 @@ def get_bm25_weights(corpus): ... ["cat", "outer", "space"], ... ["wag", "dog"] ... ] - >>> result = get_bm25_weights(corpus) + >>> result = get_bm25_weights(corpus, n_jobs=-1) """ bm25 = BM25(corpus) average_idf = sum(float(val) for val in bm25.idf.values()) / len(bm25.idf) - weights = [] - for doc in corpus: - scores = bm25.get_scores(doc, average_idf) - weights.append(scores) + n_processes = effective_n_jobs(n_jobs) + if n_processes == 1: + weights = [bm25.get_scores(doc, average_idf) for doc in corpus] + return weights + + get_score = partial(_get_scores, bm25, average_idf=average_idf) + pool = Pool(n_processes) + weights = pool.map(get_score, corpus) + pool.close() + pool.join() return weights diff --git a/gensim/test/test_BM25.py b/gensim/test/test_BM25.py index a96302e8c9..e37efc2920 100644 --- a/gensim/test/test_BM25.py +++ b/gensim/test/test_BM25.py @@ -44,6 +44,15 @@ def test_disjoint_docs_if_weight_zero(self): self.assertAlmostEqual(weights[0][1], 0) self.assertAlmostEqual(weights[1][0], 0) + def test_multiprocessing(self): + """ Result should be the same using different processes """ + weights1 = get_bm25_weights(common_texts) + weights2 = get_bm25_weights(common_texts, n_jobs=2) + weights3 = get_bm25_weights(common_texts, n_jobs=-1) + self.assertAlmostEqual(weights1, weights2) + self.assertAlmostEqual(weights1, weights3) + self.assertAlmostEqual(weights2, weights3) + if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) diff --git a/gensim/utils.py b/gensim/utils.py index ec02cf4bb2..35abc203d8 100644 --- a/gensim/utils.py +++ b/gensim/utils.py @@ -44,6 +44,8 @@ from smart_open import smart_open +from multiprocessing import cpu_count + if sys.version_info[0] >= 3: unicode = str @@ -2025,3 +2027,29 @@ def lazy_flatten(nested_list): yield sub else: yield el + + +def effective_n_jobs(n_jobs): + """Determines the number of jobs can run in parallel. + + Just like in sklearn, passing n_jobs=-1 means using all available + CPU cores. + + Parameters + ---------- + n_jobs : int + Number of workers requested by caller. + + Returns + ------- + int + Number of effective jobs. + + """ + if n_jobs == 0: + raise ValueError('n_jobs == 0 in Parallel has no meaning') + elif n_jobs is None: + return 1 + elif n_jobs < 0: + n_jobs = max(cpu_count() + 1 + n_jobs, 1) + return n_jobs From f9beeaa9fff68edf83443f6cd0c14732895c4f44 Mon Sep 17 00:00:00 2001 From: Laubeee Date: Mon, 13 Aug 2018 17:06:46 +0200 Subject: [PATCH 12/66] Fix `default` -> `auto` prior parameter in documentation for lda-related models (#2156) * update documentation to match actual code Change "default" to "auto" since there is no handling on the value "default". * Update ldamodel.py * Update ldamulticore.py * Update ldamodel.py * Update atmodel.py * correct assymetric -> asymmetric * Update ldamulticore.py * Update atmodel.py * Update ldamodel.py --- gensim/models/ldamodel.py | 6 +++--- gensim/models/ldamulticore.py | 4 ++-- gensim/sklearn_api/atmodel.py | 4 ++-- gensim/sklearn_api/ldamodel.py | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index 2f8fca4768..2f66f30c52 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -369,7 +369,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, Alternatively default prior selecting strategies can be employed by supplying a string: * 'asymmetric': Uses a fixed normalized asymmetric prior of `1.0 / topicno`. - * 'default': Learns an asymmetric prior from the corpus. + * 'auto': Learns an asymmetric prior from the corpus. eta : {float, np.array, str}, optional A-priori belief on word probability, this can be: @@ -530,8 +530,8 @@ def init_dir_prior(self, prior, name): If `name` == 'alpha', then the prior can be: * an 1D array of length equal to the number of expected topics, - * 'asymmetric': Uses a fixed normalized assymetric prior of `1.0 / topicno`. - * 'default': Learns an assymetric prior from the corpus. + * 'asymmetric': Uses a fixed normalized asymmetric prior of `1.0 / topicno`. + * 'auto': Learns an asymmetric prior from the corpus. name : {'alpha', 'eta'} Whether the `prior` is parameterized by the alpha vector (1 parameter per topic) or by the eta (1 parameter per unique term in the vocabulary). diff --git a/gensim/models/ldamulticore.py b/gensim/models/ldamulticore.py index d32a709f80..168a2752c0 100644 --- a/gensim/models/ldamulticore.py +++ b/gensim/models/ldamulticore.py @@ -128,8 +128,8 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, workers=None, our a-priori belief for the each topics' probability. Alternatively default prior selecting strategies can be employed by supplying a string: - * 'asymmetric': Uses a fixed normalized assymetric prior of `1.0 / topicno`. - * 'default': Learns an assymetric prior from the corpus. + * 'asymmetric': Uses a fixed normalized asymmetric prior of `1.0 / topicno`. + * 'auto': Learns an asymmetric prior from the corpus. eta : {float, np.array, str}, optional A-priori belief on word probability, this can be: diff --git a/gensim/sklearn_api/atmodel.py b/gensim/sklearn_api/atmodel.py index 69397833c0..085ed9a745 100644 --- a/gensim/sklearn_api/atmodel.py +++ b/gensim/sklearn_api/atmodel.py @@ -82,8 +82,8 @@ def __init__(self, num_topics=100, id2word=None, author2doc=None, doc2author=Non our a-priori belief for the each topics' probability. Alternatively default prior selecting strategies can be employed by supplying a string: - * 'asymmetric': Uses a fixed normalized assymetric prior of `1.0 / topicno`. - * 'default': Learns an assymetric prior from the corpus. + * 'asymmetric': Uses a fixed normalized asymmetric prior of `1.0 / topicno`. + * 'auto': Learns an asymmetric prior from the corpus. eta : {float, np.array, str}, optional A-priori belief on word probability, this can be: diff --git a/gensim/sklearn_api/ldamodel.py b/gensim/sklearn_api/ldamodel.py index 3e5d65dcc6..33f2575acc 100644 --- a/gensim/sklearn_api/ldamodel.py +++ b/gensim/sklearn_api/ldamodel.py @@ -60,8 +60,8 @@ def __init__(self, num_topics=100, id2word=None, chunksize=2000, passes=1, updat our a-priori belief for the each topics' probability. Alternatively default prior selecting strategies can be employed by supplying a string: - * 'asymmetric': Uses a fixed normalized assymetric prior of `1.0 / topicno`. - * 'default': Learns an assymetric prior from the corpus. + * 'asymmetric': Uses a fixed normalized asymmetric prior of `1.0 / topicno`. + * 'auto': Learns an asymmetric prior from the corpus. eta : {float, np.array, str}, optional A-priori belief on word probability, this can be: From 27c524db60828e5ab1580a46a8ca5520dfd4d352 Mon Sep 17 00:00:00 2001 From: Vimig Socrates Date: Mon, 13 Aug 2018 21:53:41 -0400 Subject: [PATCH 13/66] Make `word2vec2tensor` script compatible with `python3` (#2147) * encoded strings to unicode * added test scripts for word2vec2tensor * added acknowledgement * removed windows CRs * changed filenotfound to exception to appease flake8 * addressed comments, added key-wise assert * forgot to check flake8 again * added dec param to pass test --- gensim/scripts/word2vec2tensor.py | 13 +- gensim/test/test_scripts.py | 263 ++++++++++++++++++------------ 2 files changed, 162 insertions(+), 114 deletions(-) diff --git a/gensim/scripts/word2vec2tensor.py b/gensim/scripts/word2vec2tensor.py index 2618bdcae0..5bf8d2e23b 100644 --- a/gensim/scripts/word2vec2tensor.py +++ b/gensim/scripts/word2vec2tensor.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # +# Copyright (C) 2018 Vimig Socrates # Copyright (C) 2016 Loreto Parisi # Copyright (C) 2016 Silvio Olivastri # Copyright (C) 2016 Radim Rehurek @@ -43,6 +44,7 @@ import logging import argparse +from smart_open import smart_open import gensim logger = logging.getLogger(__name__) @@ -67,12 +69,11 @@ def word2vec2tensor(word2vec_model_path, tensor_filename, binary=False): outfiletsv = tensor_filename + '_tensor.tsv' outfiletsvmeta = tensor_filename + '_metadata.tsv' - with open(outfiletsv, 'w+') as file_vector: - with open(outfiletsvmeta, 'w+') as file_metadata: - for word in model.index2word: - file_metadata.write(gensim.utils.to_utf8(word) + gensim.utils.to_utf8('\n')) - vector_row = '\t'.join(str(x) for x in model[word]) - file_vector.write(vector_row + '\n') + with smart_open(outfiletsv, 'wb') as file_vector, smart_open(outfiletsvmeta, 'wb') as file_metadata: + for word in model.index2word: + file_metadata.write(gensim.utils.to_utf8(word) + gensim.utils.to_utf8('\n')) + vector_row = '\t'.join(str(x) for x in model[word]) + file_vector.write(gensim.utils.to_utf8(vector_row) + gensim.utils.to_utf8('\n')) logger.info("2D tensor file saved to %s", outfiletsv) logger.info("Tensor metadata file saved to %s", outfiletsvmeta) diff --git a/gensim/test/test_scripts.py b/gensim/test/test_scripts.py index 001283e3c4..2fa625e942 100644 --- a/gensim/test/test_scripts.py +++ b/gensim/test/test_scripts.py @@ -1,108 +1,155 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (C) 2018 Manos Stergiadis -# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html - -""" -Automated tests for checking the output of gensim.scripts. -""" - -from __future__ import unicode_literals - -import json -import logging -import os.path -import unittest - -from gensim.scripts.segment_wiki import segment_all_articles, segment_and_write_all_articles -from smart_open import smart_open -from gensim.test.utils import datapath, get_tmpfile - - -class TestSegmentWiki(unittest.TestCase): - - def setUp(self): - self.fname = datapath('enwiki-latest-pages-articles1.xml-p000000010p000030302-shortened.bz2') - self.expected_title = 'Anarchism' - self.expected_section_titles = [ - 'Introduction', - 'Etymology and terminology', - 'History', - 'Anarchist schools of thought', - 'Internal issues and debates', - 'Topics of interest', - 'Criticisms', - 'References', - 'Further reading', - 'External links' - ] - - def tearDown(self): - # remove all temporary test files - fname = get_tmpfile('script.tst') - extensions = ['', '.json'] - for ext in extensions: - try: - os.remove(fname + ext) - except OSError: - pass - - def test_segment_all_articles(self): - title, sections, interlinks = next(segment_all_articles(self.fname, include_interlinks=True)) - - # Check title - self.assertEqual(title, self.expected_title) - - # Check section titles - section_titles = [s[0] for s in sections] - self.assertEqual(section_titles, self.expected_section_titles) - - # Check text - first_section_text = sections[0][1] - first_sentence = "'''Anarchism''' is a political philosophy that advocates self-governed societies" - self.assertTrue(first_sentence in first_section_text) - - # Check interlinks - self.assertTrue(interlinks['self-governance'] == 'self-governed') - self.assertTrue(interlinks['Hierarchy'] == 'hierarchical') - self.assertTrue(interlinks['Pierre-Joseph Proudhon'] == 'Proudhon') - - def test_generator_len(self): - expected_num_articles = 106 - num_articles = sum(1 for x in segment_all_articles(self.fname)) - - self.assertEqual(num_articles, expected_num_articles) - - def test_json_len(self): - tmpf = get_tmpfile('script.tst.json') - segment_and_write_all_articles(self.fname, tmpf, workers=1) - - expected_num_articles = 106 - num_articles = sum(1 for line in smart_open(tmpf)) - self.assertEqual(num_articles, expected_num_articles) - - def test_segment_and_write_all_articles(self): - tmpf = get_tmpfile('script.tst.json') - segment_and_write_all_articles(self.fname, tmpf, workers=1, include_interlinks=True) - - # Get the first line from the text file we created. - with open(tmpf) as f: - first = next(f) - - # decode JSON line into a Python dictionary object - article = json.loads(first) - title, section_titles, interlinks = article['title'], article['section_titles'], article['interlinks'] - - self.assertEqual(title, self.expected_title) - self.assertEqual(section_titles, self.expected_section_titles) - - # Check interlinks - self.assertTrue(interlinks['self-governance'] == 'self-governed') - self.assertTrue(interlinks['Hierarchy'] == 'hierarchical') - self.assertTrue(interlinks['Pierre-Joseph Proudhon'] == 'Proudhon') - - -if __name__ == '__main__': - logging.basicConfig(level=logging.DEBUG) - unittest.main() +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2018 Vimig Socrates heavily influenced from @AakaashRao +# Copyright (C) 2018 Manos Stergiadis +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +""" +Automated tests for checking the output of gensim.scripts. +""" + +from __future__ import unicode_literals + +import json +import logging +import os.path +import unittest + +from smart_open import smart_open +import numpy as np + +from gensim.scripts.segment_wiki import segment_all_articles, segment_and_write_all_articles +from gensim.test.utils import datapath, get_tmpfile + +from gensim.scripts.word2vec2tensor import word2vec2tensor +from gensim.models import KeyedVectors + + +class TestSegmentWiki(unittest.TestCase): + + def setUp(self): + self.fname = datapath('enwiki-latest-pages-articles1.xml-p000000010p000030302-shortened.bz2') + self.expected_title = 'Anarchism' + self.expected_section_titles = [ + 'Introduction', + 'Etymology and terminology', + 'History', + 'Anarchist schools of thought', + 'Internal issues and debates', + 'Topics of interest', + 'Criticisms', + 'References', + 'Further reading', + 'External links' + ] + + def tearDown(self): + # remove all temporary test files + fname = get_tmpfile('script.tst') + extensions = ['', '.json'] + for ext in extensions: + try: + os.remove(fname + ext) + except OSError: + pass + + def test_segment_all_articles(self): + title, sections, interlinks = next(segment_all_articles(self.fname, include_interlinks=True)) + + # Check title + self.assertEqual(title, self.expected_title) + + # Check section titles + section_titles = [s[0] for s in sections] + self.assertEqual(section_titles, self.expected_section_titles) + + # Check text + first_section_text = sections[0][1] + first_sentence = "'''Anarchism''' is a political philosophy that advocates self-governed societies" + self.assertTrue(first_sentence in first_section_text) + + # Check interlinks + self.assertTrue(interlinks['self-governance'] == 'self-governed') + self.assertTrue(interlinks['Hierarchy'] == 'hierarchical') + self.assertTrue(interlinks['Pierre-Joseph Proudhon'] == 'Proudhon') + + def test_generator_len(self): + expected_num_articles = 106 + num_articles = sum(1 for x in segment_all_articles(self.fname)) + + self.assertEqual(num_articles, expected_num_articles) + + def test_json_len(self): + tmpf = get_tmpfile('script.tst.json') + segment_and_write_all_articles(self.fname, tmpf, workers=1) + + expected_num_articles = 106 + num_articles = sum(1 for line in smart_open(tmpf)) + self.assertEqual(num_articles, expected_num_articles) + + def test_segment_and_write_all_articles(self): + tmpf = get_tmpfile('script.tst.json') + segment_and_write_all_articles(self.fname, tmpf, workers=1, include_interlinks=True) + + # Get the first line from the text file we created. + with open(tmpf) as f: + first = next(f) + + # decode JSON line into a Python dictionary object + article = json.loads(first) + title, section_titles, interlinks = article['title'], article['section_titles'], article['interlinks'] + + self.assertEqual(title, self.expected_title) + self.assertEqual(section_titles, self.expected_section_titles) + + # Check interlinks + self.assertTrue(interlinks['self-governance'] == 'self-governed') + self.assertTrue(interlinks['Hierarchy'] == 'hierarchical') + self.assertTrue(interlinks['Pierre-Joseph Proudhon'] == 'Proudhon') + + +class TestWord2Vec2Tensor(unittest.TestCase): + def setUp(self): + self.datapath = datapath('word2vec_pre_kv_c') + self.output_folder = get_tmpfile('w2v2t_test') + self.metadata_file = self.output_folder + '_metadata.tsv' + self.tensor_file = self.output_folder + '_tensor.tsv' + self.vector_file = self.output_folder + '_vector.tsv' + + def testConversion(self): + word2vec2tensor(word2vec_model_path=self.datapath, tensor_filename=self.output_folder) + + with smart_open(self.metadata_file, 'rb') as f: + metadata = f.readlines() + + with smart_open(self.tensor_file, 'rb') as f: + vectors = f.readlines() + + # check if number of words and vector size in tensor file line up with word2vec + with smart_open(self.datapath, 'rb') as f: + first_line = f.readline().strip() + + number_words, vector_size = map(int, first_line.split(b' ')) + self.assertTrue(len(metadata) == len(vectors) == number_words, + ('Metadata file %s and tensor file %s imply different number of rows.' + % (self.metadata_file, self.tensor_file))) + + # grab metadata and vectors from written file + metadata = [word.strip() for word in metadata] + vectors = [vector.replace(b'\t', b' ') for vector in vectors] + + # get the originaly vector KV model + orig_model = KeyedVectors.load_word2vec_format(self.datapath, binary=False) + + # check that the KV model and tensor files have the same values key-wise + for word, vector in zip(metadata, vectors): + word_string = word.decode("utf8") + vector_string = vector.decode("utf8") + vector_array = np.array(list(map(float, vector_string.split()))) + np.testing.assert_almost_equal(orig_model[word_string], vector_array, decimal=5) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.DEBUG) + unittest.main() From 46124f4895b4bec6eee58c23e1686e5c9efb84fd Mon Sep 17 00:00:00 2001 From: Kenji Otsuka Date: Mon, 20 Aug 2018 16:14:56 +0900 Subject: [PATCH 14/66] Fix typo in documentation (#2157) * fix spelling: practicitoners -> practitioners * commit for travis * add link from mmap * add one more link to mmap in into --- docs/src/changes_080.rst | 2 +- docs/src/intro.rst | 2 +- docs/src/tut3.rst | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/changes_080.rst b/docs/src/changes_080.rst index b038ccb930..e3c36b9db3 100644 --- a/docs/src/changes_080.rst +++ b/docs/src/changes_080.rst @@ -69,7 +69,7 @@ Other changes (that you're unlikely to notice unless you look) ---------------------------------------------------------------------- * Improved efficiency of ``lsi[corpus]`` transformations (documents are chunked internally for better performance). -* Large matrices (numpy/scipy.sparse, in `LsiModel`, `Similarity` etc.) are now mmapped to/from disk when doing `save/load`. The `cPickle` approach used previously was too `buggy `_ and `slow `_. +* Large matrices (numpy/scipy.sparse, in `LsiModel`, `Similarity` etc.) are now `mmapped `_ to/from disk when doing `save/load`. The `cPickle` approach used previously was too `buggy `_ and `slow `_. * Renamed `chunks` parameter to `chunksize` (i.e. `LsiModel(corpus, num_topics=100, chunksize=20000)`). This better reflects its purpose: size of a chunk=number of documents to be processed at once. * Also improved memory efficiency of LSI and LDA model generation (again). * Removed SciPy 0.6 from the list of supported SciPi versions (need >=0.7 now). diff --git a/docs/src/intro.rst b/docs/src/intro.rst index bcb60efa27..b686a23a49 100644 --- a/docs/src/intro.rst +++ b/docs/src/intro.rst @@ -30,7 +30,7 @@ Features * **Memory independence** -- there is no need for the whole training corpus to reside fully in RAM at any one time (can process large, web-scale corpora). -* **Memory sharing** -- trained models can be persisted to disk and loaded back via mmap. Multiple processes can share the same data, cutting down RAM footprint. +* **Memory sharing** -- trained models can be persisted to disk and loaded back via `mmap `_. Multiple processes can share the same data, cutting down RAM footprint. * Efficient implementations for several popular vector space algorithms, including :class:`~gensim.models.word2vec.Word2Vec`, :class:`~gensim.models.doc2vec.Doc2Vec`, :class:`~gensim.models.fasttext.FastText`, TF-IDF, Latent Semantic Analysis (LSI, LSA, see :class:`~gensim.models.lsimodel.LsiModel`), diff --git a/docs/src/tut3.rst b/docs/src/tut3.rst index f017edfc37..e2cf10a7b5 100644 --- a/docs/src/tut3.rst +++ b/docs/src/tut3.rst @@ -145,5 +145,5 @@ That doesn't mean it's perfect though: `user stories and general questions `_. Gensim has no ambition to become an all-encompassing framework, across all NLP (or even Machine Learning) subfields. -Its mission is to help NLP practicioners try out popular topic modelling algorithms +Its mission is to help NLP practitioners try out popular topic modelling algorithms on large datasets easily, and to facilitate prototyping of new algorithms for researchers. From 7fedf5addd3f75bcd2e1ab6ded89aa677611534d Mon Sep 17 00:00:00 2001 From: Kento Nozawa Date: Mon, 27 Aug 2018 11:32:00 +0900 Subject: [PATCH 15/66] Use heading instead of bold style in `gensim.models.translation_matrix` (#2164) --- gensim/models/translation_matrix.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/gensim/models/translation_matrix.py b/gensim/models/translation_matrix.py index d7340d760c..4bf638f3ce 100644 --- a/gensim/models/translation_matrix.py +++ b/gensim/models/translation_matrix.py @@ -10,7 +10,8 @@ Examples -------- -**How to make translation between two set of word-vectors** +How to make translation between two set of word-vectors +======================================================= Initialize a word-vector models @@ -47,7 +48,8 @@ ... loaded_trans_model = TranslationMatrix.load(fname) # load model -**How to make translation between two :class:`~gensim.models.doc2vec.Doc2Vec` models** +How to make translation between two :class:`~gensim.models.doc2vec.Doc2Vec` models +================================================================================== Prepare data and models From 3ccbb2e406cb65de25a53182718e19fc770ce8e9 Mon Sep 17 00:00:00 2001 From: Kento Nozawa Date: Mon, 27 Aug 2018 12:12:31 +0900 Subject: [PATCH 16/66] Fix quote of vocabulary from `gensim.models.Word2Vec` (#2161) --- gensim/models/word2vec.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gensim/models/word2vec.py b/gensim/models/word2vec.py index a84531b57c..ff8fe23199 100755 --- a/gensim/models/word2vec.py +++ b/gensim/models/word2vec.py @@ -617,7 +617,7 @@ class Word2Vec(BaseWordEmbeddingsModel): This object essentially contains the mapping between words and embeddings. After training, it can be used directly to query those embeddings in various ways. See the module level docstring for examples. - vocabulary : :class:'~gensim.models.word2vec.Word2VecVocab' + vocabulary : :class:`~gensim.models.word2vec.Word2VecVocab` This object represents the vocabulary (sometimes called Dictionary in gensim) of the model. Besides keeping track of all unique words, this object provides extra functionality, such as constructing a huffman tree (frequent words are closer to the root), or discarding extremely rare words. From e87aa850972a8d578f36b7e2e9a793d0fe40d5e7 Mon Sep 17 00:00:00 2001 From: Hong Xu Date: Sun, 26 Aug 2018 23:14:44 -0700 Subject: [PATCH 17/66] Replace deprecated parameters with new in docstring of `gensim.models.Doc2Vec` (#2165) --- gensim/models/doc2vec.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gensim/models/doc2vec.py b/gensim/models/doc2vec.py index 08cbf7f106..57693e0eed 100644 --- a/gensim/models/doc2vec.py +++ b/gensim/models/doc2vec.py @@ -454,7 +454,7 @@ def __init__(self, documents=None, input_streams=None, dm_mean=None, dm=1, dbow_ dm : {1,0}, optional Defines the training algorithm. If `dm=1`, 'distributed memory' (PV-DM) is used. Otherwise, `distributed bag of words` (PV-DBOW) is employed. - size : int, optional + vector_size : int, optional Dimensionality of the feature vectors. window : int, optional The maximum distance between the current and predicted word within a sentence. @@ -480,7 +480,7 @@ def __init__(self, documents=None, input_streams=None, dm_mean=None, dm=1, dbow_ useful range is (0, 1e-5). workers : int, optional Use these many worker threads to train the model (=faster training with multicore machines). - iter : int, optional + epochs : int, optional Number of iterations (epochs) over the corpus. hs : {1,0}, optional If 1, hierarchical softmax will be used for model training. From 3c3506d51a2caf6b890de3b1b32a8b85f7566ca5 Mon Sep 17 00:00:00 2001 From: Dmitry Persiyanov Date: Fri, 14 Sep 2018 14:05:03 +0300 Subject: [PATCH 18/66] File-based fast training for Any2Vec models (#2127) * CythonLineSentence * fix * fix setup.py * fixes * some refactoring * remove printf * compiled * second branch for pystreams * fix * learning rate decay in Cython + _do_train_epoch + _train_epoch_multistream methods * add train_epoch_sg function * call _train_epoch_multistream from train() * add word2vec_inner.cpp * remove pragma from .cpp * fix doc * fix pip * add __reduce__ to CythonLineSentence for proper pickling * remove printf * add 1 test for CythonLineSentence * no vocab copying * fixed * Revert "fixed" This reverts commit 2a44fbc0dbc44ee98ba0455d56f8f5d3e5fce000. * Revert "no vocab copying" This reverts commit 942a12f8f6248aaed724cfd457a7cac7ace2eea4. * remove input_streams, add corpus_file * fix * fix replacing input_streams -> corpus_file in Word2Vec class * upd .cpp * add C++11 compiler flags * pep8 * add link args too * upd FastLineSentence * fix signatures in doc2vec/fasttext + removed tests on multistream * fix flake * clean up base_any2vec.py * fix * fix CythonLineSentence ctor * fix py3 type error * fix again * try again * new error * fix test * add unordered_map wrapper * upd * fix cython compiling errors * upd word2vec_inner.cpp * add some tests * more tests for corpus_file * fix docstrings * addressing comments * fix tests skipIf * add persistence test * online learning tests * fix save_as_line_sentence * fix again * address new comments * fix test * move multistream functions from word2vec_inner to word2vec_multistream * fix tests * add .c file * fix test * fix tests skipIf and setup.py * fix mac os compatibility * add tutorial on w2v multistream * 300% -> 200% in notebook * add MULTISTREAM_VERSION global constant * first move towards multistream FastText * move MULTISTREAM_VERSION * fix error * fix CythonVocab * regenerated .c & .cpp files * resolve ambiguate fast_sentence_* declarations * add test_training_multistream for fasttext * add skipif * add more tests * fix flake8 * add short example * upd jupyter notebook * fix docstrings in doc2vec * add d2v_train_epoch_dbow for from-file training * add missing parts of from-file doc2vec * refactored a bit * add total_corpus_count calculation in doc2vec * add tests for doc2vec file-based + rename MULTISTREAM -> CORPUSFILE everywhere * regenerated .c + .cpp files * add Word2VecConfig in order to remove repeating parts of code * make shared initialization * use init_config from word2vec_corpusfile * add FastTextConfig * init_config -> init_w2v_config, init_ft_config * regenerated .c & .cpp files * using FastTextConfig in fasttext_corpusfile.pyx * fix * fix * fix next_random in w2v * introduce Doc2VecConfig * fix init_d2v_config * use Doc2VecConfig in doc2vec_corpusfile.pyx * removed unused vars * fix docstrings * fix more docstrings * test old model for doc2vec & fasttext * fix loading old models * fix fasttext model checking * merge fast_line_sentence.cpp and fast_line_sentence.h * fix word2vec test * fix syntax error * remove redundanta seekg call * fix example notebook * add initial doc_tags computation * fix test * fix test for windows * add one more test on offsets * get rid of subword_arrays in fasttext * make hanging indents everywhere * open file in byte mode * fix pep * fix tests * fix again * final fix? * regenerated .c & .cpp files * fix test_persistence_fromfile for FastText * add fasttext & doc2vec to notebook * add short examples * update file-based tutorial notebook * work credit + minor nb fixes * remove FIXMEs from file-based *2vec notebook * remove warnings in corpus_file mode * fix deprecation warning * regenerate .ipynb * upd plot * upd plot --- docs/notebooks/Any2Vec_Filebased.ipynb | 550 + docs/notebooks/word2vec_file_scaling.png | Bin 0 -> 86627 bytes gensim/models/base_any2vec.py | 231 +- gensim/models/deprecated/doc2vec.py | 1 + gensim/models/deprecated/fasttext.py | 1 + gensim/models/deprecated/word2vec.py | 3 + gensim/models/doc2vec.py | 281 +- gensim/models/doc2vec_corpusfile.cpp | 11480 +++++++++++++++ gensim/models/doc2vec_corpusfile.pyx | 520 + gensim/models/doc2vec_inner.c | 6985 ++++----- gensim/models/doc2vec_inner.pxd | 92 + gensim/models/doc2vec_inner.pyx | 502 +- gensim/models/fast_line_sentence.h | 45 + gensim/models/fasttext.py | 86 +- gensim/models/fasttext_corpusfile.cpp | 9370 ++++++++++++ gensim/models/fasttext_corpusfile.pyx | 283 + gensim/models/fasttext_inner.c | 4700 +++--- gensim/models/fasttext_inner.pxd | 88 + gensim/models/fasttext_inner.pyx | 292 +- gensim/models/word2vec.py | 112 +- gensim/models/word2vec_corpusfile.cpp | 15734 +++++++++++++++++++++ gensim/models/word2vec_corpusfile.pxd | 69 + gensim/models/word2vec_corpusfile.pyx | 441 + gensim/models/word2vec_inner.c | 5014 ++++--- gensim/models/word2vec_inner.pxd | 75 +- gensim/models/word2vec_inner.pyx | 299 +- gensim/test/test_doc2vec.py | 228 +- gensim/test/test_fasttext.py | 253 +- gensim/test/test_utils.py | 23 +- gensim/test/test_word2vec.py | 175 +- gensim/utils.py | 15 + setup.py | 72 +- 32 files changed, 47865 insertions(+), 10155 deletions(-) create mode 100644 docs/notebooks/Any2Vec_Filebased.ipynb create mode 100644 docs/notebooks/word2vec_file_scaling.png create mode 100644 gensim/models/doc2vec_corpusfile.cpp create mode 100644 gensim/models/doc2vec_corpusfile.pyx create mode 100644 gensim/models/doc2vec_inner.pxd create mode 100644 gensim/models/fast_line_sentence.h create mode 100644 gensim/models/fasttext_corpusfile.cpp create mode 100644 gensim/models/fasttext_corpusfile.pyx create mode 100644 gensim/models/fasttext_inner.pxd create mode 100644 gensim/models/word2vec_corpusfile.cpp create mode 100644 gensim/models/word2vec_corpusfile.pxd create mode 100644 gensim/models/word2vec_corpusfile.pyx diff --git a/docs/notebooks/Any2Vec_Filebased.ipynb b/docs/notebooks/Any2Vec_Filebased.ipynb new file mode 100644 index 0000000000..0ad4c2a282 --- /dev/null +++ b/docs/notebooks/Any2Vec_Filebased.ipynb @@ -0,0 +1,550 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# *2Vec File-based Training: API Tutorial\n", + "\n", + "This tutorial introduces a new file-based training mode for **`gensim.models.{Word2Vec, FastText, Doc2Vec}`** which leads to (much) faster training on machines with many cores. Below we demonstrate how to use this new mode, with Python examples." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## In this tutorial\n", + "\n", + "1. We will show how to use the new training mode on Word2Vec, FastText and Doc2Vec.\n", + "2. Evaluate the performance of file-based training on the English Wikipedia and compare it to the existing queue-based training.\n", + "3. Show that model quality (analogy accuracies on `question-words.txt`) are almost the same for both modes." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Motivation\n", + "\n", + "The original implementation of Word2Vec training in Gensim is already super fast (covered in [this blog series](https://rare-technologies.com/word2vec-in-python-part-two-optimizing/), see also [benchmarks against other implementations in Tensorflow, DL4J, and C](https://rare-technologies.com/machine-learning-hardware-benchmarks/)) and flexible, allowing you to train on arbitrary Python streams. We had to jump through [some serious hoops](https://www.youtube.com/watch?v=vU4TlwZzTfU) to make it so, avoiding the Global Interpreter Lock (the dreaded GIL, the main bottleneck for any serious high performance computation in Python).\n", + "\n", + "The end result worked great for modest machines (< 8 cores), but for higher-end servers, the GIL reared its ugly head again. Simply managing the input stream iterators and worker queues, which has to be done in Python holding the GIL, was becoming the bottleneck. Simply put, the Python implementation didn't scale linearly with cores, as the original C implementation by Tomáš Mikolov did." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![scaling of word2vec file-based training](word2vec_file_scaling.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We decided to change that. After [much](https://github.com/RaRe-Technologies/gensim/pull/2127) [experimentation](https://github.com/RaRe-Technologies/gensim/pull/2048#issuecomment-401494412) and [benchmarking](https://persiyanov.github.io/jekyll/update/2018/05/28/gsoc-first-weeks.html), including some pretty [hardcore outlandish ideas](https://github.com/RaRe-Technologies/gensim/pull/2127#issuecomment-405937741), we figured there's no way around the GIL limitations—not at the level of fine-tuned performance needed here. Remember, we're talking >500k words (training instances) per second, using highly optimized C code. Way past the naive \"vectorize with NumPy arrays\" territory.\n", + "\n", + "So we decided to introduce a new code path, which has *less flexibility* in favour of *more performance*. We call this code path **`file-based training`**, and it's realized by passing a new `corpus_file` parameter to training. The existing `sentences` parameter (queue-based training) is still available, and you can continue using without any change: there's **full backward compatibility**." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## How it works\n", + "\n", + "\n", + "\n", + "| *code path* | *input parameter* | *advantages* | *disadvantages*\n", + "| :-------- | :-------- | :--------- | :----------- |\n", + "| queue-based training (existing) | `sentences` (Python iterable) | Input can be generated dynamically from any storage, or even on-the-fly. | Scaling plateaus after 8 cores. |\n", + "| file-based training (new) | `corpus_file` (file on disk) | Scales linearly with CPU cores. | Training corpus must be serialized to disk in a specific format. |" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "When you specify `corpus_file`, the model will read and process different portions of the file with different workers. The entire bulk of work is done outside of GIL, using no Python structures at all. The workers update the same weight matrix, but otherwise there's no communication, each worker munches on its data portion completely independently. This is the same approach the original C tool uses. \n", + "\n", + "Training with `corpus_file` yields a **significant performance boost**: for example, in the experiment belows training is 3.7x faster with 32 workers in comparison to training with `sentences` argument. It even outperforms the original Word2Vec C tool in terms of words/sec processing speed on high-core machines.\n", + "\n", + "The limitation of this approach is that `corpus_file` argument accepts a path to your corpus file, which must be stored on disk in a specific format. The format is simply the well-known [gensim.models.word2vec.LineSentence](https://radimrehurek.com/gensim/models/word2vec.html#gensim.models.word2vec.LineSentence): one sentence per line, with words separated by spaces." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## How to use it\n", + "\n", + "You only need to:\n", + "\n", + "1. Save your corpus in the LineSentence format to disk (you may use [gensim.utils.save_as_line_sentence(your_corpus, your_corpus_file)](https://radimrehurek.com/gensim/utils.html#gensim.utils.save_as_line_sentence) for convenience).\n", + "2. Change `sentences=your_corpus` argument to `corpus_file=your_corpus_file` in `Word2Vec.__init__`, `Word2Vec.build_vocab`, `Word2Vec.train` calls.\n", + "\n", + "\n", + "A short Word2Vec example:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1\n" + ] + } + ], + "source": [ + "import gensim\n", + "import gensim.downloader as api\n", + "from gensim.utils import save_as_line_sentence\n", + "from gensim.models.word2vec import Word2Vec\n", + "\n", + "print(gensim.models.word2vec.CORPUSFILE_VERSION) # must be >= 0, i.e. optimized compiled version\n", + "\n", + "corpus = api.load(\"text8\")\n", + "save_as_line_sentence(corpus, \"my_corpus.txt\")\n", + "\n", + "model = Word2Vec(corpus_file=\"my_corpus.txt\", iter=5, size=300, workers=14)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Let's prepare the full Wikipedia dataset as training corpus\n", + "\n", + "We load wikipedia dump from `gensim-data`, perform text preprocessing with Gensim functions, and finally save processed corpus in LineSentence format." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "CORPUS_FILE = 'wiki-en-20171001.txt'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import itertools\n", + "from gensim.parsing.preprocessing import preprocess_string\n", + "\n", + "def processed_corpus():\n", + " raw_corpus = api.load('wiki-english-20171001')\n", + " for article in raw_corpus:\n", + " # concatenate all section titles and texts of each Wikipedia article into a single \"sentence\"\n", + " doc = '\\n'.join(itertools.chain.from_iterable(zip(article['section_titles'], article['section_texts'])))\n", + " yield preprocess_string(doc)\n", + "\n", + "# serialize the preprocessed corpus into a single file on disk, using memory-efficient streaming\n", + "save_as_line_sentence(processed_corpus(), CORPUS_FILE)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Word2Vec\n", + "\n", + "We train two models:\n", + "* With `sentences` argument\n", + "* With `corpus_file` argument\n", + "\n", + "\n", + "Then, we compare the timings and accuracy on `question-words.txt`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from gensim.models.word2vec import LineSentence\n", + "import time\n", + "\n", + "start_time = time.time()\n", + "model_sent = Word2Vec(sentences=LineSentence(CORPUS_FILE), iter=5, size=300, workers=32)\n", + "sent_time = time.time() - start_time\n", + "\n", + "start_time = time.time()\n", + "model_corp_file = Word2Vec(corpus_file=CORPUS_FILE, iter=5, size=300, workers=32)\n", + "file_time = time.time() - start_time" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training model with `sentences` took 9494.237 seconds\n", + "Training model with `corpus_file` took 2566.170 seconds\n" + ] + } + ], + "source": [ + "print(\"Training model with `sentences` took {:.3f} seconds\".format(sent_time))\n", + "print(\"Training model with `corpus_file` took {:.3f} seconds\".format(file_time))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Training with `corpus_file` took 3.7x less time!**" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, let's compare the accuracies:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "from gensim.test.utils import datapath" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/persiyanov/gensim/gensim/matutils.py:737: FutureWarning: Conversion of the second argument of issubdtype from `int` to `np.signedinteger` is deprecated. In future, it will be treated as `np.int64 == np.dtype(int).type`.\n", + " if np.issubdtype(vec.dtype, np.int):\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Word analogy accuracy with `sentences`: 75.4%\n", + "Word analogy accuracy with `corpus_file`: 74.8%\n" + ] + } + ], + "source": [ + "model_sent_accuracy = model_sent.wv.evaluate_word_analogies(datapath('questions-words.txt'))[0]\n", + "print(\"Word analogy accuracy with `sentences`: {:.1f}%\".format(100.0 * model_sent_accuracy))\n", + "\n", + "model_corp_file_accuracy = model_corp_file.wv.evaluate_word_analogies(datapath('questions-words.txt'))[0]\n", + "print(\"Word analogy accuracy with `corpus_file`: {:.1f}%\".format(100.0 * model_corp_file_accuracy))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The accuracies are approximately the same." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## FastText" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Short example:" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "import gensim.downloader as api\n", + "from gensim.utils import save_as_line_sentence\n", + "from gensim.models.fasttext import FastText\n", + "\n", + "corpus = api.load(\"text8\")\n", + "save_as_line_sentence(corpus, \"my_corpus.txt\")\n", + "\n", + "model = FastText(corpus_file=\"my_corpus.txt\", iter=5, size=300, workers=14)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Let's compare the timings" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from gensim.models.word2vec import LineSentence\n", + "from gensim.models.fasttext import FastText\n", + "import time\n", + "\n", + "start_time = time.time()\n", + "model_corp_file = FastText(corpus_file=CORPUS_FILE, iter=5, size=300, workers=32)\n", + "file_time = time.time() - start_time\n", + "\n", + "start_time = time.time()\n", + "model_sent = FastText(sentences=LineSentence(CORPUS_FILE), iter=5, size=300, workers=32)\n", + "sent_time = time.time() - start_time" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training model with `sentences` took 17963.283 seconds\n", + "Training model with `corpus_file` took 10725.931 seconds\n" + ] + } + ], + "source": [ + "print(\"Training model with `sentences` took {:.3f} seconds\".format(sent_time))\n", + "print(\"Training model with `corpus_file` took {:.3f} seconds\".format(file_time))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**We see a 1.67x performance boost!**" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Now, accuracies:" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/persiyanov/gensim/gensim/matutils.py:737: FutureWarning: Conversion of the second argument of issubdtype from `int` to `np.signedinteger` is deprecated. In future, it will be treated as `np.int64 == np.dtype(int).type`.\n", + " if np.issubdtype(vec.dtype, np.int):\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Word analogy accuracy with `sentences`: 64.2%\n", + "Word analogy accuracy with `corpus_file`: 66.2%\n" + ] + } + ], + "source": [ + "from gensim.test.utils import datapath\n", + "\n", + "model_sent_accuracy = model_sent.wv.evaluate_word_analogies(datapath('questions-words.txt'))[0]\n", + "print(\"Word analogy accuracy with `sentences`: {:.1f}%\".format(100.0 * model_sent_accuracy))\n", + "\n", + "model_corp_file_accuracy = model_corp_file.wv.evaluate_word_analogies(datapath('questions-words.txt'))[0]\n", + "print(\"Word analogy accuracy with `corpus_file`: {:.1f}%\".format(100.0 * model_corp_file_accuracy))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Doc2Vec" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Short example:" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "import gensim.downloader as api\n", + "from gensim.utils import save_as_line_sentence\n", + "from gensim.models.doc2vec import Doc2Vec\n", + "\n", + "corpus = api.load(\"text8\")\n", + "save_as_line_sentence(corpus, \"my_corpus.txt\")\n", + "\n", + "model = Doc2Vec(corpus_file=\"my_corpus.txt\", epochs=5, vector_size=300, workers=14)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Let's compare the timings" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from gensim.models.doc2vec import Doc2Vec, TaggedLineDocument\n", + "import time\n", + "\n", + "start_time = time.time()\n", + "model_corp_file = Doc2Vec(corpus_file=CORPUS_FILE, epochs=5, vector_size=300, workers=32)\n", + "file_time = time.time() - start_time\n", + "\n", + "start_time = time.time()\n", + "model_sent = Doc2Vec(documents=TaggedLineDocument(CORPUS_FILE), epochs=5, vector_size=300, workers=32)\n", + "sent_time = time.time() - start_time" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training model with `sentences` took 20427.949 seconds\n", + "Training model with `corpus_file` took 3085.256 seconds\n" + ] + } + ], + "source": [ + "print(\"Training model with `sentences` took {:.3f} seconds\".format(sent_time))\n", + "print(\"Training model with `corpus_file` took {:.3f} seconds\".format(file_time))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**A 6.6x speedup!**" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Accuracies:" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/persiyanov/gensim/gensim/matutils.py:737: FutureWarning: Conversion of the second argument of issubdtype from `int` to `np.signedinteger` is deprecated. In future, it will be treated as `np.int64 == np.dtype(int).type`.\n", + " if np.issubdtype(vec.dtype, np.int):\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Word analogy accuracy with `sentences`: 71.7%\n", + "Word analogy accuracy with `corpus_file`: 67.8%\n" + ] + } + ], + "source": [ + "from gensim.test.utils import datapath\n", + "\n", + "model_sent_accuracy = model_sent.wv.evaluate_word_analogies(datapath('questions-words.txt'))[0]\n", + "print(\"Word analogy accuracy with `sentences`: {:.1f}%\".format(100.0 * model_sent_accuracy))\n", + "\n", + "model_corp_file_accuracy = model_corp_file.wv.evaluate_word_analogies(datapath('questions-words.txt'))[0]\n", + "print(\"Word analogy accuracy with `corpus_file`: {:.1f}%\".format(100.0 * model_corp_file_accuracy))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## TL;DR: Conclusion\n", + "\n", + "In case your training corpus already lives on disk, you lose nothing by switching to the new `corpus_file` training mode. Training will be much faster.\n", + "\n", + "In case your corpus is generated dynamically, you can either serialize it to disk first with `gensim.utils.save_as_line_sentence` (and then use the fast `corpus_file`), or if that's not possible continue using the existing `sentences` training mode.\n", + "\n", + "------\n", + "\n", + "This new code branch was created by [@persiyanov](https://github.com/persiyanov) as a Google Summer of Code 2018 project in the [RARE Student Incubator](https://rare-technologies.com/incubator/).\n", + "\n", + "Questions, comments? Use our Gensim [mailing list](https://groups.google.com/forum/#!forum/gensim) and [twitter](https://twitter.com/gensim_py). Happy training!" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/notebooks/word2vec_file_scaling.png b/docs/notebooks/word2vec_file_scaling.png new file mode 100644 index 0000000000000000000000000000000000000000..e4f53117362f698180eddf5a9134c9947ae42717 GIT binary patch literal 86627 zcmeFZcRbhq`#$`ZG?i$eghV8Jlhu`Em60+sDqA*@t%XuVMnq(1WJa%dlVJqG)N?h zI1*_~(~fQUi78u$AO5q|@gGIa9e8-`F!smaciJiFIFd-aX^1~$EkC{1@I!H@bJ|Xq zY)ze9t~;2Jtgk!SS=l;SS=?ZDHgRyYu(e?p;K4s!%;ru`b`re2|N8_UTL&{<-`H=v zNhD^HqTCtH+tCw0U2Y#}+mM-FN@h=Fzt6gl#%<~-?~VY{^t?TVtT2Yg86^{Ue3NP?xFvGKdLzCd`|q&FIm#Bk)rG=QY$>E5YpLY_IkoYI1wJ)=X#QN&Oy^ASl|1SOs zDCYQbqpQHWv2JD6Vl|~Bz?;SK580MC2Bm4yrxquFUhV$kIu^(%^{B(p`{Q(9g>zVN7yty{D3GU9!x) znPtRD;(KT3!Rj?0gA)JEwP6|awzPva;oQ5=pFjVusOXi$V9m(SVo|TvQT9QG1(&eM z$k)+95j^_3-CvGr9H>{nI4{~iFmV6Gg{W)2WuD*j%z`@eeOtLN9HF7Pi`O13i~GWS z>eOy8FRwJ(KZ%K~6FsFzie08EOI+u&ZGK7!S$<2UQ&L>9-`w!bH*ek|YSYaz8NZL3 zx>>NlD){)0J$t_V{u+eEkdwPT{fn%}eM)L=ZLYDZrbZxYaiDs?gkp7>R>ZWq)nT!qwmCiOZcoZ&VX{^0}D(4fR8@BTRRs;vQ<|+x$E%d$BS6`*-a& zp&&-7Pcy&2l92)f1K-*I_BB;4!tOIjxqUP5X=I`Pi1qw6GU6}VbDR9sm5dKq#;4KI zj|I&de%+&z_Q}lTw)y$~!QP`1<~FA}#lv~@9~}|3`A}K8Er6C+UPXlx`yCt=^+?OW zHR)Zzc{w>bwx;1&@4&A?Ovjm-NvEABWN+MvZ}GCbb&GqlpSoE_!NWuHpR;FYDyU?L zMY}cLe(v^dv5T?NPnMT&cb9n{*}j)SR$H5nQ!~{kH1t4Sq+rUsfD7|M6^u=CoVtZJ zPv`|rNmZjo-RWAq&xE#9G0MryZ{yT@^LTc!*63^C(eI`1qO0?xMj?ySeHBVn!Diy5 z%+^sphXDl@oD-9pQ2wZ)!NCVQd8V(fWa`q1*)fNNgzy9SK8Qqm z`SRr#JsK1>w=3D}vT|%6`@aO_7=7JK`cS^Hz2jK;`UOWEOO+QQdl0`5A|hy`qoemRFg*5JU+w#F zhdj!pemy4xXWC=F`F!f-^h4d<-Epm>c8e1_36c*VKD@oTzNG$IgAE~m4BIW`HveH` z{cpY1!P>U8%ezVY_U|{EUo!MwzkG5$`1tv{hT{SP8j63h_o>O^4g))S+{U>z-{{fv z@bJ`ly%4i!33>Kxj?2u-itEJ1X#eC3FFt+!y8YtCi#vAi{J5L9#3!q4b(g)p{h>GZ zf!$0VRaI3Vyf^uJv}XJZl3yu=^jVb>LAbAD_0JEs;Fp=Keu~%d0954 z^_Zlj-pa3sWn5i_wpz0mo~f(?0yLW&OXcdNZnr`?FYkKu=1rFXS84HWk<9#JW0?#*X3lwF z5pgX|b+N_K?5Z#2kYvJwVnR=8?L}->E{t~w55IC*n$)k07T^Es)hiDD_d@tC)CUgS zZ)#Gjd(K}mvE2PYY8%&E$Ki`ke|{X?*x0ahaQG*T^D>{;+U>-|#DPdradG|9$a{WNOuBj(9lx3xpqMnhy{U!#jPs-OXXv8b^zR^< zc$3<2I~Nx}yyIa}o5w42!^cmZs-4*WglqkKFdLKJ0qQk;%1BRXu?BBxX=!uvg(Dt+ z{}e_IKMhgyy{lX9CH14wHmD^@xuUa^{_V|qv9@bKhFc46UzH`_Bl;#`M~`z0hC z-$G8_oTN;fp_8|b<%V<73sGBnWXpXA4kUW0rQP`Q$h}LAU)t59kH!mY4 zs%05S-F9>PR9(IE;HfL0pB_6~XTU!1Jk|5$jearbxZYdK%PP8N3mwX%e|T|mae8r49y>rRNL{Q9lcJ&` z1&^7HFO&Yar618k79r|?KW0CFU1vwFi=gBv4FuWE8GqqtDg@sWlbVrm7m#P=K#xsR%yaxrFB<@B`=B<$J!`}dzm z@80D&LHppz5z%<}rAf3OYHuF?`SG@PZY`>Kl2`xz0p617Eo*CQ*Nlwr+5fJzNWSdY z|9R&;YP*HsT_Nl5;pk$I}`1tXoos*MXq=3ov>Uh>E zTglXukBs8tSa)+`?7Z0CyYtB>jS7;k)PN4Al z{{6dxlG3B*{KtQwcFIvZXY85v|x|Dm-AE*+MJh6dANe&>nr9+@uk^1dN1$apR9Z{4hST^emYFCZv* zdHB-v1wpQgPY-rquI2OI@Mv?pv0Pg^1%$)7ckkY)(U_PR6Kq=A_*^x6><-dv&DS9r z`o&jJ2_Da`E-!Of0IQuM4LqPVcsfT%yZV&*ges?FK}&h0PILTU&AW#f&jYIk^IxpPW5CJ*KrYKW=6n zv^bh`zi;({pI^X}Cr`!%0L@Nmz0qrqR+W>xf7{hHl6!cd8g-$fwWrc|xDB~i5rHB! z*YKSG?5!f-g^i_NZ(2Os)^HBPzkSnGR8a{<7Ji*5SydiSN2>Z+ zz?m>J@i|A2q41CpmPVv-_vCezTC^sIJbkLhQhV)W#o~wUOdfGX(Ldha zRQB*F8IzQ7nT$3&`lmU;PttW(PI&Y5RLL{54fO>1*F*Zb#t#)^MAkYxJF9UGSw$O$ z9y1RRT5Tl=fL!Y?k-^QSiOy$P)?LU7WXy!`IYiHgr83+q=G z#^)~cIKI&<3K^aa=hBH!>?jmYR*AeJz+2%Mboi#>utUUkK=1yMk<|ALR8&;rcE5ba zBzL**HF)pz8mBv|-Vm9st%Utz^wYRF<~&zFO8WTDp6<6dBi_A#pEl&^>>S?fz4>!O zS_m0NyoNvdmd!qD*X8L~8oog?8{f@!uLrJwQc_lare)~aHjz8tk;Ti;&!4uk(8bNg zrQ)_UX&urr&3HV^daYGe#-g3t(9lraV>x?lCdF*BtYMn&J2t?fG3xK|zyK>?qZ+s2 z^&m3ktcRBPv;;1r@7dQ^8f>w?0c^w4)WO(Q3met-)rMFZL&r!_+n&(F!j{kVP0PL3 zHM3%Tm+o2Kx|{lrk8~mN`p@^bY96|y{g7`ZN}iC%!eJ3B4w7+&?;d%Jprfag>(2jS z+0Vq3we`Nfo}MvEl*7W0SNuFPCN2x;+cEfhy~wTv75#x@Zm%CyS|op@?3g7wK|>Y z>FJ~9nzwtnEsU86T-dZD`A{AfQb1xmzH{eJhv9~!I>pYJ-_kBClN68z2O45dpIGr; z8Kh_HS zLUsVPT=yj#*S-g|p+d2qi;-glMBcDMIg_?q52oI<5}f3UAE=i>wlMcTO@AmtXRk^EQ~-lsV=bYVm)^3 z46*LdPF&EaF+hb)SdDjCp1#gzu2*QIuT$nx)Lr6wiFhki69MxkW|N~{%YA+{Z=dXG zOb1*zfXuCveeI53xtBgx&IHv+BhU0gQjg(#r)uNQRkx|ZK{mj91oi&TT$6y$U%x6F z8z*|;J!%9_sJ`UVfA2(%;KhM|nHvpc7*Jkb?r>3kH_LrLKU6AxQp_ehnnL>48CJms^xb#z=LR{K$yPVOD8;KWVB(}&ZMeuN+4`RHme?@(396JhF zT>{#uT-#o0B8w8$iz*zwSe&ci)dL@UQJ@qMNfbW7%Syx-N>Pms9~dw$@>+k7uW*XW z0C|sHM@NV44P|)CcAkI!`KQjtaoyn?ak#O^T)5(A=ysK@tTNEL)&dI|e@j(Y^10pr z`QZuILaWaBy3Hq|Jx}$EovU$V*rcTm0-0oZ(ywHOyp(cJGsr^NWex1vvnLF>UdhQR zpOuZR8ovqs@Q$b*y+zT z#_&h}lt)BVe4_nUegCR^@6CDpHcau^iRy>@*;($QbpPzG~d#^8CAa!ZpjV{7wF4ERiwF%D*}1;>B>Rc-8kDqp*Umlvppz z5E&7XE7em7r@p>Ehq<8(MUKOPz^n3{S28$IK$S?`p5pB!$A^;%WjvSmxC`{~SU32bEMyu7?Kv$G+nH?_Wdju3f?=E#v# zM6_yb&uH6wQiZho!nRDk-E*`h2?_f(xQ4vp((d6h@CBSlW6%{1AiTnWGnMr8qOHDX zf8@w60Za01FexdC{;eS1>8!Z-MhWYQ6E$U?e;+eWq}>|3L5fo11am8dGtE(I;IJg! zE;CA$)Y(EHB~?*%6DrP4exq;p#z2P?q8l|XSufO%?QPRs) zvZjqMGov=q`)B~?lc~7Ncb9t`7M;v8D1F~)KtHr)KzDS(gq%6gv|i&vE3iG8gPU;W zmoEoL(U4Xk@tYOYxh`byr+%$pT;TAG_UKV-0TktNC8}1iOV3q0L~RzjBwd1>-_DYr z0;s&+b^#p18=)$swLv`Zgazf1#38p4OL6=D`@pLHIP=}_>?^FqPrFQJ{9uknGBc&m z&CLz<92p*F*UB1sa; z@8!U^V14K=v=(N5d&4~>>9MTl`a2EV9}*Lj88wM`J;0p8Ze)Wn*SafhdFQLY|Bi&7 z=9N9NDM?8W{rvn$PpPDrWrgdJhQh{gPA-}NAW--`J|fCvSj){BKL428oV048htR$@ z>cuGK=2RAOeUc$#Y64};cDBI8z`%9zAQ>|*s>7qU4~O|6b4sfs5?XV9UxnYP;bRro zQVmp89xtz~oa|yRCo`uG2JYDLbnb+2I8yG;r~9jfHJgmeIp}))Qg3b9M@?k&kRcQv zkw}~6ZN2nX%5AiTZSdGOwP{j*St*FEv(JQ(5j9#;f2I$y zWjQNc-_qo3J7g}k#-kKoMje;cfu=y-mSvocon1LeDQs(%t3vSKW0fH0-eg4lxGDj_ zS<`h*BBo*Bg6y`q*NKR)DoRhj0))K0_f&i43sR+2$O7$fH+t?bA8TuC2eyr%&FLQ+ zI^nZ8&A@VF-m(qs=Hq1gZA$Wi3>MCw^QDijzO_t>GzF@vS(`oLL%_s>&Y)jTgYmt6T-?^hpe$^IrP3e6%l`iFxLUi>xsge>g7g>%U zzufAvG8j>_mhTqK&dz?y0yxanIa2qqka=9^v>;`o1%=4(-@n6zY9n|>qY5lqPni5@ zl-Z2VayVEJsQt-qz3!*gw=6?LQftS(LV)Z68HAeY21pA|DLIF73ytWm@6#m`C69+CWe2hFT-gtT6FLF}6+kAKfO z4z^7=<)*mP$Ufh_M|Y8S?A==#e=b0%b#c6dij;l5V%xJI4Pz6N>)<>|J}3?qsLbZ8 zK&E6Q94$#k$=h)&t=`%!$BvPqQxU2*%(-#SBv$7vSpDlaZ>CFR8kD-FYUi+!2z^1m zbyu;MuY~i2_Q*n0haqXCGcUzdqGC1u@_B13t7!zp)%XG>^&-lu6){c{avC9*M48kf z!QgVq2BOW{4ty06nXfsOWquJ!DQYZtPtm(~<}Yt@y{<(>iXX}kpF_qXI4$1Nc^YtT zeE^J_5erPpLRt~%!?%10JbJnbI2~v=0T~Da3l*Nl-C($Hbxs|`gs{xW_^)3+fErgb zDQS81)TICUhNtI!y%r;Ji)7yM$Luv_xQ%0G5dHac=dJ@t)@--`^XJc#vDA%w$wlMl zh({6$k@yKIYx>tm@_?FjneD>nO(ZbOQC(lB8E(9LLYd`>ZlpsmaDzcx4cLGC(>zbG z{U!pQz~LkZ=}AC#&jgEAYeA5`Ry`&3z_<@Ml@(`&vK-yQC(yJSw^3;wnr9%Mm$X53 zRN~vf@1MH!3tbHjdr>?)x}~X@q~%a*$mlwzE~Q*N`8A}-{qCpsDV(x6&DR>FxYaua zm(#Bt6BOJJY2fx;y(kjj{CD16tG_o_hGVsUgcOZBNhIl8-Ij^C3PxtPlOY8OVu_k` z&3CW^^-g&2Q%!5c;>Ur@PiwWZwhmi-B;0k(oA&S0l-A;lqQ8HEDZng{`OK6)>ys)j zS|cMPgBTX3=oKIG7&^Hi1eQId+p_~ZQL(l%4_ES@Jh>AEMC0&z*@kn(iL0us`wY;o z(5ue+-(CO`DT3EfUQdsKl)8WrLf%+I ztywm}zs)k|KZi-ryQ>ZgJ9g2LAg+;pxb1!D%PPHq786$zZ){c;FBnIm)(7|RbBNOG z-pW^r6i|vPdA{3N{PgKln|pWcs1l6X=nH-Y5dI1*)$&L}P&V3QU`una`ZdHa1tgUuWz=Yn%C-s#s^_-QH+j_&uXa*U-ZRL4-?TWZG z5`6$dfCcn4Dhl+b`!t{PPORUmYne+Irt6Vh8fmVu`pGEcCFygQ$-ABNtf^_T=?A6s zN$vOb`)O!Y0{y^p#{<+q7qLFPmr>FmhY<{XS_8YwQD9kS&>j968C;JUr4E;sm6>r1@}dPNTAO>tx!>r>vpBMiON$4qTjS7Rb0ANu%^^14QV z@`Qg#`wT8^1$BMMkoFtVZSJ%iXG*4P=~zKtO~95SPr6OeQmjiijQ3Lwx+M4fQXOP2csKpx+PZm|LVR`Lj;!2ARLi1fV2ZuCehlvR=UIiy`Z z=8CGSS{t6w-AJ2-Qkh>`tgYV31l(Asj7+BrZ;5COk$6>bk%Bv@NZ!t?b=3XX$j?%D zT4bm58X7b>*@WE0AZAB`Ao0$9iRZrrSTGc1KBjT3GJ|#czIWRz;@=#1t!^xe&%ULT z{#e!h2(9r-QAAAd8ioEvQ_~Y}^CNfAf_-??acjb<60bKO&$6`#@d<+3l6_nNw(b`q8E;FTuFGsecOD0cw>N%4?P zo`Z2GWP7xw-Ni1duM<|c^A(M!mX=C6Pj){9Sx5(k>h{_)Kk-*E6#YpN26xNLWxyv` z(LCmy5OLGJbs?qom2Gbs$w&BRI~|2}`y2fLbdm`|zG!^?f!q_kH#Z29-@!pZ#HQOH z^*sUlLaIj6t}CG7QfCkPeKHt^B9p9BzEFC{@s#;G&m+faP?Ha!o$n=-F8^O3m3L54 zok#QbYicTpjC>mhnQLi$T5nOug$vim-<3sdB5~P+O&OdD4i2u_9B-xL9LX^}FmG$7 z>t?&{z=bZYbOwOv36DqpQCdv}SJVs6?)o%sU5!@v&}hkSB|};#3H_H;cHbta0wknm zHABEh8q1@ggzdgM8JF8PeZ)uF`OmOJ7*z*YmS2Uq2Kg80h&@rowX} zpXVDzXY9EguX7g+rIp9zu8)tj=DICd0^EFRZ$AW8_|yHJ`UGIRR+iR-BHuyAUCIIN7jpafZ$n877;~O<4ZANe zpmnB^0@3jay9pb~Luj2h2C9P1zok;It*`e@PQLl3zD|Bz7|}-LwHr5X*sac6tW9}u zo<;ZYtFP}4>;WE&on|M}L>^-A3idallYMEcPm5s<#{rTGx&zvm_! zRuf2S_E5dM-{0B|apE&520|uBpL(r2gw4D&H_1}1rnk4Z|4#x_K?R?;S4Szc!RP0ssxO89a_;GRsb-kk(B#YZKUuf;Y8AF=92E?#pxil5V5s;0?xsRu$q+E4kI1pOKXz~QOXs6S z9xDPM(}YvT$8*45sQbum){KJH67mztDY$F%+w5T(&tKSyP@JPi!P{LLs3kDI`o8yw z_$yTZj5l(Jzcg%vDn?ed(&JO~{fsDOY#;|nKdb~UY(i$_8IQPzfYp9O$Vvse3P&=uw{B3Iq1YhZ}1o7 ztZ-{&`pxh(^#tG9X@ku}+IXhGiF{pm4$CPU?+z^pgn%!shp5_ za^lt-0wFJ49e@8acHQc`&8aa*J2ka^P(YqcPul?>9tFp@^{M0IwfR<6B?-^IcwsLS{W*4Vit^~$v?o?x(9tW21pxEq%8$E$$V3GYk&=&GY4+JVPzh9&bY z$|MIjrX%f7QeMY(E$iG+)bD|eHS|67feGDxgPa==+U_;2I@WgPnc2g>)g}b0LA|p6 zvXNMOLHh-FRM|iV@%VqcvYOk=1hsi?kmsRE6)%oepYWdSj6fcp+(cOu^j-&~S4WNy zZtl5vf7P)G4OQrr!+>AkEi14fQKNPGv~y>fj5mOIh-?GG=s4^`XgW+~NEt~UE3vR3UQdWRE-jU>FTTiK zr_2(^wkvI?cU+n3E=uUG4WD=CWsw3IJ9~{z@};z=gikcp_t#RH9ivV}8dTSnEiDv! z>PUTr?;45p3+V168n+8Az?xeV)3r;xs|bG^;wY!nH8CY6 z0nsGpHqYTCar6+k?)?q}_e|Nr=5l8M7WIjr#k4rL!mGtmATXI^FCKS>Rl(Ho)FJ9; zjO5@^FG+_Bdo0^Q60jKJtvgkVX8SW?5NdM`q!-*Kdod_U+H;le0`J7?!MDImuh8J3 zwY#0z-a6EtdG$HJ(KerGt+}_A+FU0o-G|3yG|e~=k0X_phb5dw;w1OMj>7N;q_?cxWGiV);t1*1*vH@D6zAUpDTEeIylPt-HtfgjD6_-7r=*C6v}5<~OM8CV^p=%YZIMCcy92l1OG($eghC?q zs@Q3C3$&(Z^Wo853tg4yV_{o5r_qp-B=+MUw7dXJbe_)(vxk4dw2?{^;p3D2mlCa!49G8wFgL;2!x29l)L0oP0zL=R#L+ zuVG$?6+t0QFLv3ISBbkI?OYMAhesyVpcMQDcgx^71qH<`$dM!;)Qhxj_5f~=pv-HV ztb}vy{PVQ1$4$BOVz0;C9_jV@OL?|aCdS6Up!;()t*4j0u_%4DbwJX15_Asf8vJ!> zR0@+lrRm)_;RCrc8k&@nbsA;DA3d;;Z2bJ{1-89BNLm_03%#2g z9jxAH>;$B6peEc1(wb0kNS~XV)nRix0Ve3#{rg*2fJO)b6rwQ)46j>cd%XTyC#*p$ zZYgp!GjVNAQH_AVA+H>)ef=!LCE#B^`{~ntWX#~sL!Li3cb2LQJenR#Aj({;P60xg z1FToj)C^sNb}N87{u*M1(|CJ5>n3mcTKoXj-o4>~j#ps2T!vwZXNOxscFMR`Opfm~QY8%BKD8ZS67^9Z$X8ztSfkizV*vIg_xS_M`*IeB@{p!(Jl z+Aeks_6e5Fl}7K-T&?sg7od1w7_uLzqPBN%pds}`goB070sbXLC8ZDu%HpoGiG-O4 z4i7F+Ht>-zS8uMBVJcf8dV;^vLwVj_-JYF0@p-#hRo0CGJ+p_5pdnPuV8U;)1z zf?y!zdrf>E3TP6pGO8;#HLy|KafoBQB}J79E}L(4w$%$twE*m)u1;g$6c?v@c>(1d z09ok89TFX0Zehmfr_Djyj3Dj9a_`)|`x&4d$M6VRphtjN0#t8rd9E$zDya{<*pQn<{AnNhyNRd||2~ zME0_^S!KlAjUGbv(#$f@7qR+&EjMI#D9HWFa7$|D&hjFm57H^@ODMAt@tXDu5tUWzFaGAXv_A2-c9|wG3 zr%wmO#u~m{y7WMNapCVu$HrZ z^<%RLVi0G+3E3hGI)@i7Z?zx4l*74#v1x-n7t{Y_wfXMLDcN(a-nk0|a&;w@nW1xt_;F_wgs^WXU zp%2$^^mqlI_Ki{AEP*}r0-rfN;gXou^8+6iDP|wvvMjiCsd$El zdS&J-Q`oQP?Y0x&b6mr}Jnj491s@&F;lpX-Q=+{dmxN_6eP76vhoWMPey{)6uQ;8( zW@cvfzvB}r;?Bt>tH#$hha+7^5?IPg)OI+*)v0f(rMZaEN4J*%NQ1Nji$n_lFi zFj@IYLnOCBK>MgUK+vFSa>=)`NP`4dl1DkDZFEHD6q$K&w9%z>M#7zgcGTMW>Y>4z zNUeSEIkq&R8qAKRXIV}vd&*uc->^je&Xl}Xv!0ie6YM%SG+6Y8*a{x~_X9eGWL1a& zC1ZN7>)K0|K}=cVn^^b%_|oP@CoE;q=5>~Pmk-%nE$=fJABMeob^4N=F&aZ9Qhj%0 zW236@Cas~zE7y%vhGivIT?O^=M&D`RG!c#}N7=Kyru=zX*LJc4n5AufmTPNVre{p8 zzAubw76p!yl9B`P+NRBM)QNxRQ%hR_W%_IroL^e4G@=11kGs(oHHA1_iW_izoFrT9 zmVs#33EH7-%j+RD+qDDQkV2lDu6|B6#a#q1e!Jue6Sx|LRNo7~~FBN%`g#*@puNW_Vmv z;LeJIDE}L8_FTYZ7k)?yyoAP@nS)~o`qwWw$YE5$2d>md2@~7_LBOI6`rNxmj**Rf z$c{JQu2o$>__DjgT3g^y^phuh5LtKOQ#FDqCiQ9|50>!S&6~UNw$LP*j>|v(fUFi4 z6g?nf_e&NmDh0@s^VluWC3~@Ony}P@FZzmW;J^Oq{+6r=xSRn)0x|Az(&$eShF!p5 zYv&s8gneu8)vH(2GY!?@)MMh&d*?%aTs{uv2dvs1xWLo2vI2o2`Y>B?8

SN;;1| z!FzQdDL5V&j4){9y`h^CcZhu@2VUvevt8ZfB z@uP#M2%j;ADfm%;G8P}*K2tc87mrQ)UgXG*jI|Z^6l_2omgY7Xp=`58#*)92#j2TH zz7_UdhGh)o+xK$K_KCs6nvLF)%bgpo;Kw{c}@R!3qQ6EIP0cxuP%iWxzBRKd&9ZOMy zPZD#TW!23$e^pcmpxj?PRdyCEkvfcZIK~8%XJB9eFfUHL4NL+;);~0by}=6F0mczi z9heuGURZb#6LVB4jPpU-<@A0SBtC=dx$I}O#Rr`;aRf)Yiv(fJ`Gp}T{N^|M)vx%? zW9dVewK375Ryj!F1BKF}-2@~V#MJ|^w|oL;_84zRFvz&(>7}LffX~^OwgQ?{a4ZjX zQZ45GmT;bGqPLt0)%G7*Ss#ZOI%yBFhL=+5tAA+Y?1q04AHOixM*I{N^ghbb_pYvr zwzh*fLX~jd*n{vhk(}zbY|nTI&g7RLlb0;ME;Sw94sbA_fQ}*1vPx!mfxxu?yJ&5) zgYs9y=&0Kf{vrkjM-bI}T7(^s$nD2ic~boX9cM#x7t)h8uw% zgn0afp%~(7V8sVtn2o~6AnozpT*dn)z2t2^XkzzaHT%@q2+X{fP=PUGWDiV{;QU;; zGQL4s(ZUb&WPLDheL`ZNftxxG2^nkShmZ$SRmU(Ke44vqm=v_>Dba~@??G389pE4F zvmGw0x&l#Rb^+D|BlMQ3ms0lPRgg>X<8#@u znyqaRw3GRfW^&N_mC)vAt>*THu}TD+jpS8fC`%ifQOgcQzTzF}jDO_h9^_%GD33}U z04!*nSnh%96fOo&IgLMNt_sBMU9fR9Kr!MIjOG_n5 zi26Sd^_|b6qcg<+;P^g2?ZQKfLzFTE-tkuwD(>eY{nwMI>4~uu%%99u92wqzyR{AKMSTOy6hD;q$oGK@0%1^rRU!Se{33?Wkg@f84Z^oREG%#NExWaay z`opjPkMAI8Qgh&2x)v26_Zd*;Ao2Pz+4k$#wS3!NMhrd3+u7OK(?0X(P?ew#?&sO6 zu0FG{a85x%;nnc7;NXw^`Kh0;c_xP3y%nrP(|(>)QJd*?;x+xmkoyZWQ-gq(3MwjG zlQ)_KA4$cH+Cq!m3R?&FyvJmB5xKYbCJP6(dS8yThT_Nf3T!*akB6~l1Ziqy^Jpl3 zT3ud(8}N5O&laf)t4GD8|6Ws!mMwk$p9{b;=}?gW&w|Uz86N~|_uq?09ef!W=zm{% z<>UX?7hc!GFg52z5XlM`FIIvCe~O96H7QKvq5u1$EfBw1v8urK;O$sE(grFJ;VL+N zTI9li#%4Dz-i|oGsSV43|D#8DQ3D8x5aEbIPLjne9#VBc(S12_V)3%&^$$=J%n!gR zp8b!V9iNre^4&xKUQ=@~#nki&( zMz-Gn4sgep({GC#Wg5rCB18w{Uy=T)TQRYU|NmU!NA>UgB(XZ0_vw6+y)&Q|Vf^KQ z3%|er$fhHdB3)fwd)j;dxrRsk$eZSa!eV0T?d2c(D-kUdoNTnjlZi*_4*pg7O>+jX zd;X~dzr-W|{o%-z-~Zbr`F@Oc`dmyAT=fLSDQ^LJ>$S@6UAz8qb9+Bl0!3^Wrhm|@ zKeIS??AVD~P@mmJ`$^fb`D5+itT-d58L`xGdWvHO#Z9vnJKPWL>Q`p@_&Gl9oVlj%Q8ZU zB8k<~e(z*)XSTP*bwBZDFcFz2o@aOBalL zADkTMr!bYn2o+91F`eql_$I_n+j=2bJuYnw!MFi~!4?t*fCJD495lX6j9d|xtXsOU z9`6=X<)oL}dMR1uBfb@MIWkpMRUw;hI+Vvs#JC-_4I~FjVAONht}zojq>qnJKaa?N z;z&IQUN?WCmDSau&fVWpuu&^Np}*pD8c{<)Lj1Ezj635=8wAO>EG3WCDi z(uC2&cTS@;u#)*;Va&J10ch_b$=3o{l;pmF=D}%4dXLPvJdxKptAh5xZ#ON^bi&O> zVxETz4qoD{!i3UPXln?lc{mAV#zz2jK6J)F|5K4WW$|?mgULJ#_w0YKcw@;P_V}5V z*ob|OqI>SVY@q9Xb@;s~2d1N0I7Jxl-t*s;1Y-tK!_jE-yDwU$^QtlliA;=Uq-=J9 z@_T@~+9rg$dG?=wwjes2(=MMHKlp76a{PI0!K1g7#*ye|F&@c)YDfSnf+eBx*ye*5 z64S;2F>c9si_q1k8k9-^C_Ex&mGNlIC%#$pdqMxQ7u(48d5Z2~$~=7!KMN=}m;2(2 zaN*2HzfDXFqbd7>RG;ylYdxDDT!KJk^5#|4*d|OW!T;ZE%wX;r1zQ4Pg}K6=E94_*w`aNorSXeT-25k0+MFL91KMN4#UE)MZ6YMn4oi6#KntbyG(!NTabK!nqL7f zxh7{}kjj)#ROWA^-D4Z-GZ=G5UEPjj0gmAnjtQaEjX4m84K#^+{xjh2my*IxOf#d! zjgpLRK1}Y0Hk6pl7ckyIY_H`vj;$0FAK}+Rw0?%8I3-caafs4wabhO|85}_Zpf>VV zmE+Za54_a4+}387*6cYAN$K?kt`>pZ$H^%s#(i@#yaz9h`afdw`^6mpPr81w2qECJ zDTgz2aZ&C*#FhOcm-6c!qD^b&V}QB%eqo^~V*81R^^a@FC?_iHQlD(DanbYN*m{Ud zo8<29PQ)O7Qvs4M8|@dI-DK!22vj&LWk`JnCYKrbPl?_kRV#}T8Nv_SOO46q?J#oQ z1LHwANDn;(#q=J~^DCUls)mM20!Lw9=sH>`TovDHlaaHth=JFN2{B}k_JSDEM2Xr* zOX~;K{RT*9I$_HL*qAU$0(+mzD!B`W4NLY4{Ut`~KcRZ?yUm-gfj|77Z?PZo5GD!W zVcu21jX{IGE-o%5vo)u(b8z33?G3Ha&C6;L`%BTz+$De3IZSwNeG51IjBL@Xo4^k{~On>UFs$b$i{Ve zQJxoLxwx_lYPGDZ1V3dgUBcOpB_zFrS32A9Y!m5R7vsbO7#D$W?rC#nK zZ?gO?BXbIXI|t|(8F)L;;2Agp!z_r=+9S3lcc8H_BUE~g?9a=~_d~fLS{NcuNrdME zLm-ba33Kh&hdbZFJgC0}mmiP4k*}qp_JHY)4kyZ&iYX9O_o-IGAcESEfVm7JFTn%x zc=iueAea?udNHEp4|?htJQC15DxpYbQMw0*hTcc;U59-5T=?c8jFG1f5QF8|+d%Z{ zM)-M+8Xa)X2!sF}=Flu_$F*g9fB%;8!nj>|o1pG-xptU&CVFYo)0Zy;VI%krR{t|- z${AuBWE3+mfzSss3`!4!-hSU)0-$M+H}(kgMCvAPC4r?)oJb(>GOp|#?q+}^%4hvU z=<#7;rD{Rk#Gx+cG%dDa8GQfTTS)@m#P^5B#Iz=?IOuV{`7EQhr9taA)30FpxXzSN z^0ZL(tQ(rvQ{Lm>GH3&5-(h``pXP5(+pY*3eNB@JVJ_IpAEKb(&@dHLWx>ME zdVlNG@b5nAqM_mU4Q#h#r&c_?HzMJ!B!;?C!=5G&JD|KqiaVYV5fM>&DcM;fDIyY1 zq#YlesHD*fppcF2hhx0qcb*f^%xKwsOTT~rS=8W`b)|I)=UvoHLD0!{WD7I!{H%4-MuyH}WsVNK7!Uu@w82e2#V*n_W3|&_4gdX~V|}nZo2z z78^{i&K6tq6}CO1;PD<9_E%QuFD-I+wVmv-I&HfAJiOC6BjYgISwxr_vMu4LNdWH? z`6COZ1EvpMJr@_Xoz-_1(1Ewk-?_Lm>WHwVu|YdHzC%2E3gA01?lfeLmzd1F>m=;J zNP%oW9YjS7{Ujz4Ef~9r!%yi8o)Tsr_|%Vr%1;1R(vkK{TBM=abs!F)79kLUsd6=( zSfO#%Sm}E7xbVi)Vd40;KLJa-g_)K0vz{#of}GskeuVee#>OBxxDL9wEK!c@q;uX?2C4<;glp~#JA^tpE@wq!& zC|VY}7w6`F!w_`n-XxG$F!mm*a0BoZ(`w_tIl1bd5;pkgt!HX;{zC&<*Ms@Hm4tM= zzyAF4vW1anx#;wznhNoir3cZ~Q^qchj!7OhD;DeXc~$q+NZc%)$!f3u@tFR&9fR=E zs?sa>F|>c`(qD>g+e}}x$ph5j))DLi0wHL6E@4#S?%lgpFyKZ_7Xo;2>AfSScfo`i z@6BK`*&K$%jUo7KE15~mwfPLZtlD<3?`2o={c`z)V1W1JtA?-sp&(+#H1frZt5J)z zuah44l(`EKbrOl{xqF~ZDQ+6GvV`<8l%7?_!pHX%KFU@AcBN2`Gst%q@ROpbU)Ud# ztT%^x>kyQF!5LH(etrE7|ANAG<&I}zBndXLQis7^@(KJuIn3ddRXOo&p8HBf9;DHRoW?~IC>IW_Q4 znPkDC!p6tWmrDFZ2)(@c!f-} z)Gd<4ThMHtFD6ikzm9GhsZ%%_jh-Tcm^{HX3)pmv5%;=)9lz*-Jb()UD z3gUJbiMz;v?84M0IRMUY5J~7tFF_$auhE*InW#X`EdC*y-$*`rF()TSk(OjO009L` zvbIMBGddTAy~3A>5d#HAi<=gqx3smjeTQdfX9a?MwtX*l;k7N9*#=gq4|afS@b1aZ z3Ny)gMFV8FU>Q{O{V)L&6~$fDA)(y;^CyP$L{K5?qJ#tZjVg^Tyl-cBOkIcJf2Kcx zcNTxPRIle?RQ3a|2$Cx;!MImfkLG+M7K~+<*@peZ_WENkC&*MRV(n)gxOOUrZ)W}e zGj!E^e#ehlZF+bt z(U_7f{>q$nAa%r@AxEo1+**p1imw?MsM<48ack_`$F{aGU=4dzbFMc}Bk8$a7 zWv|ao*UE}==TBi2XFPb2V(myqvNJ?UD9Ev+b5~w#JU|N(0l<`lf#11VD8G@&$+$_x zn#%SjqQhuz4u)wB&XXvc+-hhVQfiySsAt4jH6%BJjeQNSJ$%udA***Cj4VHN3AAya zzXZ@?j=kC`*7-k?IeO&=O<#m9&5g8&0TfzYHX7>^r{bxN}QMH@;8eM7f^sS{A-BHz=x+(gTQ zEY2b={StvUhq*u9l1gYkDLb1lNd|WCzJ)DAK)=z(2gD`dXih;vL$+-mfg7q4Xkwp! z4&E)QojWCIxas-WpnNj9y5k7P<|t>>lisT0yv^(VKh&-;yi?FNrc^w!zl>sb`i8iR zn=of}$I(-H`L^VHgUlN?Fb-1~@pSPM#fPd9y3_oWsfW>I1>&`!Rg|3wgdD z!rjtOXBe55td!wwqeXKNWf}<{8{E7pm47@BxB9 zK?WnbX<}7T)iuC6KbAj^I}G#zQUHGDz=&KETCICC0_7UTjso4E9Nx1!f|m)tguaQ+ zJfzh*NDPUcgnMqdu-D57lkMQh2;~FCy&AK}90ny~7;m}`J=S8e6G4vKWqm*xpiIM0 zjDbup1TnaUVCZuxqZeXCZ4Cg^h#4h12`3H^IEi4Cfp;8WN(R4HjWnNXSS}64gKtU{ zeRhFW=O>iNY1De$`sBkkKH|oAm_;G(9tR?;c6L1$svj^Q=p$dio>rK-2&Ll1iz7%; zfR-=OOu>?)fRVh`CqKMGvwDBx1UKMr1}a4kgShzu7mjMQv}Y8`=XIgOgv0=t;+E=2 z?6to!tMsvlPpe*RHI$5yOcQ7E&90*Rl=b2qO(wALB{6WSSBM)R7A2Vb4)1q3* z&7yjUqKRczL0d0tV8_wJxd5Oy@do9$ef zW_5W<`9=`MeLPjB#WN$K}Vb8J(;XQ9X&;S3v$NM{u=Xj3a)3?vKKlgo&^E|KXN`_8Yt5E9x z9|aGH!=a7{g5uu~iTrbrfr!({Tfq$`A;icsZMcm9DU8!VVdQjy`xUr!CSRf zVLKx%t{T5-seA*5hm?6hRuFAuV5#KX93xjI!Cac@lo3yOGXPVaoL`ozG}@C|2-M9-R6s%`JK7m4kS@O~xAW#WF! zkll(tFXDu>xBP8%v#{8p5BZ4$u;A#-l@iC0-Sz@;S*Z`6NVUy4 zS4=G{cC+Il4gWkBdguCmo{RzZFeV=|o1>BeJ=b&`^d`^tv0>!I)wfZrDG96Z$!~~! z#~Lr{A<_A5+Ob(9cwhJAn1SbLb?~@#oJq0xCv6@R#sJy`h&cZKuY0xQywS078PAk< zW108XKToVWn(4cReXVGhY1mJzxpiI3oS&VpjvdQ~n=E?v0JM#RSwzx*z7b^&R$GTt{VcP{j$Vu{F7G1Iiuil zdDFPB*Abm+Aar00Ri(`Pke-9sw!~aRylo5F0)i;GWI>p*SitAo)`cxrpfDjOHa2!$ zi->u%56LE~Ucjk)Z56n@umA)j9(f<*FA*kR1|AL4_>n@ua2VK-f!Ti-%655B+51Ni z&)t-L|4{DWof986GSOExMXYB0W%DwJdN6*b>vYwv2`;QIr~k5kNg5!NWk*PnB+6+N|`>XB0R#(~Df57Yi3{YT5Dg9$ww%SzOkgwT~o5vubiT z3%hn_pT%BLmRMO^b9dExCQThZcs7u_zajI7#LppQc|Yw`Z-f=P+kAx7fb<~=cR)K6 zV9q)hJ@v_K z)8Bg-``4L#`!kj6AtxmFe&eR??|y#Vc2V%3@QJ?X2Y(szyY)TANxT)gMcDgpha%0S z&?xGqNTWRp0G%ThbEGKs5B`Bj#~U8D0o_heXz0{6Pi!Xo4C0G^`Pb0BzKouVn&-|n zq@5E%yMVBs@PGH!sF85^;VXRA9WkQ9>i)eEIYGRmYPDtj&YRxgqpB6=i?&Pk4$8_z z>#T{5Ie-ttxo_WH7)@vhDzKdnF!^4EB#!{LT9TdMjT^2?Ve1aIWYoZEaH%GKl=y`% zA+&fDeLR8BE-^+=K&>y(oK5?sg{h70@xLE8!1Vx=Vq~7nM74qE;ylz9kV!JR+Lhnb<_jqg~eZ2a* z9~@TFyVuv)D30{|9u>?zOv3X;#}Qn9Uu3}mY=E4 z+A(tUWk+zv_W=e4^<9IU=^jWMv`nT)eq@MXkIq2{=n_Oh%78F{Q9ovDOY=j`2R1rZG%#~bGrJkCaN_mESA;g#~N6J5CsKP7_0wBao*FacY z+P{rZ3E*cwAghCBAEA#d3X`B_y$Cd3*}&>|$HJ^(NkSo`xw(0^exO1_CTrfkXt^@J z=x*Z!djPHQq?{1$+TL{an<_7ks=o<;WKD60U_$U6A@?gM4)r-Wxh%|2vAG74HIiHt zdkd0$Z-2?zy8w^SAn!-**JC&cG$*e=e{A;Rs7k zgHgS6q{x7%aS+s{K;cEo1&fRKRJlNJhrR|j_V(SV1OJXVI|*kWf}tLj5=d^^vUw04 zy`F9}uZ))1aijSDcvu9f@R@3!WMbJv) zbP8q96XFIjG|DN2bL#I$I)D?I7&?remG)ol%z*MGAXh@^S`?RBQleHL%4Li7^^r$2 zF4*o52&Dr}3gGu`zQYjj zXwZYXh>H*vkdm?PstD3Vk~j|_WMNYg%b>�YvZ};6Gd+-|d=|?dw+Im|n2a)i__& zSfdAG2MyVZA)SEbMAym+#61U?8ayh6klO<~Qb8LssGw0;Ra#NujXMfD{m}pR{d+@` z#Xvq@^LDX5u&YR%j(ULW54$tOa41V{P43tPGx2^fLi^}Kf8}; zE3Sd!knLCQgNOeC0bPIo7YO(@a9(&vQ9xnQ;=A|RajA!`18P5R3sm5^y0-gkiS6j5 z*_pr$F%2Wru2nmk>wKzMivE8fKqlIHmTO@hAx9C8B|$X@ATC)e2Of`9qwIFB7skGv z$j6S%#NvWBK83C&RTXO23&HP7Dn27^$`C!07Ac@nzXAfOI^%}YA^3dpN_I)n>6#na zd%w(Tee7GVD0%B-UdEMi81?;3zF^$G7Fw}wTem9z%2+kTxU=IAfygNh=_oew8y)F zJ)H@-57I-NJ02`kvdq0rs16JO0PX5czjYeBs>=d00fV1gEtVafTOE`;r55s0FV%mxCQ9qEu4MuNn60x&kQ8T zz=^%R^y&lW+Xd%mSuAYt**!ew6~=$vA3D|_e25nIx4dzhYtIzGU36nOIdS5{jo*+c zL+#HB@il^(m8Nam2%?SxvwAd6!T5lqs0U*i)PcN*U>$(sgaDv`6!G*`9QXea+`N&1 z0#Ib5f@t_!`XeZ*2vglVNA?z&qxa}w1`V!rcx(>V)B=GB(wvA;2n0zG1ZIFVXh)MT zX0#g)cb%!C4sl2UPEp??2+k_cB!6`=D55*ct zPCjNTA$y30uR}TAP3Ss6Hoqm&dzgZgi%Wf>n`@`j_w`t^&_~~Kw!8sjLoIOITT&^ z&s;IPozRG^VdU&H{iACbc}*4;j*g8%6fT3_aHugeBh9DrAub_e49n-_Qtj+0?{}WT zTF>AFH1NfUJHN;){qwHS9XPx~4ly~&NYnv72cr6+p}_UZm#(=E1o^^Wh1fhc#iC=s zR^C+#86WX4W9>RtAcs$iZg~dS1ie4DziDX+#hiy5fcjnl%LH5qxrr z@EVCNmzoJ`{Hr#MY`F&+Gnqaz3AfgF~|MImCPw0NqIZ2`xcRt|Ed?vc8DgT>B|5 zutOC4;ZJqnW|2v%Jvqa*fkjfm#Ka_|OJ@K+HNRPe0>aZS3Inn~@mN>B@D8Z< zOSl5Tc(Hvi?%!7F<^hpg{Z2fV_D;#G4^cU9FPja6P- zZS#c!XO!mFUD@iD*Hhei0w?6IfiGwvAA@)D--JI%gmyT7zdYmhW{x)Vmku4*BNi(o zS35{)mgyTB68@Qaumeq+DPKT9|Jr34DX1zYRvmiU)A`p7`W#fj>%u9X&YuFrF>rzb zwUZs+o+IG{hfWv=1fvgcR6>M`^Hz$OSTGf~VTWlUdc6okl4gj_Oy~{3B2es9qlQx& z6dd?@>^-6t{SGka;;{1~9#$!xw9fOY6H`;!xt1q+d>ez- zpu#Z_$~lr-LSZK@ElmL@91A>%7+K5*J&{n?wn^C?^m(ulQWTG_);~2^jlQcrJ zUVrG}Yw=K2xE?{~dJ$X-5M;crE_m6jUWEclje!|LvCijq^BB!MxO~g!_L1WR7DIPXm z(A7LT7b5sI0z*OoEPb@EqO(m8oXB1>bbPnfd1HbKPh;dljD$5ZSNicGDdKq$a=Dv0+m*+u+rUVf#{F-TV^5mMrc2HdS%I2W^7xu2+4`WjUb5}*< z>_xO=o)D}>wDThAq$zXr=F7H~Z{-vMrt%v&PZ+GMaZPj&e%_d>HF0gs3Kmrp?jBtm zc7W=fKHe1a4ShBe{{opoNJr7wJgA4t%rZ3;x*Z@u5_l5KLR!3lmk?DY!g%r4^v|yq zO$-#q4Ls=&-26P__W$79FRs#j5Kb`yUo2}j=su;iYi)qFBAALXW_VSLk-2a(zmOb`4iz1JcYDd*-^QWl=VFEA}hfP*w-PP>w z?fn3{mdIS8$|XuFH~=*`3IZk}Bn0$;G*8I=kJG|L%wnP#5uyABSp6G09r4--Ir`xD z$f^>s7n!(s(99H61TYH+pk9GCSP}kj9~uBaSp*Wn8Vx%LQH!LW>uh?Ya}i9y1cp5j zfdCZ4()Tv8qgf{r`4FlQIvDZKdB~Nu3a^SneRUD89)i6vAS#5*h2{(?F&lDaJT*xw z3}+;#5h@a8j{Sfta`IvX$p5Ta!+PLA0Q9Fst~&7&WNJJ(cXA!_(I21QORRK}hZHD* zZ$ZkE7f>!WP)%z55bH}5O7?8}PD06w+T8|s31#24PRl5uSBZU0)(>m*wz&8j%mdPn4|#TF zivi~7OoYKlz4PQmwqcu+-U@^QN3iw|cmccsNiYyLp=_WVl63baX82=}VVzXpZ-cKi z-!=vIJ%V@iQ)Xn|J*%VA{Aq z;uEF~I22<{w%p9@qh@S9Xih$0$Ds=Y(HkEY6Z<|ibPDV?p}`5m-#ZO%m z?YwetvfbuAVqoLSDP~R-RU@McT>c*I#thZTvE+m4tqLkLC?8;zd2tHUrbx5Tq?Z!d zwc&M{DO!Vjco*~0Qwxb3DlhLMu*gK0HG2s^_PKrnd)tV6|C(*(4QzuV2OSzJ2Lxp( z68<$WzFn&*>z8U&chlt}3a-Dz^^fwt`#qupFOJp3cXPNf$M9>Fos$_&qOWBQG=$KB zNXB3fqCp{glh7I)$9r=h0SfMlTS6yR+2+puZmKhV!=KjVg-fZBin9Ge;K53VF4jT# zc;j$}Bn5?50{~vlz>6gsh&e?uE_B=T$^CHwWt+{;ZGZF7Y+Kfnl=VSX+#r5p%R+<= zZ;;f!Ma*lLzmBjr0dk{_4>TpuIo>=d$4c3!y`S#5b_nMY$!i%48@~R@KD_4D5y4ok z?QQB))nCm3nc31Ur8|UF+IMf z>eIbMG&FAid&$KAE*Wi_vnlAev)$pKmRp-##a4@%^DHzxJM6|Z_thd}@eqh(@pbcx z+~nO@S!>w9)egR+zgqb1pXBt86|ttybgQy?}ZVUCA@EW0!Oa!aN_)bUyxGK`Zbe&NE0?DHO88 z@kX*Mmj0)lJ;Is3|LVX6Kb>3}>5*?Mx3%Rj>si}Z^|c%;AMHMSHQ;VXP@PFPflHtr z0cdy0%1Rk&5Jn-bf~I7Tpx}6%pA-i80Y}DB{21Z_F6qFwbwCby&-wj|WuDH51u4Pb zSBA|Xd=!dzfrgw$nn{3QfPVMtOe#C$S%NyonXs8ZHjE87MXdPq@=i-pS2fgDKO7wl z%v#JUos<3E8sh~^W%d#aity_ZunJO$f|vourUctt8Fi;fNiRX;;lm;o4S41ru!SD` zJY`=*%C*lg$&pcqU=v{E6Ogi_A0vU>Kv#yLJp7crdx>hMzyQdP;$&;)FD^?syYuVYDJM$X8GeN? z6^2~HgAd;|VOhl^h{Qn_QW`v$8m68=Qg`phjpN}{xD~)n^gQ}V5DSQhOaW-Z+-Mz9 z2DWRGPJHblRw z{_*I}r|s@rJo{cx2xd8~G-Kqs|Ly^KkoLyq+A{T`C~)zNYnv?j=6+dBKP&Rw3U71Qi6g3m|( z5d>fWJyeyEDxnWzjoT24u!2SZz)x}$F(W<+s>2SyvorjFDhi+!x+v=W#HAkPcUnslth(Eo6epMN4ES2BFPI@Rxx~~lZcH- zO9PNF1VR6acqH7z*~KM1Ckl}Os3rs8YY>=`cnmPe4B{vP)T684@%-<|AjAHpWzcH! z9k5&(;0A{s1{rX}ct91%y$ma|iGxcFxo^;2s>u0c(QJ9Urd9R}Sj~)2z`-(UTd?F(K^Qz4lrI#8!Ah=V53z7ku9Z zi5HGP6qM6)73hll5prTu{iLB0=GtWrBph(*3UG$NQI26@tAof;UKedMvZah5VEIt zPyw4j?tnf*&>nB`g#NQ0C2Y_;oCMN?f~@M7=0s@HYRwy16SAxK_ajZpp9+SH@=wQu zdAlQ*PU6SU!ppR~4`{hx6n*z{!}nvuD-|B*ZgjnN;oP0)f8LKr^7BO@u%by^t0NTn zB8uqe+p=K@P!}QI0MPT!d%QYmsN~f#d-6u)K$Hu)rM_CvBH6K1aygMfB1WQ+8n5&| za8gx#o!&s58*r2{+eZ`TV|hyuYfmAXJxNVIe_;mV2VY|x1`^k!(SsrmO{Jud9lM?< zaPSQh4Nd5lG&D5Guued!yRjMn05~DHvFa4Z?wREc?3>8LXOlZb$ikP{+RMHX`}N zF4S~T;G+by-OvpBzIb4+^tH*&1q^)J!W9Q20WC$(Nq#(_hJ&cB6Afw%(hPL+x*Fa? z%7f5@vRHz%@fFZQd=rA*BL(|#Rf;7!cn(@e8}A|PvM{4pmBT?Vx{!r743pY~K&=xA z_g1=AkC&V;&E09Y_6E(7rXLriGhZ;+mPLyS;ae=hUV+z!nW-K2N7eY>>&fSIRFrua ziT#O6>vPdW3~G#479eaS)Y`{BJzwsLWHotcaUJF%|9+FbZCwbbf7wYf|2wT$-~}rui?HY&N2UVs^1_1cu%%NH_RepU{>x!LmcS?pQeTQ&F!%DWNJ`co@5@om?z z+NP(Zw{8EIp6-%oF7HIOWH79FH-##5^o)rX>#wQxD4R+Eqe5A+Vnw1kHJ}ZQuBg+G z-AID*UkuKzCnx|h9WQg;v5Mq9s9|TG%LlLyG=pcqN4{5k9(dG~P|Oj1A#8c!*fivi z<&)1xhKDJY1y9GCk>*n-B356CKy^3;Sy8VcoWOgix1svNX{aBuLq|WHB>>`rMbUTz z*6sI{3$SL zstcDFV%e7;8eBHzj>&!~FQ}|Q3ygu*Oit{#ikEf(Uk@P10`D2N|H3lTn2>k+25Pc* zB8euUXPBg8`9Ix&rZsiCAU?OSuy}|d5^;iYxC2;BV|aSB8K}n}O^oBwNQeL(KIylb)fN>a?U&Z&PmtT zF})UTm@M5h_1}2XApE?tFo}K2eLTqh68AMgcqwq6psNeeLy}8fudc-xVcvMYD76`W>*eK zoQNu5ZCQOcXL2jFD zk;{YQG%lRyIB}>eb;E_`X|-*NU&2d6=+1L1_GuS>(j2;xk6$QnzENf%*ke!6+WH6` zSpin@sI@bT+TMf|Kuag|{(a1*w{#5QXdQ-zoy1;mX*r z72F|H^Syzc$Ju)+LXoje17U{x0~?3=D=+PkUSk8C%`X=BgUwK37b=p-o_S|_2(;OhDh(x|y8-(Q-EH`>w z*r6~2?p!wR*eya_5Csc1XwAST5}2?p7qQ|LbeOj(?6SY!e(wY{<(q3i`RD9lC@pS2 zvXsOlvt7C(7%ZaO7S+8?B42}RWd;|2c24+cIR|@-e4}xC^hAV3M8Mlv@-Oxw!z}V2 zrV+?D9aHcDiB~Ip9Oc2)@dH?cCtIsx{%(Dx|3MV!dQ>O*3UwTUXI|A@6ucIZDY0m- zcY8zA)n_kMV@Qr|9ES3NrT;na&e`l+`N<{lp7}cfH+R#ruSHLJH~d;DrL)t*E?*o! zDqH!VdR{&xO^m77xWdmGb$1lS@_eCOb!YH~Jxd=UFk+u)h;ewC!CU=Wxn;64MAjY* zf#_X%4ydg6+s<4#wDZ*p>9S)=>rB;|7X?Q}bx7}@?h|Wbd%_H%J49XrF`K9^JT(PU zADo{)`|XHCuI>1_T0v)Xxlh_xHYUZ~!=HY1dOsS_%h~s`SwmDenp{JbT!WNBf=5D? zOT~aHKy3&JR>Au%Ep!KanaRb4bfK$xgWJwtw7Rrkx=&m7_cPP&9g!pdzGe@DbcN7U z=!OZm<_Z4cBBn<Ak_HwC2t3%kv9aP z2G5VkD}_7L6-cDYu=HB!9EOoE!2gk!2L%Tk=!FIcC;DGIl^)l3+SXfcidP#};@h|D z_|fz<6P1%Sa7SbjW9h>Oa$2ZDquiPh{eeQVtS8-%j_$y*4P=K@n;uPKiLMS~O2|a- z%N@DbP$xu4CsMuxTBl<1zA%$78r1n>=9e9)0tCC5JG}`_sN*1tVf@9Vk-7~O#4=)1}lJ!2C6~vq^X6P<}HrMkOIsIfs(}Pxb$pBjNfKNQrT+BNH*o~EK4?p{Jv=SHwt0jX*zq9EQHorDPF`)^JUk5U4YNm|2! z_0dNPsi|dndHg;EC3LewDb2WuR8ua)@cnp9Kx8Md3pf8&%Gnr{~0SFMIT*K#F zn;URe6W7w!vE(u`FFwGJjen7qJnw$fuH_rWM61kOS&!dac|m*$ox3Yu#vm=V7hK(O zAVg?9d@}P7Ku`1`x{17(44;B)OjFop1snq1mT$t?XCi|}e+dfc6w*5yQ*iG=Bt`~m zdCy_s^$oZNQg8>26DGnMZ#z~{to@$^~Wuw=vPXMX1rc!sT+NS zea&CIa$NXhhxJ+KZy1-gTle=mQmluP1`3dLha|rT!NKnVPSCu81f=q-kaUXbBeIkf zKo!onOG!^V3Yg+&x$gfX|IblqVq;yb>5P(-)7>@7;$g%wx2$#d9-0>$ceBZ^Saw}n z^UTJTjjY+M+_AD;s)Z*{KbAdh5ulo#?X^XAqqQt|xyG9_hYYvKu&q7CC@;Sooq31L zD#!eTKm9pouxwAv&DZDqx_5@vL@7uA)PWAZx`0BVI0WgcJTaD%N)$Wam99%|9e1>A zj2WG|)vPq4JhP5M8C<_oxciGr-p0<7l9KeF$W8jAFMoag0UH1(OP@mNp`By-bWMcaLOLqk;sSHSQ5 zxvEML$1*SxL)QzN=Kv?_;Q?nct?G${yN!BQLHjjwuV*!W3C0ekT{-5};fH&+gqlms zNzJ>qQ6!`UPx5`<>FY;Jp`;&YsI4MYlrRWBi63v4Gaa^$MM##2UP#dYp2EiW*ZqZV za3O3fX=z)IBq6|T`k+^i0R;QJOH?g)_<4D)L9w_31d$*_f_0tAo&UUUv|Bk_^}>xQ z$0^4zIxklLyhy=>N>+&CaGKvj^oi2t6iUm14SOt%NbphshD_bV(b~Eeh}cx4nsTd~ zKk9(w>)C$=7k+&A?s#A@+8Yl&1XKOleQpFfRurVNO(;DDjYNelsNbwCz6astDC$3qp#XJLH|Y;pCtI{)2Ar$%|m$tk91x;+E`d33SvjN%y+&}2gjw=3p- zk3{$GZ7L|pDK4ws$kd>-;-No#-a5&ckA&CJZwE}>3V%hJLU z$)qp_6r#bt3Hnr`ZT#0CNAXR*tcfh+X`WQ38ypoWs{6%h*Hqr1mEkjxRKwY&K#tnF z<237?ot?{yi;wp7^psau7Y_eMsiS-#^kJ@LL$_|-ngy=PPDKq((eOO~<(%gz1rt4d zxc?e0^D`k)+`=<(d`&@iPwuKk2XBoL<)EOF)Uq2#`iHvc>ieTD4G*24Zzg;5tQX(m zSlHu~*ROR@!a6=Nf!BZ{^a+Ag%&2cbh{~{ev)afkVt;iFjU+@nT>JL*zxXwTp~4Uw zX8`YBzw5{u0IP-HT9)C8!dnkTD4s8gsXI0FE!ZVpCD8s{V$p(=@z#}d&F=b#DHQ4B z7gyQ~ku1{<+6Syjip>b9(z?C=;;dAc?f^N1=YQ-sO&_O|NCZwudSEt1UwW=73;>Q= z58aCcZ2l8-_{Sy)(%7jeDzb%}7=2M~&GRcL6egB6{YIq8Q@DidOJLgR z$Wr@Jt^c6PNDZxE0hpu#?HNFpk=`J|$LNj3B@Ils8LC0r&?B6e(HnF_O4q`44qz z+k9H0PWb1Qz4+btsYivgjKa#Po~!T#o+tM4C96y{bDDgPTSryBANq8`T5jv-x`Waj zL1ka;@r&uWe}9<@dyY>umku?~T%^`NzI-iQRQ%XAz4TSdu%Wqve}Aa|jHRJwsN|qcTTmb$)oYDK zoM=LPBhUNNF9x{G**nXa$mQ1Ajvl+WkUwx&veWh0p@{CG(ZucVj+iUhi)mF3PhcB` zORg~?KQz%d+@itP9`>Zj%|T|Sq{nuDgmB^cyGfn(r}=buE%^-5E2%dOEI}T|31%se{8~DSdJ$ z279fgZUo_1!pwi)7&o}}$i;GkKJAm+F>dhqvPy8~#3HtX4(2K;3x}j=_d8rE;U7KjM*7lOs_MR{k#qBU=0L%P(es zZ)_5BvLel8v7MoS+4al}dyj3_L*cmdkCQg64o&t>SX!aa;yxXL`TYcHS0=L-S?!*wO>WwjPdlIBM7O!SvL{5H_yV2d|9$?>2rx192r7 z2y3_arQ>h?xq^qkVd(s>rg|&2Gncc=8d{GwTpoFR$7(}BLWzaFI84dV-~H%0q;ab$ zsisgYW#x{|4%5d{6gUPJI!A=vOpIro5APrw{7b&v(!IarXNj<%6G#tCz2ECX{Q5|ZdfQs-QALQ5(oYj8i`l}m@!lOq9vG@aw4 z!R!pb!KzKs$?tdxo@vo7peRz7wnwSKyIaxl{ z+fq6?I&JVkL$jyX;J@81DyF%#yD4J-Zn86}8W9>EG3lWqZ2b!E_t*wPC9Y3aj?j-= zeZ5E)oYMI3>zw#L7nDa8X9_nCo^(ng(C!V;vS}m>4my>_5x4$Fo^~HI^OYxF( z{EC5%h@ph6{A(pX-!M3oNo2!r?&*dMqO-vzYyngQ9f5Z za>iyQOk>LTT{znm&sD~mtqeOnP6(uVtCi;+qt>VC{vqdj-}{=+OMdIIz~58A)c9Oz z#^r)ysM?4I)0mt>tltwM{a`mXg`y6$yg@rZ0)3q zbPL*;{Rp)3!FZXc3#}ygut$pjq!iS8x*K9SN^Af(cUrAf4s#3s`PY9dGH*+#9=KrZ z@M6jBP}chdNty+Ogd~Bc*Km83l=K9=DI(1V;E+!+Ujz{@mw(c@Jpdh~I#ArHmZAl9Bn12SagobDb61ec*xK^mRQcpfHZJaG zx1w3LKf>}YxoXckO&ZH&Q1=+Vb@X6)-{KaGFh*gmDf$hs-&(l=5g$rjn=r|y397nm z(}ojJz^+-drXCfmPmusN&O{@Q3@tNwG6fX(TVTyx*8!q7u5tGV*QD;dOUnIfln~#2jh83SIU8kaq2jt3P=_#x1G5&% zneNRbl@1rXl8&aPsc2@`GSG=?=l(5eN|^A|40!y({rlEsP4#5f4jVw#Q>d^=o0!KY zw6?U62z<1)WoPrx)MZpvg=7tv%Q9PUOMRkga>K$*c(gQP0Ro z+N;t0r^SM}bgW~U@!HA$IoGKluC64U+f#U1`;T>2XH2d4I6NmM`{K&1sNU+Rr`WCL z2*=6RL+1IYwpN9ziWzo<05vo=u4r$c^LqJWzbb?Z>{L@z)AiF5XtZ;=Dq7+k<`=ZY zEKDYwJuQcXKNTRu{B%yygUn338-~~$(uy-BhA!WSF2Q!aDCf2qw|1;;-1C*GOF*W2 zL+FCRZajrGJ)Qz|T^3pq0WnpAUgJgxzbmE#c5^nNw!5FjF!G1x`LNh-~Yx#|WZ;D#VO@M;rA?RMMsQ zd&=OZIucKtlGNuKCPRJim6r2@Jb-DH1oY?>WzDJNY*Bj>G z_{V4pu1-e6tEi}?{USAnxcwxG1@>`r%AwkVKujd(TfLd1vGf#TTxm2bug--rX9 z1nLShaa{nB(Wve1NRz33+c zx#TPw|M%w<{&>&AW%xjZj`Z^OHs56jIk$*@d_G9za7d3+Jv}KUA3kvJ-W{tqI@H~r z2$pgHd2cCu5yWObNSmN(@if(#y{8Rk3Voha@qZTu|wV9 zh{&CS(KR9SJ0kD$ii#ck)ITf6Na{knmk$lmBdoOp7)re{7EIrw5^Qo<)5F69`WGFD zw(Ed!@lh>`h&7@3MOL@0HQz84ajB1u8Z|F5)q$XT?Jpq`R8!J98EPE~+~z)0alYIv zyS$-64FYJhy7*gl&j*L0DQbpC;|B6%iJ2w9$)^@1U>^SL`2eA1I=}}17L>4L0jZgp znK1?BI2FAbmwjNn_S95c?_TZab2quW7q5@~rv*qolx^?OI2jl9f2TBW9BwabQ}RIZ zfnm1xL6l2NOjHWlqSyBk)u<{=0R{?xKPM*#+PPFPT)IJH6L_wZPs9ysnSm^vadOJF zwY8o6xYz$T;gcS(7npK|a@M(f>(~%Fc9lnF0H&x7 zb{4#fb{!Q>NYDv@BHkSJz-S&>4>5HUYL6>Wot3IJi&7p3bg?3pzUuxSkO2`O2|{?o z=bBFl4bg&5e(7X(bl@bgl(q*|nC%ESC+#~Ap2oAFP{Lh9ApM@&F{47Z6sw0B(JWh# ze( z9!my7`ICWdF0TgtuQ=8{drU!(rKMCgRy5Z0r5c>>gTTOx_C7p;QBk^6HK2LHn)|obJ>1VCP-T=kP;nz!*vjp?;Di@D^-9;a$Huqi9xI+Y zd!s?O=zmXsOX%$2xmxk(*ks^Cn$WK9Ddt`Eqvp2}eKJ$vJt`~;C#hdiJhVP?BOa=% zH7;vnDUmr<6~N}O-Yi8lfwf8K(_gT~qGj30%_A@MJf^p*mU@G{=)ba`FPKl=d`uM>*()A)J8zJ=5i8^A~H%FLM0gpU}drnmT8}u*g16CZ${sP z;OJ<6DhUp8f)3hzlbQK#T~31M!8=W64dr7>X2l<5q6$>FgQ&9(8IH5_&#K2K{*Cyp zwHRt0o|!gqKozYaDk=(>uI|4GJ>4uY@HEh8S_Jh(0A!|r5?9orn|@?4bnBqakzjYd zK!#nzX+XXvy<pgxG!d44>!s1cPm zO(|-e33~w=#jY+td_K{6X!B%Gv07l_AwFGUJExl`9M%Rydq98*ii!wMrV9$W)z6$s zMB)5Em~nK9jT?kgvlZtT;=vS|!CGiz0v!z(v9)J_8}&z7Qnhn?a=psAyNC6gFCDnx zauu+S=+Bqrii(@Z3`Q7s?0AaN?WaO&wo1%THlQ6<7%6pIr9eXCq)Ke-uBB}I`-2cQ zWZMbmo<{4(;{VIGDd`&!9)XEyDndQG9~g@WsKc5l_i*{z#uDT10fG{S*((Jy(0?H) zQ;HSs-wd2GbpuBLL@7LAqhTo!d{8e5G@D3dm{QKQujy%ofB%ioDKFaCtVHApv|2RU zq_lxCa|tW)bUBWd2C@ntNnH70Lvjs!>2PPkx%M2Zmb9GCwl@Rh5g3BcY7*Yd#bJUz zN`O14#G?LpVexFyIkEDJs=u~Zy5N%YttetE5N|_Jqk!`HcuYNj`8UjLB~}Hxm_bNb zLlMM6ed6P{7AteYsI)FK|NGR4%R*EzqvWhnj-dk!NnJ^R; z1%c^M{KyKt{&5|BI96)$GQ!9qOFZ>NT-pePB-4S5H;YZlC)12PeD-l@bBU>1hbdLn zl|^L!XElF5U;X+2K;1XlvIzU&f@;x}WU`zm0~JrYphkk&Kny;y>MC>@Oi6}He%}o> znEKcQ`}T3QM4V@7c-0=Hu6^M_M3Ai7syjmg3vsqKS$}g!la1K_k4b@YRUW=du&iZ)NRUNH?!)QGMo_nm*?dcV?W+8N6%Jt2#I zPy{8J3nm?9Bdv+yizPFr!AYGa%*Ap7jY=Qabla`F_7KP5nacrfS@x%E^sVMOzxu2> z$_zXvfTp?;yDm zI(Ga$9kq-W3TIAh$@-SMeU6$OeOI^Ur=7^|E%-~dPS1tkDS@6EpPYO>d>CVedU|^g zNweaQgc*<+WPDTRk;v%}Y{^`Ea>rp}fop}apkp0Z-UbD4HU0`f$s*B(Lj=xBq80OlY#IF_2eIA>HL+b}n z(0~!YNk|LI5Q7As0l|e%Tm31?(ScoYe1_1bfn5r)1Kn%^fb}rx#>k#l$RA-SRccWzRVJ8F}T}E5+b^L;oz8)dR7Dh%hz&}T+~~Qv%Y`Q;%f^@9FeOz83^?Cgu_#D%^zEj@~v0;2iFtE$~d~9e#joWh0h3zNgsSD zw*2c<8$X=VYg!LDZ4+yi~KE zfz+QOcT74Ii_Hc;EWN8|05+*T`k2AF3>#M%QwcPO;C{zG975@dV28Lfr-0YO*AY;N z?N-ioZ6eKu{HLz5k-K>RZl2vp4@eZx&dwgXD-^Se>FE5Ij(6>!ouW}0vh9Icv(he2 z#n%gtyG?2;JY03v_Z{Z4ZLeWZV6QhTij116$b$n;3NQNdE^%y}wVPvIF1DmsvvFhKV(c2|E-xlPwY3o*I$Jj){3J6ue(&|apX`e* z)Z4LI);dw0yxtwU~CE?W2pAE)i%D6JHy@WZ^?Ao<$rl`Po&~ zyNKFcHq1&5%y<>VJh&sCc5YQg>1Qf(>Q%`wd=!1K5Mp_+UZKmmA`>Rm5$+e2Z}Nd~ zF7ZGRL6!|*{9_~W6hn+89(t^+;eIbI49H;C=lY~D~EO&rtbgfyLAD1*;sJPwXG9zlg|^!1P>EGo9NMvKGma=#6eP{qW##%O^8?&` zIVYFMz{H0eh*M|Wz(}4moOew>&;E<=|MA^+;B7xl0Y$N`FxaA?fR756KctEyOrhQ~ zV8I1-#dUimmuNSN3f&UPQLE3LddSkRqA}i*+UCJ}Hn6)}zqy~;a;o1`#lfWK!qVxb zOx?oLsc;T@FWsoO>cd1EN#(#FL+oPaQB9@-~%_j%Jp9lUsvVp3VUKGre>cWdnT~4xn13Vnyx9sB^xx4mE4lG+CKJs zWD;FDd4z?t+6H@-C1+$XjvbTk?xP!03OW$=qsXceT>|3}18;u_i~dG4rEd)(N7DX` zCVEwH(=on|vpl8fwYp*CG>z>uYij`5@JuA+;Nw$7ef6d6QhYg{pt3~F95@SkHPaFM z#~8etLFClZP%jt_fF04!8p&Olzm$~ve;9iUsI0fITNn&1go7v{rAP?Ujiic%NGL50 z(jg5}ViD3HT`HhRcb7EMB@NQj-Sw@F=YQ{e@BQBUeT>04o-;hp^NYRrT5Ha^=F0zB z5Bn(*&aZCTW-&;``rP0HF|J>Q%ol3?$(b3Y#ViorCxdhy(u5SpR7gYi001OGoPwJ+ za56{17NXwyv_>*k`BaS^I-P$=-fl#Nv)Ha}bMV|m9~O_|cTbN3RNX0i>!8970XQFw zCKO3#78X*$O3M9&#r*x~=N;JT#UuT(sk#PC;z9iCq#Kn|Kai&Dh*9nDu~lgZ?*@dS z?a(y^qnsF_o7@F_Awl}}!OQDA7rZAa=tjUwgvD35)k+v@RJ$b4c*yloWjCb40oQc) zi`U_U&H0hVsj+6&$LQihcn)i&!B_SZlvv#`9pZ3KsiG5nv}s{x#;|R#x`Hm)o8`Vb zc7-gbzcPU{BqW56i%St)a+RR3y&DcsD6Bx$A`Zg`Ghp{w)ovd_g~h2=i2+?yPhp4; zAXUFWqC%I_66)y2)K$m+R$t5lRi z!JaM7HTqmJY#*O5D4+}leLPCQF=Q_b-Rv`h@(4-d1fk0g4$sEnVfE$Gbw5ZjRXCwZ zkCXoP?RId*z5MuD7i4@0qLl~&nMyZIn}VYQ$y5;14@Nm0FwU@SXWZMico}MdT!3zi z3Jd>)s+tEv+6!mYW(~f~ZMbRBnSZR+`M$XeZD@+-=II~_HCD@cmi7z={0-0X4gj~Hu1?~jVPJR+ zUm*`oW7v;g8vXyE0tRi{z%IcESS4UFK#Mtu0-;1Fs9ib11JVWZ7)QWpVO(>rq@d8< zyU!5B0)qik;kUGI6D0MIz*s^abbz_g3~;wx2VzAY@J0FLjh%dh!5b}EKdo6H950o# z%2s$?kdxPHY8rx#{ORShuw3a^qsR7o1kdR!1%$gCA9uxQ+g~TU#DcAA(f!2GFbSIX z2p<;RWsrcDfjH>CV3t77Vt6|m4jd{YsB#sia;RWz z%RCf%K~yJ*hk5Pl@3*)=b+QbBKng2$U0wC=hv-^;o3X&S$KL4r(e~o?isu>Symrz^ zpf?~SKNw4%T5(v+cfxNm{-RFniheP3JO0K4uKtft0zUJOwssUJ*oums#;(3-m;_rI z5=z-zF#tUf{{<`B2zqik({V|8WPuX_)Dudv5*t8qdx`Lgh>}na19;a5e}5%tYe-Iy z5qhNoa?~PF*};UBJ?*2P~f2bwMRGt2CvuYR5*9r6Tb-Ttw;v;PRRG(4qsd>ge)w@B~ zB7{Gg)V7Ku&PH4g_y^_YQAN$o&B=>Qpt3USwFZSLwFHd9$n%3XP?x-{FgB<=5#I(q zEqh@0qGqc#2KdNx(4s+;Zi$i}9L2>F4b5E;-uBk^<>-%>f4p(2a)u1&+~tMtNBZpN zE}a4qK}u$-teBOb{GBNODRQ}pywtir#q_kRWB8xX$DMbvn`EE*IN42v|9hk`-{kD| zDhf8#1*?Z@sp=uE9j3BBAsm>Vg+ztR-PGmnQ5x=e>Z7HqL5pBVAZ?%8* zQHx}VRQ`3{L8pABcQWsackI5?#~i91Ca2r4fN9of>l4d|mOZ*m1cXxatZDZ&F0~Uq z5$4u~msJR-O%E)!yJW(KISNN-P1)2Becr^hJ<- z(HB1U^*NzmLBVqC1tD%v{^F5RN<}?ik4YOl&h5+GS$+JOsZLGZ&L-+(*gqdlYlAwLDDpdRoZL9>kkuINO>;{3eHwy~)M^lu~T zL*BNcxHR;?%1*P4=l}`<^4}PgREaYL5$9Eo|;e|czTYL>Nkm4aZ-ia*l z){O4^y!&)7=|b*AwNcBP#|M&Sly;V0*HtG|OHpLEU;VDuKlCHzZ8U?@md5OBw{si9 z{^!!Ss4;p`UBjqWC<~Jzdux9LVj7u$b-s9-tD}^>_TdgL0X86~@u#a03S(tC zsdW!!@3+6-?&ye)@A)}>i&re!?5$;>zqgEUc9Dzs7F$#^7S*}Pb8ubga}iMx<%|hX zpUgaTW`$&AWFU|mAi%q$@#vIG06sD$DM=h|fU|C4xTIj9FOD7dBjQ1xmDr^v6q_L~ z$JJ`#arsDpXlfg7LP~BGy~GEctzQY@EW-n}aj5VR4z@3!EWMJ0|7swIm+xNVOt6A> zN)jtq!>uII{-J?1+cHzSiP|GVc2|6dL2il!$S<9oOrT2|1}hC-8~z5VHqws5 zoTvd?d`+4TW&vfg2I>1<}8+6(wk4zx`1D1~xF36P< zGRE7)!FzrKtvV0f~tWULsS=YLNAdCSH!iYYZ_^$OAB2p zj4`U$14t{6{ULz{SD;>?9(?>PYxUb=({|Tqnwpvuf5M(V?ha`n@cKJiu7bm(i1KNLW5qSc~eV8+GS~Y-F_ovQ{%sN zgsm{PfH3I-O@eKg6t@w`YLVWx!*cF~zym)4Dv67@xV=J0tNxOH;M39$4?E>38p;6h z4+8MDn+Pl~wv8cOg07B&ZrEPK@CPd!8+lXHRG^@2AQaDg(Z&{XF`tJ7ITb{uaW3{x zmzTs9LH;X&lvCsjBws-F*VIB3dkEQnkSCHzG+KRcJ~t}RCT*8nF`h+ zu<%X0Qv|V0InAv+8fwmDFpqPDpqmHuJOJajDJk!RQlT4U%!?X_P;@XY552!qR>7{& zWdHT{x6H>u-6Qou{~~1)=bCm!~ydD)dLspDjL@ zt;udd#sZXSRL9N8Q#t(%P&IfT7c*@WYCU||(z0y1_)tu&w@Hx>>P|>tk~~Bzdvs6E z%ImjwO<#`;$0oFuOUg;S1ZV}Q^VzI$mdoB30}><185*2LbgSU^huXWq)@v~-`N_NW z>_T($V|4xUWp-?h3BL(`C{VB0cJ|lT)_k>E8JHfnrV5NzyH>emOUD;L zo#>WwnV1L*slVibu=l%t67I4jteTr}*_})Y3Yz`SO!o{vw00BC8`{YiDixw|^Q?cU zu_SnibQoJ4r}8%JLzH77KBv;Q)i`f9WvA;|E~CW$Q)?;-X@Z=kS@Hz2bMKPlE50FE z3F5ywqf+N?!q1SF8P|af-U^0_I%Rt?d)uugH3hDM?EI(8IAv3YmK-;D5dv`?K2{6W zmAywcWhDX<5(by3B!ErgNSDd18zueni;<@XN^mvRhf@zxB5?seRCL0y^-0JxNfVz< z+f{{kFGPAUU#mM8C#4yvMo0<8oDGSHkl4i*z5?);B$T#_ilNZQ-wo~>@e^?GG`GKf zlD`9jy~7x~yH20iJW6gbD@iR-B=1AD>B0V@2a=dWK259g?B_>u*}T-u4;7>G>iY7@ z%~BLOH5sB*i}KiRaJqf{5NAC12?RLM#@vglPB_pdgV16HA(B&20=W5TA{I*Mcvl8` zajp{sro*8_n{z48K`dLa-|6W?y^wQ+VrTQ)(b?FM=S#{seUc#mUW1n-O&FU#AUL7Z zAN?fYiEe7`^Cx1w+~o z!ZTn9m;zGFeR8Mhv_fZy8PCU?^RI9Td}g~J`PaUioyI9YsVmZzA)az|QDCqg&r?;`srqxz5eZ(zi%By1}pS9YwRGJA+2c0(bLtpQ_YlZScu#8#T z+Df-ogJJ<3D@WXbJP_8Ef2#4Bxm+mG9`vGrA}$3{foj{g?0Xr_#-%4wq3I`BmDD{~ zqw*`+_{rrcL5+?|wNicQn`JgM2uWhej$L3ylu%su#I5Op|7ZbDY2AhZSzBLk0)V8D zs-Wak2reMP>KFcwzSFuq=`qEU;AiSls(B==r2F*V8@{jOkULN55~1{4fY8E16~Yzr zzVP1UySF+s9%6Sht&S(BF*P$(TPUQZ9Ph)5UqG0580L4(-!p*b0jt-jQ*=--2izGn zV|F4=Kv*{@RyBc|FKDwP7N7oh8ozR8PQ^Fa&_>S-gn4-_GH(Dz^*m1{Bs5M@fveu2 z{?3@%o7QV_DXaj5B|s!d32A}X+1T@1Mh?^PIy}g$*RLl767y3(r)RGtWPDtQ?5+)T z&Es8;#sNt!^^$xnff-+Vp3w88g9_S9BSSxwf_18ps$OvIB8dK!PICNC8NKx)2kL36 zXc=~WNf!))$&|KA#c5}>i)B%nKlW7Lmf^K#AHpk#bv-0c_|rz&2H*?2IWj) zfS#aLOa3(KAm|+c=Hyu>>yvL*-dF!jeF+>sWxFFEd!q5o4wt&#!zc?^a6+hl1KFtP zZH7DDfzh`hgyGA7JvvmY{WUtb^S}>(9@?3J-a1PpnN<8k{Kk{Dc4x! z$kI$2*exU`1`LnmQ}lRr+?I);QjxEyu|~3Kb`KHB5T$ZV8K2U%7Cb?`HS10G8=M#% z{IGy__x4cLS3I9yk_)rvQ3DR(E+}n6h!fyqu#?hMazr69XSvG+uT$(*f4HoI;Bf{4 z6VAZahStj>Ug7{yNN}8izbtq5Tk@l6ZzrxeI(|9b)VKZj#x%x+(oDBzzqAU*OfCH} zhq!M@iV#WADuRrt3;f;lw5zcI>i7<47PNz+bZ--ETAM&&`X?NYiCF0IF!SR}lPjtX zD~;7ulZJMvO522?XJDaA2cLbktfXP2;hSZSV=lDVyC7GTUG!Gz9IuMQ3t+RnPZj%t z)dB)-rDE>(`~f`U_w|9Opfn^nF6oYbR)5CPat}o1QVNzYv;3w>(-gTVT6mMkKSWkI zY+0^;&rjy9qP@SUnj6;toa+YfiQEKIh2vYS?3%#O{YlyR>EE+g$Y&%=E0lK3Dp#xb zE7=mH8ZAV>`jgP*pP>fqa*#iyW1hyk&CV_de0uSOAK%q0S12U_ z@9h9`4`gsm&_8MjS{i7pO3MOSXJs1_6+_RmA0{n~j@O>BOwwCz$f9{POA6x6Qsa&{8 za^@7(nJZWB-@hh%=6Rs3aBfnMqkL*1d7fiZY;s|4SdU^t;^+qj)593@tQ&coo|Nh0 zZiP2IvG8w@n79Q5xM&(oA06Tm5k24#58?~xohUA3i`W+kR;ps5%5a{xUS|G&`UV}F zrrL?YioS5IolIUu zrBOji-F9GY_e{xfD@=8809&~dR^$QT9LKyBk~LXPTF$F>L0CRXsA6wbe46HRjd9OK zI_M)$*C)afGxm45+QK`<`;p^gZbY8yqR$`hH}BfdY$s0_{^mJYJffi@#0$(xJXQP_ zYU%IE$$KM3$;tE}zAU~Oc=8>o@jVA)@uXZc`9~Hczv?~Svx{0B?tFG|QjwAd%OfjS zW6L~?PjhZ!{n4til2VdVQT-TB7`NVvIeu(my~o;Tw#a;R1uxLr`{Ey*Vt`I|hnHRJ zL5t<->npH`Y*mq43xF98+~1Iagd7=D#>}xxbp7DyRV|`_ew$4QeZ!gAYOFrL(orA; zk1ZzgM~AS+Lvmg{-lgEvOW*q*@tYZ5TsZ3I=L1Gs@mNrQmAlP{!)YqjcYS!w>} zKP}RPc2Vzl@xK(l*gJoJe}QU%7GxHZd0RZ~NGvICu)(vm!%@_$Tf#7yHQb|N4 zf&06O)yj4o&dM6?{&zDgt*pu(LXoxK%{=b!3@ebw4ZAiG){P2CJlTuRK%zu;zctNNvbB7Z#ACMPdAa*DUj*zh)HwAzQY^iTCqhR!_g zm+#mA)z;)+_`5Jz&)Sgx)}C)={Z}PoFHiA(d13o@c$3Y3FldFczDNygd71wCtXL5# zL&`a=<^WfQNU}ES>IWwarjwnMH>yuHt2f?sL@vt%OYA`u7$h9>D{|I*)Zt$vdf#Hq zezORce-rvth*>QspISNNB>8gdEvX%WJea=|R6fWNWplE5r(sJ4Yfw$f+P8j-7_O8w ziQlc-A~%AJh^0N=RQkM>mfJDLVFmb*#@s)P24+U%+g+1Kn?`WB%W>)b(QCm2Hfp~( zUlO0N7GtZskP)qa><$Z1k8RmHSiCxUH9$SIC2-)D*>ozw9azrfs)jY^>bu$s;x2rL z$6O$biA+NUp1G=!N%qcUF-_dYcwMeBBw))^Aa&h6u0R6|W=58IG+9_COncT7T!&Tr zkux+eVl&?npM-S&30M8u`mvcQgZox^c(X0?XyB%}zx>m#noA_xD+Botc+{{)xb74@ z_8A{Bx^*6Bg^LfIJH*3`E%P%gjE@_$ix{GYKB&z-n;U4x8aPc%%og;y`~flFa|T!s z*q5%m4;yo%hv`Xs4Gq@ljPK^+b=VC)JZSs2|NVuXPF7`}?z#2KW**9Yqn=QBIewO7HJ{xG>+MXXRwymRc;u72Ps?o`km6r$Kh<6v_z&skg{gbP1 zU$Dq>R26*HUl^8MtdH^Z%HDc=s)MNLnaF5xqfM~>$Q>Fi7doNQA1xRAz|~d|OEjuK zsr*TeheK$~XV=d(3E;N8lr>?@fr|YfE!CXz4n-LxN z_jwdF@u)TCP)x4W9rO@Ql=r1;{A`cd8W>ur za;Son4?mlOk}8(Xwj84c6bCc6zs}dDO84H5AQbIDDMG;?Oo|GV{Wmme>j9 z;jCZTMO4AfosYABW*b^@y=>ljxyyRjltV>Nr8}=XugL#3=4s-8SU#0d!jjrBgv+?i z$r%F8Kt)XkZjinN#l}kO=ukqbEd;r}AP{&sul5H=PY&c_nP zELxdyg<`@ZlZ86;^*vYpdVIVQj}c5#}`Ojj^~0%Lmyi?%*Q2UQoV70BkdGttvh`xlfi`;4tnHsXX52@qhCZ z3xnx8LDj9C31X6Krs%Mnl`@Vp=~u{aQs*)Veg3pVevDN&PnnpQzf+xy)`J#*n0wDh z`U(%Xe)>h^fVBJx@`MFDJ_QAZrEQp5(g()%NI+Dt2JA;(UfwVYk2Uh{+L?3(;nWn9 zK~AYyZabdBXd_7tB$+; zX0z+*QG=x)E9-YSZ+q!ejei&UZ?Dw#dRP+wHYl*%pVS`WkkF7ZQmqJpb~y)OIJn)fgZz|w6!OaH0OtyV`H>hwkO zI96RO=o9=?Cj!r3JtM2OF+~+;t*TC-n}ZiKJc=XW_{fr!buL>2e#@r z?%H|EQjbuWkC8fS!v{H|i()|qcxCX>fz_z^HXHU^{~W(n4}YtERkcf%Nv~as=Eskk zOL%bpjU%&0YVO`|d(I8-qfM4wV8zT-J;h46fWH8-kTkaWPkYD4>aP?dbm<24p9WdM z<%MG7biOu`GD9f2lJLy*^xBvQ1sV~ICW8P3&w~9H&CCKp<(AL#3m6|)3OemYatdB6 zDqy3{(uFon_MihZtPf^>yqMcv(np*r`RRgzZ<2mhOcHj1pX8Gn<=XPov*gV zZ>>X3cqpAsNp9x#zT6{y(|IWz^K)TV6c%k`rbZJh7CwHWWP z9ITJ^ZWXSM-!3iNU^B!E9BCf2$}F(TsGpmLr#slZ2?H6!S6G+QIIi5kva;DJfjssd z`L4AuD(4x8)bGQI0n=Yzg9<+rP({~`c#4;#Z4C{-p&+GZX7)#7Z@zfD2I!J|`RB6- z5w|%hY=tS1d($#9E~YMT`|&NIQGEu^$P_lqY>*2iKQI&Gh4n#*k_M(05O-kPdT6Tq z!Bd>?B0b5@tNC^1oQd=9m`5XAOQjs{eBWx?>%~TCPyk50oVfG_?UP}>5+e%^hvuC= zXQFF&KTKu@4;sNqAO2Ap1s%4-=l%C8c3WQDW@Ma#{ww~)i!(69a|JG$6}Vkw%%kDF z`VjwV{xV!03*Q?L81uLSWFEZbi?J?SD*n1mVcx)&p{X5U&W2JGo7W$T^+R8bG^J=x zb}d+F7pN4je)&9Pufj0)TJR)Bns+Xk(tP8u*S+KWX5bB%^@CF$xdbisG6Y0~q#nci z+&8mOB5i{5a}YvDfCZzyc}53VdZH1}U{U6%hB@4&X4|810IN_RpxXnOQB|*>Q{<1j z>PW#a5f~;y3W^XY7h=Ab_y4#PM=_ZZwf&@P65}_mF4pR(kl!tOj>?l=8iFdQ^p#1) z>xmK1!}PkwjaS3hx7TRt_!}Wcy{E++C((IojP`pkI!6K7RAcJhZY6fV2MTCzsySkC zTNcsRr-AT(1lrT$tTVeYQ4_aHGZd<}y|a$xoxnB7bW+KoSd3nPN>0G+o~ zwSjW?>e=y8O|r3~%P+6?a$s5*nG*Ro`x)3gZ{xFv^f$U}FVxX4Y;j9@SZzRdK;PV< zedSm18S>E*UYgrjQ9X8%PRKct*Ba$AY-~Z`bNfU?Lt4CH_SL?oqGBCbi-Y;SEcZ9| zHpIwr^@95T19WV~BqSs(moCEGKIL*}UKp5dLD&vzs?x))K`>Hg0~Qu#8U~Fl*n&d} zH55WW^O@LaJvXdkUo?P7Ff&sE`qd4BFKbT^EH5Mevw8~QsY#LHMB&VblK{v0|qkb2( zb6CM@2@uwM^72=EdU_JReOm`)5u}9o9zD7U5fZY|d?yNM!=r`aH;}hIee&cU%rH5I z;Z8=TrjyVmzF5~LDl&PnQCrRyd&uJ9QyqdK;<2}$qH?Rqc|$l=(YtXLVv4Cew^B~Z zb3)?8G}=o;D#gLcU+MjcqiRL(i@$+w|M~TUEai)nT2oLzN=#0AX#^x)k;j=38G-b&dHt2$^% z>T+ou+Dq*ZaM6TkpmVn~VNZdG3kGAkdgPQsa~1lg2FH-ZeuxABffhND_%lPV*Y!x_Qx85!(`9GYM*l_q5n~*}`FI_F+ zY9`np(*ImZFIYXI<#X`^v+4V#55)h3RVKN{!#{*gN0pt;%4yHdjVT}E?h6UGUUoG9 z7%Sp0hR1skvKHt=3kVIZgMQc6(c>d~`&s8Vad8v{!*(QX{Oj15m}T2k($GfB4}G-A zMI938W2YIIMWv5q@)|Fvt^toz2R%-WaFjJdN6Fm-)@7hx5&epR-0vZ+!)<+)+*(W# z*!rnH-lk@3&G9H#Ff5N)j1~LO#6;~YX|J(-gwo7LAFpjHpIq9k#NG8(JFMe3ZaeaSIG^Mucnhi=Z5VZk#`}O< z60%1AWc%t5$Xd4N#&_jcS64BvUcKtPya1OO&TfDM3V;a+k67(n;Pq&bl$!5VVLH@g zGPq`BdEH?!wK?veT9+{28&s8p3l3oNO7<+8aZ+(I=tVUnq_T>^PZYAUuN;{u-!#DY}S0zXiW zhGk~5q!*5VNcW+ov>HkJ-$5Ks0CAwkc7L}Mg%afQS{7`-9*T8HW1tGM<8*4?eYqQW zHMG09n3o|UVTG#dz-C+-9U9!|M7xtw8iYK8;nd2B-*A9gsuiz)1!LRQpjYLIcPl9= z={E$adh-H%J$?Y%G;*qI=Ld0Do#!+jnM7fP3$jIf7}N)|WM-Ufsdrdnr=i?hI?2+; z-N(7CA;k$K4PDR}<1ZbxC{5jaB84&$+`aU#Z=u`$JT5LbpJ;(2;A1aA4^Rl1h6;3+ z?eD?rafb#s7HEtG{&L^ZqxuJQSG2(|ZsloQ{`SxQx&?zEZkL&HC&c5$MX1ms!}Hla zwY8>a+oujY5G;h>_AXpDxXa6P!yHPY!s(U9o|eRj$&i^b7Rc^ikGx(IKb{k= zH5~)PMqvgz1~z+?@B!vkQzM0c`}*~(JE(Q7kk_Z;9dJa@hMrpW0~NR&^0-C1seas? zeg)af!jBy=aN9l@D2`*qSUeJ`Im6k)xd3ZfUxH@vQhu;%jHI4iV!cm~4z(cFNLev2 zFMkB!8p@1{|6Z>UI+;M_8XOc<1zpQO84HGV?4sEf)zw=-ggKyg{6LZ1ALasrhGA_g zOfx7ht_}3MnaT`k@;LOxhiD#*6Z(+x<+JN}Lv3DiF(D4nF58CDcn?)7Sbjg}aa;|sdiw2K@p4g_nW)Pmbvo2YKv zzY!8cG=mByWw8Hwl{8&A;oJn@>=-*&m_INaE&ItfaS#5p?XPXQ6Wm9Hmg)+xlOz9@ z^sb&QgL+(ju}?h4nLS6#w%oCyyOu<*Bl*ikw!Q7TPaLlZkSE?z;84uWv7v=(@A=2) znUHYXGTM@}YbY{{zW1SohxK{u^JA}=s!wkB92Gu*J_fKuR$<(J9_oE`(smoE}cY79f>{n%NYUyzI9w?kv67&Tg24YMc^g4o)a z6NV4kL`2pR9978Rt_Z~~eJk(x>N(ZxOV)gveS)Zac_Skq;j#9qOW{r=zRf|8AqV){ z*gwfSVX{um%Ft03B5I|~Hg;4t|85eF6Uvx}a7w#?epz4aAE*mugR2|o%sk+_%RcsU zo(>3Naq}?nr!p7*gf?lbnXjK{c2iCxLnBo@)m!*C}&``!lw>5uwZwtJ%WYC)c?K*qh@Qlx;keUF3!1J`WA4 z#e)AT6q%3j-NVFyuBpdyf-#Uz_=BfmFg_&AGMg=9k75sRM5XPkUJ81BubiKU-Oq%H z)TbqRTty^7fT_CnNAg=(Ke?!qZY=-lQivT^ujcu}6Q2I$%5zZC$5A#~P z!E;cO4nfrB;O5pPa^K&Zn={VN$=QuN-lYHozY5DH#)9y0@H=*lJfzNpMclN{=Z$fG zEoa@k>!Aeb6#mW#n62L3rSAB*nxB1{9GN_n63H&f)~}Ks92t7upP1R3F#f(+QPVVB z1J*Wis!|XvBhL*M$o33DKRpCv`3lvT(#!XZ%}W-!Kr-s5O@thXj}J$W^+0uw{(=u= zd=TBD$e`+atoXm6XytZy3Z$>s4a<4pPa1Fo_ABoH+k&@GE~`G?LQvTyvqeor_??sO zI+Q&%9yJs5Q-g=E5j-^MgBCJMDB;^da9Depd+4_%0}R^*qtO-cb`6d)FZKevkF|_f zhfN6yKv?9@!D0{1_pf+m=T=8R$d&S*gz0)!6|tqrnePlxY^5cXzk`Ly_K3X!R%Hr< zH)b0AWTcQ7V4KPWY2b8ev=%Sh47T8@~neku0on)$B zYrmTL4|xQB+xrJ^5ev~N0NcTE_=$agv`UZ!z=4eE1pY<9TLF#$^qPjH zhTTUfy*hUe#?zNC#|#4E_2b7L)-xb$;6U=_tvwj4RBTH#s5QQ_;+W_UO;aKEOZ;7a4UlB4w6~Hf+4F* zpj4>V$;yPz4Ola;GL3uaAtItikA6#_If6^e;@yN?KQ?vz#KPKKpO^{5o|->+^vi(k z^o_pJR+q3^wpUfM&ZW6uWFT=+kWzr~I3=j^8{ln7;GLTL7`Q~=`vFX&TUF0OW#OrD zcoVG?a&$4E&qD)z9Dp8v3DFt-3iy$@zi!c=D4L1Y1^-x@M1U(=H@5svk*ntfyA#l2 zoDI3maCCzZL#V`;kBO`9RJZ>4d@ zZX9c*dPnmcu`N3A*ztJ7Alp2R;z^0;5v)ue$Bo&KLJ$BrAnoMhk|PHoM>IalM?cO4s6$n zZ({c!#m)C!_gl?>&P8UaRO)2?wjf?2hKKB-@5{lO=qa88wJsp{KpfZ*wo+fFHk*cJ zJfvq0kq*&;`aKeI==4c&0!T^ z`r-q?JDQCbP>h3rn8JQQsh|RJE>8;QluMbz!P=R03ZXLqS!HHoTx+H5Tr`d}=~F50 znt1*iRyH|1m$&b`fHSbq0Nu3tB8=Yf>=Sw60$otr!UP*ax5&urs0Y^mW=w2s>sYnN zE}*DCz|>nKV?X>+@>6^ zpS8-)FdhYtk1F_3VV{+SfBL@517du^I&~gecbf4R+uVO|SiBw%d$-C;VJKp&p??n} zDJ2DsGofZ=^aZ#f5I7T8S3xuq1AdL?T|eV^G$Qg+paqBUmE>T(Cf8D8|12ecyT`W~ zYBr}0;hcCcab{USL!WRhGYm`oTeAiK6De$^oLyo=PQ)Uc5_J-0qT^5PojZU2ycUcB z0WP1!4Z-q?=h6p1H zteAVp9$VwTi9|y|4>zCk-V^c+Q(iZ1B#Q9PkkA=lVS#&Uc$tutjVP%Y~_mby~I>lsm`P6IGeZulQI?xA)sErX4}Gl zs6k-Tc?pAvGn4sMAy-64;63yRt$iV*_|AJ@)CenrI!(`7<0Uyx^?E4<6i?);MpW;q z@h0PENKaO28LZL?@TsfQ$UhipQ~n34pV7(G1@PsqRtK! z9x5)#!f>k1b0VQ~ZN=cM^SD-^3iWD?tQ0%6FRwYJGJQQnp zfge)Y(5KOyDk$f7g#t1*k~K+oR*PvhU>wecXMJXUKJaOhd7~2_u>6|hH!T9)i_+4B2 zS76oN;>QGakBMF3j_QEmQMM&B2w4E#N!}MtHPwzs?ut)XOO8C6)@&gnGO;H%^(# z1}S0vyfrp4*@ey*c#9@-JS3v?#GUb9zM$s^vTknnWY30|^TDN8$em1R#pthRsxS_* z>Ji~4)Qp)FM8M;_tD-JRRw8Nd6wQ!X3f%&AZ{EYkE-dIIf-+nCUUlYw3Lw0|7bxc9 zzhC#d)(W=ahH^Z(WOqM;cRAbNXee!Clk4U6P^4A!zu)7nkk65(-tBwF5thF+TKc~* zNc??V;dEJYedAN%# zgEn#~n#}F6fw`}T0@n=m*n-W8`HlBJWf1yn##U}lcoZrLeh_a{7?7P~I;qXn^P~P# zn>_g(FODN=mto;|-jjt6(H0PMseuuag*)gx+#=}B{L>QS9HNO?nOcL%&-sGm^q%px z1aX-aP<|+fmiv5xu}`NccWV7z@7c)amYB9OG)@{0cW@PuT;WN`GsH0wc%r3dL24H@ z{H{&-@A8d8>?5l>pm3U9TdP%dWgL9!@6PS5Y?O-V`;!~map3VTl^rgxPKMGN*>Ihb zRaR=ij}z+9Zyjv{-ap}&OH_4zz|~k0&llqM^*}Pifk%F#T?#j@|9p|4tSER$H;wkj zzW?e2hmAFeq`7yWCVT@mMb=JJ?b6z=JZQqwNnH~GUmd$wouy(%lYkfq5MYR-1Ty>! z5H6kmYYuu%nAoSk+&@%tmb!xP;el4Q-yO5tQ=>)4)0#!cUy{Gi<+6h+MJ_mYZu&V2 zSCm%iCwix0CdQ0+^`irgfgi9;6))L8Cayb8XSp+wXZ$pQwt=xtPhpFGWbl|AiYkoK zR1F?AKdzj+FF&0;eP_ZGg@hUXT;$8WVTNA|w=r&5%4&ulaQqw4@Su+Ax)qnrWrIAonh0%eObElX!;pe2$K*sFH|+}< zN2m*{GftLigH{9 z`M0q%TChvZ$!*!$-CHZ0l3(DW1WY7h{SPh79-p;=K;tk8OF0IX@)nDQ+Tqpa8*PKrJ>c(x!csv`ghd8x<{!K1HNe>E!;pIQY~KIL#=SO@}0Nx*j@w z!j);0e7^PPwZ0hsY)Eocib`;Sdk=34UFw}PH`@6EOeWH&>UK!HU~}<)K6$;-%ZOqW z|JG-j^ycAs&dkjn28N|Ti95sOm8@rJ5a*7TkB^p9m+YakP=h4t%-ORY z;QIqCMLkGgJe#KOE*^j*F5su|cg+jO+?Pw2K|?VlKte|5qR9I&!K8VlHPXjDEDqErGG!iUJtSG4`B0Fq1szb=NiVCBTCy^$ZJkgUf#Ajs~W4sV*$0fICJzBBvcN)+aV2f9pJ}NseE*|d_2DbJr zm`+!}r9f{H)VvsdX?D|f&3dj?&Z6!<9REK24So7gPOqW;dmUISRzS$1_-TSkbkfhB z4d9knaF03%1JwyC+aI)X&i74C88D!QNEdV%rC{Atx>9wCL9K8lw}1keZ%voYYiJ-h zkQ??<6Ckht4N6QjJscRB)hlo78BCtF)Y8VdgCK)7d&fbek7 zxVTXx18_i)0R$ED2p%)c%ND@LtbzY4A|_#o(0KU~6i45(vI5~+^46=p8*s<1h*Jkb z+k$Ydl1%Kb{F$$v2%`n{_D%Xykz(by@vm+7e`V1-T|1YIg8{fmy^QBzE zA7mXCOH$(AWs#le04})c+DcPQ7^%t;Y+4tEe2?fZ+`9mW*Ut}4kEI84{;b{>@FMr` zl>3u)@&`(*Hx~_gZ-FgnWn)PDH|y~}ul_9`5^g`31P8rE&;fY?j6tY4I6lHPuKBPb zW0oQKUD}7@j3<7u;9Jg;SOU9E{X|xJ;}4LEruJF=Q|*={`-(s_xTa=ITm2|&YUuM! zU8~%nT2rKe35siP22Nq@_3f{8^~G7~R{3Jju{TV<6NrP$0?f+r>m zqgFA%By6#jRM5FkcdG->4{PAAKan;~p3Hz514aDM2cgC_f`RI~N zEA{g%Wqic~IDuJ10}C-^1+u_ZtsR zzA@nDgF-kqC1hk~;B@~Do?=Dyt6(h-=g55+r5L7W)wn&P306o5YK8IM0x+PFj6Ut* ze$4Y`$PKmcy+TNz#f71ubVac;L z%(DNXSS!GIoWeN{)ymMZhMf;|koSfpa|?|Groq(kJcZFfGf6$x~Zd1_91 z+?OvXp}BiAu%VwXs$TcMsSElV=@PmBEVlVBFTV2En{B$C407fhZ=C9e-ohb4Z*pxv zc?!rw z`wtLFyb%pWGym2g!-K|Ge_dOPVyv*FSYGapBe`P!d!Wx`78t?Q@RL-GQos+=)xh<~ z>cMXl=dD$;rf#@nc6>hKxBgGzFd4n6wr71(dnhu;#%^%re9_k~m<{9m`~Q!AJ@rgv zl0PU^sY=(x?x}3QTZ{2nD;+CzUJz)Ow4mcS8jxi{#+{Gd2l9|eOY=?G@D@={e=rA} z1pQbop~Az8JSoX!de%NiIVT>&t>&JK%39ZRQ8BCt2q;5Od*mMu1afdqzR+swVC3M9 zDz_G-6yCr7H`(}GcFeF;vmlOCCavj9YC16m^Bvq~p9#N4B+CJ+*h(7G6}^eizNQVM zm0MpHd9Xe9`~0h3i9C|ONWgHQSFg#RKem#$n+-B-eUTF^3Xj(y$&CPyR}57KUIq49 ztp7)4uzS(?l6Z>@^`2dD@h z-akL8E)lYUhyPyr~R6Fqb|)*qtdhz{6o+paigKl+=?wQ_H%7d6}+T`>2KMq zE257`9u3!59aa^Y{?UVvW3j7LUQj$NIk`BzS!r~2j!DHd=YSs11_?8fuuYOso7zdZ zgW`r&Jw85=QLG@q@pedy2e;g?+M*yqKW)&*&vfu%0i>~@?%+h~YxoggM$0+j+@t>z zw_fvo!=4GF%L1kteVUY+HDers^H2AK%g)bM=ztm8&VpK8!poR3Mhwfp`vULZ4A1dA zDuxBbU#hcLIB|P(@?FFbC+!aN`1+8Ik+%+B!E};gs@D2giz!d;Yi2*!d)_IvE)1+3 zfgh8uGLbkEv)w?}Yr-X}&Kv)Aw=j@@mKVL~YHDyQoWZROvCVNw0?fief}>l;EP@Hp-&K{jFbaIi0+;~*Orq%7?Fqw$jKgu`; z?4~Z?H~stXrmYu>x$zG`o7Z2xwDFVeG6$}GNpL~{uDx%&2~Ta z_6y9N4i>)`2(cP$T2U1IQW7rP2GN}EO{B7;q$K--Hj_7$8~_1o)d#i*gJk^gwUDRZ)J0d1jp=maCMw)*V73>E-I}Xbf=kUs7D)OC!?0mm#CF~wz|hQE+dS&@9+oT@5#aV*~PZ{7NZtj9@WuezY2lX z`oklyiFCW^n6xR!sNIR)IJ={r-X9kEN0xau!@Y?DjF$s;9SdId9W9a>7L?|@q^vA8 z40)Y8ENNQFw!;{zX#QfgK3L2aJ&dK=B0Mm{8}@90vacTG`)P( zp*omA9(WR!iJg(}MjZIq<>`P7GhWs}n(}b){Z@fe%yE0qY|JKWsr2zO8rLn- zDv1u(V&VVo0HYR?(zkgY(AYTXv-vJKomh0<4wwASJCnh59Kpq@;(S%<=)2nj)Wc4g zNBqPD_v3pm3V$B<*7P{LzCC@K)R6y8wa)8_T5Bs^eD=Bd2)O%?)g8~+`cL0zKRy>) zQ$Hgk;5zPVI$Ubo;xp>7^y>d<@2#V%T)Xzsg(xa1q9Uz;gh)%LinJo4fHcw|Al;!T zpwbA^pp=4iHwc0>Qj6}8?ylc_#O)j38RvZCjPvJt#=BqLTU_h8Yu+=id0n_#TP9^g z&lH)o5*sVhA~%|5DzL0V!?I6XOUWblCq}7QE%`#1fr6*oUVifX0aG)lB&zDsot1L) zmPx~dQ|{GgZ`$smi{_Mz3nvyUxmlCBwy^V}DoA`Ys6wcLxmHZ_vp*b@ZETt##EB6H#jiEe7iopO^TemL}~k@vN^kZjj_(atuy-_idEw^|zrH zO|1R%xUnW(VY2fpE44mvwp(1JX4v1p&$M`D`xdyfdphtaulJ}KyH%(kXuo30d##C? zL((Md?w`tJBtE(@Src++V&&t9+*A}--{ZuUCDDsYh zD5}@{ypratgg2UPni?nlf_7H&h%3Y~(bpeUaM*2Us)!A|>pm68e*t{bo55op7X|lv zw{8qSIZ@eDuI@0?{ZY&g=edQRWfwC}k>dGt&U1y~?!88vn@dze14iSdZFE89;yN7_ z{6yt}1lP?ENv_JwPcQpX0S}> z^{+`#zhf%#n3252W9;68T*9|JcV-tj2$4T2C{OGT28Qayi#1yI6o>M#%E1fFy z#MO{*n0()J3E$1DD^tUAS~bk>c^!7_-2{hlp%yRq^7QnCC**m`XBK{rf1(&hTTVGB z*d}Y5p?CZ<%HCS!NmTYc^Ap+Y!&6v0HB<6|dtZCwgTH7MX~c!nDz4L>RUsiG;~h;~ z67t?;%Mwbp-Zyz6|#Yb(a&9*QrG0S+WrkLtsQuX6cW zUq&vVKvQ7#TW;0*uL0r1TZ3l3Ums*2IQNv+uMF!5d^z&RRKPS96}2l|^Vk)5$C?${ z6CHwnAFLkM-0mUs)f%F{s!+6Bhc=1)rPQtY}a3hGWE&RoMu%vB-q4<9hTWI^VsvNES9=FX^^diwC3evjRy;GpO2s@%_GRJ&PIK2N zAGjoa9<>d`7e&VS_;Y^WllNu*r}_=nK~QRBDA!zvtvL#-~|#=IR)A`rY@K9lZ@10V=eS-QG( zO#->PSht1OgWa-qrOKmo^l@Xl2k7Ow6GX8ecW&o1z0#TPmx~s@17xG=V;hEXX+^io z8VB}LH2r0Ai+0NTd&FOx*nIkU8q4nFsZ@$Ovwh_UVsmKfF}mO9^XJr^@;3>ZEEnk8<4*Dttfb6{=vyY`5wAOZX+DOA;HS@%h1n5_Ac7nK}QAOGlP~YbU{~x>sSU&U?u<3(hb+YxlY48o*2L@Ft4Fi zw6dC26R79(rF|92j!8WIA;^ItxPA7a!SG1BNBDW6*Y!Vx&*F5pxIpGF_2Hh~E8Au5 zJnx%CX*kpy3}F6^UPTSoZ7(GEGYG4{@anBGr(2&4?8Ml1Z^{ORcn=Ba);d&Xv3ZCxfDvFR-5h7$g5~E`XlgH z%TL(_&Jf0*3ctt((61jK2gi~U2`C8AC4?K9roPA(zk- zjbp#4p(pueDRj3Aen(E;g;dDuiExz)3v zQe96KwQx)LR>J0eJ(!}J4WH^eSIhoP5VF17M;8PXYiX#aa?-YO^@$SjW^XOVj6dq` ziPqUK6|{f4y20|b9S^as@r!p($he|>ipQP#HG6v1$aw8$XM%zT2Y*^g?BtKAhEggw zbdRe>FTBurAv-S=2y=G1!vt4j%5Nj!F5(#x5*n4jsh9N9yNpHkm33lEb208yt+;(A zmH7bQ%k5w1kS!6SSHkOD{8FRJRgith(TtL0aW+W3j*Xn))QutQ(YjIR%)?*G=X2;^ z;ir*t+bt02Q{Y)J(qF$HARkc0bGrO$1}(w3v-{HYjKo8+^9sANqqWOqD=W(u7Kw=8 z49~q}SU3IzBE82=ht^~y0)8i+H~x6;%45nF?Lr|SV@^pjl!ZC{R>6;=`FkWQd9uZ# z@_~i=uA055mVFeB^X97Qfn$Fd=VOB(H-6~OR3&~b_*!SiYuoP>Sr&ed{YAYC9p_c& z6NegyEa}JD1+j@fIM9Lk^afk(l$zPEbrK(2uWn2b@DOPCdE=9FtEP_p+VWeAF!Ar; z%_=zLtq8y&ID<P?XCXZsKrV!Wj^>vW3%eLRRg%aO7JDN9B{pMHu3)^r~tf&FELyQ;Gc3?SP zrWG&p5>TBXbCn+o@MxV$(NxjaEcC)&Aparwbs*W`Z*(ypg=_2X< z8`E}Nb^Ir$DLtw>N;c+9Pp3S4{PnuV_jXK~dd7y2WBYu)bh5BVb)?rX!0j|Mi|+a! za%?uda|Myzn%=vB^$tz0JJcTiNOfU-=2GRpfCSqO>YlDdu8Ih3-t0uP9_Qk?!@|m# zX{vMf!p|1D&O7H}ZmD$k7%MyJGRqL$(7QW|-C*1h=39OP{qA1O5r3sN=m;e5pSIMBI7*YYU(YWx+C;1K%g@!iu~$RLx} z!*4&DW*}DljisTc9us~q?JZgIP{VW*j^T*M#%mNU?yuTZJKSUm15IM3?vs!8p7XYn z@Y;#wVzfM&^`9fhAcSq>RmVq3`irOsHl~i|!qHnklc@(rsV7H%6w#FCY)}}u!Iwz< zA{TmnsJ>g0B71!72@4Y8%=P&)zfN|TSlS`yb;UQAt8Q^6)5!^PCJO^bS_tRS3Yc}OYv`?;BPq%1!%JW4eY11K{W7erC7mUWrP`hIH8JHm<omsT2fVZYqPktRK~M8TF;1v-iXn$2KToe&+R!cm=>So zjb-{Gk}eZ_e6Y{Nma&kNJk3$QAY6c|X6tb==3&BC3Q!2E-l8wh=n%>6e}EsuCuwxg zsLbRJX6~Na@m@(_NA_jpM(h65n4NhiX{I#mrHN`-F@tY5gX#)Cd8B@3R^%V$OR9Hj zVs^*(Z?{mzTL?d2yoiILu39mKpVFt;47)NLC#RmJq1N(eZ(liOxz7xjXM#5^otSJ~ ziQAT=fhgK9l0=~6;^gmakGFR&ANU6e&3vj)s;y#OS`Bu*FeJ%3gB=R+XlN@-b~JS7 zBmy>0L@~aMJ0t0U?|IK|odmHl+DcYG#WSCsQneX$vft*V#QlXECBbJaFxNDqXp^#f zf$5!65P0IVV_ol68>3wY4~jXjK0Q$%Snq>R<{O)=8Ew+BmxRgYw&r|e`o=4)R~($S zNjb-5S}`#eIdGG68RrP1CVo~-D>xUQ5Md>VDKR={S1ZB2+R zL)#U_)cg<&q6sGHv^l5WG3n1EwG^3Nc?`r5o47vnK%U0}>PuQ2;Ve5ebaY8obHG#r z0%=!RSo{DS4ca4;l9HedAY3?RQVu#Zw6wJ6E?f}P(z*hT;!vSPl?=LmH(}KS)d1Ou zn{NUF&g~x@kZ|ap0b&5^$B*$uno>7iTEjV$s}6y}nay?IUY3psWF`|nfBuzX5}cn_ zt3U1EXlpSoV8SS@UX|&6rQFe8@QAzEZq2iGq-RHME&eoSUyAnhhfJOhLgCZv?sIP7 zvcEoBgOK|qftkDNrK(@Uq8WkMv8sVxW_liam>JLPUq=d;yfKQsH=cJd192hsgTpP# zYFGNnTpP3dLswQRT$}FDP(76_xT=I3dz>GI+s*KFZe$TqRLl%lxRq^YW@b8?0}fanO9fS~2;LBw>REAS zwA;@Z=k<*{LyTB0t11(%v(ISw*X@f4t zD^Jg$$jC-$EJh*CAfPt04?3qK0HM*JYN{2{H31MT0&|CE;80f07HFzMi9UHk0uApI zz|jF{(lx*Z^US(}C^pm=vw&{#{Q2|StBtG(|7iSN;AR!H>-Q=DSSs(i<&fvQtR|b@2;`K4j-i$Lt2AbPp_>r8Q*I@#HkwUaEf+; zU0xVt0V^DMIZzUSmvh+1MG3+{EJk;;_&+EI7&suyKWZFN*bP*fyRHFDsRyZc1I;?p zmY)S$o~31FMZkhNtNtM@Ow!(-7vwFFo^2rTP|pg5W`**dDKeDnW=?F^+){r5D-eF@ z0j$jVU;~XHCO(v+VatXCL>@$Q^$?&G6w>V-xU5DP%*-cabObklUXqqh{WUROzhqAN zt(m4ea_jEAZBPD5#eIzXRF=7SeLOq)&bh>o-+J|gwKAN#&Vr1wwJLL_S=Hm_XK?Fm zB)S6?-ru>ejZc4Fj(6`w>50o!!Z9`8V&F`k*(}>Xt#W*`QIDx$#|<{Uy~xy4Y{LZN zjO+8g!O$}Z4K9l2^p+YLlql$n#~Z2}9*zYuui)5NIak+mPWSXOv*nSh4+D?Msi@>& zWPzvsn*H4t`dPt*mqo}%kgx6 zE3a z({r=C*CwyE#B-GktWrGA$0=#8#;lg|k)&nhcmu0z@9^sm&{QH%VE%24+{m3d%_Efz zQ(#?o-X8VBrQq-EwHsjwA8M zQJQ3A0;=zx&t>+P^6}zqyuM2(ETwzP}a1P97&ElGSpqjo6|Dd$8lmU#PR5;dv$jLQt1uzL%q(+dmD3UWf<0Rp-W);M|r-tYv=ys(+1 zspditG(B2IjK=_<-PlSU)iexd0xp7Yx*Q1Ul6cvn%hq5W{Bv3qnpZx4{ODV}C>=2; z+@9$}ArZb&fB$|=DqYX;yP^VoorVDYT`pFUvrL+=H{7sPFNO(ece1k63rlGwKR6LN z{aEF(YPdg;8jLhyWWUR)V?d1hlo@IT(8BQh^t5s#lew^fk#3zzdK zrtVN@dU=y=ZQ}i0(N?W+SjNUJk9^Xeb(V=z+-YfY(*wHO4Qp*CHC$-sjSxkkowVly zw>>k({^IWl@1@I2B>qqML0h{)D&dY!Z1p-WND~Hb-ovmaTmC6Qg>jCGYUvr7fb9(- z$Mv|+pS_`1olZ9F!F0qmna~Fc3U{2HOV_GiBR%UiwYAyMc?L+!Z_q)##*dzym_V2? z4GkIK-Y|XS0t71vi3~#FhOov7ZVg!3>~1X$w?eb+%H}4WkkG-~trFf>>4O+)=ZZ~p zdhrgK?5HwaO2|?NM~Y)!G*wv9VuFNlzwd%=&mrTDBg`oN5yQEQIA5&d%*XOVnMEEd zt4P9S(R;V|S@yGU#>&wyE-!ZjU^78x1!CvA>iN-c(to_b8ueex_jm$;O#*lw-Zbl{ zc9`X|6*ClF5nq+igz z8V?_S^C96xg$X%v>st-ZtN`Z-5fPCBYi>G)IY)RH2y$5!aPqdemct#1kfwyOdyI^X zi0{8uJM*#Zm^LSRi^WuCWd?h3ca2{|qZkK<9b@s+`EX?01Sb{`b5p-c;LUP<2KlHJ z`jG7)V>xmXJ2vvoU}g^FF{jL9TA7bZU&lJ{5>1(g)!9juS65yUz8OGOE|PfSgHzYk*p3!hT(2as;umXtgLuwDw-#qUEyvw^@1pGw%L_nH1s zse^2cU{Q+=5b!Yo8Lu2kvCtOAZvE>4z`a*iSE-qp-sKpyT7nqK8U!5OC5ziD8f!lJRKZb?xKUz= z88Fta_-WIs50CemspOvJi1qSdqR_O%_DJYu8#7yO3$jgXFll%KnHBJ;CN4}q&t6T) zVQpgySQ#Cc@{y&zD)<6P7%Bcq7*U(PXeZ@i12!>GRjdb4I>_@~VPz!dfuzUY-kuFuj)4lQO5JYIrXH;Ze+Ily27t|m zQ2`*>`^}qCpevg^+*`^h*eha>abA@IO72>O2?c)r{Pk-Bbk+;*FINj5ZjWzoZ{OUB z!T#8v)ZRcEb2M!$+-wC@5H?x_Q-j<-=`ZfZV&1RY679xWuAcVGaPiREk7wH*uIjHK zNm(rPixH5%FOT2*y4TrmLLgvt-;?k8!Wl+P?XHY)+^zm3`#sI#iO?t0(+P*}*z zZ8l)vyakM&c#G?yO#SxDwEnK|~fzAgA^fzx^B;pQjf}l zZ6!>OFD?2A|1p-E#NVS0h4WdPLkcA85-1>7a>MwU?NCEsbwAVQQywizdxN3v8OhzP zGo{63icv2gXH?rAo$8ima}b?pC9gBJUhFFUZinj1H||#L4p#qlCfYxg%4T!NVV4~m zw!jmh6{Bry&WOZe`p19X6oW`zEyts-b>+wKT0N_vZ~e(F`i=3QopZ~ZrEjOBx*t{A zxmmty4KW}$^+>9!G5FZ;KE+%bE%3x42(pmi8Y#Qjb6xDlHqa!fprjS23%U?q0~Cha zj=RPGx*;+kG4kUvC$7h}$hTo@JGKp}rt8?NlU*}3m-{nJI?VNRZXY`NjdM%;^Y-n^ zx`UIfafECArIigAKZ>vN?YE1taRW4i{HtCogg(9flzf^`f|fmVPP8d0b=&a)}wW-+qhlWVALf<&CfnC*G{OpO^TX2nDwy&+xl# zXB*FRO;9vj#Z0RytNfZ@<8VSKXyKBh6{kIa>GITH3z|Ul5OTNo@*S5fDEAC;LUTe+ z?%E^u%>fRM(iwu({#eX#tC3~>I<)$3WsQN>a++56;(!dFs!M3s_tYoyg)bj}Pi07B zvDrkhK!36+3tXY74Aj3iB1JTm6pV7Or}~Pp^nZ_4qC{yudPEGXeunzaexz+YjVgK7 zTKS}32BxEE`;EFgSV9qn9>pFEH<)RUGbG#;gRi|=kER|o>_v&X)^&3$7v>VkO@n;1>BLvrek zK!H-xD9|9`5hq~v-tLum+h$jB7iWsQ`vW45T+ONm#+v70V zQebL1s-0{dycs4ME6zd@VxYAVnQ3;l^SwtsF3qB6Lhm{9-j^W-6cViz*OGr&9@WAhTyH;YKA?Nkp;A357C{r;W5Q0S0+aXyr*ueYGS5F(no+W~7nH~Zj z$ms2!+N##&(9VJ+5=f|s&3^uzzcPjvus0vPimKh1Ye?S ztNq!QOMHEf{vMrtZF`=VQhNs<_ogVj>6Td~ul-zwdt5|L1gZho;9FjKpDf<7vC{sh zvO?V~19QdeqUr1zPBoRa)WcUQ)Pz)VT)D~#K*VB|m=tS%mo!_$T}ObzRLD!TVU6ty zo~%+~IR*dmd55)sVr7r_|8U*|Ly;NF?8h5=QWUFHF?iRvInK>rbZVIIWmOsJN?gfH zt^mH%*cDdJ6a!>Iu`fQ_x%UoyvMC{gT&tVD2v}@gVtwS6$Hg@`stkDXJMs_z5f^YP zj9-i47%2u=G5-^Wf%uEdGuX&+G}($pjS%a^+t1Qtu57Fp?!OLQq7aDLx?$zKFpPIT z_LcmWd$L^Pr3Jd<^R9ygc5f}=;8<@ugqK|gPO%AZ-Sad=XYtP1{A5vH0nU_Z>&WuyO%p!1?OYUYUdF-5b!~`yi z|76tbHunemwJo$z;|4D@JH_xKxNA^Sxoc$eWi5h5SUv2IPr6AeH8rgVr zkiMRAZ%Fwnc+|S!oZY(;SNVor$bM&2+Zv~|sbw--Cv(+5bCgvp10#pH<_P;drB~%* z+9QX#-}QzkoL|3A6YK2rwDDW-YyKYlRb zj3}5;Yqm)2`zybO*Z8cn$u(Iy<&p`;7RekncV)*?9xAVKe{~^CX+%L4fG8;TCCksW zCyuG%6%OT`&gpu2s=ao$R8Wor00Z6eHm4SlDE?o-n3E|GS;`fay3TNY9J}(_>5`zl5=CAl>`eP z%XBi|$(@^HqdtE!BGjMYgK2oJKSfC##?%35fjxX=m}qZRxo^YXDPoQwzhArmyzisXh!ZdB0QphiCeoHGi4J;wHvkY4X&uV&lU zwPJ5QT61c7+5yhqpzvtt z*^-=cpKs6c)H*fS^^0zDZbO5IX3;vl5S;^*qvPUlqA_H(bDtnz^o4VhYk!lff7>{6 zocGQ8NA3M-<~wIsw^Bq3p9|><5_om?Jd`LlOKZb>%qvp zSNy=g_laL3uumW)_w!9G8;S`*907iYh@Hr39X^ArH?O=8&*Fqc`C>`|qRoIj?R2os zjJ&$%1zL2dOhQ#*c;6WyT`_9gCd~Ys_Pj4bQ&)- z_Ekv6w_OpBH0-9Bb9St<_UHLGtkqZ0BY+uGypVncYH^Tzlh?_x*u+~Bj)J*+=$L%& zC*!w&FcHvCEQ`E9FLVqnmE0-a?!v!rGQW<$GQAP&2RHNw9FM^U^WiMgnGf_zt{KydGRK)Nwj9K|C?2T>vo;f2+0B?ODiTRQr8+%sw}srn_(h zV`NjFuxSTfb#gcANV8uNGjiy4)Q8b19o#>`?kkhG<k4S>98FvvSU*ugCSetT#6%5>>qdg>N+dG%Bdx zl+)g!8tKH4P|Z-5jK*{!Qh#h2d@=M=(31cu{abXZ{^mCMPT^%yAFUu`aZA{AYXD)} zMV`P-S>&S?p6HB=R2;sjNM}ZA$qt`Ga)SUC(7K#y&Zpag(YRBYZ{h|A8|#oMOg5dV zG;lMvR}MZIcGA3HQAh9M+^EcLu@hg)6p(0LbV>AY-4~EPh&9IRb=}7GcMfZf3F0Uf zlob@7adZuCBhKkPc{lRPngefPJA0wRW!h#>LOTzyy~7MMNaV>a(82sSy_7`&9rHNL z*jl%1PPTt)FU&&wZaeLi{B>0cjjnIqOBAcAH{;oEOu;P8en#BIEpSkLn(e415ce#> z%Wo?ODW)CJ3Rl+_c$Tl!e(bk~f0t9TpV< z)xZ2ZYBc0Fy(4p?zD#6Y+xa@=bi)0JlkV;Cq3#ve5Q)(>gR4tIY|ln!aCr7l`?#Il zFD@*c-JSQ0-Vbu-z~=bD|NcAJzbKch!e@caTEUxmlgAoYuk`76ZNL>WtHHnqi}!@29y$kmL0?ZXgT{1s}4;*Ev+F34HDNPqkQ;^{b>P zEKl393=t~nEsCXYBAQ+eoPIc1_dMRT-Y&pkNhvHRRmGe0`%1I+fQ|`2ZRAlGqk*Qq z7VnobijQlVi`8tAP5(+!im60^Bh0hfkgeXbToihm`+Q@0(%#r!-1>gjMHf2JS@JVf zU9()=3GFKN5!EJOA7~-UYL9qmplLmA2rG+heyty(vUI`!_SW6tVAc4NLp`n{LTM#o zT4J4pV){aKCaJ!oN~3%(^3lk1sVuH{?_hA0P0+b!i7>R18Y{e=8m1dADM zf9i~42_6s=jETDv7RJz<9Fn;yaG}&0PZ34MajKg;QD6;Y>!T@D2&%LY$ftoqWR0L747 zfK`C`t2Qp+-xIf{8g&lPvezn63@oRjym6RhohaUPVk<+JMdFOM7mOCjRYOBWosw+y?Aj|JeG(>ILR`$zDPZ0zKr+ zYe-RIVWDE^wb@!i7XfFq=XF*?`Bybpsl&US~o$os= z-ge^bv2&y%Ipx>pfc~Q1VvG2-UjFpV*L}|t5}jGbHq1x=X0kv`kDnBrsuB|JvL~1Z z;6&OOf8ewpa?`KKVXb6xhXM-S-Fr{fwt4pj4bfaB=X095@{H3_s#N zfBn%*JVAtHW0BK)mdvBTtKi=@&~FnbpqHup^%iX^tC*@>oiV2HUFNg6Z;)%w%0!D zPkCaeOMz@0D22_Wbo|a>Ae;N0v<}pG!|v(-n`QYOI`XV_9cKI|T=e}A<5w1qd$k{N zAzEihWwi2JBI@+&9Qu-QH<6I&yhpvRrS`@@Ye7v6VVecbe<4TqZM)1@Dm_T!Yxx!d zsu)Nb(va|3Y8_{BI>qwuY_cH11=&BYAP=a%aVE^2dl*kH5bb2QxbO-4?@y8<_y97P zxSKs@-)c@Dt=?i%qWR|yEKS#6aS-yHCq37fVEg-GLH^G!38j+W0J$}}+qX~Lx^?R$ z7S?%U;yNHi&+K1NvZ(;_N)9Mu14Hrh)|SDKfU8JX=cC7uxvVA^P4(wx!QYz8-ErJT za0>6PgYM7qQ{;#pmsn3}KZ1c&{jw~^#i2wi2~0zEy4W)f-oEiRiwa_!h=#h7Y(_ zb@d3u!9yO&;2B&)qaZoqomW8bd!Ce3+RTgv6AKqL0x|<29kb@|w(o%uLWFaY73kRg zZ|chhJx-Q9@GTd4|3r}h8j7Q6Yo|qcC{R#?vhr+22{~=*{`_Ubtdb_C2gwA1UerFJI(}`tjQ$FKqGv#q6v2=ur(YFf4CwPJyn#XLRgw z^%S&MfR$2x#T7W;p(4bC+MhQNNktEXvKUZJ$k^C$X}TS3%! zZR@Y;Biw=jfHV!rxZUBA@jHu)donIJH@EBGKk0i%*D1+<5KTkT1=pu7ZvAHg3$4}< zJWT03l+?|4*W?FYPks7bPWrdZ+*?WJQ*G^r$0a1S;}TML;qZS|iIkidY7Nu(yl%A>$Q@ddIX@`M2j)phkEC}pjK*_vtA`BX$`E|&wDi9jL#)E&1lUPj|gL|kr z8K`)m`U}jHL4>c09ctR2VYZ$=eY(81HV$N&^Zf-GRqS)<4D{+sW7d}z73k+L?_ zC>~~r(9?TC^YQIFAP8nVe{l(^i~+MH#&-Ykp<;#`w55MlFS%rHZVr1p;{fkpTObv# zxO=Oe3UU~1CfzqS7YFB_u`)7h1EUEFK|kc|eCu=s0i-vO*@c#OhqAq9_^-kBH6Sn% zRT5ogvAG6{$nOTt4B&5^Jbk*LQ}yrh3%mrY7BK;}gK0qMu=WO2Qf_8h5ul8g9ZkPXgCmS zI%nk#0g2t8=O&Gu67dz&R=~25h`0(OI3RlZ(~-^Mva+~PpkReMm0R>+t1qGn$N~IV z8ZH};5%>To?YQi1P6DC)6ch;S>gtkT554*4L%nlpgS9VvroKC8!~&q9X9I;R5lg^{ zbp{X5)bp)adNdRyahmU1AUg;OlHb)0S&)j}GtCGlbGWf2p8x$IZ^?IX&i=80)YQy~ zllb?^Jp-`0zfU_gTw8zt?7#owH{HvgqqEdv5i$j4gWi6AFRLxM=+|84}|bXtIX zQceApw*ZLzzE71oXVLzzV&;E*&x? zH@dGRK$RyfEn^SdD##W?0A42ObbAMfpJh&VP~UM_D%&dT}u61e#cOm|bPNJTx`Xz=}%ZJrfjuDT{Es|TmVl@N(X+w8zyRq4 zMEwR(UE848fiU2)N3zb_Es_jz6%HiDC!Wy_$Lu&<#+0EsamMQGp`Me>o@(tKjePIZW3!r0B_7gM)Fxd{L-42uQ4+Axi z1ST669;q%6s2J=a?}v5<+I41)67RxLzmWj^rNj>!C&*)`@GrAf_71_{y~M0?vJwDV z8>Ye3?JNffdJMVy0qz6vm<)Ne;o$qw=Ui6u3D;zSqZy=1!(P35<>-PiP69)*6UrWT z#O%6}Dg0F48vGPOlLS|i;{r$wOuaZwiMm2VQ{8Kp(`&yo`2nNOj(=mmx1*N_Kve+6 zI{mxi7o_*jn+yyBg^Ij9A;`M}IZC9!Vqv_78>rm@oC(GPK&0uH;88&<-95sYU%*v# zmbbfFMTPA9W6}tJ>hzJB8Q#$@G#cHT4;0c+&IOC1_3hQ!jdTP6wY<9e6l(Mrh=~hS z(x}^at+8^g4FU7xK2D@6B2s}>2X#pWH8oj45`hsgX@NE-zyr~Mf@w-X~tk!F_p=yYEM znWQnjwA2bTf%UDe^dcg$u&kl1iehSD^{fp~v$ z@EQ(YuuI@lPyt!3(LuZ6QzO9KVU=%UdO@kM)44RPTf*op>^M5O5P*UA?c2AAS{zV} zB*!?uRW&R6&X=BLq5}w`Zt+0d`&3TSUWx$CX&d2F%sYXJi2(vjrXdjtI%MomPlB=q z9dZG~$g8~w)?hPmKzS5|4{sS8r<4obQbTqlNHyO1IqUngk0C9QeQiZL7YyjN})W7o>k638*Bfp{rPr>;2e zb|8-M?k^e?5Z9@zy|LC+;Ymzui(o_>1H__qwSmccrv0^#4=(Uqy4Gi@kR>K^=?QDR zLAEhq5a2ytdU(uqr;(umT#^X*LqIh0Q7PJI{l%pSf?-Hg&-Tux1|M~_E9M3Fe|c-} zO$PpqIFUWFvo!H-~ihSe=BMMw1HBs zqPy=Inca&eRa=Im2N=oV@D9SW7$zEq- zLL}s(sNOHxtzVdy$~{LTBzM|Q*SF~Z2t1aO5P+X_kCZ$q%H_K z7!W)Mc*B4299<1dVq$8y{?j#eIr&dL4m{Mqe(V0x=Av*?!|u`e_8kB707YuH)bU5l zbPbZ#57s=@e&tG%Mp#9RfAhlfj@qM1W{Z+ZiEX5C#T+J&+_dt94WBoWx6eH_hn>C7F__ahkly8362u{@w{!x zA)qV8oxoWM(!NY8{jRPeWp2DKnZMv zsby&6B!^pp$l>-K5)zWxp|X+_Cr-Q%3VNzh05TcZ=;`Oc*9sUkQ!+IC3=C(95Zccs z$GRHl>BFoHxSWZD>1?BO9!=N2Y>ZBiW>GIr0CgxUkP0vhN`NMT()vE%Wydl{0Dp0T z&M`8WN_PxtDXBp8++#(>(A~|2R4Ui)s?u`6D@otG_qa7i=$aH9W#sqZ7{oZvewiDZ z19`az@CGnj$!pWCJbi2SfBVfv&+daiX&Qo6vAZnXh6B85pEhoGo z-IEG;gp^W=PHIXDm|Qv$5fQ0}V1CWF_kk3Wk)B=xkq;51^rv*1XP0UR4a)V|)C^;g zRD_-X zFjm;b`d8akK$Oma-_2QpRztafte>8)u4E8!kaGd$n$^-=MnyMd2W~oTK7;e9T;{Z8 z_D}{^4`f@@*4Nh+=h8DW(7=oM=^~RN=rKNo{reD%Uc3n;K|p?m1y;ia`Wdj#o@IBd zgV@a&GG*|3Cg;;-+9C4*J2(N%bSmTsd^ZujXQ_7RrXg!S2u%Pp@&y#dx^|IC+q&wG zg+hr&B5=8;R#@0#NIHZB2gkd+qof`}n1`UMt!QgSHXscm6Ex&F$IR&*N(+++sS_tl zumoSfb^;iW%cwm;)jlW{l9cabV>2L~!|6VhZyH&ukpU!>@o?9`bcCa01?=vN9PQ_j z<4eTv3hyu9fdriqm^a^V22v4h+AqQRdLJ2?3hb{u{bz-ox0lJVV@*ss_#g+rL<9`-W7- z(P0VrDUq*QgF0b%D$boop(+R85R$`=F;w8<;_550%Xq4z)4aR`C{!?2Az%^BXPc18 z`t<2GcjP_TpJv%q=gw()wlv!;4*As7h)EsI8JlebR#+bR68W9CGa<1+GvhY8o;S_| zuW(h0*;Z37nt!9%24vHcx>Q=D1(HGk0Y-8Uga+U|o1PbzX@}tIniL4sJ-Z;h*Yk|q zO0XU_QpCK3ghT-^_hdbNZ0F9}G;-1xhAT8RG77qie=i)(@xonaVF7`*Xp#|WrRpbA zJQSj^`&5D;eMWx@3ya&y*StGtsZSM9bsy8!OJ*UNI~7jpF9C3R1%FDRrs*}CTLm|d zdbb9_pB;WQi1uqUY=Nzro}JAB=aUIe2ee$7Y!<(Li!Id-6po#S`gYZB&^i^=Ip26D zLkZck#db^STqfNN{QN4=ssRVS9dZWia5v@c;==S60_PQS{e>d0s-~tEyaBf$YCD92Q}Mn$M9O*4(bH^73t_V1=6Ptla^mT zf0q2fs>zLZ<`RPNX4PI0x!k~jfJ1WO(xvWSK5!;eOH0+@Z0HxPEu;%DGD<( zw@HS%9vSxdy|XhPWoVR5SQf$ma-a2bbai)MXJo8b1Lbh^#D*i6_P3K(&6Ac~WC8gJ zphY=Tk-LRSNJt$xkPR)Hfh3tp3NaX|sU?Yf zgWk}$$owQ*3{Dom;R1H)v@^h7 z+6KJQ%^N@*YY3S%WIpZf3#0B&GY=+Tg++s?=dqGfYFwNs+|7juVBBgLjkkakN7hod zS^wilKt)9bWGIrsWfFISyh1~Ba|m3;;^MW;XaU_H=U{}dJw30}(A)t>(=3;Yf+7vj z)f6JGY5*2S!&My-Wii!6!mOGv_jQPy`lN@~`DC4LUm_fM<5tbd$po5XWYwF)IVE&zeSU&(jyOli ztAZ09(yZSB$)~Plx!C@P<`u37kJQw@g2`f5&J=~WexR?PRCELh1LMxm^4A#{+Q10G zmnTA^udB23b4rRda3jmRxRil|ebw!VY;UOstmaMgA;IQY5lzrxkrxwt1!|fNjg1=+ z2T2DZYTCUEoBgXkF!1fIILzPcs1JcXcv-VusUH$j*J2^L*jeG`29q?{-s`Dt2&Sc> zrKKC*9Q>9fJTmUj<>eX>#dkt$7?)P1d;iL1-DFehyKWAn>fHWzOa|Og!a;b`GK^-%XOik0Uad0*u zl`{hry%D??pBfq(Zo-`R@BSXG9b2hlDddKZpXFGI)Bh{K{`ZgnZ-apU|A&uf_y0xw e^#9$|?Njl89*AKrllY8$rqtd0V(GWFUj84bU2aYQ literal 0 HcmV?d00001 diff --git a/gensim/models/base_any2vec.py b/gensim/models/base_any2vec.py index 8f7123e1d7..bd74a5a6bd 100644 --- a/gensim/models/base_any2vec.py +++ b/gensim/models/base_any2vec.py @@ -37,13 +37,15 @@ from timeit import default_timer import threading from six.moves import xrange -from six import itervalues +from six import itervalues, string_types from gensim import matutils from numpy import float32 as REAL, ones, random, dtype, zeros from types import GeneratorType from gensim.utils import deprecated import warnings -import itertools +import os +import copy + try: from queue import Queue @@ -123,6 +125,10 @@ def _clear_post_train(self): """Resets certain properties of the model post training. eg. `keyedvectors.vectors_norm`.""" raise NotImplementedError() + def _do_train_epoch(self, corpus_file, thread_id, offset, cython_vocab, thread_private_mem, cur_epoch, + total_examples=None, total_words=None, **kwargs): + raise NotImplementedError() + def _do_train_job(self, data_iterable, job_parameters, thread_private_mem): """Train a single batch. Return 2-tuple `(effective word count, total word count)`.""" raise NotImplementedError() @@ -131,10 +137,45 @@ def _check_training_sanity(self, epochs=None, total_examples=None, total_words=N """Check that the training parameters provided make sense. e.g. raise error if `epochs` not provided.""" raise NotImplementedError() - def _check_input_data_sanity(self, data_iterable=None, data_iterables=None): - """Check that only one argument is not None.""" - if not ((data_iterable is not None) ^ (data_iterables is not None)): - raise ValueError("You must provide only one of singlestream or multistream arguments.") + def _check_input_data_sanity(self, data_iterable=None, corpus_file=None): + """Check that only one argument is None.""" + if not (data_iterable is None) ^ (corpus_file is None): + raise ValueError("You must provide only one of singlestream or corpus_file arguments.") + + def _worker_loop_corpusfile(self, corpus_file, thread_id, offset, cython_vocab, progress_queue, cur_epoch=0, + total_examples=None, total_words=None, **kwargs): + """Train the model on a `corpus_file` in LineSentence format. + + This function will be called in parallel by multiple workers (threads or processes) to make + optimal use of multicore machines. + + Parameters + ---------- + corpus_file : str + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + thread_id : int + Thread index starting from 0 to `number of workers - 1`. + offset : int + Offset (in bytes) in the `corpus_file` for particular worker. + cython_vocab : :class:`~gensim.models.word2vec_inner.CythonVocab` + Copy of the vocabulary in order to access it without GIL. + progress_queue : Queue of (int, int, int) + A queue of progress reports. Each report is represented as a tuple of these 3 elements: + * Size of data chunk processed, for example number of sentences in the corpus chunk. + * Effective word count used in training (after ignoring unknown words and trimming the sentence length). + * Total word count used in training. + **kwargs : object + Additional key word parameters for the specific model inheriting from this class. + + """ + thread_private_mem = self._get_thread_working_mem() + + examples, tally, raw_tally = self._do_train_epoch( + corpus_file, thread_id, offset, cython_vocab, thread_private_mem, cur_epoch, + total_examples=total_examples, total_words=total_words, **kwargs) + + progress_queue.put((examples, tally, raw_tally)) + progress_queue.put(None) def _worker_loop(self, job_queue, progress_queue): """Train the model, lifting batches of data from the queue. @@ -252,14 +293,14 @@ def _log_progress(self, job_queue, progress_queue, cur_epoch, example_count, tot raise NotImplementedError() def _log_epoch_end(self, cur_epoch, example_count, total_examples, raw_word_count, total_words, - trained_word_count, elapsed): + trained_word_count, elapsed, is_corpus_file_mode): raise NotImplementedError() def _log_train_end(self, raw_word_count, trained_word_count, total_elapsed, job_tally): raise NotImplementedError() - def _log_epoch_progress(self, progress_queue, job_queue, cur_epoch=0, total_examples=None, total_words=None, - report_delay=1.0): + def _log_epoch_progress(self, progress_queue=None, job_queue=None, cur_epoch=0, total_examples=None, + total_words=None, report_delay=1.0, is_corpus_file_mode=None): """Get the progress report for a single training epoch. Parameters @@ -284,6 +325,8 @@ def _log_epoch_progress(self, progress_queue, job_queue, cur_epoch=0, total_exam words in a corpus. Used to log progress. report_delay : float, optional Number of seconds between two consecutive progress report messages in the logger. + is_corpus_file_mode : bool, optional + Whether training is file-based (corpus_file argument) or not. Returns ------- @@ -324,20 +367,81 @@ def _log_epoch_progress(self, progress_queue, job_queue, cur_epoch=0, total_exam elapsed = default_timer() - start self._log_epoch_end( cur_epoch, example_count, total_examples, raw_word_count, total_words, - trained_word_count, elapsed) + trained_word_count, elapsed, is_corpus_file_mode) self.total_train_time += elapsed return trained_word_count, raw_word_count, job_tally - def _train_epoch(self, data_iterable=None, data_iterables=None, cur_epoch=0, total_examples=None, - total_words=None, queue_factor=2, report_delay=1.0): + def _train_epoch_corpusfile(self, corpus_file, cur_epoch=0, total_examples=None, total_words=None, **kwargs): + """Train the model for a single epoch. + + Parameters + ---------- + corpus_file : str + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + cur_epoch : int, optional + The current training epoch, needed to compute the training parameters for each job. + For example in many implementations the learning rate would be dropping with the number of epochs. + total_examples : int, optional + Count of objects in the `data_iterator`. In the usual case this would correspond to the number of sentences + in a corpus, used to log progress. + total_words : int + Count of total objects in `data_iterator`. In the usual case this would correspond to the number of raw + words in a corpus, used to log progress. Must be provided in order to seek in `corpus_file`. + **kwargs : object + Additional key word parameters for the specific model inheriting from this class. + + Returns + ------- + (int, int, int) + The training report for this epoch consisting of three elements: + * Size of data chunk processed, for example number of sentences in the corpus chunk. + * Effective word count used in training (after ignoring unknown words and trimming the sentence length). + * Total word count used in training. + + """ + if not total_words: + raise ValueError("total_words must be provided alongside corpus_file argument.") + + from gensim.models.word2vec_corpusfile import CythonVocab + from gensim.models.fasttext import FastText + cython_vocab = CythonVocab(self.wv, hs=self.hs, fasttext=isinstance(self, FastText)) + + progress_queue = Queue() + + corpus_file_size = os.path.getsize(corpus_file) + + thread_kwargs = copy.copy(kwargs) + thread_kwargs['cur_epoch'] = cur_epoch + thread_kwargs['total_examples'] = total_examples + thread_kwargs['total_words'] = total_words + workers = [ + threading.Thread( + target=self._worker_loop_corpusfile, + args=( + corpus_file, thread_id, corpus_file_size / self.workers * thread_id, cython_vocab, progress_queue + ), + kwargs=thread_kwargs + ) for thread_id in range(self.workers) + ] + + for thread in workers: + thread.daemon = True + thread.start() + + trained_word_count, raw_word_count, job_tally = self._log_epoch_progress( + progress_queue=progress_queue, job_queue=None, cur_epoch=cur_epoch, + total_examples=total_examples, total_words=total_words, is_corpus_file_mode=True) + + return trained_word_count, raw_word_count, job_tally + + def _train_epoch(self, data_iterable, cur_epoch=0, total_examples=None, total_words=None, + queue_factor=2, report_delay=1.0): """Train the model for a single epoch. Parameters ---------- data_iterable : iterable of list of object The input corpus. This will be split in chunks and these chunks will be pushed to the queue. - data_iterables : iterable of iterables of list of object - The iterable of input streams like `data_iterable`. Use this parameter in multistream mode. cur_epoch : int, optional The current training epoch, needed to compute the training parameters for each job. For example in many implementations the learning rate would be dropping with the number of epochs. @@ -361,7 +465,6 @@ def _train_epoch(self, data_iterable=None, data_iterables=None, cur_epoch=0, tot * Total word count used in training. """ - self._check_input_data_sanity(data_iterable, data_iterables) job_queue = Queue(maxsize=queue_factor * self.workers) progress_queue = Queue(maxsize=(queue_factor + 1) * self.workers) @@ -372,9 +475,6 @@ def _train_epoch(self, data_iterable=None, data_iterables=None, cur_epoch=0, tot for _ in xrange(self.workers) ] - # Chain all input streams into one, because multistream training is not supported yet. - if data_iterables is not None: - data_iterable = itertools.chain(*data_iterables) workers.append(threading.Thread( target=self._job_producer, args=(data_iterable, job_queue), @@ -386,11 +486,11 @@ def _train_epoch(self, data_iterable=None, data_iterables=None, cur_epoch=0, tot trained_word_count, raw_word_count, job_tally = self._log_epoch_progress( progress_queue, job_queue, cur_epoch=cur_epoch, total_examples=total_examples, total_words=total_words, - report_delay=report_delay) + report_delay=report_delay, is_corpus_file_mode=False) return trained_word_count, raw_word_count, job_tally - def train(self, data_iterable=None, data_iterables=None, epochs=None, total_examples=None, + def train(self, data_iterable=None, corpus_file=None, epochs=None, total_examples=None, total_words=None, queue_factor=2, report_delay=1.0, callbacks=(), **kwargs): """Train the model for multiple epochs using multiple workers. @@ -398,6 +498,9 @@ def train(self, data_iterable=None, data_iterables=None, epochs=None, total_exam ---------- data_iterable : iterable of list of object The input corpus. This will be split in chunks and these chunks will be pushed to the queue. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + If you use this argument instead of `data_iterable`, you must provide `total_words` argument as well. epochs : int, optional Number of epochs (training iterations over the whole input) of training. total_examples : int, optional @@ -444,10 +547,14 @@ def train(self, data_iterable=None, data_iterables=None, epochs=None, total_exam for callback in self.callbacks: callback.on_epoch_begin(self) - trained_word_count_epoch, raw_word_count_epoch, job_tally_epoch = self._train_epoch( - data_iterable=data_iterable, data_iterables=data_iterables, cur_epoch=cur_epoch, - total_examples=total_examples, total_words=total_words, queue_factor=queue_factor, - report_delay=report_delay) + if data_iterable is not None: + trained_word_count_epoch, raw_word_count_epoch, job_tally_epoch = self._train_epoch( + data_iterable, cur_epoch=cur_epoch, total_examples=total_examples, + total_words=total_words, queue_factor=queue_factor, report_delay=report_delay) + else: + trained_word_count_epoch, raw_word_count_epoch, job_tally_epoch = self._train_epoch_corpusfile( + corpus_file, cur_epoch=cur_epoch, total_examples=total_examples, total_words=total_words, **kwargs) + trained_word_count += trained_word_count_epoch raw_word_count += raw_word_count_epoch job_tally += job_tally_epoch @@ -538,7 +645,7 @@ def _do_train_job(self, data_iterable, job_parameters, thread_private_mem): def _set_train_params(self, **kwargs): raise NotImplementedError() - def __init__(self, sentences=None, input_streams=None, workers=3, vector_size=100, epochs=5, callbacks=(), + def __init__(self, sentences=None, corpus_file=None, workers=3, vector_size=100, epochs=5, callbacks=(), batch_words=10000, trim_rule=None, sg=0, alpha=0.025, window=5, seed=1, hs=0, negative=5, ns_exponent=0.75, cbow_mean=1, min_alpha=0.0001, compute_loss=False, fast_version=0, **kwargs): """ @@ -550,6 +657,10 @@ def __init__(self, sentences=None, input_streams=None, workers=3, vector_size=10 consider an iterable that streams the sentences directly from disk/network. See :class:`~gensim.models.word2vec.BrownCorpus`, :class:`~gensim.models.word2vec.Text8Corpus` or :class:`~gensim.models.word2vec.LineSentence` for such examples. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (or none of them). workers : int, optional Number of working threads, used for multiprocessing. vector_size : int, optional @@ -622,6 +733,7 @@ def __init__(self, sentences=None, input_streams=None, workers=3, vector_size=10 self.running_training_loss = 0 self.min_alpha_yet_reached = float(alpha) self.corpus_count = 0 + self.corpus_total_words = 0 super(BaseWordEmbeddingsModel, self).__init__( workers=workers, vector_size=vector_size, epochs=epochs, callbacks=callbacks, batch_words=batch_words) @@ -637,20 +749,18 @@ def __init__(self, sentences=None, input_streams=None, workers=3, vector_size=10 self.neg_labels = zeros(self.negative + 1) self.neg_labels[0] = 1. - if sentences is not None or input_streams is not None: - self._check_input_data_sanity(data_iterable=sentences, data_iterables=input_streams) - if input_streams is not None: - if not isinstance(input_streams, (tuple, list)): - raise TypeError("You must pass tuple or list as the input_streams argument.") - if any(isinstance(stream, GeneratorType) for stream in input_streams): - raise TypeError("You can't pass a generator as any of input streams. Try an iterator.") + if sentences is not None or corpus_file is not None: + self._check_input_data_sanity(data_iterable=sentences, corpus_file=corpus_file) + if corpus_file is not None and not isinstance(corpus_file, string_types): + raise TypeError("You must pass string as the corpus_file argument.") elif isinstance(sentences, GeneratorType): raise TypeError("You can't pass a generator as the sentences argument. Try an iterator.") - self.build_vocab(sentences=sentences, input_streams=input_streams, trim_rule=trim_rule) + self.build_vocab(sentences=sentences, corpus_file=corpus_file, trim_rule=trim_rule) self.train( - sentences=sentences, input_streams=input_streams, total_examples=self.corpus_count, epochs=self.epochs, - start_alpha=self.alpha, end_alpha=self.min_alpha, compute_loss=compute_loss) + sentences=sentences, corpus_file=corpus_file, total_examples=self.corpus_count, + total_words=self.corpus_total_words, epochs=self.epochs, start_alpha=self.alpha, + end_alpha=self.min_alpha, compute_loss=compute_loss) else: if trim_rule is not None: logger.warning( @@ -783,7 +893,7 @@ def __str__(self): self.__class__.__name__, len(self.wv.index2word), self.vector_size, self.alpha ) - def build_vocab(self, sentences=None, input_streams=None, workers=None, update=False, progress_per=10000, + def build_vocab(self, sentences=None, corpus_file=None, update=False, progress_per=10000, keep_raw_vocab=False, trim_rule=None, **kwargs): """Build vocabulary from a sequence of sentences (can be a once-only generator stream). @@ -794,12 +904,10 @@ def build_vocab(self, sentences=None, input_streams=None, workers=None, update=F consider an iterable that streams the sentences directly from disk/network. See :class:`~gensim.models.word2vec.BrownCorpus`, :class:`~gensim.models.word2vec.Text8Corpus` or :class:`~gensim.models.word2vec.LineSentence` module for such examples. - input_streams : list or tuple of iterable of iterables - The tuple or list of `sentences`-like arguments. Use it if you have multiple input streams. It is possible - to process streams in parallel, using `workers` parameter. - workers : int - Used if `input_streams` is passed. Determines how many processes to use for vocab building. - Actual number of workers is determined by `min(len(input_streams), workers)`. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (not both of them). update : bool If true, the new words in `sentences` will be added to model's vocab. progress_per : int, optional @@ -824,11 +932,10 @@ def build_vocab(self, sentences=None, input_streams=None, workers=None, update=F Key word arguments propagated to `self.vocabulary.prepare_vocab` """ - workers = workers or self.workers total_words, corpus_count = self.vocabulary.scan_vocab( - sentences=sentences, input_streams=input_streams, progress_per=progress_per, trim_rule=trim_rule, - workers=workers) + sentences=sentences, corpus_file=corpus_file, progress_per=progress_per, trim_rule=trim_rule) self.corpus_count = corpus_count + self.corpus_total_words = total_words report_values = self.vocabulary.prepare_vocab( self.hs, self.negative, self.wv, update=update, keep_raw_vocab=keep_raw_vocab, trim_rule=trim_rule, **kwargs) @@ -916,9 +1023,9 @@ def estimate_memory(self, vocab_size=None, report=None): ) return report - def train(self, sentences=None, input_streams=None, total_examples=None, total_words=None, + def train(self, sentences=None, corpus_file=None, total_examples=None, total_words=None, epochs=None, start_alpha=None, end_alpha=None, word_count=0, - queue_factor=2, report_delay=1.0, compute_loss=False, callbacks=()): + queue_factor=2, report_delay=1.0, compute_loss=False, callbacks=(), **kwargs): """Train the model. If the hyper-parameters are passed, they override the ones set in the constructor. Parameters @@ -928,6 +1035,10 @@ def train(self, sentences=None, input_streams=None, total_examples=None, total_w consider an iterable that streams the sentences directly from disk/network. See :class:`~gensim.models.word2vec.BrownCorpus`, :class:`~gensim.models.word2vec.Text8Corpus` or :class:`~gensim.models.word2vec.LineSentence` module for such examples. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (not both of them). total_examples : int, optional Count of sentences. total_words : int, optional @@ -949,6 +1060,8 @@ def train(self, sentences=None, input_streams=None, total_examples=None, total_w :attr:`~gensim.models.base_any2vec.BaseWordEmbeddingsModel.running_training_loss`. callbacks : list of :class:`~gensim.models.callbacks.CallbackAny2Vec`, optional List of callbacks that need to be executed/run at specific stages during training. + **kwargs : object + Additional key word parameters for the specific model inheriting from this class. Returns ------- @@ -962,9 +1075,10 @@ def train(self, sentences=None, input_streams=None, total_examples=None, total_w self.compute_loss = compute_loss self.running_training_loss = 0.0 return super(BaseWordEmbeddingsModel, self).train( - data_iterable=sentences, data_iterables=input_streams, total_examples=total_examples, + data_iterable=sentences, corpus_file=corpus_file, total_examples=total_examples, total_words=total_words, epochs=epochs, start_alpha=start_alpha, end_alpha=end_alpha, word_count=word_count, - queue_factor=queue_factor, report_delay=report_delay, compute_loss=compute_loss, callbacks=callbacks) + queue_factor=queue_factor, report_delay=report_delay, compute_loss=compute_loss, callbacks=callbacks, + **kwargs) def _get_job_params(self, cur_epoch): """Get the learning rate used in the current epoch. @@ -1136,6 +1250,8 @@ def load(cls, *args, **kwargs): model.vocabulary.make_cum_table(model.wv) # rebuild cum_table from vocabulary if not hasattr(model, 'corpus_count'): model.corpus_count = None + if not hasattr(model, 'corpus_total_words'): + model.corpus_total_words = None if not hasattr(model.trainables, 'vectors_lockf') and hasattr(model.wv, 'vectors'): model.trainables.vectors_lockf = ones(len(model.wv.vectors), dtype=REAL) if not hasattr(model, 'random'): @@ -1175,24 +1291,29 @@ def _log_progress(self, job_queue, progress_queue, cur_epoch, example_count, tot elapsed : int Elapsed time since the beginning of training in seconds. + Notes + ----- + If you train the model via `corpus_file` argument, there is no job_queue, so reported job_queue size will + always be equal to -1. + """ if total_examples: # examples-based progress % logger.info( "EPOCH %i - PROGRESS: at %.2f%% examples, %.0f words/s, in_qsize %i, out_qsize %i", cur_epoch + 1, 100.0 * example_count / total_examples, trained_word_count / elapsed, - utils.qsize(job_queue), utils.qsize(progress_queue) + -1 if job_queue is None else utils.qsize(job_queue), utils.qsize(progress_queue) ) else: # words-based progress % logger.info( "EPOCH %i - PROGRESS: at %.2f%% words, %.0f words/s, in_qsize %i, out_qsize %i", cur_epoch + 1, 100.0 * raw_word_count / total_words, trained_word_count / elapsed, - utils.qsize(job_queue), utils.qsize(progress_queue) + -1 if job_queue is None else utils.qsize(job_queue), utils.qsize(progress_queue) ) def _log_epoch_end(self, cur_epoch, example_count, total_examples, raw_word_count, total_words, - trained_word_count, elapsed): + trained_word_count, elapsed, is_corpus_file_mode): """Callback used to log the end of a training epoch. Parameters @@ -1212,6 +1333,8 @@ def _log_epoch_end(self, cur_epoch, example_count, total_examples, raw_word_coun the sentence length). elapsed : int Elapsed time since the beginning of training in seconds. + is_corpus_file_mode : bool + Whether training is file-based (corpus_file argument) or not. Warnings -------- @@ -1223,6 +1346,10 @@ def _log_epoch_end(self, cur_epoch, example_count, total_examples, raw_word_coun cur_epoch + 1, raw_word_count, trained_word_count, elapsed, trained_word_count / elapsed ) + # don't warn if training in file-based mode, because it's expected behavior + if is_corpus_file_mode: + return + # check that the input corpus hasn't changed during iteration if total_examples and total_examples != example_count: logger.warning( diff --git a/gensim/models/deprecated/doc2vec.py b/gensim/models/deprecated/doc2vec.py index 33d442904a..96ba7cd1fe 100644 --- a/gensim/models/deprecated/doc2vec.py +++ b/gensim/models/deprecated/doc2vec.py @@ -153,6 +153,7 @@ def load_old_doc2vec(*args, **kwargs): new_model.train_count = old_model.__dict__.get('train_count', None) new_model.corpus_count = old_model.__dict__.get('corpus_count', None) + new_model.corpus_total_words = old_model.__dict__.get('corpus_total_words', None) new_model.running_training_loss = old_model.__dict__.get('running_training_loss', 0) new_model.total_train_time = old_model.__dict__.get('total_train_time', None) new_model.min_alpha_yet_reached = old_model.__dict__.get('min_alpha_yet_reached', old_model.alpha) diff --git a/gensim/models/deprecated/fasttext.py b/gensim/models/deprecated/fasttext.py index 594c310b9a..1ba0d9b155 100644 --- a/gensim/models/deprecated/fasttext.py +++ b/gensim/models/deprecated/fasttext.py @@ -107,6 +107,7 @@ def load_old_fasttext(*args, **kwargs): new_model.train_count = old_model.train_count new_model.corpus_count = old_model.corpus_count + new_model.corpus_total_words = old_model.corpus_total_words new_model.running_training_loss = old_model.running_training_loss new_model.total_train_time = old_model.total_train_time new_model.min_alpha_yet_reached = old_model.min_alpha_yet_reached diff --git a/gensim/models/deprecated/word2vec.py b/gensim/models/deprecated/word2vec.py index 5ac913dbb9..8a9dcd960c 100644 --- a/gensim/models/deprecated/word2vec.py +++ b/gensim/models/deprecated/word2vec.py @@ -191,6 +191,7 @@ def load_old_word2vec(*args, **kwargs): new_model.train_count = old_model.__dict__.get('train_count', None) new_model.corpus_count = old_model.__dict__.get('corpus_count', None) + new_model.corpus_total_words = old_model.__dict__.get('corpus_total_words', None) new_model.running_training_loss = old_model.__dict__.get('running_training_loss', 0) new_model.total_train_time = old_model.__dict__.get('total_train_time', None) new_model.min_alpha_yet_reached = old_model.__dict__.get('min_alpha_yet_reached', old_model.alpha) @@ -1622,6 +1623,8 @@ def load(cls, *args, **kwargs): model.make_cum_table() # rebuild cum_table from vocabulary if not hasattr(model, 'corpus_count'): model.corpus_count = None + if not hasattr(model, 'corpus_total_words'): + model.corpus_total_words = None for v in model.wv.vocab.values(): if hasattr(v, 'sample_int'): break # already 0.12.0+ style int probabilities diff --git a/gensim/models/doc2vec.py b/gensim/models/doc2vec.py index 57693e0eed..135aa4ac5b 100644 --- a/gensim/models/doc2vec.py +++ b/gensim/models/doc2vec.py @@ -56,7 +56,6 @@ import logging import os import warnings -import multiprocessing try: from queue import Queue @@ -65,7 +64,6 @@ from collections import namedtuple, defaultdict from timeit import default_timer -from functools import reduce from numpy import zeros, float32 as REAL, empty, ones, \ memmap as np_memmap, vstack, integer, dtype, sum as np_sum, add as np_add, repeat as np_repeat, concatenate @@ -76,11 +74,11 @@ from gensim.models.word2vec import Word2VecKeyedVectors, Word2VecVocab, Word2VecTrainables, train_cbow_pair,\ train_sg_pair, train_batch_sg from six.moves import xrange -from six import string_types, integer_types, itervalues, iteritems +from six import string_types, integer_types, itervalues from gensim.models.base_any2vec import BaseWordEmbeddingsModel from gensim.models.keyedvectors import Doc2VecKeyedVectors from types import GeneratorType -from gensim.utils import deprecated +from gensim.utils import deprecated, smart_open logger = logging.getLogger(__name__) @@ -347,6 +345,36 @@ def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, return len(padded_document_indexes) - pre_pad_count - post_pad_count +try: + from gensim.models.doc2vec_corpusfile import ( + d2v_train_epoch_dbow, + d2v_train_epoch_dm_concat, + d2v_train_epoch_dm, + CORPUSFILE_VERSION + ) +except ImportError: + # corpusfile doc2vec is not supported + CORPUSFILE_VERSION = -1 + + def d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + _expected_words, work, _neu1, docvecs_count, word_vectors=None, word_locks=None, + train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + doctag_vectors=None, doctag_locks=None): + raise NotImplementedError("Training with corpus_file argument is not supported.") + + def d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, + _expected_examples, _expected_words, work, _neu1, docvecs_count, word_vectors=None, + word_locks=None, learn_doctags=True, learn_words=True, learn_hidden=True, + doctag_vectors=None, doctag_locks=None): + raise NotImplementedError("Training with corpus_file argument is not supported.") + + def d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + _expected_words, work, _neu1, docvecs_count, word_vectors=None, word_locks=None, + learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, + doctag_locks=None): + raise NotImplementedError("Training with corpus_file argument is not supported.") + + class TaggedDocument(namedtuple('TaggedDocument', 'words tags')): """Represents a document along with a tag, input document format for :class:`~gensim.models.doc2vec.Doc2Vec`. @@ -437,7 +465,7 @@ class Doc2Vec(BaseWordEmbeddingsModel): includes not only the word vectors of each word in the context, but also the paragraph vector. """ - def __init__(self, documents=None, input_streams=None, dm_mean=None, dm=1, dbow_words=0, dm_concat=0, + def __init__(self, documents=None, corpus_file=None, dm_mean=None, dm=1, dbow_words=0, dm_concat=0, dm_tag_count=1, docvecs=None, docvecs_mapfile=None, comment=None, trim_rule=None, callbacks=(), **kwargs): """ @@ -448,9 +476,10 @@ def __init__(self, documents=None, input_streams=None, dm_mean=None, dm=1, dbow_ Input corpus, can be simply a list of elements, but for larger corpora,consider an iterable that streams the documents directly from disk/network. If you don't supply `documents`, the model is left uninitialized -- use if you plan to initialize it in some other way. - input_streams : list or tuple of iterable of iterables - The tuple or list of `documents`-like arguments. Use it if you have multiple input streams. It is possible - to process streams in parallel, using `workers` parameter. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (or none of them). dm : {1,0}, optional Defines the training algorithm. If `dm=1`, 'distributed memory' (PV-DM) is used. Otherwise, `distributed bag of words` (PV-DBOW) is employed. @@ -572,23 +601,18 @@ def __init__(self, documents=None, input_streams=None, dm_mean=None, dm=1, dbow_ self.docvecs = docvecs or Doc2VecKeyedVectors(self.vector_size, docvecs_mapfile) self.comment = comment - if documents is not None or input_streams is not None: - self._check_input_data_sanity(data_iterable=documents, data_iterables=input_streams) - if input_streams is not None: - if not isinstance(input_streams, (tuple, list)): - raise TypeError("You must pass tuple or list as the input_streams argument.") - if any(isinstance(stream, GeneratorType) for stream in input_streams): - raise TypeError("You can't pass a generator as any of input streams. Try an iterator.") - if any(isinstance(stream, TaggedLineDocument) for stream in input_streams): - warnings.warn("Using TaggedLineDocument in multistream mode can lead to incorrect results " - "because of tags collision.") + + if documents is not None or corpus_file is not None: + self._check_input_data_sanity(data_iterable=documents, corpus_file=corpus_file) + if corpus_file is not None and not isinstance(corpus_file, string_types): + raise TypeError("You must pass string as the corpus_file argument.") elif isinstance(documents, GeneratorType): raise TypeError("You can't pass a generator as the documents argument. Try an iterator.") - self.build_vocab(documents=documents, input_streams=input_streams, - trim_rule=trim_rule, workers=self.workers) + self.build_vocab(documents=documents, corpus_file=corpus_file, trim_rule=trim_rule) self.train( - documents=documents, input_streams=input_streams, total_examples=self.corpus_count, epochs=self.epochs, - start_alpha=self.alpha, end_alpha=self.min_alpha, callbacks=callbacks) + documents=documents, corpus_file=corpus_file, total_examples=self.corpus_count, + total_words=self.corpus_total_words, epochs=self.epochs, start_alpha=self.alpha, + end_alpha=self.min_alpha, callbacks=callbacks) @property def dm(self): @@ -636,6 +660,33 @@ def reset_from(self, other_model): self.docvecs.offset2doctag = other_model.docvecs.offset2doctag self.trainables.reset_weights(self.hs, self.negative, self.wv, self.docvecs) + def _do_train_epoch(self, corpus_file, thread_id, offset, cython_vocab, thread_private_mem, cur_epoch, + total_examples=None, total_words=None, offsets=None, start_doctags=None, **kwargs): + work, neu1 = thread_private_mem + doctag_vectors = self.docvecs.vectors_docs + doctag_locks = self.trainables.vectors_docs_lockf + + offset = offsets[thread_id] + start_doctag = start_doctags[thread_id] + + if self.sg: + examples, tally, raw_tally = d2v_train_epoch_dbow( + self, corpus_file, offset, start_doctag, cython_vocab, cur_epoch, + total_examples, total_words, work, neu1, self.docvecs.count, + doctag_vectors=doctag_vectors, doctag_locks=doctag_locks, train_words=self.dbow_words) + elif self.dm_concat: + examples, tally, raw_tally = d2v_train_epoch_dm_concat( + self, corpus_file, offset, start_doctag, cython_vocab, cur_epoch, + total_examples, total_words, work, neu1, self.docvecs.count, + doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) + else: + examples, tally, raw_tally = d2v_train_epoch_dm( + self, corpus_file, offset, start_doctag, cython_vocab, cur_epoch, + total_examples, total_words, work, neu1, self.docvecs.count, + doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) + + return examples, tally, raw_tally + def _do_train_job(self, job, alpha, inits): """Train model using `job` data. @@ -677,7 +728,7 @@ def _do_train_job(self, job, alpha, inits): ) return tally, self._raw_word_count(job) - def train(self, documents=None, input_streams=None, total_examples=None, total_words=None, + def train(self, documents=None, corpus_file=None, total_examples=None, total_words=None, epochs=None, start_alpha=None, end_alpha=None, word_count=0, queue_factor=2, report_delay=1.0, callbacks=()): """Update the model's neural weights. @@ -695,13 +746,14 @@ def train(self, documents=None, input_streams=None, total_examples=None, total_w Parameters ---------- - documents : iterable of list of :class:`~gensim.models.doc2vec.TaggedDocument` + documents : iterable of list of :class:`~gensim.models.doc2vec.TaggedDocument`, optional Can be simply a list of elements, but for larger corpora,consider an iterable that streams the documents directly from disk/network. If you don't supply `documents`, the model is left uninitialized -- use if you plan to initialize it in some other way. - input_streams : list or tuple of iterable of iterables - The tuple or list of `documents`-like arguments. Use it if you have multiple input streams. It is possible - to process streams in parallel, using `workers` parameter. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (not both of them). total_examples : int, optional Count of sentences. total_words : int, optional @@ -730,10 +782,61 @@ def train(self, documents=None, input_streams=None, total_examples=None, total_w List of callbacks that need to be executed/run at specific stages during training. """ + kwargs = {} + if corpus_file is not None: + # Calculate offsets for each worker along with initial doctags (doctag ~ document/line number in a file) + offsets, start_doctags = self._get_offsets_and_start_doctags_for_corpusfile(corpus_file, self.workers) + kwargs['offsets'] = offsets + kwargs['start_doctags'] = start_doctags + super(Doc2Vec, self).train( - sentences=documents, input_streams=input_streams, total_examples=total_examples, total_words=total_words, + sentences=documents, corpus_file=corpus_file, total_examples=total_examples, total_words=total_words, epochs=epochs, start_alpha=start_alpha, end_alpha=end_alpha, word_count=word_count, - queue_factor=queue_factor, report_delay=report_delay, callbacks=callbacks) + queue_factor=queue_factor, report_delay=report_delay, callbacks=callbacks, **kwargs) + + @classmethod + def _get_offsets_and_start_doctags_for_corpusfile(cls, corpus_file, workers): + """Get offset and initial document tag in a corpus_file for each worker. + + Firstly, approximate offsets are calculated based on number of workers and corpus_file size. + Secondly, for each approximate offset we find the maximum offset which points to the beginning of line and + less than approximate offset. + + Parameters + ---------- + corpus_file : str + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + workers : int + Number of workers. + + Returns + ------- + list of int, list of int + Lists with offsets and document tags with length = number of workers. + """ + corpus_file_size = os.path.getsize(corpus_file) + approx_offsets = [int(corpus_file_size // workers * i) for i in range(workers)] + offsets = [] + start_doctags = [] + + with smart_open(corpus_file, mode='rb') as fin: + curr_offset_idx = 0 + prev_filepos = 0 + + for line_no, line in enumerate(fin): + if curr_offset_idx == len(approx_offsets): + break + + curr_filepos = prev_filepos + len(line) + while curr_offset_idx != len(approx_offsets) and approx_offsets[curr_offset_idx] < curr_filepos: + offsets.append(prev_filepos) + start_doctags.append(line_no) + + curr_offset_idx += 1 + + prev_filepos = curr_filepos + + return offsets, start_doctags def _raw_word_count(self, job): """Get the number of words in a given job. @@ -1016,19 +1119,20 @@ def estimate_memory(self, vocab_size=None, report=None): report['doctag_syn0'] = self.docvecs.count * self.vector_size * dtype(REAL).itemsize return super(Doc2Vec, self).estimate_memory(vocab_size, report=report) - def build_vocab(self, documents=None, input_streams=None, update=False, progress_per=10000, keep_raw_vocab=False, - trim_rule=None, workers=None, **kwargs): + def build_vocab(self, documents=None, corpus_file=None, update=False, progress_per=10000, keep_raw_vocab=False, + trim_rule=None, **kwargs): """Build vocabulary from a sequence of sentences (can be a once-only generator stream). Parameters ---------- - documents : iterable of list of :class:`~gensim.models.doc2vec.TaggedDocument` + documents : iterable of list of :class:`~gensim.models.doc2vec.TaggedDocument`, optional Can be simply a list of :class:`~gensim.models.doc2vec.TaggedDocument` elements, but for larger corpora, consider an iterable that streams the documents directly from disk/network. See :class:`~gensim.models.doc2vec.TaggedBrownCorpus` or :class:`~gensim.models.doc2vec.TaggedLineDocument` - input_streams : list or tuple of iterable of iterables - The tuple or list of `documents`-like arguments. Use it if you have multiple input streams. It is possible - to process streams in parallel, using `workers` parameter. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (not both of them). update : bool If true, the new words in `sentences` will be added to model's vocab. progress_per : int @@ -1049,20 +1153,16 @@ def build_vocab(self, documents=None, input_streams=None, update=False, progress * `count` (int) - the word's frequency count in the corpus * `min_count` (int) - the minimum count threshold. - workers : int - Used if `input_streams` is passed. Determines how many processes to use for vocab building. - Actual number of workers is determined by `min(len(input_streams), workers)`. - **kwargs Additional key word arguments passed to the internal vocabulary construction. """ - workers = workers or self.workers total_words, corpus_count = self.vocabulary.scan_vocab( - documents=documents, input_streams=input_streams, docvecs=self.docvecs, - progress_per=progress_per, trim_rule=trim_rule, workers=workers + documents=documents, corpus_file=corpus_file, docvecs=self.docvecs, + progress_per=progress_per, trim_rule=trim_rule ) self.corpus_count = corpus_count + self.corpus_total_words = total_words report_values = self.vocabulary.prepare_vocab( self.hs, self.negative, self.wv, update=update, keep_raw_vocab=keep_raw_vocab, trim_rule=trim_rule, **kwargs) @@ -1138,40 +1238,6 @@ def _note_doctag(key, document_length, docvecs): docvecs.count = docvecs.max_rawint + 1 + len(docvecs.offset2doctag) -def _scan_vocab_worker(stream, progress_queue, max_vocab_size, trim_rule): - min_reduce = 1 - vocab = defaultdict(int) - doclen2tags = defaultdict(list) - checked_string_types = 0 - document_no = -1 - total_words = 0 - for document_no, document in enumerate(stream): - if not checked_string_types: - if isinstance(document.words, string_types): - log_msg = "Each 'words' should be a list of words (usually unicode strings). " \ - "First 'words' here is instead plain %s." % type(document.words) - progress_queue.put(log_msg) - - checked_string_types += 1 - - document_length = len(document.words) - - for tag in document.tags: - doclen2tags[document_length].append(tag) - - for word in document.words: - vocab[word] += 1 - total_words += len(document.words) - - if max_vocab_size and len(vocab) > max_vocab_size: - utils.prune_vocab(vocab, min_reduce, trim_rule=trim_rule) - min_reduce += 1 - - progress_queue.put((total_words, document_no + 1)) - progress_queue.put(None) - return vocab, doclen2tags - - class Doc2VecVocab(Word2VecVocab): """Vocabulary used by :class:`~gensim.models.doc2vec.Doc2Vec`. @@ -1209,51 +1275,7 @@ def __init__(self, max_vocab_size=None, min_count=5, sample=1e-3, sorted_vocab=T max_vocab_size=max_vocab_size, min_count=min_count, sample=sample, sorted_vocab=sorted_vocab, null_word=null_word, ns_exponent=ns_exponent) - def _scan_vocab_multistream(self, input_streams, docvecs, workers, trim_rule): - manager = multiprocessing.Manager() - progress_queue = manager.Queue() - - workers = min(workers, len(input_streams)) - logger.info("Scanning vocab in %i processes.", workers) - pool = multiprocessing.Pool(processes=workers) - - worker_max_vocab_size = self.max_vocab_size // workers if self.max_vocab_size else None - results = [ - pool.apply_async(_scan_vocab_worker, - (stream, progress_queue, worker_max_vocab_size, trim_rule) - ) for stream in input_streams - ] - pool.close() - - unfinished_tasks = len(results) - total_words = 0 - total_documents = 0 - while unfinished_tasks > 0: - report = progress_queue.get() - if report is None: - unfinished_tasks -= 1 - logger.info("scan vocab task finished, processed %i documents and %i words;" - " awaiting finish of %i more tasks", total_documents, total_words, unfinished_tasks) - elif isinstance(report, string_types): - logger.warning(report) - else: - num_words, num_documents = report - total_words += num_words - total_documents += num_documents - - results = [res.get() for res in results] # pairs (vocab, doclen2tags) - self.raw_vocab = reduce(utils.merge_counts, [r[0] for r in results]) - if self.max_vocab_size: - utils.trim_vocab_by_freq(self.raw_vocab, self.max_vocab_size, trim_rule=trim_rule) - - # Update `docvecs` with document tags information. - for (_, doclen2tags) in results: - for document_length, tags in iteritems(doclen2tags): - for tag in tags: - _note_doctag(tag, document_length, docvecs) - return total_words, total_documents - - def _scan_vocab_singlestream(self, documents, docvecs, progress_per, trim_rule): + def _scan_vocab(self, documents, docvecs, progress_per, trim_rule): document_no = -1 total_words = 0 min_reduce = 1 @@ -1295,14 +1317,17 @@ def _scan_vocab_singlestream(self, documents, docvecs, progress_per, trim_rule): self.raw_vocab = vocab return total_words, corpus_count - def scan_vocab(self, documents=None, input_streams=None, docvecs=None, progress_per=10000, workers=None, - trim_rule=None): + def scan_vocab(self, documents=None, corpus_file=None, docvecs=None, progress_per=10000, trim_rule=None): """Create the models Vocabulary: A mapping from unique words in the corpus to their frequency count. Parameters ---------- - documents : iterable of :class:`~gensim.models.doc2vec.TaggedDocument` + documents : iterable of :class:`~gensim.models.doc2vec.TaggedDocument`, optional The tagged documents used to create the vocabulary. Their tags can be either str tokens or ints (faster). + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (not both of them). docvecs : list of :class:`~gensim.models.keyedvectors.Doc2VecKeyedVectors` The vector representations of the documents in our corpus. Each of them has a size == `vector_size`. progress_per : int @@ -1328,10 +1353,10 @@ def scan_vocab(self, documents=None, input_streams=None, docvecs=None, progress_ """ logger.info("collecting all words and their counts") - if input_streams is None: - total_words, corpus_count = self._scan_vocab_singlestream(documents, docvecs, progress_per, trim_rule) - else: - total_words, corpus_count = self._scan_vocab_multistream(input_streams, docvecs, workers, trim_rule) + if corpus_file is not None: + documents = TaggedLineDocument(corpus_file) + + total_words, corpus_count = self._scan_vocab(documents, docvecs, progress_per, trim_rule) logger.info( "collected %i word types and %i unique tags from a corpus of %i examples and %i words", diff --git a/gensim/models/doc2vec_corpusfile.cpp b/gensim/models/doc2vec_corpusfile.cpp new file mode 100644 index 0000000000..ad7d0c9e07 --- /dev/null +++ b/gensim/models/doc2vec_corpusfile.cpp @@ -0,0 +1,11480 @@ +/* Generated by Cython 0.28.2 */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_28_2" +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (0 && PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef __cplusplus + #error "Cython files generated with the C++ option must be compiled with a C++ compiler." +#endif +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #else + #define CYTHON_INLINE inline + #endif +#endif +template +void __Pyx_call_destructor(T& x) { + x.~T(); +} +template +class __Pyx_FakeReference { + public: + __Pyx_FakeReference() : ptr(NULL) { } + __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } + T *operator->() { return ptr; } + T *operator&() { return ptr; } + operator T&() { return *ptr; } + template bool operator ==(U other) { return *ptr == other; } + template bool operator !=(U other) { return *ptr != other; } + private: + T *ptr; +}; + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__gensim__models__doc2vec_corpusfile +#define __PYX_HAVE_API__gensim__models__doc2vec_corpusfile +/* Early includes */ +#include +#include +#include "numpy/arrayobject.h" +#include "numpy/ufuncobject.h" +#include "ios" +#include "new" +#include "stdexcept" +#include "typeinfo" +#include +#include +#include "voidptr.h" +#include +#include +#include "fast_line_sentence.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + +/* Header.proto */ +#if !defined(CYTHON_CCOMPLEX) + #if defined(__cplusplus) + #define CYTHON_CCOMPLEX 1 + #elif defined(_Complex_I) + #define CYTHON_CCOMPLEX 1 + #else + #define CYTHON_CCOMPLEX 0 + #endif +#endif +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #include + #else + #include + #endif +#endif +#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) + #undef _Complex_I + #define _Complex_I 1.0fj +#endif + + +static const char *__pyx_f[] = { + "gensim/models/doc2vec_corpusfile.pyx", + "__init__.pxd", + "type.pxd", + "gensim/models/word2vec_corpusfile.pxd", +}; +/* NoFastGil.proto */ +#define __Pyx_PyGILState_Ensure PyGILState_Ensure +#define __Pyx_PyGILState_Release PyGILState_Release +#define __Pyx_FastGIL_Remember() +#define __Pyx_FastGIL_Forget() +#define __Pyx_FastGilFuncInit() + +/* ForceInitThreads.proto */ +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 + * # in Cython to enable them only on the right systems. + * + * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + */ +typedef npy_int8 __pyx_t_5numpy_int8_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 + * + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t + */ +typedef npy_int16 __pyx_t_5numpy_int16_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< + * ctypedef npy_int64 int64_t + * #ctypedef npy_int96 int96_t + */ +typedef npy_int32 __pyx_t_5numpy_int32_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< + * #ctypedef npy_int96 int96_t + * #ctypedef npy_int128 int128_t + */ +typedef npy_int64 __pyx_t_5numpy_int64_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 + * #ctypedef npy_int128 int128_t + * + * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + */ +typedef npy_uint8 __pyx_t_5numpy_uint8_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 + * + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t + */ +typedef npy_uint16 __pyx_t_5numpy_uint16_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< + * ctypedef npy_uint64 uint64_t + * #ctypedef npy_uint96 uint96_t + */ +typedef npy_uint32 __pyx_t_5numpy_uint32_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< + * #ctypedef npy_uint96 uint96_t + * #ctypedef npy_uint128 uint128_t + */ +typedef npy_uint64 __pyx_t_5numpy_uint64_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 + * #ctypedef npy_uint128 uint128_t + * + * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< + * ctypedef npy_float64 float64_t + * #ctypedef npy_float80 float80_t + */ +typedef npy_float32 __pyx_t_5numpy_float32_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 + * + * ctypedef npy_float32 float32_t + * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< + * #ctypedef npy_float80 float80_t + * #ctypedef npy_float128 float128_t + */ +typedef npy_float64 __pyx_t_5numpy_float64_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 + * # The int types are mapped a bit surprising -- + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t + */ +typedef npy_long __pyx_t_5numpy_int_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong longlong_t + * + */ +typedef npy_longlong __pyx_t_5numpy_long_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_ulong uint_t + */ +typedef npy_longlong __pyx_t_5numpy_longlong_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 + * ctypedef npy_longlong longlong_t + * + * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t + */ +typedef npy_ulong __pyx_t_5numpy_uint_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 + * + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulonglong_t + * + */ +typedef npy_ulonglong __pyx_t_5numpy_ulong_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_intp intp_t + */ +typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 + * ctypedef npy_ulonglong ulonglong_t + * + * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< + * ctypedef npy_uintp uintp_t + * + */ +typedef npy_intp __pyx_t_5numpy_intp_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 + * + * ctypedef npy_intp intp_t + * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< + * + * ctypedef npy_double float_t + */ +typedef npy_uintp __pyx_t_5numpy_uintp_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 + * ctypedef npy_uintp uintp_t + * + * ctypedef npy_double float_t # <<<<<<<<<<<<<< + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t + */ +typedef npy_double __pyx_t_5numpy_float_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 + * + * ctypedef npy_double float_t + * ctypedef npy_double double_t # <<<<<<<<<<<<<< + * ctypedef npy_longdouble longdouble_t + * + */ +typedef npy_double __pyx_t_5numpy_double_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 + * ctypedef npy_double float_t + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cfloat cfloat_t + */ +typedef npy_longdouble __pyx_t_5numpy_longdouble_t; + +/* "word2vec_inner.pxd":19 + * void* PyCObject_AsVoidPtr(object obj) + * + * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< + * + * # BLAS routine signatures + */ +typedef __pyx_t_5numpy_float32_t __pyx_t_6gensim_6models_14word2vec_inner_REAL_t; + +/* "gensim/models/word2vec_corpusfile.pxd":21 + * cimport numpy as np + * + * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< + * + * + */ +typedef __pyx_t_5numpy_float32_t __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t; +/* Declarations.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< float > __pyx_t_float_complex; + #else + typedef float _Complex __pyx_t_float_complex; + #endif +#else + typedef struct { float real, imag; } __pyx_t_float_complex; +#endif +static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float); + +/* Declarations.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< double > __pyx_t_double_complex; + #else + typedef double _Complex __pyx_t_double_complex; + #endif +#else + typedef struct { double real, imag; } __pyx_t_double_complex; +#endif +static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double); + + +/*--- Type declarations ---*/ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 + * ctypedef npy_longdouble longdouble_t + * + * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t + */ +typedef npy_cfloat __pyx_t_5numpy_cfloat_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 + * + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< + * ctypedef npy_clongdouble clongdouble_t + * + */ +typedef npy_cdouble __pyx_t_5numpy_cdouble_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cdouble complex_t + */ +typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 + * ctypedef npy_clongdouble clongdouble_t + * + * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew1(a): + */ +typedef npy_cdouble __pyx_t_5numpy_complex_t; +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig; +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config; + +/* "word2vec_inner.pxd":22 + * + * # BLAS routine signatures + * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)(int const *, float const *, int const *, float *, int const *); + +/* "word2vec_inner.pxd":23 + * # BLAS routine signatures + * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); + +/* "word2vec_inner.pxd":24 + * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil + */ +typedef float (*__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)(int const *, float const *, int const *, float const *, int const *); + +/* "word2vec_inner.pxd":25 + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil + * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil + */ +typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)(int const *, float const *, int const *, float const *, int const *); + +/* "word2vec_inner.pxd":26 + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< + * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil + * + */ +typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)(int const *, float const *, int const *); + +/* "word2vec_inner.pxd":27 + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil + * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< + * + * cdef scopy_ptr scopy + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)(int const *, float const *, float const *, int const *); + +/* "word2vec_inner.pxd":44 + * + * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() + * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * + */ +typedef __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr)(int const *, float const *, int const *, float const *, int const *); + +/* "word2vec_inner.pxd":45 + * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() + * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * + * cdef our_dot_ptr our_dot + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); + +/* "word2vec_inner.pxd":51 + * + * + * cdef struct Word2VecConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, compute_loss, size, window, cbow_mean, workers + * REAL_t running_training_loss, alpha + */ +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig { + int hs; + int negative; + int sample; + int compute_loss; + int size; + int window; + int cbow_mean; + int workers; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t running_training_loss; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + int sentence_idx[(0x2710 + 1)]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; +}; + +/* "word2vec_inner.pxd":125 + * + * + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=*) # <<<<<<<<<<<<<< + */ +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config { + int __pyx_n; + PyObject *_neu1; +}; +struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig; +struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config; + +/* "gensim/models/doc2vec_inner.pxd":23 + * + * + * cdef struct Doc2VecConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, learn_doctags, learn_words, learn_hidden, train_words, cbow_mean + * int document_len, doctag_len, window, expected_doctag_len, null_word_index, workers, docvecs_count + */ +struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig { + int hs; + int negative; + int sample; + int learn_doctags; + int learn_words; + int learn_hidden; + int train_words; + int cbow_mean; + int document_len; + int doctag_len; + int window; + int expected_doctag_len; + int null_word_index; + int workers; + int docvecs_count; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_vectors; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *doctag_vectors; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *doctag_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + int layer1_size; + int vector_size; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t doctag_indexes[0x2710]; + __pyx_t_5numpy_uint32_t window_indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; +}; + +/* "gensim/models/doc2vec_inner.pxd":91 + * + * + * cdef init_d2v_config(Doc2VecConfig *c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=*, work=*, # <<<<<<<<<<<<<< + * neu1=*, word_vectors=*, word_locks=*, doctag_vectors=*, doctag_locks=*, docvecs_count=*) + */ +struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config { + int __pyx_n; + PyObject *train_words; + PyObject *work; + PyObject *neu1; + PyObject *word_vectors; + PyObject *word_locks; + PyObject *doctag_vectors; + PyObject *doctag_locks; + PyObject *docvecs_count; +}; +struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem; + +/* "gensim/models/word2vec_corpusfile.pxd":47 + * + * + * cdef struct VocabItem: # <<<<<<<<<<<<<< + * long long sample_int + * np.uint32_t index + */ +struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem { + PY_LONG_LONG sample_int; + __pyx_t_5numpy_uint32_t index; + __pyx_t_5numpy_uint8_t *code; + int code_len; + __pyx_t_5numpy_uint32_t *point; + int subword_idx_len; + __pyx_t_5numpy_uint32_t *subword_idx; +}; + +/* "gensim/models/word2vec_corpusfile.pxd":59 + * + * + * ctypedef unordered_map[string, VocabItem] cvocab_t # <<<<<<<<<<<<<< + * + * cdef class CythonVocab: + */ +typedef std::unordered_map __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t; + +/* "gensim/models/word2vec_corpusfile.pxd":33 + * + * + * cdef class CythonLineSentence: # <<<<<<<<<<<<<< + * cdef FastLineSentence* _thisptr + * cdef public bytes source + */ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence { + PyObject_HEAD + struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_vtab; + FastLineSentence *_thisptr; + PyObject *source; + size_t max_sentence_length; + size_t max_words_in_batch; + size_t offset; + std::vector > buf_data; +}; + + +/* "gensim/models/word2vec_corpusfile.pxd":61 + * ctypedef unordered_map[string, VocabItem] cvocab_t + * + * cdef class CythonVocab: # <<<<<<<<<<<<<< + * cdef cvocab_t vocab + * cdef subword_arrays + */ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab { + PyObject_HEAD + struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_vtab; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t vocab; + PyObject *subword_arrays; +}; + + + +/* "gensim/models/word2vec_corpusfile.pxd":33 + * + * + * cdef class CythonLineSentence: # <<<<<<<<<<<<<< + * cdef FastLineSentence* _thisptr + * cdef public bytes source + */ + +struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence { + bool (*is_eof)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector (*read_sentence)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector > (*_read_chunked_sentence)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector > (*_chunk_sentence)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, std::vector , int __pyx_skip_dispatch); + void (*reset)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector > (*next_batch)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); +}; +static struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; + + +/* "gensim/models/word2vec_corpusfile.pxd":61 + * ctypedef unordered_map[string, VocabItem] cvocab_t + * + * cdef class CythonVocab: # <<<<<<<<<<<<<< + * cdef cvocab_t vocab + * cdef subword_arrays + */ + +struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab { + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *(*get_vocab_ptr)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *); +}; +static struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* RaiseNoneIterError.proto */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* GetVTable.proto */ +static void* __Pyx_GetVtable(PyObject *dict); + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* None.proto */ +static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void); /* proto */ + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* RealImag.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #define __Pyx_CREAL(z) ((z).real()) + #define __Pyx_CIMAG(z) ((z).imag()) + #else + #define __Pyx_CREAL(z) (__real__(z)) + #define __Pyx_CIMAG(z) (__imag__(z)) + #endif +#else + #define __Pyx_CREAL(z) ((z).real) + #define __Pyx_CIMAG(z) ((z).imag) +#endif +#if defined(__cplusplus) && CYTHON_CCOMPLEX\ + && (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103) + #define __Pyx_SET_CREAL(z,x) ((z).real(x)) + #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) +#else + #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) + #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) +#endif + +/* Arithmetic.proto */ +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq_float(a, b) ((a)==(b)) + #define __Pyx_c_sum_float(a, b) ((a)+(b)) + #define __Pyx_c_diff_float(a, b) ((a)-(b)) + #define __Pyx_c_prod_float(a, b) ((a)*(b)) + #define __Pyx_c_quot_float(a, b) ((a)/(b)) + #define __Pyx_c_neg_float(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero_float(z) ((z)==(float)0) + #define __Pyx_c_conj_float(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs_float(z) (::std::abs(z)) + #define __Pyx_c_pow_float(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero_float(z) ((z)==0) + #define __Pyx_c_conj_float(z) (conjf(z)) + #if 1 + #define __Pyx_c_abs_float(z) (cabsf(z)) + #define __Pyx_c_pow_float(a, b) (cpowf(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex); + static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex); + #if 1 + static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex, __pyx_t_float_complex); + #endif +#endif + +/* Arithmetic.proto */ +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq_double(a, b) ((a)==(b)) + #define __Pyx_c_sum_double(a, b) ((a)+(b)) + #define __Pyx_c_diff_double(a, b) ((a)-(b)) + #define __Pyx_c_prod_double(a, b) ((a)*(b)) + #define __Pyx_c_quot_double(a, b) ((a)/(b)) + #define __Pyx_c_neg_double(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero_double(z) ((z)==(double)0) + #define __Pyx_c_conj_double(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs_double(z) (::std::abs(z)) + #define __Pyx_c_pow_double(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero_double(z) ((z)==0) + #define __Pyx_c_conj_double(z) (conj(z)) + #if 1 + #define __Pyx_c_abs_double(z) (cabs(z)) + #define __Pyx_c_pow_double(a, b) (cpow(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex); + static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex); + #if 1 + static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex, __pyx_t_double_complex); + #endif +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value); + +/* None.proto */ +#include + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* PyIdentifierFromString.proto */ +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +/* ModuleImport.proto */ +static PyObject *__Pyx_ImportModule(const char *name); + +/* TypeImport.proto */ +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); + +/* VoidPtrImport.proto */ +static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig); + +/* FunctionImport.proto */ +static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + + +/* Module declarations from 'cython' */ + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'numpy' */ + +/* Module declarations from 'numpy' */ +static PyTypeObject *__pyx_ptype_5numpy_dtype = 0; +static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0; +static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0; +static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0; +static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0; +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/ + +/* Module declarations from 'libcpp.string' */ + +/* Module declarations from 'libcpp.vector' */ + +/* Module declarations from 'gensim.models.word2vec_inner' */ +static __pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_scopy = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_scopy (*__pyx_vp_6gensim_6models_14word2vec_inner_scopy) +static __pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_saxpy = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_saxpy (*__pyx_vp_6gensim_6models_14word2vec_inner_saxpy) +static __pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_sdot = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_sdot (*__pyx_vp_6gensim_6models_14word2vec_inner_sdot) +static __pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_dsdot = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_dsdot (*__pyx_vp_6gensim_6models_14word2vec_inner_dsdot) +static __pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_snrm2 = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_snrm2 (*__pyx_vp_6gensim_6models_14word2vec_inner_snrm2) +static __pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_sscal = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_sscal (*__pyx_vp_6gensim_6models_14word2vec_inner_sscal) +static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_vp_6gensim_6models_14word2vec_inner_EXP_TABLE)[0x3E8] = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE (*__pyx_vp_6gensim_6models_14word2vec_inner_EXP_TABLE) +static __pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_our_dot = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_our_dot (*__pyx_vp_6gensim_6models_14word2vec_inner_our_dot) +static __pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_our_saxpy = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy (*__pyx_vp_6gensim_6models_14word2vec_inner_our_saxpy) +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14word2vec_inner_random_int32)(unsigned PY_LONG_LONG *); /*proto*/ + +/* Module declarations from 'gensim.models.doc2vec_inner' */ +static void (*__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_hs)(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_neg)(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static void (*__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_hs)(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int); /*proto*/ +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_neg)(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int); /*proto*/ +static void (*__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_hs)(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int const , int); /*proto*/ +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_neg)(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int const , int); /*proto*/ +static PyObject *(*__pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config)(struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config *__pyx_optional_args); /*proto*/ + +/* Module declarations from 'libcpp.utility' */ + +/* Module declarations from 'libcpp.unordered_map' */ + +/* Module declarations from 'libcpp' */ + +/* Module declarations from 'gensim.models.word2vec_corpusfile' */ +static PyTypeObject *__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = 0; +static PyTypeObject *__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab = 0; +static __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (*__pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha)(__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int); /*proto*/ +static __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (*__pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha)(__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int, int, int, int, int); /*proto*/ + +/* Module declarations from 'gensim.models.doc2vec_corpusfile' */ +static int __pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE; +static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_6gensim_6models_18doc2vec_corpusfile_ONEF; +static void __pyx_f_6gensim_6models_18doc2vec_corpusfile_prepare_c_structures_for_batch(std::vector &, int, int, int, int *, int *, unsigned PY_LONG_LONG *, __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *, __pyx_t_5numpy_uint32_t *, int *, __pyx_t_5numpy_uint8_t **, __pyx_t_5numpy_uint32_t **, __pyx_t_5numpy_uint32_t *, int *, int, int, int); /*proto*/ +#define __Pyx_MODULE_NAME "gensim.models.doc2vec_corpusfile" +extern int __pyx_module_is_main_gensim__models__doc2vec_corpusfile; +int __pyx_module_is_main_gensim__models__doc2vec_corpusfile = 0; + +/* Implementation of 'gensim.models.doc2vec_corpusfile' */ +static PyObject *__pyx_builtin_ImportError; +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_ValueError; +static PyObject *__pyx_builtin_RuntimeError; +static const char __pyx_k_c[] = "c"; +static const char __pyx_k_i[] = "i"; +static const char __pyx_k_j[] = "j"; +static const char __pyx_k_k[] = "k"; +static const char __pyx_k_m[] = "m"; +static const char __pyx_k_n[] = "n"; +static const char __pyx_k_np[] = "np"; +static const char __pyx_k__10[] = "*"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_neu1[] = "neu1"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_work[] = "work"; +static const char __pyx_k_alpha[] = "alpha"; +static const char __pyx_k_count[] = "count"; +static const char __pyx_k_fblas[] = "fblas"; +static const char __pyx_k_model[] = "model"; +static const char __pyx_k_numpy[] = "numpy"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_vocab[] = "vocab"; +static const char __pyx_k_epochs[] = "epochs"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_offset[] = "offset"; +static const char __pyx_k_alpha_2[] = "_alpha"; +static const char __pyx_k_doc_tag[] = "_doc_tag"; +static const char __pyx_k_idx_end[] = "idx_end"; +static const char __pyx_k_sent_idx[] = "sent_idx"; +static const char __pyx_k_cur_epoch[] = "_cur_epoch"; +static const char __pyx_k_doc_words[] = "doc_words"; +static const char __pyx_k_end_alpha[] = "end_alpha"; +static const char __pyx_k_idx_start[] = "idx_start"; +static const char __pyx_k_inv_count[] = "inv_count"; +static const char __pyx_k_min_alpha[] = "min_alpha"; +static const char __pyx_k_ValueError[] = "ValueError"; +static const char __pyx_k_num_epochs[] = "num_epochs"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_word_locks[] = "word_locks"; +static const char __pyx_k_ImportError[] = "ImportError"; +static const char __pyx_k_corpus_file[] = "corpus_file"; +static const char __pyx_k_cur_epoch_2[] = "cur_epoch"; +static const char __pyx_k_learn_words[] = "learn_words"; +static const char __pyx_k_start_alpha[] = "start_alpha"; +static const char __pyx_k_total_words[] = "total_words"; +static const char __pyx_k_train_words[] = "train_words"; +static const char __pyx_k_RuntimeError[] = "RuntimeError"; +static const char __pyx_k_cython_vocab[] = "_cython_vocab"; +static const char __pyx_k_doctag_locks[] = "doctag_locks"; +static const char __pyx_k_document_len[] = "document_len"; +static const char __pyx_k_input_stream[] = "input_stream"; +static const char __pyx_k_learn_hidden[] = "learn_hidden"; +static const char __pyx_k_start_doctag[] = "start_doctag"; +static const char __pyx_k_word_vectors[] = "word_vectors"; +static const char __pyx_k_docvecs_count[] = "docvecs_count"; +static const char __pyx_k_learn_doctags[] = "learn_doctags"; +static const char __pyx_k_doctag_vectors[] = "doctag_vectors"; +static const char __pyx_k_expected_words[] = "_expected_words"; +static const char __pyx_k_effective_words[] = "effective_words"; +static const char __pyx_k_total_documents[] = "total_documents"; +static const char __pyx_k_expected_words_2[] = "expected_words"; +static const char __pyx_k_expected_examples[] = "_expected_examples"; +static const char __pyx_k_scipy_linalg_blas[] = "scipy.linalg.blas"; +static const char __pyx_k_CORPUSFILE_VERSION[] = "CORPUSFILE_VERSION"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_d2v_train_epoch_dm[] = "d2v_train_epoch_dm"; +static const char __pyx_k_expected_examples_2[] = "expected_examples"; +static const char __pyx_k_d2v_train_epoch_dbow[] = "d2v_train_epoch_dbow"; +static const char __pyx_k_total_effective_words[] = "total_effective_words"; +static const char __pyx_k_d2v_train_epoch_dm_concat[] = "d2v_train_epoch_dm_concat"; +static const char __pyx_k_ndarray_is_not_C_contiguous[] = "ndarray is not C contiguous"; +static const char __pyx_k_numpy_core_multiarray_failed_to[] = "numpy.core.multiarray failed to import"; +static const char __pyx_k_unknown_dtype_code_in_numpy_pxd[] = "unknown dtype code in numpy.pxd (%d)"; +static const char __pyx_k_Format_string_allocated_too_shor[] = "Format string allocated too short, see comment in numpy.pxd"; +static const char __pyx_k_Non_native_byte_order_not_suppor[] = "Non-native byte order not supported"; +static const char __pyx_k_Optimized_cython_functions_for_f[] = "Optimized cython functions for file-based training :class:`~gensim.models.doc2vec.Doc2Vec` model."; +static const char __pyx_k_gensim_models_doc2vec_corpusfile[] = "gensim/models/doc2vec_corpusfile.pyx"; +static const char __pyx_k_ndarray_is_not_Fortran_contiguou[] = "ndarray is not Fortran contiguous"; +static const char __pyx_k_numpy_core_umath_failed_to_impor[] = "numpy.core.umath failed to import"; +static const char __pyx_k_Format_string_allocated_too_shor_2[] = "Format string allocated too short."; +static const char __pyx_k_gensim_models_doc2vec_corpusfile_2[] = "gensim.models.doc2vec_corpusfile"; +static PyObject *__pyx_n_s_CORPUSFILE_VERSION; +static PyObject *__pyx_kp_u_Format_string_allocated_too_shor; +static PyObject *__pyx_kp_u_Format_string_allocated_too_shor_2; +static PyObject *__pyx_n_s_ImportError; +static PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor; +static PyObject *__pyx_n_s_RuntimeError; +static PyObject *__pyx_n_s_ValueError; +static PyObject *__pyx_n_s__10; +static PyObject *__pyx_n_s_alpha; +static PyObject *__pyx_n_s_alpha_2; +static PyObject *__pyx_n_s_c; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_corpus_file; +static PyObject *__pyx_n_s_count; +static PyObject *__pyx_n_s_cur_epoch; +static PyObject *__pyx_n_s_cur_epoch_2; +static PyObject *__pyx_n_s_cython_vocab; +static PyObject *__pyx_n_s_d2v_train_epoch_dbow; +static PyObject *__pyx_n_s_d2v_train_epoch_dm; +static PyObject *__pyx_n_s_d2v_train_epoch_dm_concat; +static PyObject *__pyx_n_s_doc_tag; +static PyObject *__pyx_n_s_doc_words; +static PyObject *__pyx_n_s_doctag_locks; +static PyObject *__pyx_n_s_doctag_vectors; +static PyObject *__pyx_n_s_document_len; +static PyObject *__pyx_n_s_docvecs_count; +static PyObject *__pyx_n_s_effective_words; +static PyObject *__pyx_n_s_end_alpha; +static PyObject *__pyx_n_s_epochs; +static PyObject *__pyx_n_s_expected_examples; +static PyObject *__pyx_n_s_expected_examples_2; +static PyObject *__pyx_n_s_expected_words; +static PyObject *__pyx_n_s_expected_words_2; +static PyObject *__pyx_n_s_fblas; +static PyObject *__pyx_kp_s_gensim_models_doc2vec_corpusfile; +static PyObject *__pyx_n_s_gensim_models_doc2vec_corpusfile_2; +static PyObject *__pyx_n_s_i; +static PyObject *__pyx_n_s_idx_end; +static PyObject *__pyx_n_s_idx_start; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_input_stream; +static PyObject *__pyx_n_s_inv_count; +static PyObject *__pyx_n_s_j; +static PyObject *__pyx_n_s_k; +static PyObject *__pyx_n_s_learn_doctags; +static PyObject *__pyx_n_s_learn_hidden; +static PyObject *__pyx_n_s_learn_words; +static PyObject *__pyx_n_s_m; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_min_alpha; +static PyObject *__pyx_n_s_model; +static PyObject *__pyx_n_s_n; +static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous; +static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou; +static PyObject *__pyx_n_s_neu1; +static PyObject *__pyx_n_s_np; +static PyObject *__pyx_n_s_num_epochs; +static PyObject *__pyx_n_s_numpy; +static PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to; +static PyObject *__pyx_kp_s_numpy_core_umath_failed_to_impor; +static PyObject *__pyx_n_s_offset; +static PyObject *__pyx_n_s_pyx_vtable; +static PyObject *__pyx_n_s_range; +static PyObject *__pyx_n_s_scipy_linalg_blas; +static PyObject *__pyx_n_s_sent_idx; +static PyObject *__pyx_n_s_start_alpha; +static PyObject *__pyx_n_s_start_doctag; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_total_documents; +static PyObject *__pyx_n_s_total_effective_words; +static PyObject *__pyx_n_s_total_words; +static PyObject *__pyx_n_s_train_words; +static PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd; +static PyObject *__pyx_n_s_vocab; +static PyObject *__pyx_n_s_word_locks; +static PyObject *__pyx_n_s_word_vectors; +static PyObject *__pyx_n_s_work; +static PyObject *__pyx_pf_6gensim_6models_18doc2vec_corpusfile_d2v_train_epoch_dbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v_start_doctag, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_docvecs_count, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_train_words, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_18doc2vec_corpusfile_2d2v_train_epoch_dm(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v_start_doctag, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_docvecs_count, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_18doc2vec_corpusfile_4d2v_train_epoch_dm_concat(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v_start_doctag, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_docvecs_count, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks); /* proto */ +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */ +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */ +static PyObject *__pyx_int_1; +static PyObject *__pyx_tuple_; +static PyObject *__pyx_tuple__2; +static PyObject *__pyx_tuple__3; +static PyObject *__pyx_tuple__4; +static PyObject *__pyx_tuple__5; +static PyObject *__pyx_tuple__6; +static PyObject *__pyx_tuple__7; +static PyObject *__pyx_tuple__8; +static PyObject *__pyx_tuple__9; +static PyObject *__pyx_tuple__11; +static PyObject *__pyx_tuple__13; +static PyObject *__pyx_tuple__15; +static PyObject *__pyx_codeobj__12; +static PyObject *__pyx_codeobj__14; +static PyObject *__pyx_codeobj__16; +/* Late includes */ + +/* "gensim/models/doc2vec_corpusfile.pyx":57 + * + * + * cdef void prepare_c_structures_for_batch(vector[string] &doc_words, int sample, int hs, int window, int *total_words, # <<<<<<<<<<<<<< + * int *effective_words, unsigned long long *next_random, cvocab_t *vocab, + * np.uint32_t *indexes, int *codelens, np.uint8_t **codes, np.uint32_t **points, + */ + +static void __pyx_f_6gensim_6models_18doc2vec_corpusfile_prepare_c_structures_for_batch(std::vector &__pyx_v_doc_words, int __pyx_v_sample, int __pyx_v_hs, int __pyx_v_window, int *__pyx_v_total_words, int *__pyx_v_effective_words, unsigned PY_LONG_LONG *__pyx_v_next_random, __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_v_vocab, __pyx_t_5numpy_uint32_t *__pyx_v_indexes, int *__pyx_v_codelens, __pyx_t_5numpy_uint8_t **__pyx_v_codes, __pyx_t_5numpy_uint32_t **__pyx_v_points, __pyx_t_5numpy_uint32_t *__pyx_v_reduced_windows, int *__pyx_v_document_len, int __pyx_v_train_words, int __pyx_v_docvecs_count, int __pyx_v_doc_tag) { + struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem __pyx_v_predict_word; + std::string __pyx_v_token; + int __pyx_v_i; + long __pyx_t_1; + std::vector ::iterator __pyx_t_2; + std::string __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + __pyx_t_5numpy_uint32_t __pyx_t_6; + int __pyx_t_7; + __pyx_t_5numpy_uint8_t *__pyx_t_8; + __pyx_t_5numpy_uint32_t *__pyx_t_9; + int __pyx_t_10; + int __pyx_t_11; + + /* "gensim/models/doc2vec_corpusfile.pyx":64 + * cdef VocabItem predict_word + * cdef string token + * cdef int i = 0 # <<<<<<<<<<<<<< + * + * total_words[0] += doc_words.size() + */ + __pyx_v_i = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":66 + * cdef int i = 0 + * + * total_words[0] += doc_words.size() # <<<<<<<<<<<<<< + * + * for token in doc_words: + */ + __pyx_t_1 = 0; + (__pyx_v_total_words[__pyx_t_1]) = ((__pyx_v_total_words[__pyx_t_1]) + __pyx_v_doc_words.size()); + + /* "gensim/models/doc2vec_corpusfile.pyx":68 + * total_words[0] += doc_words.size() + * + * for token in doc_words: # <<<<<<<<<<<<<< + * if vocab[0].find(token) == vocab[0].end(): # shrink document to leave out word + * continue # leaving i unchanged + */ + __pyx_t_2 = __pyx_v_doc_words.begin(); + for (;;) { + if (!(__pyx_t_2 != __pyx_v_doc_words.end())) break; + __pyx_t_3 = *__pyx_t_2; + ++__pyx_t_2; + __pyx_v_token = __pyx_t_3; + + /* "gensim/models/doc2vec_corpusfile.pyx":69 + * + * for token in doc_words: + * if vocab[0].find(token) == vocab[0].end(): # shrink document to leave out word # <<<<<<<<<<<<<< + * continue # leaving i unchanged + * + */ + __pyx_t_4 = (((__pyx_v_vocab[0]).find(__pyx_v_token) == (__pyx_v_vocab[0]).end()) != 0); + if (__pyx_t_4) { + + /* "gensim/models/doc2vec_corpusfile.pyx":70 + * for token in doc_words: + * if vocab[0].find(token) == vocab[0].end(): # shrink document to leave out word + * continue # leaving i unchanged # <<<<<<<<<<<<<< + * + * predict_word = vocab[0][token] + */ + goto __pyx_L3_continue; + + /* "gensim/models/doc2vec_corpusfile.pyx":69 + * + * for token in doc_words: + * if vocab[0].find(token) == vocab[0].end(): # shrink document to leave out word # <<<<<<<<<<<<<< + * continue # leaving i unchanged + * + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":72 + * continue # leaving i unchanged + * + * predict_word = vocab[0][token] # <<<<<<<<<<<<<< + * if sample and predict_word.sample_int < random_int32(next_random): + * continue + */ + __pyx_v_predict_word = ((__pyx_v_vocab[0])[__pyx_v_token]); + + /* "gensim/models/doc2vec_corpusfile.pyx":73 + * + * predict_word = vocab[0][token] + * if sample and predict_word.sample_int < random_int32(next_random): # <<<<<<<<<<<<<< + * continue + * indexes[i] = predict_word.index + */ + __pyx_t_5 = (__pyx_v_sample != 0); + if (__pyx_t_5) { + } else { + __pyx_t_4 = __pyx_t_5; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_5 = ((__pyx_v_predict_word.sample_int < __pyx_f_6gensim_6models_14word2vec_inner_random_int32(__pyx_v_next_random)) != 0); + __pyx_t_4 = __pyx_t_5; + __pyx_L7_bool_binop_done:; + if (__pyx_t_4) { + + /* "gensim/models/doc2vec_corpusfile.pyx":74 + * predict_word = vocab[0][token] + * if sample and predict_word.sample_int < random_int32(next_random): + * continue # <<<<<<<<<<<<<< + * indexes[i] = predict_word.index + * if hs: + */ + goto __pyx_L3_continue; + + /* "gensim/models/doc2vec_corpusfile.pyx":73 + * + * predict_word = vocab[0][token] + * if sample and predict_word.sample_int < random_int32(next_random): # <<<<<<<<<<<<<< + * continue + * indexes[i] = predict_word.index + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":75 + * if sample and predict_word.sample_int < random_int32(next_random): + * continue + * indexes[i] = predict_word.index # <<<<<<<<<<<<<< + * if hs: + * codelens[i] = predict_word.code_len + */ + __pyx_t_6 = __pyx_v_predict_word.index; + (__pyx_v_indexes[__pyx_v_i]) = __pyx_t_6; + + /* "gensim/models/doc2vec_corpusfile.pyx":76 + * continue + * indexes[i] = predict_word.index + * if hs: # <<<<<<<<<<<<<< + * codelens[i] = predict_word.code_len + * codes[i] = predict_word.code + */ + __pyx_t_4 = (__pyx_v_hs != 0); + if (__pyx_t_4) { + + /* "gensim/models/doc2vec_corpusfile.pyx":77 + * indexes[i] = predict_word.index + * if hs: + * codelens[i] = predict_word.code_len # <<<<<<<<<<<<<< + * codes[i] = predict_word.code + * points[i] = predict_word.point + */ + __pyx_t_7 = __pyx_v_predict_word.code_len; + (__pyx_v_codelens[__pyx_v_i]) = __pyx_t_7; + + /* "gensim/models/doc2vec_corpusfile.pyx":78 + * if hs: + * codelens[i] = predict_word.code_len + * codes[i] = predict_word.code # <<<<<<<<<<<<<< + * points[i] = predict_word.point + * + */ + __pyx_t_8 = __pyx_v_predict_word.code; + (__pyx_v_codes[__pyx_v_i]) = __pyx_t_8; + + /* "gensim/models/doc2vec_corpusfile.pyx":79 + * codelens[i] = predict_word.code_len + * codes[i] = predict_word.code + * points[i] = predict_word.point # <<<<<<<<<<<<<< + * + * effective_words[0] += 1 + */ + __pyx_t_9 = __pyx_v_predict_word.point; + (__pyx_v_points[__pyx_v_i]) = __pyx_t_9; + + /* "gensim/models/doc2vec_corpusfile.pyx":76 + * continue + * indexes[i] = predict_word.index + * if hs: # <<<<<<<<<<<<<< + * codelens[i] = predict_word.code_len + * codes[i] = predict_word.code + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":81 + * points[i] = predict_word.point + * + * effective_words[0] += 1 # <<<<<<<<<<<<<< + * i += 1 + * if i == MAX_DOCUMENT_LEN: + */ + __pyx_t_1 = 0; + (__pyx_v_effective_words[__pyx_t_1]) = ((__pyx_v_effective_words[__pyx_t_1]) + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":82 + * + * effective_words[0] += 1 + * i += 1 # <<<<<<<<<<<<<< + * if i == MAX_DOCUMENT_LEN: + * break # TODO: log warning, tally overflow? + */ + __pyx_v_i = (__pyx_v_i + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":83 + * effective_words[0] += 1 + * i += 1 + * if i == MAX_DOCUMENT_LEN: # <<<<<<<<<<<<<< + * break # TODO: log warning, tally overflow? + * document_len[0] = i + */ + __pyx_t_4 = ((__pyx_v_i == 0x2710) != 0); + if (__pyx_t_4) { + + /* "gensim/models/doc2vec_corpusfile.pyx":84 + * i += 1 + * if i == MAX_DOCUMENT_LEN: + * break # TODO: log warning, tally overflow? # <<<<<<<<<<<<<< + * document_len[0] = i + * + */ + goto __pyx_L4_break; + + /* "gensim/models/doc2vec_corpusfile.pyx":83 + * effective_words[0] += 1 + * i += 1 + * if i == MAX_DOCUMENT_LEN: # <<<<<<<<<<<<<< + * break # TODO: log warning, tally overflow? + * document_len[0] = i + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":68 + * total_words[0] += doc_words.size() + * + * for token in doc_words: # <<<<<<<<<<<<<< + * if vocab[0].find(token) == vocab[0].end(): # shrink document to leave out word + * continue # leaving i unchanged + */ + __pyx_L3_continue:; + } + __pyx_L4_break:; + + /* "gensim/models/doc2vec_corpusfile.pyx":85 + * if i == MAX_DOCUMENT_LEN: + * break # TODO: log warning, tally overflow? + * document_len[0] = i # <<<<<<<<<<<<<< + * + * if train_words and reduced_windows != NULL: + */ + (__pyx_v_document_len[0]) = __pyx_v_i; + + /* "gensim/models/doc2vec_corpusfile.pyx":87 + * document_len[0] = i + * + * if train_words and reduced_windows != NULL: # <<<<<<<<<<<<<< + * for i in range(document_len[0]): + * reduced_windows[i] = random_int32(next_random) % window + */ + __pyx_t_5 = (__pyx_v_train_words != 0); + if (__pyx_t_5) { + } else { + __pyx_t_4 = __pyx_t_5; + goto __pyx_L12_bool_binop_done; + } + __pyx_t_5 = ((__pyx_v_reduced_windows != NULL) != 0); + __pyx_t_4 = __pyx_t_5; + __pyx_L12_bool_binop_done:; + if (__pyx_t_4) { + + /* "gensim/models/doc2vec_corpusfile.pyx":88 + * + * if train_words and reduced_windows != NULL: + * for i in range(document_len[0]): # <<<<<<<<<<<<<< + * reduced_windows[i] = random_int32(next_random) % window + * + */ + __pyx_t_7 = (__pyx_v_document_len[0]); + __pyx_t_10 = __pyx_t_7; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "gensim/models/doc2vec_corpusfile.pyx":89 + * if train_words and reduced_windows != NULL: + * for i in range(document_len[0]): + * reduced_windows[i] = random_int32(next_random) % window # <<<<<<<<<<<<<< + * + * if doc_tag < docvecs_count: + */ + (__pyx_v_reduced_windows[__pyx_v_i]) = (__pyx_f_6gensim_6models_14word2vec_inner_random_int32(__pyx_v_next_random) % __pyx_v_window); + } + + /* "gensim/models/doc2vec_corpusfile.pyx":87 + * document_len[0] = i + * + * if train_words and reduced_windows != NULL: # <<<<<<<<<<<<<< + * for i in range(document_len[0]): + * reduced_windows[i] = random_int32(next_random) % window + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":91 + * reduced_windows[i] = random_int32(next_random) % window + * + * if doc_tag < docvecs_count: # <<<<<<<<<<<<<< + * effective_words[0] += 1 + * + */ + __pyx_t_4 = ((__pyx_v_doc_tag < __pyx_v_docvecs_count) != 0); + if (__pyx_t_4) { + + /* "gensim/models/doc2vec_corpusfile.pyx":92 + * + * if doc_tag < docvecs_count: + * effective_words[0] += 1 # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = 0; + (__pyx_v_effective_words[__pyx_t_1]) = ((__pyx_v_effective_words[__pyx_t_1]) + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":91 + * reduced_windows[i] = random_int32(next_random) % window + * + * if doc_tag < docvecs_count: # <<<<<<<<<<<<<< + * effective_words[0] += 1 + * + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":57 + * + * + * cdef void prepare_c_structures_for_batch(vector[string] &doc_words, int sample, int hs, int window, int *total_words, # <<<<<<<<<<<<<< + * int *effective_words, unsigned long long *next_random, cvocab_t *vocab, + * np.uint32_t *indexes, int *codelens, np.uint8_t **codes, np.uint32_t **points, + */ + + /* function exit code */ +} + +/* "gensim/models/doc2vec_corpusfile.pyx":95 + * + * + * def d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_18doc2vec_corpusfile_1d2v_train_epoch_dbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_18doc2vec_corpusfile_d2v_train_epoch_dbow[] = "d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None)\nTrain distributed bag of words model (\"PV-DBOW\") by training on a corpus file.\n\n Called internally from :meth:`~gensim.models.doc2vec.Doc2Vec.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.doc2vec.Doc2Vec`\n The FastText model instance to train.\n corpus_file : str\n Path to corpus file.\n _cur_epoch : int\n Current epoch number. Used for calculating and decaying learning rate.\n work : np.ndarray\n Private working memory for each worker.\n neu1 : np.ndarray\n Private working memory for each worker.\n train_words : bool, optional\n Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both** `learn_words`\n and `train_words` are set to True.\n learn_doctags : bool, optional\n Whether the tag vectors should be updated.\n learn_words : bool, optional\n Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both**\n `learn_words` and `train_words` are set to True.\n learn_hidden : bool, optional\n Whether or not the weights of the hidden layer will be updated.\n word_vectors : numpy.ndarray, optional\n The vector representation for each word in the vocabulary. If None, these will be retrieved from the model.\n word_locks : numpy.ndarray, optional\n A learning lock factor for each weight in the hidden layer for words, value 0 completely blocks updates,\n a value of 1 allows to update word-vectors.\n doctag_vectors : numpy.ndarray, optional\n Vector representations of the tags. If None, these will be retrieved from the model.\n doctag_locks"" : numpy.ndarray, optional\n The lock factors for each tag, same as `word_locks`, but for document-vectors.\n\n Returns\n -------\n int\n Number of words in the input document that were actually used for training.\n\n "; +static PyMethodDef __pyx_mdef_6gensim_6models_18doc2vec_corpusfile_1d2v_train_epoch_dbow = {"d2v_train_epoch_dbow", (PyCFunction)__pyx_pw_6gensim_6models_18doc2vec_corpusfile_1d2v_train_epoch_dbow, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_18doc2vec_corpusfile_d2v_train_epoch_dbow}; +static PyObject *__pyx_pw_6gensim_6models_18doc2vec_corpusfile_1d2v_train_epoch_dbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_model = 0; + PyObject *__pyx_v_corpus_file = 0; + PyObject *__pyx_v_offset = 0; + PyObject *__pyx_v_start_doctag = 0; + PyObject *__pyx_v__cython_vocab = 0; + PyObject *__pyx_v__cur_epoch = 0; + PyObject *__pyx_v__expected_examples = 0; + PyObject *__pyx_v__expected_words = 0; + PyObject *__pyx_v_work = 0; + PyObject *__pyx_v_neu1 = 0; + PyObject *__pyx_v_docvecs_count = 0; + PyObject *__pyx_v_word_vectors = 0; + PyObject *__pyx_v_word_locks = 0; + PyObject *__pyx_v_train_words = 0; + PyObject *__pyx_v_learn_doctags = 0; + PyObject *__pyx_v_learn_words = 0; + PyObject *__pyx_v_learn_hidden = 0; + PyObject *__pyx_v_doctag_vectors = 0; + PyObject *__pyx_v_doctag_locks = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("d2v_train_epoch_dbow (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_corpus_file,&__pyx_n_s_offset,&__pyx_n_s_start_doctag,&__pyx_n_s_cython_vocab,&__pyx_n_s_cur_epoch,&__pyx_n_s_expected_examples,&__pyx_n_s_expected_words,&__pyx_n_s_work,&__pyx_n_s_neu1,&__pyx_n_s_docvecs_count,&__pyx_n_s_word_vectors,&__pyx_n_s_word_locks,&__pyx_n_s_train_words,&__pyx_n_s_learn_doctags,&__pyx_n_s_learn_words,&__pyx_n_s_learn_hidden,&__pyx_n_s_doctag_vectors,&__pyx_n_s_doctag_locks,0}; + PyObject* values[19] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; + + /* "gensim/models/doc2vec_corpusfile.pyx":96 + * + * def d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, # <<<<<<<<<<<<<< + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + * doctag_vectors=None, doctag_locks=None): + */ + values[11] = ((PyObject *)Py_None); + values[12] = ((PyObject *)Py_None); + + /* "gensim/models/doc2vec_corpusfile.pyx":97 + * def d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, # <<<<<<<<<<<<<< + * doctag_vectors=None, doctag_locks=None): + * """Train distributed bag of words model ("PV-DBOW") by training on a corpus file. + */ + values[13] = ((PyObject *)Py_False); + values[14] = ((PyObject *)Py_True); + values[15] = ((PyObject *)Py_True); + values[16] = ((PyObject *)Py_True); + + /* "gensim/models/doc2vec_corpusfile.pyx":98 + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + * doctag_vectors=None, doctag_locks=None): # <<<<<<<<<<<<<< + * """Train distributed bag of words model ("PV-DBOW") by training on a corpus file. + * + */ + values[17] = ((PyObject *)Py_None); + values[18] = ((PyObject *)Py_None); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 19: values[18] = PyTuple_GET_ITEM(__pyx_args, 18); + CYTHON_FALLTHROUGH; + case 18: values[17] = PyTuple_GET_ITEM(__pyx_args, 17); + CYTHON_FALLTHROUGH; + case 17: values[16] = PyTuple_GET_ITEM(__pyx_args, 16); + CYTHON_FALLTHROUGH; + case 16: values[15] = PyTuple_GET_ITEM(__pyx_args, 15); + CYTHON_FALLTHROUGH; + case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); + CYTHON_FALLTHROUGH; + case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); + CYTHON_FALLTHROUGH; + case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); + CYTHON_FALLTHROUGH; + case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); + CYTHON_FALLTHROUGH; + case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); + CYTHON_FALLTHROUGH; + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_corpus_file)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 1); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 2); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start_doctag)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 3); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cython_vocab)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 4); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cur_epoch)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 5); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_examples)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 6); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 7: + if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_words)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 7); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 8: + if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 8); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 9: + if (likely((values[9] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_neu1)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 9); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 10: + if (likely((values[10] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_docvecs_count)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, 10); __PYX_ERR(0, 95, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 11: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_vectors); + if (value) { values[11] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 12: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_locks); + if (value) { values[12] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 13: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_train_words); + if (value) { values[13] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 14: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_doctags); + if (value) { values[14] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 15: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_words); + if (value) { values[15] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 16: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_hidden); + if (value) { values[16] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 17: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_vectors); + if (value) { values[17] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 18: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_locks); + if (value) { values[18] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "d2v_train_epoch_dbow") < 0)) __PYX_ERR(0, 95, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 19: values[18] = PyTuple_GET_ITEM(__pyx_args, 18); + CYTHON_FALLTHROUGH; + case 18: values[17] = PyTuple_GET_ITEM(__pyx_args, 17); + CYTHON_FALLTHROUGH; + case 17: values[16] = PyTuple_GET_ITEM(__pyx_args, 16); + CYTHON_FALLTHROUGH; + case 16: values[15] = PyTuple_GET_ITEM(__pyx_args, 15); + CYTHON_FALLTHROUGH; + case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); + CYTHON_FALLTHROUGH; + case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); + CYTHON_FALLTHROUGH; + case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); + CYTHON_FALLTHROUGH; + case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); + CYTHON_FALLTHROUGH; + case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); + values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_model = values[0]; + __pyx_v_corpus_file = values[1]; + __pyx_v_offset = values[2]; + __pyx_v_start_doctag = values[3]; + __pyx_v__cython_vocab = values[4]; + __pyx_v__cur_epoch = values[5]; + __pyx_v__expected_examples = values[6]; + __pyx_v__expected_words = values[7]; + __pyx_v_work = values[8]; + __pyx_v_neu1 = values[9]; + __pyx_v_docvecs_count = values[10]; + __pyx_v_word_vectors = values[11]; + __pyx_v_word_locks = values[12]; + __pyx_v_train_words = values[13]; + __pyx_v_learn_doctags = values[14]; + __pyx_v_learn_words = values[15]; + __pyx_v_learn_hidden = values[16]; + __pyx_v_doctag_vectors = values[17]; + __pyx_v_doctag_locks = values[18]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dbow", 0, 11, 19, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 95, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.doc2vec_corpusfile.d2v_train_epoch_dbow", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_18doc2vec_corpusfile_d2v_train_epoch_dbow(__pyx_self, __pyx_v_model, __pyx_v_corpus_file, __pyx_v_offset, __pyx_v_start_doctag, __pyx_v__cython_vocab, __pyx_v__cur_epoch, __pyx_v__expected_examples, __pyx_v__expected_words, __pyx_v_work, __pyx_v_neu1, __pyx_v_docvecs_count, __pyx_v_word_vectors, __pyx_v_word_locks, __pyx_v_train_words, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, __pyx_v_doctag_vectors, __pyx_v_doctag_locks); + + /* "gensim/models/doc2vec_corpusfile.pyx":95 + * + * + * def d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_18doc2vec_corpusfile_d2v_train_epoch_dbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v_start_doctag, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_docvecs_count, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_train_words, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks) { + struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig __pyx_v_c; + int __pyx_v_cur_epoch; + int __pyx_v_num_epochs; + int __pyx_v_expected_examples; + int __pyx_v_expected_words; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_start_alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_end_alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_input_stream = 0; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_vocab = 0; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_document_len; + int __pyx_v_effective_words; + int __pyx_v_total_effective_words; + int __pyx_v_total_documents; + int __pyx_v_total_words; + std::vector __pyx_v_doc_words; + int __pyx_v__doc_tag; + long __pyx_v_k; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_5; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config __pyx_t_8; + int __pyx_t_9; + std::vector __pyx_t_10; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_t_11; + int __pyx_t_12; + long __pyx_t_13; + long __pyx_t_14; + int __pyx_t_15; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + __Pyx_RefNannySetupContext("d2v_train_epoch_dbow", 0); + + /* "gensim/models/doc2vec_corpusfile.pyx":143 + * cdef Doc2VecConfig c + * + * cdef int cur_epoch = _cur_epoch # <<<<<<<<<<<<<< + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v__cur_epoch); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 143, __pyx_L1_error) + __pyx_v_cur_epoch = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":144 + * + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs # <<<<<<<<<<<<<< + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_epochs); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 144, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 144, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_num_epochs = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":145 + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) # <<<<<<<<<<<<<< + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + */ + __pyx_t_3 = (__pyx_v__expected_examples == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_examples); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 145, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_examples = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":146 + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) # <<<<<<<<<<<<<< + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + */ + __pyx_t_3 = (__pyx_v__expected_words == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_words); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 146, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_words = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":147 + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha # <<<<<<<<<<<<<< + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 147, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 147, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_start_alpha = __pyx_t_5; + + /* "gensim/models/doc2vec_corpusfile.pyx":148 + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha # <<<<<<<<<<<<<< + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_min_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 148, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_end_alpha = __pyx_t_5; + + /* "gensim/models/doc2vec_corpusfile.pyx":149 + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) # <<<<<<<<<<<<<< + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_6 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v__alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha(__pyx_t_6, __pyx_v_end_alpha, __pyx_v_cur_epoch, __pyx_v_num_epochs); + + /* "gensim/models/doc2vec_corpusfile.pyx":151 + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) # <<<<<<<<<<<<<< + * cdef CythonVocab vocab = _cython_vocab + * + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 151, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_corpus_file); + __Pyx_GIVEREF(__pyx_v_corpus_file); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_corpus_file); + __Pyx_INCREF(__pyx_v_offset); + __Pyx_GIVEREF(__pyx_v_offset); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_offset); + __pyx_t_7 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), __pyx_t_2, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 151, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_input_stream = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":152 + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + * cdef CythonVocab vocab = _cython_vocab # <<<<<<<<<<<<<< + * + * cdef int i, j, document_len + */ + if (!(likely(((__pyx_v__cython_vocab) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__cython_vocab, __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab))))) __PYX_ERR(0, 152, __pyx_L1_error) + __pyx_t_7 = __pyx_v__cython_vocab; + __Pyx_INCREF(__pyx_t_7); + __pyx_v_vocab = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":155 + * + * cdef int i, j, document_len + * cdef int effective_words = 0 # <<<<<<<<<<<<<< + * cdef int total_effective_words = 0, total_documents = 0, total_words = 0 + * cdef int sent_idx, idx_start, idx_end + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":156 + * cdef int i, j, document_len + * cdef int effective_words = 0 + * cdef int total_effective_words = 0, total_documents = 0, total_words = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end + * + */ + __pyx_v_total_effective_words = 0; + __pyx_v_total_documents = 0; + __pyx_v_total_words = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":160 + * + * cdef vector[string] doc_words + * cdef int _doc_tag = start_doctag # <<<<<<<<<<<<<< + * + * init_d2v_config( + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_start_doctag); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 160, __pyx_L1_error) + __pyx_v__doc_tag = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":163 + * + * init_d2v_config( + * &c, model, _alpha, learn_doctags, learn_words, learn_hidden, train_words=train_words, # <<<<<<<<<<<<<< + * work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + * doctag_vectors=doctag_vectors, doctag_locks=doctag_locks, docvecs_count=docvecs_count) + */ + __pyx_t_7 = PyFloat_FromDouble(__pyx_v__alpha); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 163, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + + /* "gensim/models/doc2vec_corpusfile.pyx":162 + * cdef int _doc_tag = start_doctag + * + * init_d2v_config( # <<<<<<<<<<<<<< + * &c, model, _alpha, learn_doctags, learn_words, learn_hidden, train_words=train_words, + * work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + */ + __pyx_t_8.__pyx_n = 8; + __pyx_t_8.train_words = __pyx_v_train_words; + __pyx_t_8.work = __pyx_v_work; + __pyx_t_8.neu1 = __pyx_v_neu1; + __pyx_t_8.word_vectors = __pyx_v_word_vectors; + __pyx_t_8.word_locks = __pyx_v_word_locks; + __pyx_t_8.doctag_vectors = __pyx_v_doctag_vectors; + __pyx_t_8.doctag_locks = __pyx_v_doctag_locks; + __pyx_t_8.docvecs_count = __pyx_v_docvecs_count; + __pyx_t_2 = __pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config((&__pyx_v_c), __pyx_v_model, __pyx_t_7, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, &__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 162, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":168 + * + * # release GIL & train on the full corpus, document by document + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + { + #ifdef WITH_THREAD + PyThreadState *_save; + Py_UNBLOCK_THREADS + __Pyx_FastGIL_Remember(); + #endif + /*try:*/ { + + /* "gensim/models/doc2vec_corpusfile.pyx":169 + * # release GIL & train on the full corpus, document by document + * with nogil: + * input_stream.reset() # <<<<<<<<<<<<<< + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_words = 0 + */ + ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->reset(__pyx_v_input_stream, 0); + + /* "gensim/models/doc2vec_corpusfile.pyx":170 + * with nogil: + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): # <<<<<<<<<<<<<< + * effective_words = 0 + * + */ + while (1) { + __pyx_t_9 = (((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->is_eof(__pyx_v_input_stream, 0) != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_3 = __pyx_t_9; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_9 = ((__pyx_v_total_words > (__pyx_v_expected_words / __pyx_v_c.workers)) != 0); + __pyx_t_3 = __pyx_t_9; + __pyx_L8_bool_binop_done:; + __pyx_t_9 = ((!__pyx_t_3) != 0); + if (!__pyx_t_9) break; + + /* "gensim/models/doc2vec_corpusfile.pyx":171 + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_words = 0 # <<<<<<<<<<<<<< + * + * doc_words = input_stream.read_sentence() + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":173 + * effective_words = 0 + * + * doc_words = input_stream.read_sentence() # <<<<<<<<<<<<<< + * + * if doc_words.empty(): + */ + __pyx_t_10 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->read_sentence(__pyx_v_input_stream, 0); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 173, __pyx_L4_error) + __pyx_v_doc_words = __pyx_t_10; + + /* "gensim/models/doc2vec_corpusfile.pyx":175 + * doc_words = input_stream.read_sentence() + * + * if doc_words.empty(): # <<<<<<<<<<<<<< + * continue + * + */ + __pyx_t_9 = (__pyx_v_doc_words.empty() != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":176 + * + * if doc_words.empty(): + * continue # <<<<<<<<<<<<<< + * + * prepare_c_structures_for_batch( + */ + goto __pyx_L6_continue; + + /* "gensim/models/doc2vec_corpusfile.pyx":175 + * doc_words = input_stream.read_sentence() + * + * if doc_words.empty(): # <<<<<<<<<<<<<< + * continue + * + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":180 + * prepare_c_structures_for_batch( + * doc_words, c.sample, c.hs, c.window, &total_words, &effective_words, + * &c.next_random, vocab.get_vocab_ptr(), c.indexes, c.codelens, c.codes, c.points, # <<<<<<<<<<<<<< + * c.reduced_windows, &document_len, c.train_words, c.docvecs_count, _doc_tag) + * + */ + __pyx_t_11 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_v_vocab->__pyx_vtab)->get_vocab_ptr(__pyx_v_vocab); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 180, __pyx_L4_error) + + /* "gensim/models/doc2vec_corpusfile.pyx":178 + * continue + * + * prepare_c_structures_for_batch( # <<<<<<<<<<<<<< + * doc_words, c.sample, c.hs, c.window, &total_words, &effective_words, + * &c.next_random, vocab.get_vocab_ptr(), c.indexes, c.codelens, c.codes, c.points, + */ + __pyx_f_6gensim_6models_18doc2vec_corpusfile_prepare_c_structures_for_batch(__pyx_v_doc_words, __pyx_v_c.sample, __pyx_v_c.hs, __pyx_v_c.window, (&__pyx_v_total_words), (&__pyx_v_effective_words), (&__pyx_v_c.next_random), __pyx_t_11, __pyx_v_c.indexes, __pyx_v_c.codelens, __pyx_v_c.codes, __pyx_v_c.points, __pyx_v_c.reduced_windows, (&__pyx_v_document_len), __pyx_v_c.train_words, __pyx_v_c.docvecs_count, __pyx_v__doc_tag); + + /* "gensim/models/doc2vec_corpusfile.pyx":183 + * c.reduced_windows, &document_len, c.train_words, c.docvecs_count, _doc_tag) + * + * for i in range(document_len): # <<<<<<<<<<<<<< + * if c.train_words: # simultaneous skip-gram wordvec-training + * j = i - c.window + c.reduced_windows[i] + */ + __pyx_t_1 = __pyx_v_document_len; + __pyx_t_4 = __pyx_t_1; + for (__pyx_t_12 = 0; __pyx_t_12 < __pyx_t_4; __pyx_t_12+=1) { + __pyx_v_i = __pyx_t_12; + + /* "gensim/models/doc2vec_corpusfile.pyx":184 + * + * for i in range(document_len): + * if c.train_words: # simultaneous skip-gram wordvec-training # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] + * if j < 0: + */ + __pyx_t_9 = (__pyx_v_c.train_words != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":185 + * for i in range(document_len): + * if c.train_words: # simultaneous skip-gram wordvec-training + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< + * if j < 0: + * j = 0 + */ + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/doc2vec_corpusfile.pyx":186 + * if c.train_words: # simultaneous skip-gram wordvec-training + * j = i - c.window + c.reduced_windows[i] + * if j < 0: # <<<<<<<<<<<<<< + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] + */ + __pyx_t_9 = ((__pyx_v_j < 0) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":187 + * j = i - c.window + c.reduced_windows[i] + * if j < 0: + * j = 0 # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > document_len: + */ + __pyx_v_j = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":186 + * if c.train_words: # simultaneous skip-gram wordvec-training + * j = i - c.window + c.reduced_windows[i] + * if j < 0: # <<<<<<<<<<<<<< + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":188 + * if j < 0: + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< + * if k > document_len: + * k = document_len + */ + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/doc2vec_corpusfile.pyx":189 + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > document_len: # <<<<<<<<<<<<<< + * k = document_len + * for j in range(j, k): + */ + __pyx_t_9 = ((__pyx_v_k > __pyx_v_document_len) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":190 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > document_len: + * k = document_len # <<<<<<<<<<<<<< + * for j in range(j, k): + * if j == i: + */ + __pyx_v_k = __pyx_v_document_len; + + /* "gensim/models/doc2vec_corpusfile.pyx":189 + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > document_len: # <<<<<<<<<<<<<< + * k = document_len + * for j in range(j, k): + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":191 + * if k > document_len: + * k = document_len + * for j in range(j, k): # <<<<<<<<<<<<<< + * if j == i: + * continue + */ + __pyx_t_13 = __pyx_v_k; + __pyx_t_14 = __pyx_t_13; + for (__pyx_t_15 = __pyx_v_j; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_j = __pyx_t_15; + + /* "gensim/models/doc2vec_corpusfile.pyx":192 + * k = document_len + * for j in range(j, k): + * if j == i: # <<<<<<<<<<<<<< + * continue + * if c.hs: + */ + __pyx_t_9 = ((__pyx_v_j == __pyx_v_i) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":193 + * for j in range(j, k): + * if j == i: + * continue # <<<<<<<<<<<<<< + * if c.hs: + * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose + */ + goto __pyx_L16_continue; + + /* "gensim/models/doc2vec_corpusfile.pyx":192 + * k = document_len + * for j in range(j, k): + * if j == i: # <<<<<<<<<<<<<< + * continue + * if c.hs: + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":194 + * if j == i: + * continue + * if c.hs: # <<<<<<<<<<<<<< + * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose + * fast_document_dbow_hs( + */ + __pyx_t_9 = (__pyx_v_c.hs != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":196 + * if c.hs: + * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose + * fast_document_dbow_hs( # <<<<<<<<<<<<<< + * c.points[i], c.codes[i], c.codelens[i], c.word_vectors, c.syn1, c.layer1_size, + * c.indexes[j], c.alpha, c.work, c.learn_words, c.learn_hidden, c.word_locks) + */ + __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.word_vectors, __pyx_v_c.syn1, __pyx_v_c.layer1_size, (__pyx_v_c.indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.learn_words, __pyx_v_c.learn_hidden, __pyx_v_c.word_locks); + + /* "gensim/models/doc2vec_corpusfile.pyx":194 + * if j == i: + * continue + * if c.hs: # <<<<<<<<<<<<<< + * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose + * fast_document_dbow_hs( + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":200 + * c.indexes[j], c.alpha, c.work, c.learn_words, c.learn_hidden, c.word_locks) + * + * if c.negative: # <<<<<<<<<<<<<< + * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose + * c.next_random = fast_document_dbow_neg( + */ + __pyx_t_9 = (__pyx_v_c.negative != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":202 + * if c.negative: + * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose + * c.next_random = fast_document_dbow_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.word_vectors, c.syn1neg, + * c.layer1_size, c.indexes[i], c.indexes[j], c.alpha, c.work, + */ + __pyx_v_c.next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.word_vectors, __pyx_v_c.syn1neg, __pyx_v_c.layer1_size, (__pyx_v_c.indexes[__pyx_v_i]), (__pyx_v_c.indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.next_random, __pyx_v_c.learn_words, __pyx_v_c.learn_hidden, __pyx_v_c.word_locks); + + /* "gensim/models/doc2vec_corpusfile.pyx":200 + * c.indexes[j], c.alpha, c.work, c.learn_words, c.learn_hidden, c.word_locks) + * + * if c.negative: # <<<<<<<<<<<<<< + * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose + * c.next_random = fast_document_dbow_neg( + */ + } + __pyx_L16_continue:; + } + + /* "gensim/models/doc2vec_corpusfile.pyx":184 + * + * for i in range(document_len): + * if c.train_words: # simultaneous skip-gram wordvec-training # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] + * if j < 0: + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":208 + * + * # docvec-training + * if _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * if c.hs: + * fast_document_dbow_hs( + */ + __pyx_t_9 = ((__pyx_v__doc_tag < __pyx_v_c.docvecs_count) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":209 + * # docvec-training + * if _doc_tag < c.docvecs_count: + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dbow_hs( + * c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, + */ + __pyx_t_9 = (__pyx_v_c.hs != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":210 + * if _doc_tag < c.docvecs_count: + * if c.hs: + * fast_document_dbow_hs( # <<<<<<<<<<<<<< + * c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, + * _doc_tag, c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) + */ + __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.doctag_vectors, __pyx_v_c.syn1, __pyx_v_c.layer1_size, __pyx_v__doc_tag, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.learn_doctags, __pyx_v_c.learn_hidden, __pyx_v_c.doctag_locks); + + /* "gensim/models/doc2vec_corpusfile.pyx":209 + * # docvec-training + * if _doc_tag < c.docvecs_count: + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dbow_hs( + * c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":214 + * _doc_tag, c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) + * + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dbow_neg( + * c.negative, c.cum_table, c.cum_table_len, c.doctag_vectors, c.syn1neg, + */ + __pyx_t_9 = (__pyx_v_c.negative != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":215 + * + * if c.negative: + * c.next_random = fast_document_dbow_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.doctag_vectors, c.syn1neg, + * c.layer1_size, c.indexes[i], _doc_tag, c.alpha, c.work, c.next_random, + */ + __pyx_v_c.next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.doctag_vectors, __pyx_v_c.syn1neg, __pyx_v_c.layer1_size, (__pyx_v_c.indexes[__pyx_v_i]), __pyx_v__doc_tag, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.next_random, __pyx_v_c.learn_doctags, __pyx_v_c.learn_hidden, __pyx_v_c.doctag_locks); + + /* "gensim/models/doc2vec_corpusfile.pyx":214 + * _doc_tag, c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) + * + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dbow_neg( + * c.negative, c.cum_table, c.cum_table_len, c.doctag_vectors, c.syn1neg, + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":208 + * + * # docvec-training + * if _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * if c.hs: + * fast_document_dbow_hs( + */ + } + } + + /* "gensim/models/doc2vec_corpusfile.pyx":220 + * c.learn_doctags, c.learn_hidden, c.doctag_locks) + * + * total_documents += 1 # <<<<<<<<<<<<<< + * total_effective_words += effective_words + * _doc_tag += 1 + */ + __pyx_v_total_documents = (__pyx_v_total_documents + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":221 + * + * total_documents += 1 + * total_effective_words += effective_words # <<<<<<<<<<<<<< + * _doc_tag += 1 + * + */ + __pyx_v_total_effective_words = (__pyx_v_total_effective_words + __pyx_v_effective_words); + + /* "gensim/models/doc2vec_corpusfile.pyx":222 + * total_documents += 1 + * total_effective_words += effective_words + * _doc_tag += 1 # <<<<<<<<<<<<<< + * + * c.alpha = get_next_alpha( + */ + __pyx_v__doc_tag = (__pyx_v__doc_tag + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":224 + * _doc_tag += 1 + * + * c.alpha = get_next_alpha( # <<<<<<<<<<<<<< + * start_alpha, end_alpha, total_documents, total_words, + * expected_examples, expected_words, cur_epoch, num_epochs) + */ + __pyx_v_c.alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha(__pyx_v_start_alpha, __pyx_v_end_alpha, __pyx_v_total_documents, __pyx_v_total_words, __pyx_v_expected_examples, __pyx_v_expected_words, __pyx_v_cur_epoch, __pyx_v_num_epochs); + __pyx_L6_continue:; + } + } + + /* "gensim/models/doc2vec_corpusfile.pyx":168 + * + * # release GIL & train on the full corpus, document by document + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + /*finally:*/ { + /*normal exit:*/{ + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L5; + } + __pyx_L4_error: { + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L1_error; + } + __pyx_L5:; + } + } + + /* "gensim/models/doc2vec_corpusfile.pyx":228 + * expected_examples, expected_words, cur_epoch, num_epochs) + * + * return total_documents, total_effective_words, total_words # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_total_documents); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_total_effective_words); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_16 = __Pyx_PyInt_From_int(__pyx_v_total_words); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __pyx_t_17 = PyTuple_New(3); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 228, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_17, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_17, 1, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_16); + PyTuple_SET_ITEM(__pyx_t_17, 2, __pyx_t_16); + __pyx_t_2 = 0; + __pyx_t_7 = 0; + __pyx_t_16 = 0; + __pyx_r = __pyx_t_17; + __pyx_t_17 = 0; + goto __pyx_L0; + + /* "gensim/models/doc2vec_corpusfile.pyx":95 + * + * + * def d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_16); + __Pyx_XDECREF(__pyx_t_17); + __Pyx_AddTraceback("gensim.models.doc2vec_corpusfile.d2v_train_epoch_dbow", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_input_stream); + __Pyx_XDECREF((PyObject *)__pyx_v_vocab); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/doc2vec_corpusfile.pyx":231 + * + * + * def d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_18doc2vec_corpusfile_3d2v_train_epoch_dm(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_18doc2vec_corpusfile_2d2v_train_epoch_dm[] = "d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None)\nTrain distributed memory model (\"PV-DM\") by training on a corpus file.\n This method implements the DM model with a projection (input) layer that is either the sum or mean of the context\n vectors, depending on the model's `dm_mean` configuration field.\n\n Called internally from :meth:`~gensim.models.doc2vec.Doc2Vec.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.doc2vec.Doc2Vec`\n The FastText model instance to train.\n corpus_file : str\n Path to corpus file.\n _cur_epoch : int\n Current epoch number. Used for calculating and decaying learning rate.\n work : np.ndarray\n Private working memory for each worker.\n neu1 : np.ndarray\n Private working memory for each worker.\n learn_doctags : bool, optional\n Whether the tag vectors should be updated.\n learn_words : bool, optional\n Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both**\n `learn_words` and `train_words` are set to True.\n learn_hidden : bool, optional\n Whether or not the weights of the hidden layer will be updated.\n word_vectors : numpy.ndarray, optional\n The vector representation for each word in the vocabulary. If None, these will be retrieved from the model.\n word_locks : numpy.ndarray, optional\n A learning lock factor for each weight in the hidden layer for words, value 0 completely blocks updates,\n a value of 1 allows to update word-vectors.\n doctag_vectors : numpy.ndarray, optional\n Vector representations of the tags. If None, these will be retrieved from the model.\n doctag_locks : numpy.ndarray, optional""\n The lock factors for each tag, same as `word_locks`, but for document-vectors.\n\n Returns\n -------\n int\n Number of words in the input document that were actually used for training.\n\n "; +static PyMethodDef __pyx_mdef_6gensim_6models_18doc2vec_corpusfile_3d2v_train_epoch_dm = {"d2v_train_epoch_dm", (PyCFunction)__pyx_pw_6gensim_6models_18doc2vec_corpusfile_3d2v_train_epoch_dm, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_18doc2vec_corpusfile_2d2v_train_epoch_dm}; +static PyObject *__pyx_pw_6gensim_6models_18doc2vec_corpusfile_3d2v_train_epoch_dm(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_model = 0; + PyObject *__pyx_v_corpus_file = 0; + PyObject *__pyx_v_offset = 0; + PyObject *__pyx_v_start_doctag = 0; + PyObject *__pyx_v__cython_vocab = 0; + PyObject *__pyx_v__cur_epoch = 0; + PyObject *__pyx_v__expected_examples = 0; + PyObject *__pyx_v__expected_words = 0; + PyObject *__pyx_v_work = 0; + PyObject *__pyx_v_neu1 = 0; + PyObject *__pyx_v_docvecs_count = 0; + PyObject *__pyx_v_word_vectors = 0; + PyObject *__pyx_v_word_locks = 0; + PyObject *__pyx_v_learn_doctags = 0; + PyObject *__pyx_v_learn_words = 0; + PyObject *__pyx_v_learn_hidden = 0; + PyObject *__pyx_v_doctag_vectors = 0; + PyObject *__pyx_v_doctag_locks = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("d2v_train_epoch_dm (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_corpus_file,&__pyx_n_s_offset,&__pyx_n_s_start_doctag,&__pyx_n_s_cython_vocab,&__pyx_n_s_cur_epoch,&__pyx_n_s_expected_examples,&__pyx_n_s_expected_words,&__pyx_n_s_work,&__pyx_n_s_neu1,&__pyx_n_s_docvecs_count,&__pyx_n_s_word_vectors,&__pyx_n_s_word_locks,&__pyx_n_s_learn_doctags,&__pyx_n_s_learn_words,&__pyx_n_s_learn_hidden,&__pyx_n_s_doctag_vectors,&__pyx_n_s_doctag_locks,0}; + PyObject* values[18] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; + + /* "gensim/models/doc2vec_corpusfile.pyx":232 + * + * def d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, # <<<<<<<<<<<<<< + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None): + * """Train distributed memory model ("PV-DM") by training on a corpus file. + */ + values[11] = ((PyObject *)Py_None); + values[12] = ((PyObject *)Py_None); + + /* "gensim/models/doc2vec_corpusfile.pyx":233 + * def d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None): # <<<<<<<<<<<<<< + * """Train distributed memory model ("PV-DM") by training on a corpus file. + * This method implements the DM model with a projection (input) layer that is either the sum or mean of the context + */ + values[13] = ((PyObject *)Py_True); + values[14] = ((PyObject *)Py_True); + values[15] = ((PyObject *)Py_True); + values[16] = ((PyObject *)Py_None); + values[17] = ((PyObject *)Py_None); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 18: values[17] = PyTuple_GET_ITEM(__pyx_args, 17); + CYTHON_FALLTHROUGH; + case 17: values[16] = PyTuple_GET_ITEM(__pyx_args, 16); + CYTHON_FALLTHROUGH; + case 16: values[15] = PyTuple_GET_ITEM(__pyx_args, 15); + CYTHON_FALLTHROUGH; + case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); + CYTHON_FALLTHROUGH; + case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); + CYTHON_FALLTHROUGH; + case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); + CYTHON_FALLTHROUGH; + case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); + CYTHON_FALLTHROUGH; + case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); + CYTHON_FALLTHROUGH; + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_corpus_file)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 1); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 2); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start_doctag)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 3); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cython_vocab)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 4); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cur_epoch)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 5); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_examples)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 6); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 7: + if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_words)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 7); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 8: + if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 8); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 9: + if (likely((values[9] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_neu1)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 9); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 10: + if (likely((values[10] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_docvecs_count)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, 10); __PYX_ERR(0, 231, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 11: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_vectors); + if (value) { values[11] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 12: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_locks); + if (value) { values[12] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 13: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_doctags); + if (value) { values[13] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 14: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_words); + if (value) { values[14] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 15: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_hidden); + if (value) { values[15] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 16: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_vectors); + if (value) { values[16] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 17: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_locks); + if (value) { values[17] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "d2v_train_epoch_dm") < 0)) __PYX_ERR(0, 231, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 18: values[17] = PyTuple_GET_ITEM(__pyx_args, 17); + CYTHON_FALLTHROUGH; + case 17: values[16] = PyTuple_GET_ITEM(__pyx_args, 16); + CYTHON_FALLTHROUGH; + case 16: values[15] = PyTuple_GET_ITEM(__pyx_args, 15); + CYTHON_FALLTHROUGH; + case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); + CYTHON_FALLTHROUGH; + case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); + CYTHON_FALLTHROUGH; + case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); + CYTHON_FALLTHROUGH; + case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); + CYTHON_FALLTHROUGH; + case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); + values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_model = values[0]; + __pyx_v_corpus_file = values[1]; + __pyx_v_offset = values[2]; + __pyx_v_start_doctag = values[3]; + __pyx_v__cython_vocab = values[4]; + __pyx_v__cur_epoch = values[5]; + __pyx_v__expected_examples = values[6]; + __pyx_v__expected_words = values[7]; + __pyx_v_work = values[8]; + __pyx_v_neu1 = values[9]; + __pyx_v_docvecs_count = values[10]; + __pyx_v_word_vectors = values[11]; + __pyx_v_word_locks = values[12]; + __pyx_v_learn_doctags = values[13]; + __pyx_v_learn_words = values[14]; + __pyx_v_learn_hidden = values[15]; + __pyx_v_doctag_vectors = values[16]; + __pyx_v_doctag_locks = values[17]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm", 0, 11, 18, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 231, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.doc2vec_corpusfile.d2v_train_epoch_dm", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_18doc2vec_corpusfile_2d2v_train_epoch_dm(__pyx_self, __pyx_v_model, __pyx_v_corpus_file, __pyx_v_offset, __pyx_v_start_doctag, __pyx_v__cython_vocab, __pyx_v__cur_epoch, __pyx_v__expected_examples, __pyx_v__expected_words, __pyx_v_work, __pyx_v_neu1, __pyx_v_docvecs_count, __pyx_v_word_vectors, __pyx_v_word_locks, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, __pyx_v_doctag_vectors, __pyx_v_doctag_locks); + + /* "gensim/models/doc2vec_corpusfile.pyx":231 + * + * + * def d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None): + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_18doc2vec_corpusfile_2d2v_train_epoch_dm(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v_start_doctag, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_docvecs_count, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks) { + struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig __pyx_v_c; + int __pyx_v_cur_epoch; + int __pyx_v_num_epochs; + int __pyx_v_expected_examples; + int __pyx_v_expected_words; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_start_alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_end_alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_input_stream = 0; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_vocab = 0; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_m; + int __pyx_v_document_len; + int __pyx_v_effective_words; + int __pyx_v_total_effective_words; + int __pyx_v_total_documents; + int __pyx_v_total_words; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_count; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_inv_count; + std::vector __pyx_v_doc_words; + int __pyx_v__doc_tag; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_5; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config __pyx_t_8; + int __pyx_t_9; + std::vector __pyx_t_10; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + int __pyx_t_15; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + __Pyx_RefNannySetupContext("d2v_train_epoch_dm", 0); + + /* "gensim/models/doc2vec_corpusfile.pyx":277 + * cdef Doc2VecConfig c + * + * cdef int cur_epoch = _cur_epoch # <<<<<<<<<<<<<< + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v__cur_epoch); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 277, __pyx_L1_error) + __pyx_v_cur_epoch = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":278 + * + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs # <<<<<<<<<<<<<< + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_epochs); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 278, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 278, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_num_epochs = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":279 + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) # <<<<<<<<<<<<<< + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + */ + __pyx_t_3 = (__pyx_v__expected_examples == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_examples); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 279, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_examples = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":280 + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) # <<<<<<<<<<<<<< + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + */ + __pyx_t_3 = (__pyx_v__expected_words == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_words); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 280, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_words = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":281 + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha # <<<<<<<<<<<<<< + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 281, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 281, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_start_alpha = __pyx_t_5; + + /* "gensim/models/doc2vec_corpusfile.pyx":282 + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha # <<<<<<<<<<<<<< + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_min_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 282, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_end_alpha = __pyx_t_5; + + /* "gensim/models/doc2vec_corpusfile.pyx":283 + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) # <<<<<<<<<<<<<< + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 283, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_6 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 283, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v__alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha(__pyx_t_6, __pyx_v_end_alpha, __pyx_v_cur_epoch, __pyx_v_num_epochs); + + /* "gensim/models/doc2vec_corpusfile.pyx":285 + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) # <<<<<<<<<<<<<< + * cdef CythonVocab vocab = _cython_vocab + * + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 285, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_corpus_file); + __Pyx_GIVEREF(__pyx_v_corpus_file); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_corpus_file); + __Pyx_INCREF(__pyx_v_offset); + __Pyx_GIVEREF(__pyx_v_offset); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_offset); + __pyx_t_7 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), __pyx_t_2, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 285, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_input_stream = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":286 + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + * cdef CythonVocab vocab = _cython_vocab # <<<<<<<<<<<<<< + * + * cdef int i, j, k, m, document_len + */ + if (!(likely(((__pyx_v__cython_vocab) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__cython_vocab, __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab))))) __PYX_ERR(0, 286, __pyx_L1_error) + __pyx_t_7 = __pyx_v__cython_vocab; + __Pyx_INCREF(__pyx_t_7); + __pyx_v_vocab = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":289 + * + * cdef int i, j, k, m, document_len + * cdef int effective_words = 0 # <<<<<<<<<<<<<< + * cdef int total_effective_words = 0, total_documents = 0, total_words = 0 + * cdef int sent_idx, idx_start, idx_end + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":290 + * cdef int i, j, k, m, document_len + * cdef int effective_words = 0 + * cdef int total_effective_words = 0, total_documents = 0, total_words = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end + * cdef REAL_t count, inv_count = 1.0 + */ + __pyx_v_total_effective_words = 0; + __pyx_v_total_documents = 0; + __pyx_v_total_words = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":292 + * cdef int total_effective_words = 0, total_documents = 0, total_words = 0 + * cdef int sent_idx, idx_start, idx_end + * cdef REAL_t count, inv_count = 1.0 # <<<<<<<<<<<<<< + * + * cdef vector[string] doc_words + */ + __pyx_v_inv_count = 1.0; + + /* "gensim/models/doc2vec_corpusfile.pyx":295 + * + * cdef vector[string] doc_words + * cdef int _doc_tag = start_doctag # <<<<<<<<<<<<<< + * + * init_d2v_config( + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_start_doctag); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 295, __pyx_L1_error) + __pyx_v__doc_tag = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":298 + * + * init_d2v_config( + * &c, model, _alpha, learn_doctags, learn_words, learn_hidden, train_words=False, # <<<<<<<<<<<<<< + * work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + * doctag_vectors=doctag_vectors, doctag_locks=doctag_locks, docvecs_count=docvecs_count) + */ + __pyx_t_7 = PyFloat_FromDouble(__pyx_v__alpha); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 298, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + + /* "gensim/models/doc2vec_corpusfile.pyx":297 + * cdef int _doc_tag = start_doctag + * + * init_d2v_config( # <<<<<<<<<<<<<< + * &c, model, _alpha, learn_doctags, learn_words, learn_hidden, train_words=False, + * work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + */ + __pyx_t_8.__pyx_n = 8; + __pyx_t_8.train_words = Py_False; + __pyx_t_8.work = __pyx_v_work; + __pyx_t_8.neu1 = __pyx_v_neu1; + __pyx_t_8.word_vectors = __pyx_v_word_vectors; + __pyx_t_8.word_locks = __pyx_v_word_locks; + __pyx_t_8.doctag_vectors = __pyx_v_doctag_vectors; + __pyx_t_8.doctag_locks = __pyx_v_doctag_locks; + __pyx_t_8.docvecs_count = __pyx_v_docvecs_count; + __pyx_t_2 = __pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config((&__pyx_v_c), __pyx_v_model, __pyx_t_7, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, &__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 297, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":303 + * + * # release GIL & train on the full corpus, document by document + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + { + #ifdef WITH_THREAD + PyThreadState *_save; + Py_UNBLOCK_THREADS + __Pyx_FastGIL_Remember(); + #endif + /*try:*/ { + + /* "gensim/models/doc2vec_corpusfile.pyx":304 + * # release GIL & train on the full corpus, document by document + * with nogil: + * input_stream.reset() # <<<<<<<<<<<<<< + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_words = 0 + */ + ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->reset(__pyx_v_input_stream, 0); + + /* "gensim/models/doc2vec_corpusfile.pyx":305 + * with nogil: + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): # <<<<<<<<<<<<<< + * effective_words = 0 + * + */ + while (1) { + __pyx_t_9 = (((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->is_eof(__pyx_v_input_stream, 0) != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_3 = __pyx_t_9; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_9 = ((__pyx_v_total_words > (__pyx_v_expected_words / __pyx_v_c.workers)) != 0); + __pyx_t_3 = __pyx_t_9; + __pyx_L8_bool_binop_done:; + __pyx_t_9 = ((!__pyx_t_3) != 0); + if (!__pyx_t_9) break; + + /* "gensim/models/doc2vec_corpusfile.pyx":306 + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_words = 0 # <<<<<<<<<<<<<< + * + * doc_words = input_stream.read_sentence() + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":308 + * effective_words = 0 + * + * doc_words = input_stream.read_sentence() # <<<<<<<<<<<<<< + * + * if doc_words.empty(): + */ + __pyx_t_10 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->read_sentence(__pyx_v_input_stream, 0); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 308, __pyx_L4_error) + __pyx_v_doc_words = __pyx_t_10; + + /* "gensim/models/doc2vec_corpusfile.pyx":310 + * doc_words = input_stream.read_sentence() + * + * if doc_words.empty(): # <<<<<<<<<<<<<< + * continue + * + */ + __pyx_t_9 = (__pyx_v_doc_words.empty() != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":311 + * + * if doc_words.empty(): + * continue # <<<<<<<<<<<<<< + * + * prepare_c_structures_for_batch( + */ + goto __pyx_L6_continue; + + /* "gensim/models/doc2vec_corpusfile.pyx":310 + * doc_words = input_stream.read_sentence() + * + * if doc_words.empty(): # <<<<<<<<<<<<<< + * continue + * + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":315 + * prepare_c_structures_for_batch( + * doc_words, c.sample, c.hs, c.window, &total_words, &effective_words, &c.next_random, + * vocab.get_vocab_ptr(), c.indexes, c.codelens, c.codes, c.points, c.reduced_windows, # <<<<<<<<<<<<<< + * &document_len, c.train_words, c.docvecs_count, _doc_tag) + * + */ + __pyx_t_11 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_v_vocab->__pyx_vtab)->get_vocab_ptr(__pyx_v_vocab); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 315, __pyx_L4_error) + + /* "gensim/models/doc2vec_corpusfile.pyx":313 + * continue + * + * prepare_c_structures_for_batch( # <<<<<<<<<<<<<< + * doc_words, c.sample, c.hs, c.window, &total_words, &effective_words, &c.next_random, + * vocab.get_vocab_ptr(), c.indexes, c.codelens, c.codes, c.points, c.reduced_windows, + */ + __pyx_f_6gensim_6models_18doc2vec_corpusfile_prepare_c_structures_for_batch(__pyx_v_doc_words, __pyx_v_c.sample, __pyx_v_c.hs, __pyx_v_c.window, (&__pyx_v_total_words), (&__pyx_v_effective_words), (&__pyx_v_c.next_random), __pyx_t_11, __pyx_v_c.indexes, __pyx_v_c.codelens, __pyx_v_c.codes, __pyx_v_c.points, __pyx_v_c.reduced_windows, (&__pyx_v_document_len), __pyx_v_c.train_words, __pyx_v_c.docvecs_count, __pyx_v__doc_tag); + + /* "gensim/models/doc2vec_corpusfile.pyx":318 + * &document_len, c.train_words, c.docvecs_count, _doc_tag) + * + * for i in range(document_len): # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] + * if j < 0: + */ + __pyx_t_1 = __pyx_v_document_len; + __pyx_t_4 = __pyx_t_1; + for (__pyx_t_12 = 0; __pyx_t_12 < __pyx_t_4; __pyx_t_12+=1) { + __pyx_v_i = __pyx_t_12; + + /* "gensim/models/doc2vec_corpusfile.pyx":319 + * + * for i in range(document_len): + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< + * if j < 0: + * j = 0 + */ + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/doc2vec_corpusfile.pyx":320 + * for i in range(document_len): + * j = i - c.window + c.reduced_windows[i] + * if j < 0: # <<<<<<<<<<<<<< + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] + */ + __pyx_t_9 = ((__pyx_v_j < 0) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":321 + * j = i - c.window + c.reduced_windows[i] + * if j < 0: + * j = 0 # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > document_len: + */ + __pyx_v_j = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":320 + * for i in range(document_len): + * j = i - c.window + c.reduced_windows[i] + * if j < 0: # <<<<<<<<<<<<<< + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":322 + * if j < 0: + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< + * if k > document_len: + * k = document_len + */ + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/doc2vec_corpusfile.pyx":323 + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > document_len: # <<<<<<<<<<<<<< + * k = document_len + * + */ + __pyx_t_9 = ((__pyx_v_k > __pyx_v_document_len) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":324 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > document_len: + * k = document_len # <<<<<<<<<<<<<< + * + * # compose l1 (in _neu1) & clear _work + */ + __pyx_v_k = __pyx_v_document_len; + + /* "gensim/models/doc2vec_corpusfile.pyx":323 + * j = 0 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > document_len: # <<<<<<<<<<<<<< + * k = document_len + * + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":327 + * + * # compose l1 (in _neu1) & clear _work + * memset(c.neu1, 0, c.layer1_size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< + * count = 0.0 + * for m in range(j, k): + */ + (void)(memset(__pyx_v_c.neu1, 0, (__pyx_v_c.layer1_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + + /* "gensim/models/doc2vec_corpusfile.pyx":328 + * # compose l1 (in _neu1) & clear _work + * memset(c.neu1, 0, c.layer1_size * cython.sizeof(REAL_t)) + * count = 0.0 # <<<<<<<<<<<<<< + * for m in range(j, k): + * if m == i: + */ + __pyx_v_count = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.0); + + /* "gensim/models/doc2vec_corpusfile.pyx":329 + * memset(c.neu1, 0, c.layer1_size * cython.sizeof(REAL_t)) + * count = 0.0 + * for m in range(j, k): # <<<<<<<<<<<<<< + * if m == i: + * continue + */ + __pyx_t_13 = __pyx_v_k; + __pyx_t_14 = __pyx_t_13; + for (__pyx_t_15 = __pyx_v_j; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_m = __pyx_t_15; + + /* "gensim/models/doc2vec_corpusfile.pyx":330 + * count = 0.0 + * for m in range(j, k): + * if m == i: # <<<<<<<<<<<<<< + * continue + * else: + */ + __pyx_t_9 = ((__pyx_v_m == __pyx_v_i) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":331 + * for m in range(j, k): + * if m == i: + * continue # <<<<<<<<<<<<<< + * else: + * count += ONEF + */ + goto __pyx_L15_continue; + + /* "gensim/models/doc2vec_corpusfile.pyx":330 + * count = 0.0 + * for m in range(j, k): + * if m == i: # <<<<<<<<<<<<<< + * continue + * else: + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":333 + * continue + * else: + * count += ONEF # <<<<<<<<<<<<<< + * our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) + * + */ + /*else*/ { + __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_18doc2vec_corpusfile_ONEF); + + /* "gensim/models/doc2vec_corpusfile.pyx":334 + * else: + * count += ONEF + * our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) # <<<<<<<<<<<<<< + * + * if _doc_tag < c.docvecs_count: + */ + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.layer1_size), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONEF), (&(__pyx_v_c.word_vectors[((__pyx_v_c.indexes[__pyx_v_m]) * __pyx_v_c.layer1_size)])), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE), __pyx_v_c.neu1, (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE)); + } + __pyx_L15_continue:; + } + + /* "gensim/models/doc2vec_corpusfile.pyx":336 + * our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) + * + * if _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * count += ONEF + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE, c.neu1, &ONE) + */ + __pyx_t_9 = ((__pyx_v__doc_tag < __pyx_v_c.docvecs_count) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":337 + * + * if _doc_tag < c.docvecs_count: + * count += ONEF # <<<<<<<<<<<<<< + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE, c.neu1, &ONE) + * if count > (0.5): + */ + __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_18doc2vec_corpusfile_ONEF); + + /* "gensim/models/doc2vec_corpusfile.pyx":338 + * if _doc_tag < c.docvecs_count: + * count += ONEF + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE, c.neu1, &ONE) # <<<<<<<<<<<<<< + * if count > (0.5): + * inv_count = ONEF/count + */ + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.layer1_size), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONEF), (&(__pyx_v_c.doctag_vectors[(__pyx_v__doc_tag * __pyx_v_c.layer1_size)])), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE), __pyx_v_c.neu1, (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE)); + + /* "gensim/models/doc2vec_corpusfile.pyx":336 + * our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) + * + * if _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * count += ONEF + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE, c.neu1, &ONE) + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":339 + * count += ONEF + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE, c.neu1, &ONE) + * if count > (0.5): # <<<<<<<<<<<<<< + * inv_count = ONEF/count + * if c.cbow_mean: + */ + __pyx_t_9 = ((__pyx_v_count > ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.5)) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":340 + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE, c.neu1, &ONE) + * if count > (0.5): + * inv_count = ONEF/count # <<<<<<<<<<<<<< + * if c.cbow_mean: + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + */ + __pyx_v_inv_count = (__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONEF / __pyx_v_count); + + /* "gensim/models/doc2vec_corpusfile.pyx":339 + * count += ONEF + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE, c.neu1, &ONE) + * if count > (0.5): # <<<<<<<<<<<<<< + * inv_count = ONEF/count + * if c.cbow_mean: + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":341 + * if count > (0.5): + * inv_count = ONEF/count + * if c.cbow_mean: # <<<<<<<<<<<<<< + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + */ + __pyx_t_9 = (__pyx_v_c.cbow_mean != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":342 + * inv_count = ONEF/count + * if c.cbow_mean: + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.hs: + */ + __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_c.layer1_size), (&__pyx_v_inv_count), __pyx_v_c.neu1, (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE)); + + /* "gensim/models/doc2vec_corpusfile.pyx":341 + * if count > (0.5): + * inv_count = ONEF/count + * if c.cbow_mean: # <<<<<<<<<<<<<< + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":343 + * if c.cbow_mean: + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error # <<<<<<<<<<<<<< + * if c.hs: + * fast_document_dm_hs( + */ + (void)(memset(__pyx_v_c.work, 0, (__pyx_v_c.layer1_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + + /* "gensim/models/doc2vec_corpusfile.pyx":344 + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dm_hs( + * c.points[i], c.codes[i], c.codelens[i], c.neu1, + */ + __pyx_t_9 = (__pyx_v_c.hs != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":345 + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.hs: + * fast_document_dm_hs( # <<<<<<<<<<<<<< + * c.points[i], c.codes[i], c.codelens[i], c.neu1, + * c.syn1, c.alpha, c.work, c.layer1_size, c.learn_hidden) + */ + __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.neu1, __pyx_v_c.syn1, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.layer1_size, __pyx_v_c.learn_hidden); + + /* "gensim/models/doc2vec_corpusfile.pyx":344 + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dm_hs( + * c.points[i], c.codes[i], c.codelens[i], c.neu1, + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":349 + * c.syn1, c.alpha, c.work, c.layer1_size, c.learn_hidden) + * + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dm_neg( + * c.negative, c.cum_table, c.cum_table_len, c.next_random, c.neu1, + */ + __pyx_t_9 = (__pyx_v_c.negative != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":350 + * + * if c.negative: + * c.next_random = fast_document_dm_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.next_random, c.neu1, + * c.syn1neg, c.indexes[i], c.alpha, c.work, c.layer1_size, c.learn_hidden) + */ + __pyx_v_c.next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.next_random, __pyx_v_c.neu1, __pyx_v_c.syn1neg, (__pyx_v_c.indexes[__pyx_v_i]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.layer1_size, __pyx_v_c.learn_hidden); + + /* "gensim/models/doc2vec_corpusfile.pyx":349 + * c.syn1, c.alpha, c.work, c.layer1_size, c.learn_hidden) + * + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dm_neg( + * c.negative, c.cum_table, c.cum_table_len, c.next_random, c.neu1, + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":354 + * c.syn1neg, c.indexes[i], c.alpha, c.work, c.layer1_size, c.learn_hidden) + * + * if not c.cbow_mean: # <<<<<<<<<<<<<< + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) + * # apply accumulated error in work + */ + __pyx_t_9 = ((!(__pyx_v_c.cbow_mean != 0)) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":355 + * + * if not c.cbow_mean: + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< + * # apply accumulated error in work + * if c.learn_doctags and _doc_tag < c.docvecs_count: + */ + __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_c.layer1_size), (&__pyx_v_inv_count), __pyx_v_c.work, (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE)); + + /* "gensim/models/doc2vec_corpusfile.pyx":354 + * c.syn1neg, c.indexes[i], c.alpha, c.work, c.layer1_size, c.learn_hidden) + * + * if not c.cbow_mean: # <<<<<<<<<<<<<< + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) + * # apply accumulated error in work + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":357 + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) + * # apply accumulated error in work + * if c.learn_doctags and _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * our_saxpy(&c.layer1_size, &c.doctag_locks[_doc_tag], c.work, + * &ONE, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE) + */ + __pyx_t_3 = (__pyx_v_c.learn_doctags != 0); + if (__pyx_t_3) { + } else { + __pyx_t_9 = __pyx_t_3; + goto __pyx_L25_bool_binop_done; + } + __pyx_t_3 = ((__pyx_v__doc_tag < __pyx_v_c.docvecs_count) != 0); + __pyx_t_9 = __pyx_t_3; + __pyx_L25_bool_binop_done:; + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":358 + * # apply accumulated error in work + * if c.learn_doctags and _doc_tag < c.docvecs_count: + * our_saxpy(&c.layer1_size, &c.doctag_locks[_doc_tag], c.work, # <<<<<<<<<<<<<< + * &ONE, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE) + * if c.learn_words: + */ + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.layer1_size), (&(__pyx_v_c.doctag_locks[__pyx_v__doc_tag])), __pyx_v_c.work, (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE), (&(__pyx_v_c.doctag_vectors[(__pyx_v__doc_tag * __pyx_v_c.layer1_size)])), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE)); + + /* "gensim/models/doc2vec_corpusfile.pyx":357 + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) + * # apply accumulated error in work + * if c.learn_doctags and _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * our_saxpy(&c.layer1_size, &c.doctag_locks[_doc_tag], c.work, + * &ONE, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE) + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":360 + * our_saxpy(&c.layer1_size, &c.doctag_locks[_doc_tag], c.work, + * &ONE, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE) + * if c.learn_words: # <<<<<<<<<<<<<< + * for m in range(j, k): + * if m == i: + */ + __pyx_t_9 = (__pyx_v_c.learn_words != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":361 + * &ONE, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE) + * if c.learn_words: + * for m in range(j, k): # <<<<<<<<<<<<<< + * if m == i: + * continue + */ + __pyx_t_13 = __pyx_v_k; + __pyx_t_14 = __pyx_t_13; + for (__pyx_t_15 = __pyx_v_j; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_m = __pyx_t_15; + + /* "gensim/models/doc2vec_corpusfile.pyx":362 + * if c.learn_words: + * for m in range(j, k): + * if m == i: # <<<<<<<<<<<<<< + * continue + * else: + */ + __pyx_t_9 = ((__pyx_v_m == __pyx_v_i) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":363 + * for m in range(j, k): + * if m == i: + * continue # <<<<<<<<<<<<<< + * else: + * our_saxpy(&c.layer1_size, &c.word_locks[c.indexes[m]], c.work, &ONE, + */ + goto __pyx_L28_continue; + + /* "gensim/models/doc2vec_corpusfile.pyx":362 + * if c.learn_words: + * for m in range(j, k): + * if m == i: # <<<<<<<<<<<<<< + * continue + * else: + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":365 + * continue + * else: + * our_saxpy(&c.layer1_size, &c.word_locks[c.indexes[m]], c.work, &ONE, # <<<<<<<<<<<<<< + * &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE) + * + */ + /*else*/ { + + /* "gensim/models/doc2vec_corpusfile.pyx":366 + * else: + * our_saxpy(&c.layer1_size, &c.word_locks[c.indexes[m]], c.work, &ONE, + * &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE) # <<<<<<<<<<<<<< + * + * total_documents += 1 + */ + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.layer1_size), (&(__pyx_v_c.word_locks[(__pyx_v_c.indexes[__pyx_v_m])])), __pyx_v_c.work, (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE), (&(__pyx_v_c.word_vectors[((__pyx_v_c.indexes[__pyx_v_m]) * __pyx_v_c.layer1_size)])), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE)); + } + __pyx_L28_continue:; + } + + /* "gensim/models/doc2vec_corpusfile.pyx":360 + * our_saxpy(&c.layer1_size, &c.doctag_locks[_doc_tag], c.work, + * &ONE, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE) + * if c.learn_words: # <<<<<<<<<<<<<< + * for m in range(j, k): + * if m == i: + */ + } + } + + /* "gensim/models/doc2vec_corpusfile.pyx":368 + * &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE) + * + * total_documents += 1 # <<<<<<<<<<<<<< + * total_effective_words += effective_words + * _doc_tag += 1 + */ + __pyx_v_total_documents = (__pyx_v_total_documents + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":369 + * + * total_documents += 1 + * total_effective_words += effective_words # <<<<<<<<<<<<<< + * _doc_tag += 1 + * + */ + __pyx_v_total_effective_words = (__pyx_v_total_effective_words + __pyx_v_effective_words); + + /* "gensim/models/doc2vec_corpusfile.pyx":370 + * total_documents += 1 + * total_effective_words += effective_words + * _doc_tag += 1 # <<<<<<<<<<<<<< + * + * c.alpha = get_next_alpha(start_alpha, end_alpha, total_documents, total_words, expected_examples, + */ + __pyx_v__doc_tag = (__pyx_v__doc_tag + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":372 + * _doc_tag += 1 + * + * c.alpha = get_next_alpha(start_alpha, end_alpha, total_documents, total_words, expected_examples, # <<<<<<<<<<<<<< + * expected_words, cur_epoch, num_epochs) + * + */ + __pyx_v_c.alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha(__pyx_v_start_alpha, __pyx_v_end_alpha, __pyx_v_total_documents, __pyx_v_total_words, __pyx_v_expected_examples, __pyx_v_expected_words, __pyx_v_cur_epoch, __pyx_v_num_epochs); + __pyx_L6_continue:; + } + } + + /* "gensim/models/doc2vec_corpusfile.pyx":303 + * + * # release GIL & train on the full corpus, document by document + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + /*finally:*/ { + /*normal exit:*/{ + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L5; + } + __pyx_L4_error: { + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L1_error; + } + __pyx_L5:; + } + } + + /* "gensim/models/doc2vec_corpusfile.pyx":375 + * expected_words, cur_epoch, num_epochs) + * + * return total_documents, total_effective_words, total_words # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_total_documents); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 375, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_total_effective_words); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 375, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_16 = __Pyx_PyInt_From_int(__pyx_v_total_words); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 375, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __pyx_t_17 = PyTuple_New(3); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 375, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_17, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_17, 1, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_16); + PyTuple_SET_ITEM(__pyx_t_17, 2, __pyx_t_16); + __pyx_t_2 = 0; + __pyx_t_7 = 0; + __pyx_t_16 = 0; + __pyx_r = __pyx_t_17; + __pyx_t_17 = 0; + goto __pyx_L0; + + /* "gensim/models/doc2vec_corpusfile.pyx":231 + * + * + * def d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_16); + __Pyx_XDECREF(__pyx_t_17); + __Pyx_AddTraceback("gensim.models.doc2vec_corpusfile.d2v_train_epoch_dm", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_input_stream); + __Pyx_XDECREF((PyObject *)__pyx_v_vocab); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/doc2vec_corpusfile.pyx":378 + * + * + * def d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_18doc2vec_corpusfile_5d2v_train_epoch_dm_concat(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_18doc2vec_corpusfile_4d2v_train_epoch_dm_concat[] = "d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None)\nTrain distributed memory model (\"PV-DM\") by training on a corpus file, using a concatenation of the context\n window word vectors (rather than a sum or average).\n This might be slower since the input at each batch will be significantly larger.\n\n Called internally from :meth:`~gensim.models.doc2vec.Doc2Vec.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.doc2vec.Doc2Vec`\n The FastText model instance to train.\n corpus_file : str\n Path to corpus file.\n _cur_epoch : int\n Current epoch number. Used for calculating and decaying learning rate.\n work : np.ndarray\n Private working memory for each worker.\n neu1 : np.ndarray\n Private working memory for each worker.\n learn_doctags : bool, optional\n Whether the tag vectors should be updated.\n learn_words : bool, optional\n Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both**\n `learn_words` and `train_words` are set to True.\n learn_hidden : bool, optional\n Whether or not the weights of the hidden layer will be updated.\n word_vectors : numpy.ndarray, optional\n The vector representation for each word in the vocabulary. If None, these will be retrieved from the model.\n word_locks : numpy.ndarray, optional\n A learning lock factor for each weight in the hidden layer for words, value 0 completely blocks updates,\n a value of 1 allows to update word-vectors.\n doctag_vectors : numpy.ndarray, optional\n Vector representations of the tags. If None, these will be retrieved from the model.\n doctag_locks : numpy.ndarray, optional""\n The lock factors for each tag, same as `word_locks`, but for document-vectors.\n\n Returns\n -------\n int\n Number of words in the input document that were actually used for training.\n\n "; +static PyMethodDef __pyx_mdef_6gensim_6models_18doc2vec_corpusfile_5d2v_train_epoch_dm_concat = {"d2v_train_epoch_dm_concat", (PyCFunction)__pyx_pw_6gensim_6models_18doc2vec_corpusfile_5d2v_train_epoch_dm_concat, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_18doc2vec_corpusfile_4d2v_train_epoch_dm_concat}; +static PyObject *__pyx_pw_6gensim_6models_18doc2vec_corpusfile_5d2v_train_epoch_dm_concat(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_model = 0; + PyObject *__pyx_v_corpus_file = 0; + PyObject *__pyx_v_offset = 0; + PyObject *__pyx_v_start_doctag = 0; + PyObject *__pyx_v__cython_vocab = 0; + PyObject *__pyx_v__cur_epoch = 0; + PyObject *__pyx_v__expected_examples = 0; + PyObject *__pyx_v__expected_words = 0; + PyObject *__pyx_v_work = 0; + PyObject *__pyx_v_neu1 = 0; + PyObject *__pyx_v_docvecs_count = 0; + PyObject *__pyx_v_word_vectors = 0; + PyObject *__pyx_v_word_locks = 0; + PyObject *__pyx_v_learn_doctags = 0; + PyObject *__pyx_v_learn_words = 0; + PyObject *__pyx_v_learn_hidden = 0; + PyObject *__pyx_v_doctag_vectors = 0; + PyObject *__pyx_v_doctag_locks = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("d2v_train_epoch_dm_concat (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_corpus_file,&__pyx_n_s_offset,&__pyx_n_s_start_doctag,&__pyx_n_s_cython_vocab,&__pyx_n_s_cur_epoch,&__pyx_n_s_expected_examples,&__pyx_n_s_expected_words,&__pyx_n_s_work,&__pyx_n_s_neu1,&__pyx_n_s_docvecs_count,&__pyx_n_s_word_vectors,&__pyx_n_s_word_locks,&__pyx_n_s_learn_doctags,&__pyx_n_s_learn_words,&__pyx_n_s_learn_hidden,&__pyx_n_s_doctag_vectors,&__pyx_n_s_doctag_locks,0}; + PyObject* values[18] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; + + /* "gensim/models/doc2vec_corpusfile.pyx":379 + * + * def d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, # <<<<<<<<<<<<<< + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, + * doctag_locks=None): + */ + values[11] = ((PyObject *)Py_None); + values[12] = ((PyObject *)Py_None); + + /* "gensim/models/doc2vec_corpusfile.pyx":380 + * def d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, # <<<<<<<<<<<<<< + * doctag_locks=None): + * """Train distributed memory model ("PV-DM") by training on a corpus file, using a concatenation of the context + */ + values[13] = ((PyObject *)Py_True); + values[14] = ((PyObject *)Py_True); + values[15] = ((PyObject *)Py_True); + values[16] = ((PyObject *)Py_None); + + /* "gensim/models/doc2vec_corpusfile.pyx":381 + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, + * doctag_locks=None): # <<<<<<<<<<<<<< + * """Train distributed memory model ("PV-DM") by training on a corpus file, using a concatenation of the context + * window word vectors (rather than a sum or average). + */ + values[17] = ((PyObject *)Py_None); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 18: values[17] = PyTuple_GET_ITEM(__pyx_args, 17); + CYTHON_FALLTHROUGH; + case 17: values[16] = PyTuple_GET_ITEM(__pyx_args, 16); + CYTHON_FALLTHROUGH; + case 16: values[15] = PyTuple_GET_ITEM(__pyx_args, 15); + CYTHON_FALLTHROUGH; + case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); + CYTHON_FALLTHROUGH; + case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); + CYTHON_FALLTHROUGH; + case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); + CYTHON_FALLTHROUGH; + case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); + CYTHON_FALLTHROUGH; + case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); + CYTHON_FALLTHROUGH; + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_corpus_file)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 1); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 2); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start_doctag)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 3); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cython_vocab)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 4); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cur_epoch)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 5); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_examples)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 6); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 7: + if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_words)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 7); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 8: + if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 8); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 9: + if (likely((values[9] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_neu1)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 9); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 10: + if (likely((values[10] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_docvecs_count)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, 10); __PYX_ERR(0, 378, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 11: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_vectors); + if (value) { values[11] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 12: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_locks); + if (value) { values[12] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 13: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_doctags); + if (value) { values[13] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 14: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_words); + if (value) { values[14] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 15: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_hidden); + if (value) { values[15] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 16: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_vectors); + if (value) { values[16] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 17: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_locks); + if (value) { values[17] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "d2v_train_epoch_dm_concat") < 0)) __PYX_ERR(0, 378, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 18: values[17] = PyTuple_GET_ITEM(__pyx_args, 17); + CYTHON_FALLTHROUGH; + case 17: values[16] = PyTuple_GET_ITEM(__pyx_args, 16); + CYTHON_FALLTHROUGH; + case 16: values[15] = PyTuple_GET_ITEM(__pyx_args, 15); + CYTHON_FALLTHROUGH; + case 15: values[14] = PyTuple_GET_ITEM(__pyx_args, 14); + CYTHON_FALLTHROUGH; + case 14: values[13] = PyTuple_GET_ITEM(__pyx_args, 13); + CYTHON_FALLTHROUGH; + case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); + CYTHON_FALLTHROUGH; + case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); + CYTHON_FALLTHROUGH; + case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); + values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_model = values[0]; + __pyx_v_corpus_file = values[1]; + __pyx_v_offset = values[2]; + __pyx_v_start_doctag = values[3]; + __pyx_v__cython_vocab = values[4]; + __pyx_v__cur_epoch = values[5]; + __pyx_v__expected_examples = values[6]; + __pyx_v__expected_words = values[7]; + __pyx_v_work = values[8]; + __pyx_v_neu1 = values[9]; + __pyx_v_docvecs_count = values[10]; + __pyx_v_word_vectors = values[11]; + __pyx_v_word_locks = values[12]; + __pyx_v_learn_doctags = values[13]; + __pyx_v_learn_words = values[14]; + __pyx_v_learn_hidden = values[15]; + __pyx_v_doctag_vectors = values[16]; + __pyx_v_doctag_locks = values[17]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("d2v_train_epoch_dm_concat", 0, 11, 18, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 378, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.doc2vec_corpusfile.d2v_train_epoch_dm_concat", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_18doc2vec_corpusfile_4d2v_train_epoch_dm_concat(__pyx_self, __pyx_v_model, __pyx_v_corpus_file, __pyx_v_offset, __pyx_v_start_doctag, __pyx_v__cython_vocab, __pyx_v__cur_epoch, __pyx_v__expected_examples, __pyx_v__expected_words, __pyx_v_work, __pyx_v_neu1, __pyx_v_docvecs_count, __pyx_v_word_vectors, __pyx_v_word_locks, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, __pyx_v_doctag_vectors, __pyx_v_doctag_locks); + + /* "gensim/models/doc2vec_corpusfile.pyx":378 + * + * + * def d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_18doc2vec_corpusfile_4d2v_train_epoch_dm_concat(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v_start_doctag, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_docvecs_count, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks) { + struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig __pyx_v_c; + int __pyx_v_cur_epoch; + int __pyx_v_num_epochs; + int __pyx_v_expected_examples; + int __pyx_v_expected_words; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_start_alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_end_alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_input_stream = 0; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_vocab = 0; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_m; + int __pyx_v_n; + int __pyx_v_document_len; + int __pyx_v_effective_words; + int __pyx_v_total_effective_words; + int __pyx_v_total_documents; + int __pyx_v_total_words; + std::vector __pyx_v_doc_words; + int __pyx_v__doc_tag; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_5; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config __pyx_t_8; + int __pyx_t_9; + std::vector __pyx_t_10; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + int __pyx_t_15; + int __pyx_t_16; + long __pyx_t_17; + long __pyx_t_18; + PyObject *__pyx_t_19 = NULL; + PyObject *__pyx_t_20 = NULL; + __Pyx_RefNannySetupContext("d2v_train_epoch_dm_concat", 0); + + /* "gensim/models/doc2vec_corpusfile.pyx":425 + * cdef Doc2VecConfig c + * + * cdef int cur_epoch = _cur_epoch # <<<<<<<<<<<<<< + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v__cur_epoch); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 425, __pyx_L1_error) + __pyx_v_cur_epoch = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":426 + * + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs # <<<<<<<<<<<<<< + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_epochs); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 426, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 426, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_num_epochs = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":427 + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) # <<<<<<<<<<<<<< + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + */ + __pyx_t_3 = (__pyx_v__expected_examples == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_examples); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 427, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_examples = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":428 + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) # <<<<<<<<<<<<<< + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + */ + __pyx_t_3 = (__pyx_v__expected_words == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_words); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 428, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_words = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":429 + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha # <<<<<<<<<<<<<< + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_start_alpha = __pyx_t_5; + + /* "gensim/models/doc2vec_corpusfile.pyx":430 + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha # <<<<<<<<<<<<<< + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_min_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 430, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 430, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_end_alpha = __pyx_t_5; + + /* "gensim/models/doc2vec_corpusfile.pyx":431 + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) # <<<<<<<<<<<<<< + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_6 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v__alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha(__pyx_t_6, __pyx_v_end_alpha, __pyx_v_cur_epoch, __pyx_v_num_epochs); + + /* "gensim/models/doc2vec_corpusfile.pyx":433 + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) # <<<<<<<<<<<<<< + * cdef CythonVocab vocab = _cython_vocab + * + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_corpus_file); + __Pyx_GIVEREF(__pyx_v_corpus_file); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_corpus_file); + __Pyx_INCREF(__pyx_v_offset); + __Pyx_GIVEREF(__pyx_v_offset); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_offset); + __pyx_t_7 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), __pyx_t_2, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_input_stream = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":434 + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + * cdef CythonVocab vocab = _cython_vocab # <<<<<<<<<<<<<< + * + * cdef int i, j, k, m, n, document_len + */ + if (!(likely(((__pyx_v__cython_vocab) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__cython_vocab, __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab))))) __PYX_ERR(0, 434, __pyx_L1_error) + __pyx_t_7 = __pyx_v__cython_vocab; + __Pyx_INCREF(__pyx_t_7); + __pyx_v_vocab = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":437 + * + * cdef int i, j, k, m, n, document_len + * cdef int effective_words = 0 # <<<<<<<<<<<<<< + * cdef int total_effective_words = 0, total_documents = 0, total_words = 0 + * cdef int sent_idx, idx_start, idx_end + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":438 + * cdef int i, j, k, m, n, document_len + * cdef int effective_words = 0 + * cdef int total_effective_words = 0, total_documents = 0, total_words = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end + * + */ + __pyx_v_total_effective_words = 0; + __pyx_v_total_documents = 0; + __pyx_v_total_words = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":442 + * + * cdef vector[string] doc_words + * cdef int _doc_tag = start_doctag # <<<<<<<<<<<<<< + * + * init_d2v_config( + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_start_doctag); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 442, __pyx_L1_error) + __pyx_v__doc_tag = __pyx_t_1; + + /* "gensim/models/doc2vec_corpusfile.pyx":445 + * + * init_d2v_config( + * &c, model, _alpha, learn_doctags, learn_words, learn_hidden, train_words=False, # <<<<<<<<<<<<<< + * work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + * doctag_vectors=doctag_vectors, doctag_locks=doctag_locks, docvecs_count=docvecs_count) + */ + __pyx_t_7 = PyFloat_FromDouble(__pyx_v__alpha); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 445, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + + /* "gensim/models/doc2vec_corpusfile.pyx":444 + * cdef int _doc_tag = start_doctag + * + * init_d2v_config( # <<<<<<<<<<<<<< + * &c, model, _alpha, learn_doctags, learn_words, learn_hidden, train_words=False, + * work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + */ + __pyx_t_8.__pyx_n = 8; + __pyx_t_8.train_words = Py_False; + __pyx_t_8.work = __pyx_v_work; + __pyx_t_8.neu1 = __pyx_v_neu1; + __pyx_t_8.word_vectors = __pyx_v_word_vectors; + __pyx_t_8.word_locks = __pyx_v_word_locks; + __pyx_t_8.doctag_vectors = __pyx_v_doctag_vectors; + __pyx_t_8.doctag_locks = __pyx_v_doctag_locks; + __pyx_t_8.docvecs_count = __pyx_v_docvecs_count; + __pyx_t_2 = __pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config((&__pyx_v_c), __pyx_v_model, __pyx_t_7, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, &__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 444, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":450 + * + * # release GIL & train on the full corpus, document by document + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + { + #ifdef WITH_THREAD + PyThreadState *_save; + Py_UNBLOCK_THREADS + __Pyx_FastGIL_Remember(); + #endif + /*try:*/ { + + /* "gensim/models/doc2vec_corpusfile.pyx":451 + * # release GIL & train on the full corpus, document by document + * with nogil: + * input_stream.reset() # <<<<<<<<<<<<<< + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_words = 0 + */ + ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->reset(__pyx_v_input_stream, 0); + + /* "gensim/models/doc2vec_corpusfile.pyx":452 + * with nogil: + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): # <<<<<<<<<<<<<< + * effective_words = 0 + * + */ + while (1) { + __pyx_t_9 = (((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->is_eof(__pyx_v_input_stream, 0) != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_3 = __pyx_t_9; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_9 = ((__pyx_v_total_words > (__pyx_v_expected_words / __pyx_v_c.workers)) != 0); + __pyx_t_3 = __pyx_t_9; + __pyx_L8_bool_binop_done:; + __pyx_t_9 = ((!__pyx_t_3) != 0); + if (!__pyx_t_9) break; + + /* "gensim/models/doc2vec_corpusfile.pyx":453 + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_words = 0 # <<<<<<<<<<<<<< + * + * doc_words = input_stream.read_sentence() + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":455 + * effective_words = 0 + * + * doc_words = input_stream.read_sentence() # <<<<<<<<<<<<<< + * _doc_tag = total_documents + * c.doctag_len = _doc_tag < c.docvecs_count + */ + __pyx_t_10 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->read_sentence(__pyx_v_input_stream, 0); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 455, __pyx_L4_error) + __pyx_v_doc_words = __pyx_t_10; + + /* "gensim/models/doc2vec_corpusfile.pyx":456 + * + * doc_words = input_stream.read_sentence() + * _doc_tag = total_documents # <<<<<<<<<<<<<< + * c.doctag_len = _doc_tag < c.docvecs_count + * + */ + __pyx_v__doc_tag = __pyx_v_total_documents; + + /* "gensim/models/doc2vec_corpusfile.pyx":457 + * doc_words = input_stream.read_sentence() + * _doc_tag = total_documents + * c.doctag_len = _doc_tag < c.docvecs_count # <<<<<<<<<<<<<< + * + * # skip doc either empty or without expected number of tags + */ + __pyx_v_c.doctag_len = (__pyx_v__doc_tag < __pyx_v_c.docvecs_count); + + /* "gensim/models/doc2vec_corpusfile.pyx":460 + * + * # skip doc either empty or without expected number of tags + * if doc_words.empty() or c.expected_doctag_len != c.doctag_len: # <<<<<<<<<<<<<< + * continue + * + */ + __pyx_t_3 = (__pyx_v_doc_words.empty() != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_9 = __pyx_t_3; + goto __pyx_L11_bool_binop_done; + } + __pyx_t_3 = ((__pyx_v_c.expected_doctag_len != __pyx_v_c.doctag_len) != 0); + __pyx_t_9 = __pyx_t_3; + __pyx_L11_bool_binop_done:; + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":461 + * # skip doc either empty or without expected number of tags + * if doc_words.empty() or c.expected_doctag_len != c.doctag_len: + * continue # <<<<<<<<<<<<<< + * + * prepare_c_structures_for_batch( + */ + goto __pyx_L6_continue; + + /* "gensim/models/doc2vec_corpusfile.pyx":460 + * + * # skip doc either empty or without expected number of tags + * if doc_words.empty() or c.expected_doctag_len != c.doctag_len: # <<<<<<<<<<<<<< + * continue + * + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":465 + * prepare_c_structures_for_batch( + * doc_words, c.sample, c.hs, c.window, &total_words, &effective_words, + * &c.next_random, vocab.get_vocab_ptr(), c.indexes, c.codelens, c.codes, # <<<<<<<<<<<<<< + * c.points, NULL, &document_len, c.train_words, c.docvecs_count, _doc_tag) + * + */ + __pyx_t_11 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_v_vocab->__pyx_vtab)->get_vocab_ptr(__pyx_v_vocab); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 465, __pyx_L4_error) + + /* "gensim/models/doc2vec_corpusfile.pyx":463 + * continue + * + * prepare_c_structures_for_batch( # <<<<<<<<<<<<<< + * doc_words, c.sample, c.hs, c.window, &total_words, &effective_words, + * &c.next_random, vocab.get_vocab_ptr(), c.indexes, c.codelens, c.codes, + */ + __pyx_f_6gensim_6models_18doc2vec_corpusfile_prepare_c_structures_for_batch(__pyx_v_doc_words, __pyx_v_c.sample, __pyx_v_c.hs, __pyx_v_c.window, (&__pyx_v_total_words), (&__pyx_v_effective_words), (&__pyx_v_c.next_random), __pyx_t_11, __pyx_v_c.indexes, __pyx_v_c.codelens, __pyx_v_c.codes, __pyx_v_c.points, NULL, (&__pyx_v_document_len), __pyx_v_c.train_words, __pyx_v_c.docvecs_count, __pyx_v__doc_tag); + + /* "gensim/models/doc2vec_corpusfile.pyx":468 + * c.points, NULL, &document_len, c.train_words, c.docvecs_count, _doc_tag) + * + * for i in range(document_len): # <<<<<<<<<<<<<< + * j = i - c.window # negative OK: will pad with null word + * k = i + c.window + 1 # past document end OK: will pad with null word + */ + __pyx_t_1 = __pyx_v_document_len; + __pyx_t_4 = __pyx_t_1; + for (__pyx_t_12 = 0; __pyx_t_12 < __pyx_t_4; __pyx_t_12+=1) { + __pyx_v_i = __pyx_t_12; + + /* "gensim/models/doc2vec_corpusfile.pyx":469 + * + * for i in range(document_len): + * j = i - c.window # negative OK: will pad with null word # <<<<<<<<<<<<<< + * k = i + c.window + 1 # past document end OK: will pad with null word + * + */ + __pyx_v_j = (__pyx_v_i - __pyx_v_c.window); + + /* "gensim/models/doc2vec_corpusfile.pyx":470 + * for i in range(document_len): + * j = i - c.window # negative OK: will pad with null word + * k = i + c.window + 1 # past document end OK: will pad with null word # <<<<<<<<<<<<<< + * + * # compose l1 & clear work + */ + __pyx_v_k = ((__pyx_v_i + __pyx_v_c.window) + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":473 + * + * # compose l1 & clear work + * if _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * # doc vector(s) + * memcpy(&c.neu1[0], &c.doctag_vectors[_doc_tag * c.vector_size], + */ + __pyx_t_9 = ((__pyx_v__doc_tag < __pyx_v_c.docvecs_count) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":475 + * if _doc_tag < c.docvecs_count: + * # doc vector(s) + * memcpy(&c.neu1[0], &c.doctag_vectors[_doc_tag * c.vector_size], # <<<<<<<<<<<<<< + * c.vector_size * cython.sizeof(REAL_t)) + * n = 0 + */ + (void)(memcpy((&(__pyx_v_c.neu1[0])), (&(__pyx_v_c.doctag_vectors[(__pyx_v__doc_tag * __pyx_v_c.vector_size)])), (__pyx_v_c.vector_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + + /* "gensim/models/doc2vec_corpusfile.pyx":473 + * + * # compose l1 & clear work + * if _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * # doc vector(s) + * memcpy(&c.neu1[0], &c.doctag_vectors[_doc_tag * c.vector_size], + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":477 + * memcpy(&c.neu1[0], &c.doctag_vectors[_doc_tag * c.vector_size], + * c.vector_size * cython.sizeof(REAL_t)) + * n = 0 # <<<<<<<<<<<<<< + * for m in range(j, k): + * # word vectors in window + */ + __pyx_v_n = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":478 + * c.vector_size * cython.sizeof(REAL_t)) + * n = 0 + * for m in range(j, k): # <<<<<<<<<<<<<< + * # word vectors in window + * if m == i: + */ + __pyx_t_13 = __pyx_v_k; + __pyx_t_14 = __pyx_t_13; + for (__pyx_t_15 = __pyx_v_j; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_m = __pyx_t_15; + + /* "gensim/models/doc2vec_corpusfile.pyx":480 + * for m in range(j, k): + * # word vectors in window + * if m == i: # <<<<<<<<<<<<<< + * continue + * if m < 0 or m >= document_len: + */ + __pyx_t_9 = ((__pyx_v_m == __pyx_v_i) != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":481 + * # word vectors in window + * if m == i: + * continue # <<<<<<<<<<<<<< + * if m < 0 or m >= document_len: + * c.window_indexes[n] = c.null_word_index + */ + goto __pyx_L16_continue; + + /* "gensim/models/doc2vec_corpusfile.pyx":480 + * for m in range(j, k): + * # word vectors in window + * if m == i: # <<<<<<<<<<<<<< + * continue + * if m < 0 or m >= document_len: + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":482 + * if m == i: + * continue + * if m < 0 or m >= document_len: # <<<<<<<<<<<<<< + * c.window_indexes[n] = c.null_word_index + * else: + */ + __pyx_t_3 = ((__pyx_v_m < 0) != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_9 = __pyx_t_3; + goto __pyx_L20_bool_binop_done; + } + __pyx_t_3 = ((__pyx_v_m >= __pyx_v_document_len) != 0); + __pyx_t_9 = __pyx_t_3; + __pyx_L20_bool_binop_done:; + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":483 + * continue + * if m < 0 or m >= document_len: + * c.window_indexes[n] = c.null_word_index # <<<<<<<<<<<<<< + * else: + * c.window_indexes[n] = c.indexes[m] + */ + __pyx_t_16 = __pyx_v_c.null_word_index; + (__pyx_v_c.window_indexes[__pyx_v_n]) = __pyx_t_16; + + /* "gensim/models/doc2vec_corpusfile.pyx":482 + * if m == i: + * continue + * if m < 0 or m >= document_len: # <<<<<<<<<<<<<< + * c.window_indexes[n] = c.null_word_index + * else: + */ + goto __pyx_L19; + } + + /* "gensim/models/doc2vec_corpusfile.pyx":485 + * c.window_indexes[n] = c.null_word_index + * else: + * c.window_indexes[n] = c.indexes[m] # <<<<<<<<<<<<<< + * n += 1 + * for m in range(2 * c.window): + */ + /*else*/ { + (__pyx_v_c.window_indexes[__pyx_v_n]) = (__pyx_v_c.indexes[__pyx_v_m]); + } + __pyx_L19:; + + /* "gensim/models/doc2vec_corpusfile.pyx":486 + * else: + * c.window_indexes[n] = c.indexes[m] + * n += 1 # <<<<<<<<<<<<<< + * for m in range(2 * c.window): + * memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], + */ + __pyx_v_n = (__pyx_v_n + 1); + __pyx_L16_continue:; + } + + /* "gensim/models/doc2vec_corpusfile.pyx":487 + * c.window_indexes[n] = c.indexes[m] + * n += 1 + * for m in range(2 * c.window): # <<<<<<<<<<<<<< + * memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], + * c.vector_size * cython.sizeof(REAL_t)) + */ + __pyx_t_17 = (2 * __pyx_v_c.window); + __pyx_t_18 = __pyx_t_17; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_18; __pyx_t_13+=1) { + __pyx_v_m = __pyx_t_13; + + /* "gensim/models/doc2vec_corpusfile.pyx":488 + * n += 1 + * for m in range(2 * c.window): + * memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], # <<<<<<<<<<<<<< + * c.vector_size * cython.sizeof(REAL_t)) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + */ + (void)(memcpy((&(__pyx_v_c.neu1[((__pyx_v_c.doctag_len + __pyx_v_m) * __pyx_v_c.vector_size)])), (&(__pyx_v_c.word_vectors[((__pyx_v_c.window_indexes[__pyx_v_m]) * __pyx_v_c.vector_size)])), (__pyx_v_c.vector_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + } + + /* "gensim/models/doc2vec_corpusfile.pyx":490 + * memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], + * c.vector_size * cython.sizeof(REAL_t)) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error # <<<<<<<<<<<<<< + * + * if c.hs: + */ + (void)(memset(__pyx_v_c.work, 0, (__pyx_v_c.layer1_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + + /* "gensim/models/doc2vec_corpusfile.pyx":492 + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dmc_hs( + * c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, + */ + __pyx_t_9 = (__pyx_v_c.hs != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":493 + * + * if c.hs: + * fast_document_dmc_hs( # <<<<<<<<<<<<<< + * c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, + * c.alpha, c.work, c.layer1_size, c.vector_size, c.learn_hidden) + */ + __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.neu1, __pyx_v_c.syn1, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.layer1_size, __pyx_v_c.vector_size, __pyx_v_c.learn_hidden); + + /* "gensim/models/doc2vec_corpusfile.pyx":492 + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dmc_hs( + * c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":497 + * c.alpha, c.work, c.layer1_size, c.vector_size, c.learn_hidden) + * + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dmc_neg( + * c.negative, c.cum_table, c.cum_table_len, c.next_random, c.neu1, c.syn1neg, + */ + __pyx_t_9 = (__pyx_v_c.negative != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":498 + * + * if c.negative: + * c.next_random = fast_document_dmc_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.next_random, c.neu1, c.syn1neg, + * c.indexes[i], c.alpha, c.work, c.layer1_size, c.vector_size, c.learn_hidden) + */ + __pyx_v_c.next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.next_random, __pyx_v_c.neu1, __pyx_v_c.syn1neg, (__pyx_v_c.indexes[__pyx_v_i]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.layer1_size, __pyx_v_c.vector_size, __pyx_v_c.learn_hidden); + + /* "gensim/models/doc2vec_corpusfile.pyx":497 + * c.alpha, c.work, c.layer1_size, c.vector_size, c.learn_hidden) + * + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dmc_neg( + * c.negative, c.cum_table, c.cum_table_len, c.next_random, c.neu1, c.syn1neg, + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":502 + * c.indexes[i], c.alpha, c.work, c.layer1_size, c.vector_size, c.learn_hidden) + * + * if c.learn_doctags and _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * our_saxpy(&c.vector_size, &c.doctag_locks[_doc_tag], &c.work[m * c.vector_size], + * &ONE, &c.doctag_vectors[_doc_tag * c.vector_size], &ONE) + */ + __pyx_t_3 = (__pyx_v_c.learn_doctags != 0); + if (__pyx_t_3) { + } else { + __pyx_t_9 = __pyx_t_3; + goto __pyx_L27_bool_binop_done; + } + __pyx_t_3 = ((__pyx_v__doc_tag < __pyx_v_c.docvecs_count) != 0); + __pyx_t_9 = __pyx_t_3; + __pyx_L27_bool_binop_done:; + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":503 + * + * if c.learn_doctags and _doc_tag < c.docvecs_count: + * our_saxpy(&c.vector_size, &c.doctag_locks[_doc_tag], &c.work[m * c.vector_size], # <<<<<<<<<<<<<< + * &ONE, &c.doctag_vectors[_doc_tag * c.vector_size], &ONE) + * if c.learn_words: + */ + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.vector_size), (&(__pyx_v_c.doctag_locks[__pyx_v__doc_tag])), (&(__pyx_v_c.work[(__pyx_v_m * __pyx_v_c.vector_size)])), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE), (&(__pyx_v_c.doctag_vectors[(__pyx_v__doc_tag * __pyx_v_c.vector_size)])), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE)); + + /* "gensim/models/doc2vec_corpusfile.pyx":502 + * c.indexes[i], c.alpha, c.work, c.layer1_size, c.vector_size, c.learn_hidden) + * + * if c.learn_doctags and _doc_tag < c.docvecs_count: # <<<<<<<<<<<<<< + * our_saxpy(&c.vector_size, &c.doctag_locks[_doc_tag], &c.work[m * c.vector_size], + * &ONE, &c.doctag_vectors[_doc_tag * c.vector_size], &ONE) + */ + } + + /* "gensim/models/doc2vec_corpusfile.pyx":505 + * our_saxpy(&c.vector_size, &c.doctag_locks[_doc_tag], &c.work[m * c.vector_size], + * &ONE, &c.doctag_vectors[_doc_tag * c.vector_size], &ONE) + * if c.learn_words: # <<<<<<<<<<<<<< + * for m in range(2 * c.window): + * our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], + */ + __pyx_t_9 = (__pyx_v_c.learn_words != 0); + if (__pyx_t_9) { + + /* "gensim/models/doc2vec_corpusfile.pyx":506 + * &ONE, &c.doctag_vectors[_doc_tag * c.vector_size], &ONE) + * if c.learn_words: + * for m in range(2 * c.window): # <<<<<<<<<<<<<< + * our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], + * &ONE, &c.word_vectors[c.window_indexes[m] * c.vector_size], &ONE) + */ + __pyx_t_17 = (2 * __pyx_v_c.window); + __pyx_t_18 = __pyx_t_17; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_18; __pyx_t_13+=1) { + __pyx_v_m = __pyx_t_13; + + /* "gensim/models/doc2vec_corpusfile.pyx":507 + * if c.learn_words: + * for m in range(2 * c.window): + * our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], # <<<<<<<<<<<<<< + * &ONE, &c.word_vectors[c.window_indexes[m] * c.vector_size], &ONE) + * + */ + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.vector_size), (&(__pyx_v_c.word_locks[(__pyx_v_c.window_indexes[__pyx_v_m])])), (&(__pyx_v_c.work[((__pyx_v_c.doctag_len + __pyx_v_m) * __pyx_v_c.vector_size)])), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE), (&(__pyx_v_c.word_vectors[((__pyx_v_c.window_indexes[__pyx_v_m]) * __pyx_v_c.vector_size)])), (&__pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE)); + } + + /* "gensim/models/doc2vec_corpusfile.pyx":505 + * our_saxpy(&c.vector_size, &c.doctag_locks[_doc_tag], &c.work[m * c.vector_size], + * &ONE, &c.doctag_vectors[_doc_tag * c.vector_size], &ONE) + * if c.learn_words: # <<<<<<<<<<<<<< + * for m in range(2 * c.window): + * our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], + */ + } + } + + /* "gensim/models/doc2vec_corpusfile.pyx":510 + * &ONE, &c.word_vectors[c.window_indexes[m] * c.vector_size], &ONE) + * + * total_documents += 1 # <<<<<<<<<<<<<< + * total_effective_words += effective_words + * _doc_tag += 1 + */ + __pyx_v_total_documents = (__pyx_v_total_documents + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":511 + * + * total_documents += 1 + * total_effective_words += effective_words # <<<<<<<<<<<<<< + * _doc_tag += 1 + * + */ + __pyx_v_total_effective_words = (__pyx_v_total_effective_words + __pyx_v_effective_words); + + /* "gensim/models/doc2vec_corpusfile.pyx":512 + * total_documents += 1 + * total_effective_words += effective_words + * _doc_tag += 1 # <<<<<<<<<<<<<< + * + * c.alpha = get_next_alpha(start_alpha, end_alpha, total_documents, total_words, expected_examples, + */ + __pyx_v__doc_tag = (__pyx_v__doc_tag + 1); + + /* "gensim/models/doc2vec_corpusfile.pyx":514 + * _doc_tag += 1 + * + * c.alpha = get_next_alpha(start_alpha, end_alpha, total_documents, total_words, expected_examples, # <<<<<<<<<<<<<< + * expected_words, cur_epoch, num_epochs) + * + */ + __pyx_v_c.alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha(__pyx_v_start_alpha, __pyx_v_end_alpha, __pyx_v_total_documents, __pyx_v_total_words, __pyx_v_expected_examples, __pyx_v_expected_words, __pyx_v_cur_epoch, __pyx_v_num_epochs); + __pyx_L6_continue:; + } + } + + /* "gensim/models/doc2vec_corpusfile.pyx":450 + * + * # release GIL & train on the full corpus, document by document + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + /*finally:*/ { + /*normal exit:*/{ + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L5; + } + __pyx_L4_error: { + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L1_error; + } + __pyx_L5:; + } + } + + /* "gensim/models/doc2vec_corpusfile.pyx":517 + * expected_words, cur_epoch, num_epochs) + * + * return total_documents, total_effective_words, total_words # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_total_documents); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 517, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_total_effective_words); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 517, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_19 = __Pyx_PyInt_From_int(__pyx_v_total_words); if (unlikely(!__pyx_t_19)) __PYX_ERR(0, 517, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_19); + __pyx_t_20 = PyTuple_New(3); if (unlikely(!__pyx_t_20)) __PYX_ERR(0, 517, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_20); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_20, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_20, 1, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_19); + PyTuple_SET_ITEM(__pyx_t_20, 2, __pyx_t_19); + __pyx_t_2 = 0; + __pyx_t_7 = 0; + __pyx_t_19 = 0; + __pyx_r = __pyx_t_20; + __pyx_t_20 = 0; + goto __pyx_L0; + + /* "gensim/models/doc2vec_corpusfile.pyx":378 + * + * + * def d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_19); + __Pyx_XDECREF(__pyx_t_20); + __Pyx_AddTraceback("gensim.models.doc2vec_corpusfile.d2v_train_epoch_dm_concat", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_input_stream); + __Pyx_XDECREF((PyObject *)__pyx_v_vocab); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fulfill the PEP. + */ + +/* Python wrapper */ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0); + __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_v_i; + int __pyx_v_ndim; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + int __pyx_v_t; + char *__pyx_v_f; + PyArray_Descr *__pyx_v_descr = 0; + int __pyx_v_offset; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + char *__pyx_t_8; + if (__pyx_v_info == NULL) { + PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete"); + return -1; + } + __Pyx_RefNannySetupContext("__getbuffer__", 0); + __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(__pyx_v_info->obj); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 + * + * cdef int i, ndim + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + */ + __pyx_v_endian_detector = 1; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 + * cdef int i, ndim + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * + * ndim = PyArray_NDIM(self) + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + */ + __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + __pyx_t_2 = (((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not C contiguous") + * + */ + __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_C_CONTIGUOUS) != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + if (unlikely(__pyx_t_1)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 229, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + __pyx_t_2 = (((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L7_bool_binop_done; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not Fortran contiguous") + * + */ + __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_F_CONTIGUOUS) != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L7_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + if (unlikely(__pyx_t_1)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 233, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 233, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 + * raise ValueError(u"ndarray is not Fortran contiguous") + * + * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 + * + * info.buf = PyArray_DATA(self) + * info.ndim = ndim # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * # Allocate new buffer for strides and shape info. + */ + __pyx_v_info->ndim = __pyx_v_ndim; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< + * info.shape = info.strides + ndim + * for i in range(ndim): + */ + __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 + * # This is allocated as one block, strides first. + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) + * info.shape = info.strides + ndim # <<<<<<<<<<<<<< + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + */ + __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) + * info.shape = info.strides + ndim + * for i in range(ndim): # <<<<<<<<<<<<<< + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] + */ + __pyx_t_4 = __pyx_v_ndim; + __pyx_t_5 = __pyx_t_4; + for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { + __pyx_v_i = __pyx_t_6; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 + * info.shape = info.strides + ndim + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + */ + (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< + * else: + * info.strides = PyArray_STRIDES(self) + */ + (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + goto __pyx_L9; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + */ + /*else*/ { + __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 + * else: + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + */ + __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self)); + } + __pyx_L9:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL # <<<<<<<<<<<<<< + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) + */ + __pyx_v_info->suboffsets = NULL; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< + * info.readonly = not PyArray_ISWRITEABLE(self) + * + */ + __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< + * + * cdef int t + */ + __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 + * + * cdef int t + * cdef char* f = NULL # <<<<<<<<<<<<<< + * cdef dtype descr = self.descr + * cdef int offset + */ + __pyx_v_f = NULL; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 + * cdef int t + * cdef char* f = NULL + * cdef dtype descr = self.descr # <<<<<<<<<<<<<< + * cdef int offset + * + */ + __pyx_t_3 = ((PyObject *)__pyx_v_self->descr); + __Pyx_INCREF(__pyx_t_3); + __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 + * cdef int offset + * + * info.obj = self # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(descr): + */ + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = ((PyObject *)__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + * info.obj = self + * + * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 + * + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num # <<<<<<<<<<<<<< + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + */ + __pyx_t_4 = __pyx_v_descr->type_num; + __pyx_v_t = __pyx_t_4; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_2 = ((__pyx_v_descr->byteorder == '>') != 0); + if (!__pyx_t_2) { + goto __pyx_L15_next_or; + } else { + } + __pyx_t_2 = (__pyx_v_little_endian != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L14_bool_binop_done; + } + __pyx_L15_next_or:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + */ + __pyx_t_2 = ((__pyx_v_descr->byteorder == '<') != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L14_bool_binop_done; + } + __pyx_t_2 = ((!(__pyx_v_little_endian != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L14_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + if (unlikely(__pyx_t_1)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 263, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + */ + switch (__pyx_v_t) { + case NPY_BYTE: + __pyx_v_f = ((char *)"b"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + */ + case NPY_UBYTE: + __pyx_v_f = ((char *)"B"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + */ + case NPY_SHORT: + __pyx_v_f = ((char *)"h"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + */ + case NPY_USHORT: + __pyx_v_f = ((char *)"H"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + */ + case NPY_INT: + __pyx_v_f = ((char *)"i"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + */ + case NPY_UINT: + __pyx_v_f = ((char *)"I"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + */ + case NPY_LONG: + __pyx_v_f = ((char *)"l"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + */ + case NPY_ULONG: + __pyx_v_f = ((char *)"L"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + */ + case NPY_LONGLONG: + __pyx_v_f = ((char *)"q"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + */ + case NPY_ULONGLONG: + __pyx_v_f = ((char *)"Q"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + */ + case NPY_FLOAT: + __pyx_v_f = ((char *)"f"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + */ + case NPY_DOUBLE: + __pyx_v_f = ((char *)"d"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + */ + case NPY_LONGDOUBLE: + __pyx_v_f = ((char *)"g"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + */ + case NPY_CFLOAT: + __pyx_v_f = ((char *)"Zf"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" + */ + case NPY_CDOUBLE: + __pyx_v_f = ((char *)"Zd"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f = "O" + * else: + */ + case NPY_CLONGDOUBLE: + __pyx_v_f = ((char *)"Zg"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + case NPY_OBJECT: + __pyx_v_f = ((char *)"O"); + break; + default: + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 + * elif t == NPY_OBJECT: f = "O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * info.format = f + * return + */ + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_t_3); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_7); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 282, __pyx_L1_error) + break; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f # <<<<<<<<<<<<<< + * return + * else: + */ + __pyx_v_info->format = __pyx_v_f; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f + * return # <<<<<<<<<<<<<< + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + * info.obj = self + * + * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + * return + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + */ + /*else*/ { + __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, + */ + (__pyx_v_info->format[0]) = '^'; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 + * info.format = PyObject_Malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 # <<<<<<<<<<<<<< + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + */ + __pyx_v_offset = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< + * info.format + _buffer_format_string_len, + * &offset) + */ + __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) + __pyx_v_f = __pyx_t_8; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + * info.format + _buffer_format_string_len, + * &offset) + * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + */ + (__pyx_v_f[0]) = '\x00'; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fulfill the PEP. + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + if (__pyx_v_info->obj != NULL) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0; + } + goto __pyx_L2; + __pyx_L0:; + if (__pyx_v_info->obj == Py_None) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0; + } + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_descr); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + */ + +/* Python wrapper */ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0); + __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__releasebuffer__", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * PyObject_Free(info.strides) + */ + PyObject_Free(__pyx_v_info->format); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * PyObject_Free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * PyObject_Free(info.strides) # <<<<<<<<<<<<<< + * # info.shape was stored after info.strides in the same block + * + */ + PyObject_Free(__pyx_v_info->strides); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * PyObject_Free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + * + * cdef inline object PyArray_MultiIterNew1(a): + * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew2(a, b): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 776, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + * + * cdef inline object PyArray_MultiIterNew2(a, b): + * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 779, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 782, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 785, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 788, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< + * return d.subarray.shape + * else: + */ + __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape # <<<<<<<<<<<<<< + * else: + * return () + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject*)__pyx_v_d->subarray->shape)); + __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< + * return d.subarray.shape + * else: + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 + * return d.subarray.shape + * else: + * return () # <<<<<<<<<<<<<< + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_empty_tuple); + __pyx_r = __pyx_empty_tuple; + goto __pyx_L0; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + * return () + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) { + PyArray_Descr *__pyx_v_child = 0; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + PyObject *__pyx_v_fields = 0; + PyObject *__pyx_v_childname = NULL; + PyObject *__pyx_v_new_offset = NULL; + PyObject *__pyx_v_t = NULL; + char *__pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + long __pyx_t_8; + char *__pyx_t_9; + __Pyx_RefNannySetupContext("_util_dtypestring", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + * + * cdef dtype child + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * cdef tuple fields + */ + __pyx_v_endian_detector = 1; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + * cdef dtype child + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * cdef tuple fields + * + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + if (unlikely(__pyx_v_descr->names == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(1, 805, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_descr->names; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(1, 805, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 805, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); + __pyx_t_3 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + * + * for childname in descr.names: + * fields = descr.fields[childname] # <<<<<<<<<<<<<< + * child, new_offset = fields + * + */ + if (unlikely(__pyx_v_descr->fields == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 806, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_PyDict_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 806, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(1, 806, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 + * for childname in descr.names: + * fields = descr.fields[childname] + * child, new_offset = fields # <<<<<<<<<<<<<< + * + * if (end - f) - (new_offset - offset[0]) < 15: + */ + if (likely(__pyx_v_fields != Py_None)) { + PyObject* sequence = __pyx_v_fields; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(1, 807, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 807, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 807, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(1, 807, __pyx_L1_error) + } + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) __PYX_ERR(1, 807, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_child, ((PyArray_Descr *)__pyx_t_3)); + __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); + __pyx_t_4 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + __pyx_t_4 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 809, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 809, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 809, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); + if (unlikely(__pyx_t_6)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 810, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 810, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_7 = ((__pyx_v_child->byteorder == '>') != 0); + if (!__pyx_t_7) { + goto __pyx_L8_next_or; + } else { + } + __pyx_t_7 = (__pyx_v_little_endian != 0); + if (!__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L7_bool_binop_done; + } + __pyx_L8_next_or:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + * + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * # One could encode it in the format string and have Cython + */ + __pyx_t_7 = ((__pyx_v_child->byteorder == '<') != 0); + if (__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_7 = ((!(__pyx_v_little_endian != 0)) != 0); + __pyx_t_6 = __pyx_t_7; + __pyx_L7_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + if (unlikely(__pyx_t_6)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 814, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 814, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + * + * # Output padding bytes + * while offset[0] < new_offset: # <<<<<<<<<<<<<< + * f[0] = 120 # "x"; pad byte + * f += 1 + */ + while (1) { + __pyx_t_3 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 824, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_3, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 824, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 824, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!__pyx_t_6) break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + * # Output padding bytes + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< + * f += 1 + * offset[0] += 1 + */ + (__pyx_v_f[0]) = 0x78; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte + * f += 1 # <<<<<<<<<<<<<< + * offset[0] += 1 + * + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + * f[0] = 120 # "x"; pad byte + * f += 1 + * offset[0] += 1 # <<<<<<<<<<<<<< + * + * offset[0] += child.itemsize + */ + __pyx_t_8 = 0; + (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 + * offset[0] += 1 + * + * offset[0] += child.itemsize # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(child): + */ + __pyx_t_8 = 0; + (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); + if (__pyx_t_6) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 + * + * if not PyDataType_HASFIELDS(child): + * t = child.type_num # <<<<<<<<<<<<<< + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") + */ + __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_child->type_num); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 832, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); + __pyx_t_4 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); + if (unlikely(__pyx_t_6)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 834, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 834, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + * + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_BYTE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 837, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 837, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 837, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 98; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UBYTE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 838, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 838, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 838, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 66; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_SHORT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 839, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 839, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 839, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x68; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_USHORT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 840, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 840, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 840, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 72; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_INT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 841, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 841, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 841, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x69; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UINT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 842, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 842, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 842, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 73; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 843, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 843, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 843, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x6C; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 844, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 844, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 844, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 76; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGLONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 845, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 845, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 845, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x71; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONGLONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 846, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 846, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 846, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 81; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_FLOAT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 847, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 847, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 847, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x66; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_DOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 848, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 848, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 848, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x64; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 849, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 849, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 849, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x67; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CFLOAT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 850, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 850, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 850, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x66; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 851, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 851, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 851, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x64; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 852, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 852, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 852, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x67; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_OBJECT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 853, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 853, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 853, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (likely(__pyx_t_6)) { + (__pyx_v_f[0]) = 79; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * f += 1 + * else: + */ + /*else*/ { + __pyx_t_3 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 855, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 855, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 855, __pyx_L1_error) + } + __pyx_L15:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * f += 1 # <<<<<<<<<<<<<< + * else: + * # Cython ignores struct boundary information ("T{...}"), + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + goto __pyx_L13; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + * # Cython ignores struct boundary information ("T{...}"), + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< + * return f + * + */ + /*else*/ { + __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(1, 860, __pyx_L1_error) + __pyx_v_f = __pyx_t_9; + } + __pyx_L13:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) + * return f # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + * return () + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_child); + __Pyx_XDECREF(__pyx_v_fields); + __Pyx_XDECREF(__pyx_v_childname); + __Pyx_XDECREF(__pyx_v_new_offset); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + +static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) { + PyObject *__pyx_v_baseptr; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + __Pyx_RefNannySetupContext("set_array_base", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + __pyx_t_1 = (__pyx_v_base == Py_None); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 + * cdef PyObject* baseptr + * if base is None: + * baseptr = NULL # <<<<<<<<<<<<<< + * else: + * Py_INCREF(base) # important to do this before decref below! + */ + __pyx_v_baseptr = NULL; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + goto __pyx_L3; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 + * baseptr = NULL + * else: + * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< + * baseptr = base + * Py_XDECREF(arr.base) + */ + /*else*/ { + Py_INCREF(__pyx_v_base); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 + * else: + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base # <<<<<<<<<<<<<< + * Py_XDECREF(arr.base) + * arr.base = baseptr + */ + __pyx_v_baseptr = ((PyObject *)__pyx_v_base); + } + __pyx_L3:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base + * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< + * arr.base = baseptr + * + */ + Py_XDECREF(__pyx_v_arr->base); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 + * baseptr = base + * Py_XDECREF(arr.base) + * arr.base = baseptr # <<<<<<<<<<<<<< + * + * cdef inline object get_array_base(ndarray arr): + */ + __pyx_v_arr->base = __pyx_v_baseptr; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("get_array_base", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: + * return None # <<<<<<<<<<<<<< + * else: + * return arr.base + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 + * return None + * else: + * return arr.base # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_arr->base)); + __pyx_r = ((PyObject *)__pyx_v_arr->base); + goto __pyx_L0; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + * # Versions of the import_* functions which are more suitable for + * # Cython code. + * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< + * try: + * _import_array() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_array", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 + * cdef inline int import_array() except -1: + * try: + * _import_array() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") + */ + __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 + * try: + * _import_array() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.multiarray failed to import") + * + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 999, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + * _import_array() + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_umath() except -1: + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1000, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(1, 1000, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + * # Versions of the import_* functions which are more suitable for + * # Cython code. + * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< + * try: + * _import_array() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + * raise ImportError("numpy.core.multiarray failed to import") + * + * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_umath", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + * cdef inline int import_umath() except -1: + * try: + * _import_umath() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + * try: + * _import_umath() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.umath failed to import") + * + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1005, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_ufunc() except -1: + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1006, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(1, 1006, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + * raise ImportError("numpy.core.multiarray failed to import") + * + * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_ufunc", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + * cdef inline int import_ufunc() except -1: + * try: + * _import_umath() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + * try: + * _import_umath() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1011, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1012, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(1, 1012, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_doc2vec_corpusfile(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_doc2vec_corpusfile}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "doc2vec_corpusfile", + __pyx_k_Optimized_cython_functions_for_f, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_n_s_CORPUSFILE_VERSION, __pyx_k_CORPUSFILE_VERSION, sizeof(__pyx_k_CORPUSFILE_VERSION), 0, 0, 1, 1}, + {&__pyx_kp_u_Format_string_allocated_too_shor, __pyx_k_Format_string_allocated_too_shor, sizeof(__pyx_k_Format_string_allocated_too_shor), 0, 1, 0, 0}, + {&__pyx_kp_u_Format_string_allocated_too_shor_2, __pyx_k_Format_string_allocated_too_shor_2, sizeof(__pyx_k_Format_string_allocated_too_shor_2), 0, 1, 0, 0}, + {&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1}, + {&__pyx_kp_u_Non_native_byte_order_not_suppor, __pyx_k_Non_native_byte_order_not_suppor, sizeof(__pyx_k_Non_native_byte_order_not_suppor), 0, 1, 0, 0}, + {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1}, + {&__pyx_n_s__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 0, 1, 1}, + {&__pyx_n_s_alpha, __pyx_k_alpha, sizeof(__pyx_k_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_alpha_2, __pyx_k_alpha_2, sizeof(__pyx_k_alpha_2), 0, 0, 1, 1}, + {&__pyx_n_s_c, __pyx_k_c, sizeof(__pyx_k_c), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_corpus_file, __pyx_k_corpus_file, sizeof(__pyx_k_corpus_file), 0, 0, 1, 1}, + {&__pyx_n_s_count, __pyx_k_count, sizeof(__pyx_k_count), 0, 0, 1, 1}, + {&__pyx_n_s_cur_epoch, __pyx_k_cur_epoch, sizeof(__pyx_k_cur_epoch), 0, 0, 1, 1}, + {&__pyx_n_s_cur_epoch_2, __pyx_k_cur_epoch_2, sizeof(__pyx_k_cur_epoch_2), 0, 0, 1, 1}, + {&__pyx_n_s_cython_vocab, __pyx_k_cython_vocab, sizeof(__pyx_k_cython_vocab), 0, 0, 1, 1}, + {&__pyx_n_s_d2v_train_epoch_dbow, __pyx_k_d2v_train_epoch_dbow, sizeof(__pyx_k_d2v_train_epoch_dbow), 0, 0, 1, 1}, + {&__pyx_n_s_d2v_train_epoch_dm, __pyx_k_d2v_train_epoch_dm, sizeof(__pyx_k_d2v_train_epoch_dm), 0, 0, 1, 1}, + {&__pyx_n_s_d2v_train_epoch_dm_concat, __pyx_k_d2v_train_epoch_dm_concat, sizeof(__pyx_k_d2v_train_epoch_dm_concat), 0, 0, 1, 1}, + {&__pyx_n_s_doc_tag, __pyx_k_doc_tag, sizeof(__pyx_k_doc_tag), 0, 0, 1, 1}, + {&__pyx_n_s_doc_words, __pyx_k_doc_words, sizeof(__pyx_k_doc_words), 0, 0, 1, 1}, + {&__pyx_n_s_doctag_locks, __pyx_k_doctag_locks, sizeof(__pyx_k_doctag_locks), 0, 0, 1, 1}, + {&__pyx_n_s_doctag_vectors, __pyx_k_doctag_vectors, sizeof(__pyx_k_doctag_vectors), 0, 0, 1, 1}, + {&__pyx_n_s_document_len, __pyx_k_document_len, sizeof(__pyx_k_document_len), 0, 0, 1, 1}, + {&__pyx_n_s_docvecs_count, __pyx_k_docvecs_count, sizeof(__pyx_k_docvecs_count), 0, 0, 1, 1}, + {&__pyx_n_s_effective_words, __pyx_k_effective_words, sizeof(__pyx_k_effective_words), 0, 0, 1, 1}, + {&__pyx_n_s_end_alpha, __pyx_k_end_alpha, sizeof(__pyx_k_end_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_epochs, __pyx_k_epochs, sizeof(__pyx_k_epochs), 0, 0, 1, 1}, + {&__pyx_n_s_expected_examples, __pyx_k_expected_examples, sizeof(__pyx_k_expected_examples), 0, 0, 1, 1}, + {&__pyx_n_s_expected_examples_2, __pyx_k_expected_examples_2, sizeof(__pyx_k_expected_examples_2), 0, 0, 1, 1}, + {&__pyx_n_s_expected_words, __pyx_k_expected_words, sizeof(__pyx_k_expected_words), 0, 0, 1, 1}, + {&__pyx_n_s_expected_words_2, __pyx_k_expected_words_2, sizeof(__pyx_k_expected_words_2), 0, 0, 1, 1}, + {&__pyx_n_s_fblas, __pyx_k_fblas, sizeof(__pyx_k_fblas), 0, 0, 1, 1}, + {&__pyx_kp_s_gensim_models_doc2vec_corpusfile, __pyx_k_gensim_models_doc2vec_corpusfile, sizeof(__pyx_k_gensim_models_doc2vec_corpusfile), 0, 0, 1, 0}, + {&__pyx_n_s_gensim_models_doc2vec_corpusfile_2, __pyx_k_gensim_models_doc2vec_corpusfile_2, sizeof(__pyx_k_gensim_models_doc2vec_corpusfile_2), 0, 0, 1, 1}, + {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, + {&__pyx_n_s_idx_end, __pyx_k_idx_end, sizeof(__pyx_k_idx_end), 0, 0, 1, 1}, + {&__pyx_n_s_idx_start, __pyx_k_idx_start, sizeof(__pyx_k_idx_start), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_input_stream, __pyx_k_input_stream, sizeof(__pyx_k_input_stream), 0, 0, 1, 1}, + {&__pyx_n_s_inv_count, __pyx_k_inv_count, sizeof(__pyx_k_inv_count), 0, 0, 1, 1}, + {&__pyx_n_s_j, __pyx_k_j, sizeof(__pyx_k_j), 0, 0, 1, 1}, + {&__pyx_n_s_k, __pyx_k_k, sizeof(__pyx_k_k), 0, 0, 1, 1}, + {&__pyx_n_s_learn_doctags, __pyx_k_learn_doctags, sizeof(__pyx_k_learn_doctags), 0, 0, 1, 1}, + {&__pyx_n_s_learn_hidden, __pyx_k_learn_hidden, sizeof(__pyx_k_learn_hidden), 0, 0, 1, 1}, + {&__pyx_n_s_learn_words, __pyx_k_learn_words, sizeof(__pyx_k_learn_words), 0, 0, 1, 1}, + {&__pyx_n_s_m, __pyx_k_m, sizeof(__pyx_k_m), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_min_alpha, __pyx_k_min_alpha, sizeof(__pyx_k_min_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_model, __pyx_k_model, sizeof(__pyx_k_model), 0, 0, 1, 1}, + {&__pyx_n_s_n, __pyx_k_n, sizeof(__pyx_k_n), 0, 0, 1, 1}, + {&__pyx_kp_u_ndarray_is_not_C_contiguous, __pyx_k_ndarray_is_not_C_contiguous, sizeof(__pyx_k_ndarray_is_not_C_contiguous), 0, 1, 0, 0}, + {&__pyx_kp_u_ndarray_is_not_Fortran_contiguou, __pyx_k_ndarray_is_not_Fortran_contiguou, sizeof(__pyx_k_ndarray_is_not_Fortran_contiguou), 0, 1, 0, 0}, + {&__pyx_n_s_neu1, __pyx_k_neu1, sizeof(__pyx_k_neu1), 0, 0, 1, 1}, + {&__pyx_n_s_np, __pyx_k_np, sizeof(__pyx_k_np), 0, 0, 1, 1}, + {&__pyx_n_s_num_epochs, __pyx_k_num_epochs, sizeof(__pyx_k_num_epochs), 0, 0, 1, 1}, + {&__pyx_n_s_numpy, __pyx_k_numpy, sizeof(__pyx_k_numpy), 0, 0, 1, 1}, + {&__pyx_kp_s_numpy_core_multiarray_failed_to, __pyx_k_numpy_core_multiarray_failed_to, sizeof(__pyx_k_numpy_core_multiarray_failed_to), 0, 0, 1, 0}, + {&__pyx_kp_s_numpy_core_umath_failed_to_impor, __pyx_k_numpy_core_umath_failed_to_impor, sizeof(__pyx_k_numpy_core_umath_failed_to_impor), 0, 0, 1, 0}, + {&__pyx_n_s_offset, __pyx_k_offset, sizeof(__pyx_k_offset), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_scipy_linalg_blas, __pyx_k_scipy_linalg_blas, sizeof(__pyx_k_scipy_linalg_blas), 0, 0, 1, 1}, + {&__pyx_n_s_sent_idx, __pyx_k_sent_idx, sizeof(__pyx_k_sent_idx), 0, 0, 1, 1}, + {&__pyx_n_s_start_alpha, __pyx_k_start_alpha, sizeof(__pyx_k_start_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_start_doctag, __pyx_k_start_doctag, sizeof(__pyx_k_start_doctag), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_total_documents, __pyx_k_total_documents, sizeof(__pyx_k_total_documents), 0, 0, 1, 1}, + {&__pyx_n_s_total_effective_words, __pyx_k_total_effective_words, sizeof(__pyx_k_total_effective_words), 0, 0, 1, 1}, + {&__pyx_n_s_total_words, __pyx_k_total_words, sizeof(__pyx_k_total_words), 0, 0, 1, 1}, + {&__pyx_n_s_train_words, __pyx_k_train_words, sizeof(__pyx_k_train_words), 0, 0, 1, 1}, + {&__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_k_unknown_dtype_code_in_numpy_pxd, sizeof(__pyx_k_unknown_dtype_code_in_numpy_pxd), 0, 1, 0, 0}, + {&__pyx_n_s_vocab, __pyx_k_vocab, sizeof(__pyx_k_vocab), 0, 0, 1, 1}, + {&__pyx_n_s_word_locks, __pyx_k_word_locks, sizeof(__pyx_k_word_locks), 0, 0, 1, 1}, + {&__pyx_n_s_word_vectors, __pyx_k_word_vectors, sizeof(__pyx_k_word_vectors), 0, 0, 1, 1}, + {&__pyx_n_s_work, __pyx_k_work, sizeof(__pyx_k_work), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(0, 25, __pyx_L1_error) + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 88, __pyx_L1_error) + __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(1, 229, __pyx_L1_error) + __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(1, 810, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple_)) __PYX_ERR(1, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 233, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 810, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 814, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 834, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + * _import_array() + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_umath() except -1: + */ + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 1000, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_ufunc() except -1: + */ + __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 1006, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + */ + __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 1012, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + + /* "gensim/models/doc2vec_corpusfile.pyx":95 + * + * + * def d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + */ + __pyx_tuple__11 = PyTuple_Pack(42, __pyx_n_s_model, __pyx_n_s_corpus_file, __pyx_n_s_offset, __pyx_n_s_start_doctag, __pyx_n_s_cython_vocab, __pyx_n_s_cur_epoch, __pyx_n_s_expected_examples, __pyx_n_s_expected_words, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_docvecs_count, __pyx_n_s_word_vectors, __pyx_n_s_word_locks, __pyx_n_s_train_words, __pyx_n_s_learn_doctags, __pyx_n_s_learn_words, __pyx_n_s_learn_hidden, __pyx_n_s_doctag_vectors, __pyx_n_s_doctag_locks, __pyx_n_s_c, __pyx_n_s_cur_epoch_2, __pyx_n_s_num_epochs, __pyx_n_s_expected_examples_2, __pyx_n_s_expected_words_2, __pyx_n_s_start_alpha, __pyx_n_s_end_alpha, __pyx_n_s_alpha_2, __pyx_n_s_input_stream, __pyx_n_s_vocab, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_document_len, __pyx_n_s_effective_words, __pyx_n_s_total_effective_words, __pyx_n_s_total_documents, __pyx_n_s_total_words, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_doc_words, __pyx_n_s_doc_tag, __pyx_n_s_k); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); + __pyx_codeobj__12 = (PyObject*)__Pyx_PyCode_New(19, 0, 42, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__11, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_doc2vec_corpusfile, __pyx_n_s_d2v_train_epoch_dbow, 95, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__12)) __PYX_ERR(0, 95, __pyx_L1_error) + + /* "gensim/models/doc2vec_corpusfile.pyx":231 + * + * + * def d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None): + */ + __pyx_tuple__13 = PyTuple_Pack(44, __pyx_n_s_model, __pyx_n_s_corpus_file, __pyx_n_s_offset, __pyx_n_s_start_doctag, __pyx_n_s_cython_vocab, __pyx_n_s_cur_epoch, __pyx_n_s_expected_examples, __pyx_n_s_expected_words, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_docvecs_count, __pyx_n_s_word_vectors, __pyx_n_s_word_locks, __pyx_n_s_learn_doctags, __pyx_n_s_learn_words, __pyx_n_s_learn_hidden, __pyx_n_s_doctag_vectors, __pyx_n_s_doctag_locks, __pyx_n_s_c, __pyx_n_s_cur_epoch_2, __pyx_n_s_num_epochs, __pyx_n_s_expected_examples_2, __pyx_n_s_expected_words_2, __pyx_n_s_start_alpha, __pyx_n_s_end_alpha, __pyx_n_s_alpha_2, __pyx_n_s_input_stream, __pyx_n_s_vocab, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_m, __pyx_n_s_document_len, __pyx_n_s_effective_words, __pyx_n_s_total_effective_words, __pyx_n_s_total_documents, __pyx_n_s_total_words, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_count, __pyx_n_s_inv_count, __pyx_n_s_doc_words, __pyx_n_s_doc_tag); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(0, 231, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__13); + __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(18, 0, 44, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_doc2vec_corpusfile, __pyx_n_s_d2v_train_epoch_dm, 231, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(0, 231, __pyx_L1_error) + + /* "gensim/models/doc2vec_corpusfile.pyx":378 + * + * + * def d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, + */ + __pyx_tuple__15 = PyTuple_Pack(43, __pyx_n_s_model, __pyx_n_s_corpus_file, __pyx_n_s_offset, __pyx_n_s_start_doctag, __pyx_n_s_cython_vocab, __pyx_n_s_cur_epoch, __pyx_n_s_expected_examples, __pyx_n_s_expected_words, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_docvecs_count, __pyx_n_s_word_vectors, __pyx_n_s_word_locks, __pyx_n_s_learn_doctags, __pyx_n_s_learn_words, __pyx_n_s_learn_hidden, __pyx_n_s_doctag_vectors, __pyx_n_s_doctag_locks, __pyx_n_s_c, __pyx_n_s_cur_epoch_2, __pyx_n_s_num_epochs, __pyx_n_s_expected_examples_2, __pyx_n_s_expected_words_2, __pyx_n_s_start_alpha, __pyx_n_s_end_alpha, __pyx_n_s_alpha_2, __pyx_n_s_input_stream, __pyx_n_s_vocab, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_m, __pyx_n_s_n, __pyx_n_s_document_len, __pyx_n_s_effective_words, __pyx_n_s_total_effective_words, __pyx_n_s_total_documents, __pyx_n_s_total_words, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_doc_words, __pyx_n_s_doc_tag); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(18, 0, 43, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_doc2vec_corpusfile, __pyx_n_s_d2v_train_epoch_dm_concat, 378, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_modinit_global_init_code(void); /*proto*/ +static int __Pyx_modinit_variable_export_code(void); /*proto*/ +static int __Pyx_modinit_function_export_code(void); /*proto*/ +static int __Pyx_modinit_type_init_code(void); /*proto*/ +static int __Pyx_modinit_type_import_code(void); /*proto*/ +static int __Pyx_modinit_variable_import_code(void); /*proto*/ +static int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) __PYX_ERR(2, 9, __pyx_L1_error) + __pyx_ptype_5numpy_dtype = __Pyx_ImportType("numpy", "dtype", sizeof(PyArray_Descr), 0); if (unlikely(!__pyx_ptype_5numpy_dtype)) __PYX_ERR(1, 164, __pyx_L1_error) + __pyx_ptype_5numpy_flatiter = __Pyx_ImportType("numpy", "flatiter", sizeof(PyArrayIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_flatiter)) __PYX_ERR(1, 186, __pyx_L1_error) + __pyx_ptype_5numpy_broadcast = __Pyx_ImportType("numpy", "broadcast", sizeof(PyArrayMultiIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_broadcast)) __PYX_ERR(1, 190, __pyx_L1_error) + __pyx_ptype_5numpy_ndarray = __Pyx_ImportType("numpy", "ndarray", sizeof(PyArrayObject), 0); if (unlikely(!__pyx_ptype_5numpy_ndarray)) __PYX_ERR(1, 199, __pyx_L1_error) + __pyx_ptype_5numpy_ufunc = __Pyx_ImportType("numpy", "ufunc", sizeof(PyUFuncObject), 0); if (unlikely(!__pyx_ptype_5numpy_ufunc)) __PYX_ERR(1, 872, __pyx_L1_error) + __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = __Pyx_ImportType("gensim.models.word2vec_corpusfile", "CythonLineSentence", sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), 1); if (unlikely(!__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence)) __PYX_ERR(3, 33, __pyx_L1_error) + __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = (struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence*)__Pyx_GetVtable(__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence->tp_dict); if (unlikely(!__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence)) __PYX_ERR(3, 33, __pyx_L1_error) + __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab = __Pyx_ImportType("gensim.models.word2vec_corpusfile", "CythonVocab", sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab), 1); if (unlikely(!__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab)) __PYX_ERR(3, 61, __pyx_L1_error) + __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab = (struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab*)__Pyx_GetVtable(__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab->tp_dict); if (unlikely(!__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab)) __PYX_ERR(3, 61, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __pyx_t_1 = __Pyx_ImportModule("gensim.models.word2vec_inner"); if (!__pyx_t_1) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "scopy", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_scopy, "__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "saxpy", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_saxpy, "__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "sdot", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_sdot, "__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "dsdot", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_dsdot, "__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "snrm2", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_snrm2, "__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "sscal", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_sscal, "__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "EXP_TABLE", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_EXP_TABLE, "__pyx_t_6gensim_6models_14word2vec_inner_REAL_t [0x3E8]") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "our_dot", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_our_dot, "__pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "our_saxpy", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_our_saxpy, "__pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __pyx_t_1 = __Pyx_ImportModule("gensim.models.word2vec_inner"); if (!__pyx_t_1) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_1, "random_int32", (void (**)(void))&__pyx_f_6gensim_6models_14word2vec_inner_random_int32, "unsigned PY_LONG_LONG (unsigned PY_LONG_LONG *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = __Pyx_ImportModule("gensim.models.doc2vec_inner"); if (!__pyx_t_2) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fast_document_dbow_hs", (void (**)(void))&__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fast_document_dbow_neg", (void (**)(void))&__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fast_document_dm_hs", (void (**)(void))&__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fast_document_dm_neg", (void (**)(void))&__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fast_document_dmc_hs", (void (**)(void))&__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int const , int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fast_document_dmc_neg", (void (**)(void))&__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int const , int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "init_d2v_config", (void (**)(void))&__pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config, "PyObject *(struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config *__pyx_optional_args)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_3 = __Pyx_ImportModule("gensim.models.word2vec_corpusfile"); if (!__pyx_t_3) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_3, "get_alpha", (void (**)(void))&__pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha, "__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_3, "get_next_alpha", (void (**)(void))&__pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha, "__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int, int, int, int, int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_RefNannyFinishContext(); + return -1; +} + + +#if PY_MAJOR_VERSION < 3 +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) + #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initdoc2vec_corpusfile(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initdoc2vec_corpusfile(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_doc2vec_corpusfile(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_doc2vec_corpusfile(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + result = PyDict_SetItemString(moddict, to_name, value); + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__") < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static int __pyx_pymod_exec_doc2vec_corpusfile(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m && __pyx_m == __pyx_pyinit_module) return 0; + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_doc2vec_corpusfile(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("doc2vec_corpusfile", __pyx_methods, __pyx_k_Optimized_cython_functions_for_f, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_gensim__models__doc2vec_corpusfile) { + if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "gensim.models.doc2vec_corpusfile")) { + if (unlikely(PyDict_SetItemString(modules, "gensim.models.doc2vec_corpusfile", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + (void)__Pyx_modinit_type_init_code(); + if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; + if (unlikely(__Pyx_modinit_variable_import_code() != 0)) goto __pyx_L1_error; + if (unlikely(__Pyx_modinit_function_import_code() != 0)) goto __pyx_L1_error; + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "gensim/models/doc2vec_corpusfile.pyx":14 + * + * import cython + * import numpy as np # <<<<<<<<<<<<<< + * cimport numpy as np + * + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_numpy, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_np, __pyx_t_1) < 0) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":23 + * + * # scipy <= 0.15 + * try: # <<<<<<<<<<<<<< + * from scipy.linalg.blas import fblas + * except ImportError: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "gensim/models/doc2vec_corpusfile.pyx":24 + * # scipy <= 0.15 + * try: + * from scipy.linalg.blas import fblas # <<<<<<<<<<<<<< + * except ImportError: + * # in scipy > 0.15, fblas function has been removed + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 24, __pyx_L2_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_fblas); + __Pyx_GIVEREF(__pyx_n_s_fblas); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_fblas); + __pyx_t_5 = __Pyx_Import(__pyx_n_s_scipy_linalg_blas, __pyx_t_1, -1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 24, __pyx_L2_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_5, __pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 24, __pyx_L2_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_fblas, __pyx_t_1) < 0) __PYX_ERR(0, 24, __pyx_L2_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":23 + * + * # scipy <= 0.15 + * try: # <<<<<<<<<<<<<< + * from scipy.linalg.blas import fblas + * except ImportError: + */ + } + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L7_try_end; + __pyx_L2_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":25 + * try: + * from scipy.linalg.blas import fblas + * except ImportError: # <<<<<<<<<<<<<< + * # in scipy > 0.15, fblas function has been removed + * import scipy.linalg.blas as fblas + */ + __pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_ImportError); + if (__pyx_t_6) { + __Pyx_AddTraceback("gensim.models.doc2vec_corpusfile", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_7) < 0) __PYX_ERR(0, 25, __pyx_L4_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_7); + + /* "gensim/models/doc2vec_corpusfile.pyx":27 + * except ImportError: + * # in scipy > 0.15, fblas function has been removed + * import scipy.linalg.blas as fblas # <<<<<<<<<<<<<< + * + * from gensim.models.doc2vec_inner cimport ( + */ + __pyx_t_8 = PyList_New(1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 27, __pyx_L4_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(__pyx_n_s__10); + __Pyx_GIVEREF(__pyx_n_s__10); + PyList_SET_ITEM(__pyx_t_8, 0, __pyx_n_s__10); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_scipy_linalg_blas, __pyx_t_8, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 27, __pyx_L4_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_fblas, __pyx_t_9) < 0) __PYX_ERR(0, 27, __pyx_L4_except_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L3_exception_handled; + } + goto __pyx_L4_except_error; + __pyx_L4_except_error:; + + /* "gensim/models/doc2vec_corpusfile.pyx":23 + * + * # scipy <= 0.15 + * try: # <<<<<<<<<<<<<< + * from scipy.linalg.blas import fblas + * except ImportError: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L3_exception_handled:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + __pyx_L7_try_end:; + } + + /* "gensim/models/doc2vec_corpusfile.pyx":53 + * DEF MAX_DOCUMENT_LEN = 10000 + * + * cdef int ONE = 1 # <<<<<<<<<<<<<< + * cdef REAL_t ONEF = 1.0 + * + */ + __pyx_v_6gensim_6models_18doc2vec_corpusfile_ONE = 1; + + /* "gensim/models/doc2vec_corpusfile.pyx":54 + * + * cdef int ONE = 1 + * cdef REAL_t ONEF = 1.0 # <<<<<<<<<<<<<< + * + * + */ + __pyx_v_6gensim_6models_18doc2vec_corpusfile_ONEF = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)1.0); + + /* "gensim/models/doc2vec_corpusfile.pyx":95 + * + * + * def d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + */ + __pyx_t_7 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_18doc2vec_corpusfile_1d2v_train_epoch_dbow, NULL, __pyx_n_s_gensim_models_doc2vec_corpusfile_2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_d2v_train_epoch_dbow, __pyx_t_7) < 0) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":231 + * + * + * def d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None): + */ + __pyx_t_7 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_18doc2vec_corpusfile_3d2v_train_epoch_dm, NULL, __pyx_n_s_gensim_models_doc2vec_corpusfile_2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 231, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_d2v_train_epoch_dm, __pyx_t_7) < 0) __PYX_ERR(0, 231, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":378 + * + * + * def d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, # <<<<<<<<<<<<<< + * _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + * learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, + */ + __pyx_t_7 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_18doc2vec_corpusfile_5d2v_train_epoch_dm_concat, NULL, __pyx_n_s_gensim_models_doc2vec_corpusfile_2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_d2v_train_epoch_dm_concat, __pyx_t_7) < 0) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "gensim/models/doc2vec_corpusfile.pyx":520 + * + * + * CORPUSFILE_VERSION = 1 # <<<<<<<<<<<<<< + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CORPUSFILE_VERSION, __pyx_int_1) < 0) __PYX_ERR(0, 520, __pyx_L1_error) + + /* "gensim/models/doc2vec_corpusfile.pyx":1 + * #!/usr/bin/env cython # <<<<<<<<<<<<<< + * # cython: boundscheck=False + * # cython: wraparound=False + */ + __pyx_t_7 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_7) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init gensim.models.doc2vec_corpusfile", 0, __pyx_lineno, __pyx_filename); + } + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init gensim.models.doc2vec_corpusfile"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* ExtTypeTest */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)meth)) (self, args, nargs); + } +} +#endif + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +#include "frameobject.h" +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = f->f_localsplus; + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* DictGetItem */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) + PyErr_SetObject(PyExc_KeyError, args); + Py_XDECREF(args); + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* RaiseTooManyValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* RaiseNoneIterError */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if PY_VERSION_HEX >= 0x030700A2 + *type = tstate->exc_state.exc_type; + *value = tstate->exc_state.exc_value; + *tb = tstate->exc_state.exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if PY_VERSION_HEX >= 0x030700A2 + tmp_type = tstate->exc_state.exc_type; + tmp_value = tstate->exc_state.exc_value; + tmp_tb = tstate->exc_state.exc_traceback; + tstate->exc_state.exc_type = type; + tstate->exc_state.exc_value = value; + tstate->exc_state.exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) { +#endif + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if PY_VERSION_HEX >= 0x030700A2 + tmp_type = tstate->exc_state.exc_type; + tmp_value = tstate->exc_state.exc_value; + tmp_tb = tstate->exc_state.exc_traceback; + tstate->exc_state.exc_type = local_type; + tstate->exc_state.exc_value = local_value; + tstate->exc_state.exc_traceback = local_tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* GetVTable */ + static void* __Pyx_GetVtable(PyObject *dict) { + void* ptr; + PyObject *ob = PyObject_GetItem(dict, __pyx_n_s_pyx_vtable); + if (!ob) + goto bad; +#if PY_VERSION_HEX >= 0x02070000 + ptr = PyCapsule_GetPointer(ob, 0); +#else + ptr = PyCObject_AsVoidPtr(ob); +#endif + if (!ptr && !PyErr_Occurred()) + PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); + Py_DECREF(ob); + return ptr; +bad: + Py_XDECREF(ob); + return NULL; +} + +/* Import */ + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ + static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* CLineInTraceback */ + #ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + use_cline = __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback); + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (PyObject_Not(use_cline) != 0) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ + static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ + #include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ + #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* None */ + static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void) { + int err; + #ifdef WITH_THREAD + PyGILState_STATE _save = PyGILState_Ensure(); + #endif + err = !!PyErr_Occurred(); + #ifdef WITH_THREAD + PyGILState_Release(_save); + #endif + return err; +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* Declarations */ + #if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return ::std::complex< float >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return x + y*(__pyx_t_float_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + __pyx_t_float_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +/* Arithmetic */ + #if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + #if 1 + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + if (b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real); + } else if (fabsf(b.real) >= fabsf(b.imag)) { + if (b.real == 0 && b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.imag); + } else { + float r = b.imag / b.real; + float s = 1.0 / (b.real + b.imag * r); + return __pyx_t_float_complex_from_parts( + (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); + } + } else { + float r = b.real / b.imag; + float s = 1.0 / (b.imag + b.real * r); + return __pyx_t_float_complex_from_parts( + (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); + } + } + #else + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + if (b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real); + } else { + float denom = b.real * b.real + b.imag * b.imag; + return __pyx_t_float_complex_from_parts( + (a.real * b.real + a.imag * b.imag) / denom, + (a.imag * b.real - a.real * b.imag) / denom); + } + } + #endif + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrtf(z.real*z.real + z.imag*z.imag); + #else + return hypotf(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + float denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(a, a); + case 3: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(z, a); + case 4: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } else if (b.imag == 0) { + z.real = powf(a.real, b.real); + z.imag = 0; + return z; + } else if (a.real > 0) { + r = a.real; + theta = 0; + } else { + r = -a.real; + theta = atan2f(0, -1); + } + } else { + r = __Pyx_c_abs_float(a); + theta = atan2f(a.imag, a.real); + } + lnr = logf(r); + z_r = expf(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cosf(z_theta); + z.imag = z_r * sinf(z_theta); + return z; + } + #endif +#endif + +/* Declarations */ + #if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return ::std::complex< double >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return x + y*(__pyx_t_double_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + __pyx_t_double_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +/* Arithmetic */ + #if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + #if 1 + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + if (b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real); + } else if (fabs(b.real) >= fabs(b.imag)) { + if (b.real == 0 && b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.imag); + } else { + double r = b.imag / b.real; + double s = 1.0 / (b.real + b.imag * r); + return __pyx_t_double_complex_from_parts( + (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); + } + } else { + double r = b.real / b.imag; + double s = 1.0 / (b.imag + b.real * r); + return __pyx_t_double_complex_from_parts( + (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); + } + } + #else + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + if (b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real); + } else { + double denom = b.real * b.real + b.imag * b.imag; + return __pyx_t_double_complex_from_parts( + (a.real * b.real + a.imag * b.imag) / denom, + (a.imag * b.real - a.real * b.imag) / denom); + } + } + #endif + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrt(z.real*z.real + z.imag*z.imag); + #else + return hypot(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + double denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(a, a); + case 3: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(z, a); + case 4: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } else if (b.imag == 0) { + z.real = pow(a.real, b.real); + z.imag = 0; + return z; + } else if (a.real > 0) { + r = a.real; + theta = 0; + } else { + r = -a.real; + theta = atan2(0, -1); + } + } else { + r = __Pyx_c_abs_double(a); + theta = atan2(a.imag, a.real); + } + lnr = log(r); + z_r = exp(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cos(z_theta); + z.imag = z_r * sin(z_theta); + return z; + } + #endif +#endif + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value) { + const enum NPY_TYPES neg_one = (enum NPY_TYPES) -1, const_zero = (enum NPY_TYPES) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(enum NPY_TYPES) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(enum NPY_TYPES) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum NPY_TYPES) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(enum NPY_TYPES), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntFromPy */ + static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* FastTypeChecks */ + #if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) { + if (likely(err == exc_type)) return 1; + if (likely(PyExceptionClass_Check(err))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type); + } + return PyErr_GivenExceptionMatches(err, exc_type); +} +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) { + if (likely(err == exc_type1 || err == exc_type2)) return 1; + if (likely(PyExceptionClass_Check(err))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2); + } + return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2)); +} +#endif + +/* CheckBinaryVersion */ + static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* ModuleImport */ + #ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +/* TypeImport */ + #ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +/* VoidPtrImport */ + #ifndef __PYX_HAVE_RT_ImportVoidPtr +#define __PYX_HAVE_RT_ImportVoidPtr +static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); + if (!d) + goto bad; + cobj = PyDict_GetItemString(d, name); + if (!cobj) { + PyErr_Format(PyExc_ImportError, + "%.200s does not export expected C variable %.200s", + PyModule_GetName(module), name); + goto bad; + } +#if PY_VERSION_HEX >= 0x02070000 + if (!PyCapsule_IsValid(cobj, sig)) { + PyErr_Format(PyExc_TypeError, + "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), name, sig, PyCapsule_GetName(cobj)); + goto bad; + } + *p = PyCapsule_GetPointer(cobj, sig); +#else + {const char *desc, *s1, *s2; + desc = (const char *)PyCObject_GetDesc(cobj); + if (!desc) + goto bad; + s1 = desc; s2 = sig; + while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } + if (*s1 != *s2) { + PyErr_Format(PyExc_TypeError, + "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), name, sig, desc); + goto bad; + } + *p = PyCObject_AsVoidPtr(cobj);} +#endif + if (!(*p)) + goto bad; + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(d); + return -1; +} +#endif + +/* FunctionImport */ + #ifndef __PYX_HAVE_RT_ImportFunction +#define __PYX_HAVE_RT_ImportFunction +static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); + if (!d) + goto bad; + cobj = PyDict_GetItemString(d, funcname); + if (!cobj) { + PyErr_Format(PyExc_ImportError, + "%.200s does not export expected C function %.200s", + PyModule_GetName(module), funcname); + goto bad; + } +#if PY_VERSION_HEX >= 0x02070000 + if (!PyCapsule_IsValid(cobj, sig)) { + PyErr_Format(PyExc_TypeError, + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), funcname, sig, PyCapsule_GetName(cobj)); + goto bad; + } + tmp.p = PyCapsule_GetPointer(cobj, sig); +#else + {const char *desc, *s1, *s2; + desc = (const char *)PyCObject_GetDesc(cobj); + if (!desc) + goto bad; + s1 = desc; s2 = sig; + while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } + if (*s1 != *s2) { + PyErr_Format(PyExc_TypeError, + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), funcname, sig, desc); + goto bad; + } + tmp.p = PyCObject_AsVoidPtr(cobj);} +#endif + *f = tmp.fp; + if (!(*f)) + goto bad; + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(d); + return -1; +} +#endif + +/* InitStrings */ + static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(x); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/gensim/models/doc2vec_corpusfile.pyx b/gensim/models/doc2vec_corpusfile.pyx new file mode 100644 index 0000000000..39a12483af --- /dev/null +++ b/gensim/models/doc2vec_corpusfile.pyx @@ -0,0 +1,520 @@ +#!/usr/bin/env cython +# cython: boundscheck=False +# cython: wraparound=False +# cython: cdivision=True +# cython: embedsignature=True +# coding: utf-8 +# +# Copyright (C) 2018 Dmitry Persiyanov +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +"""Optimized cython functions for file-based training :class:`~gensim.models.doc2vec.Doc2Vec` model.""" + +import cython +import numpy as np +cimport numpy as np + +from libcpp.string cimport string +from libcpp.vector cimport vector + +from libc.string cimport memset, memcpy + +# scipy <= 0.15 +try: + from scipy.linalg.blas import fblas +except ImportError: + # in scipy > 0.15, fblas function has been removed + import scipy.linalg.blas as fblas + +from gensim.models.doc2vec_inner cimport ( + fast_document_dbow_hs, + fast_document_dbow_neg, + fast_document_dm_hs, + fast_document_dm_neg, + fast_document_dmc_hs, + fast_document_dmc_neg, + init_d2v_config, + Doc2VecConfig +) + +from gensim.models.word2vec_inner cimport random_int32, sscal, REAL_t, our_saxpy + +from gensim.models.word2vec_corpusfile cimport ( + VocabItem, + CythonVocab, + CythonLineSentence, + get_alpha, + get_next_alpha, + cvocab_t +) + +DEF MAX_DOCUMENT_LEN = 10000 + +cdef int ONE = 1 +cdef REAL_t ONEF = 1.0 + + +cdef void prepare_c_structures_for_batch(vector[string] &doc_words, int sample, int hs, int window, int *total_words, + int *effective_words, unsigned long long *next_random, cvocab_t *vocab, + np.uint32_t *indexes, int *codelens, np.uint8_t **codes, np.uint32_t **points, + np.uint32_t *reduced_windows, int *document_len, int train_words, + int docvecs_count, int doc_tag) nogil: + cdef VocabItem predict_word + cdef string token + cdef int i = 0 + + total_words[0] += doc_words.size() + + for token in doc_words: + if vocab[0].find(token) == vocab[0].end(): # shrink document to leave out word + continue # leaving i unchanged + + predict_word = vocab[0][token] + if sample and predict_word.sample_int < random_int32(next_random): + continue + indexes[i] = predict_word.index + if hs: + codelens[i] = predict_word.code_len + codes[i] = predict_word.code + points[i] = predict_word.point + + effective_words[0] += 1 + i += 1 + if i == MAX_DOCUMENT_LEN: + break # TODO: log warning, tally overflow? + document_len[0] = i + + if train_words and reduced_windows != NULL: + for i in range(document_len[0]): + reduced_windows[i] = random_int32(next_random) % window + + if doc_tag < docvecs_count: + effective_words[0] += 1 + + +def d2v_train_epoch_dbow(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + doctag_vectors=None, doctag_locks=None): + """Train distributed bag of words model ("PV-DBOW") by training on a corpus file. + + Called internally from :meth:`~gensim.models.doc2vec.Doc2Vec.train`. + + Parameters + ---------- + model : :class:`~gensim.models.doc2vec.Doc2Vec` + The FastText model instance to train. + corpus_file : str + Path to corpus file. + _cur_epoch : int + Current epoch number. Used for calculating and decaying learning rate. + work : np.ndarray + Private working memory for each worker. + neu1 : np.ndarray + Private working memory for each worker. + train_words : bool, optional + Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both** `learn_words` + and `train_words` are set to True. + learn_doctags : bool, optional + Whether the tag vectors should be updated. + learn_words : bool, optional + Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both** + `learn_words` and `train_words` are set to True. + learn_hidden : bool, optional + Whether or not the weights of the hidden layer will be updated. + word_vectors : numpy.ndarray, optional + The vector representation for each word in the vocabulary. If None, these will be retrieved from the model. + word_locks : numpy.ndarray, optional + A learning lock factor for each weight in the hidden layer for words, value 0 completely blocks updates, + a value of 1 allows to update word-vectors. + doctag_vectors : numpy.ndarray, optional + Vector representations of the tags. If None, these will be retrieved from the model. + doctag_locks : numpy.ndarray, optional + The lock factors for each tag, same as `word_locks`, but for document-vectors. + + Returns + ------- + int + Number of words in the input document that were actually used for training. + + """ + cdef Doc2VecConfig c + + cdef int cur_epoch = _cur_epoch + cdef int num_epochs = model.epochs + cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + cdef int expected_words = (-1 if _expected_words is None else _expected_words) + cdef REAL_t start_alpha = model.alpha + cdef REAL_t end_alpha = model.min_alpha + cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + + cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + cdef CythonVocab vocab = _cython_vocab + + cdef int i, j, document_len + cdef int effective_words = 0 + cdef int total_effective_words = 0, total_documents = 0, total_words = 0 + cdef int sent_idx, idx_start, idx_end + + cdef vector[string] doc_words + cdef int _doc_tag = start_doctag + + init_d2v_config( + &c, model, _alpha, learn_doctags, learn_words, learn_hidden, train_words=train_words, + work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + doctag_vectors=doctag_vectors, doctag_locks=doctag_locks, docvecs_count=docvecs_count) + + # release GIL & train on the full corpus, document by document + with nogil: + input_stream.reset() + while not (input_stream.is_eof() or total_words > expected_words / c.workers): + effective_words = 0 + + doc_words = input_stream.read_sentence() + + if doc_words.empty(): + continue + + prepare_c_structures_for_batch( + doc_words, c.sample, c.hs, c.window, &total_words, &effective_words, + &c.next_random, vocab.get_vocab_ptr(), c.indexes, c.codelens, c.codes, c.points, + c.reduced_windows, &document_len, c.train_words, c.docvecs_count, _doc_tag) + + for i in range(document_len): + if c.train_words: # simultaneous skip-gram wordvec-training + j = i - c.window + c.reduced_windows[i] + if j < 0: + j = 0 + k = i + c.window + 1 - c.reduced_windows[i] + if k > document_len: + k = document_len + for j in range(j, k): + if j == i: + continue + if c.hs: + # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose + fast_document_dbow_hs( + c.points[i], c.codes[i], c.codelens[i], c.word_vectors, c.syn1, c.layer1_size, + c.indexes[j], c.alpha, c.work, c.learn_words, c.learn_hidden, c.word_locks) + + if c.negative: + # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose + c.next_random = fast_document_dbow_neg( + c.negative, c.cum_table, c.cum_table_len, c.word_vectors, c.syn1neg, + c.layer1_size, c.indexes[i], c.indexes[j], c.alpha, c.work, + c.next_random, c.learn_words, c.learn_hidden, c.word_locks) + + # docvec-training + if _doc_tag < c.docvecs_count: + if c.hs: + fast_document_dbow_hs( + c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, + _doc_tag, c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) + + if c.negative: + c.next_random = fast_document_dbow_neg( + c.negative, c.cum_table, c.cum_table_len, c.doctag_vectors, c.syn1neg, + c.layer1_size, c.indexes[i], _doc_tag, c.alpha, c.work, c.next_random, + c.learn_doctags, c.learn_hidden, c.doctag_locks) + + total_documents += 1 + total_effective_words += effective_words + _doc_tag += 1 + + c.alpha = get_next_alpha( + start_alpha, end_alpha, total_documents, total_words, + expected_examples, expected_words, cur_epoch, num_epochs) + + return total_documents, total_effective_words, total_words + + +def d2v_train_epoch_dm(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, doctag_locks=None): + """Train distributed memory model ("PV-DM") by training on a corpus file. + This method implements the DM model with a projection (input) layer that is either the sum or mean of the context + vectors, depending on the model's `dm_mean` configuration field. + + Called internally from :meth:`~gensim.models.doc2vec.Doc2Vec.train`. + + Parameters + ---------- + model : :class:`~gensim.models.doc2vec.Doc2Vec` + The FastText model instance to train. + corpus_file : str + Path to corpus file. + _cur_epoch : int + Current epoch number. Used for calculating and decaying learning rate. + work : np.ndarray + Private working memory for each worker. + neu1 : np.ndarray + Private working memory for each worker. + learn_doctags : bool, optional + Whether the tag vectors should be updated. + learn_words : bool, optional + Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both** + `learn_words` and `train_words` are set to True. + learn_hidden : bool, optional + Whether or not the weights of the hidden layer will be updated. + word_vectors : numpy.ndarray, optional + The vector representation for each word in the vocabulary. If None, these will be retrieved from the model. + word_locks : numpy.ndarray, optional + A learning lock factor for each weight in the hidden layer for words, value 0 completely blocks updates, + a value of 1 allows to update word-vectors. + doctag_vectors : numpy.ndarray, optional + Vector representations of the tags. If None, these will be retrieved from the model. + doctag_locks : numpy.ndarray, optional + The lock factors for each tag, same as `word_locks`, but for document-vectors. + + Returns + ------- + int + Number of words in the input document that were actually used for training. + + """ + cdef Doc2VecConfig c + + cdef int cur_epoch = _cur_epoch + cdef int num_epochs = model.epochs + cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + cdef int expected_words = (-1 if _expected_words is None else _expected_words) + cdef REAL_t start_alpha = model.alpha + cdef REAL_t end_alpha = model.min_alpha + cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + + cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + cdef CythonVocab vocab = _cython_vocab + + cdef int i, j, k, m, document_len + cdef int effective_words = 0 + cdef int total_effective_words = 0, total_documents = 0, total_words = 0 + cdef int sent_idx, idx_start, idx_end + cdef REAL_t count, inv_count = 1.0 + + cdef vector[string] doc_words + cdef int _doc_tag = start_doctag + + init_d2v_config( + &c, model, _alpha, learn_doctags, learn_words, learn_hidden, train_words=False, + work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + doctag_vectors=doctag_vectors, doctag_locks=doctag_locks, docvecs_count=docvecs_count) + + # release GIL & train on the full corpus, document by document + with nogil: + input_stream.reset() + while not (input_stream.is_eof() or total_words > expected_words / c.workers): + effective_words = 0 + + doc_words = input_stream.read_sentence() + + if doc_words.empty(): + continue + + prepare_c_structures_for_batch( + doc_words, c.sample, c.hs, c.window, &total_words, &effective_words, &c.next_random, + vocab.get_vocab_ptr(), c.indexes, c.codelens, c.codes, c.points, c.reduced_windows, + &document_len, c.train_words, c.docvecs_count, _doc_tag) + + for i in range(document_len): + j = i - c.window + c.reduced_windows[i] + if j < 0: + j = 0 + k = i + c.window + 1 - c.reduced_windows[i] + if k > document_len: + k = document_len + + # compose l1 (in _neu1) & clear _work + memset(c.neu1, 0, c.layer1_size * cython.sizeof(REAL_t)) + count = 0.0 + for m in range(j, k): + if m == i: + continue + else: + count += ONEF + our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) + + if _doc_tag < c.docvecs_count: + count += ONEF + our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE, c.neu1, &ONE) + if count > (0.5): + inv_count = ONEF/count + if c.cbow_mean: + sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + if c.hs: + fast_document_dm_hs( + c.points[i], c.codes[i], c.codelens[i], c.neu1, + c.syn1, c.alpha, c.work, c.layer1_size, c.learn_hidden) + + if c.negative: + c.next_random = fast_document_dm_neg( + c.negative, c.cum_table, c.cum_table_len, c.next_random, c.neu1, + c.syn1neg, c.indexes[i], c.alpha, c.work, c.layer1_size, c.learn_hidden) + + if not c.cbow_mean: + sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) + # apply accumulated error in work + if c.learn_doctags and _doc_tag < c.docvecs_count: + our_saxpy(&c.layer1_size, &c.doctag_locks[_doc_tag], c.work, + &ONE, &c.doctag_vectors[_doc_tag * c.layer1_size], &ONE) + if c.learn_words: + for m in range(j, k): + if m == i: + continue + else: + our_saxpy(&c.layer1_size, &c.word_locks[c.indexes[m]], c.work, &ONE, + &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE) + + total_documents += 1 + total_effective_words += effective_words + _doc_tag += 1 + + c.alpha = get_next_alpha(start_alpha, end_alpha, total_documents, total_words, expected_examples, + expected_words, cur_epoch, num_epochs) + + return total_documents, total_effective_words, total_words + + +def d2v_train_epoch_dm_concat(model, corpus_file, offset, start_doctag, _cython_vocab, _cur_epoch, _expected_examples, + _expected_words, work, neu1, docvecs_count, word_vectors=None, word_locks=None, + learn_doctags=True, learn_words=True, learn_hidden=True, doctag_vectors=None, + doctag_locks=None): + """Train distributed memory model ("PV-DM") by training on a corpus file, using a concatenation of the context + window word vectors (rather than a sum or average). + This might be slower since the input at each batch will be significantly larger. + + Called internally from :meth:`~gensim.models.doc2vec.Doc2Vec.train`. + + Parameters + ---------- + model : :class:`~gensim.models.doc2vec.Doc2Vec` + The FastText model instance to train. + corpus_file : str + Path to corpus file. + _cur_epoch : int + Current epoch number. Used for calculating and decaying learning rate. + work : np.ndarray + Private working memory for each worker. + neu1 : np.ndarray + Private working memory for each worker. + learn_doctags : bool, optional + Whether the tag vectors should be updated. + learn_words : bool, optional + Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both** + `learn_words` and `train_words` are set to True. + learn_hidden : bool, optional + Whether or not the weights of the hidden layer will be updated. + word_vectors : numpy.ndarray, optional + The vector representation for each word in the vocabulary. If None, these will be retrieved from the model. + word_locks : numpy.ndarray, optional + A learning lock factor for each weight in the hidden layer for words, value 0 completely blocks updates, + a value of 1 allows to update word-vectors. + doctag_vectors : numpy.ndarray, optional + Vector representations of the tags. If None, these will be retrieved from the model. + doctag_locks : numpy.ndarray, optional + The lock factors for each tag, same as `word_locks`, but for document-vectors. + + Returns + ------- + int + Number of words in the input document that were actually used for training. + + """ + cdef Doc2VecConfig c + + cdef int cur_epoch = _cur_epoch + cdef int num_epochs = model.epochs + cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + cdef int expected_words = (-1 if _expected_words is None else _expected_words) + cdef REAL_t start_alpha = model.alpha + cdef REAL_t end_alpha = model.min_alpha + cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + + cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + cdef CythonVocab vocab = _cython_vocab + + cdef int i, j, k, m, n, document_len + cdef int effective_words = 0 + cdef int total_effective_words = 0, total_documents = 0, total_words = 0 + cdef int sent_idx, idx_start, idx_end + + cdef vector[string] doc_words + cdef int _doc_tag = start_doctag + + init_d2v_config( + &c, model, _alpha, learn_doctags, learn_words, learn_hidden, train_words=False, + work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + doctag_vectors=doctag_vectors, doctag_locks=doctag_locks, docvecs_count=docvecs_count) + + # release GIL & train on the full corpus, document by document + with nogil: + input_stream.reset() + while not (input_stream.is_eof() or total_words > expected_words / c.workers): + effective_words = 0 + + doc_words = input_stream.read_sentence() + _doc_tag = total_documents + c.doctag_len = _doc_tag < c.docvecs_count + + # skip doc either empty or without expected number of tags + if doc_words.empty() or c.expected_doctag_len != c.doctag_len: + continue + + prepare_c_structures_for_batch( + doc_words, c.sample, c.hs, c.window, &total_words, &effective_words, + &c.next_random, vocab.get_vocab_ptr(), c.indexes, c.codelens, c.codes, + c.points, NULL, &document_len, c.train_words, c.docvecs_count, _doc_tag) + + for i in range(document_len): + j = i - c.window # negative OK: will pad with null word + k = i + c.window + 1 # past document end OK: will pad with null word + + # compose l1 & clear work + if _doc_tag < c.docvecs_count: + # doc vector(s) + memcpy(&c.neu1[0], &c.doctag_vectors[_doc_tag * c.vector_size], + c.vector_size * cython.sizeof(REAL_t)) + n = 0 + for m in range(j, k): + # word vectors in window + if m == i: + continue + if m < 0 or m >= document_len: + c.window_indexes[n] = c.null_word_index + else: + c.window_indexes[n] = c.indexes[m] + n += 1 + for m in range(2 * c.window): + memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], + c.vector_size * cython.sizeof(REAL_t)) + memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + + if c.hs: + fast_document_dmc_hs( + c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, + c.alpha, c.work, c.layer1_size, c.vector_size, c.learn_hidden) + + if c.negative: + c.next_random = fast_document_dmc_neg( + c.negative, c.cum_table, c.cum_table_len, c.next_random, c.neu1, c.syn1neg, + c.indexes[i], c.alpha, c.work, c.layer1_size, c.vector_size, c.learn_hidden) + + if c.learn_doctags and _doc_tag < c.docvecs_count: + our_saxpy(&c.vector_size, &c.doctag_locks[_doc_tag], &c.work[m * c.vector_size], + &ONE, &c.doctag_vectors[_doc_tag * c.vector_size], &ONE) + if c.learn_words: + for m in range(2 * c.window): + our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], + &ONE, &c.word_vectors[c.window_indexes[m] * c.vector_size], &ONE) + + total_documents += 1 + total_effective_words += effective_words + _doc_tag += 1 + + c.alpha = get_next_alpha(start_alpha, end_alpha, total_documents, total_words, expected_examples, + expected_words, cur_epoch, num_epochs) + + return total_documents, total_effective_words, total_words + + +CORPUSFILE_VERSION = 1 diff --git a/gensim/models/doc2vec_inner.c b/gensim/models/doc2vec_inner.c index 56d0478e05..6ab9beeb93 100644 --- a/gensim/models/doc2vec_inner.c +++ b/gensim/models/doc2vec_inner.c @@ -1,4 +1,4 @@ -/* Generated by Cython 0.28.3 */ +/* Generated by Cython 0.28.2 */ #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -7,7 +7,7 @@ #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else -#define CYTHON_ABI "0_28_3" +#define CYTHON_ABI "0_28_2" #define CYTHON_FUTURE_DIVISION 0 #include #ifndef offsetof @@ -453,7 +453,6 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact - #define PyObject_Unicode PyObject_Str #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -651,7 +650,7 @@ static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ @@ -759,7 +758,7 @@ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_cython_runtime; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; @@ -809,7 +808,7 @@ static const char *__pyx_f[] = { #endif -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":730 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 * # in Cython to enable them only on the right systems. * * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< @@ -818,7 +817,7 @@ static const char *__pyx_f[] = { */ typedef npy_int8 __pyx_t_5numpy_int8_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":731 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 * * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< @@ -827,7 +826,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t; */ typedef npy_int16 __pyx_t_5numpy_int16_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":732 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< @@ -836,7 +835,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t; */ typedef npy_int32 __pyx_t_5numpy_int32_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":733 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< @@ -845,7 +844,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t; */ typedef npy_int64 __pyx_t_5numpy_int64_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":737 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 * #ctypedef npy_int128 int128_t * * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< @@ -854,7 +853,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t; */ typedef npy_uint8 __pyx_t_5numpy_uint8_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":738 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 * * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< @@ -863,7 +862,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t; */ typedef npy_uint16 __pyx_t_5numpy_uint16_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":739 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< @@ -872,7 +871,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t; */ typedef npy_uint32 __pyx_t_5numpy_uint32_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":740 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< @@ -881,7 +880,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t; */ typedef npy_uint64 __pyx_t_5numpy_uint64_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":744 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 * #ctypedef npy_uint128 uint128_t * * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< @@ -890,7 +889,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t; */ typedef npy_float32 __pyx_t_5numpy_float32_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":745 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 * * ctypedef npy_float32 float32_t * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< @@ -899,7 +898,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t; */ typedef npy_float64 __pyx_t_5numpy_float64_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":754 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 * # The int types are mapped a bit surprising -- * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t # <<<<<<<<<<<<<< @@ -908,7 +907,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t; */ typedef npy_long __pyx_t_5numpy_int_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":755 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< @@ -917,7 +916,7 @@ typedef npy_long __pyx_t_5numpy_int_t; */ typedef npy_longlong __pyx_t_5numpy_long_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":756 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 * ctypedef npy_long int_t * ctypedef npy_longlong long_t * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< @@ -926,7 +925,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t; */ typedef npy_longlong __pyx_t_5numpy_longlong_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":758 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 * ctypedef npy_longlong longlong_t * * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< @@ -935,7 +934,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t; */ typedef npy_ulong __pyx_t_5numpy_uint_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":759 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 * * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< @@ -944,7 +943,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":760 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< @@ -953,7 +952,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":762 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 * ctypedef npy_ulonglong ulonglong_t * * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< @@ -962,7 +961,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; */ typedef npy_intp __pyx_t_5numpy_intp_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":763 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 * * ctypedef npy_intp intp_t * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< @@ -971,7 +970,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t; */ typedef npy_uintp __pyx_t_5numpy_uintp_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":765 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 * ctypedef npy_uintp uintp_t * * ctypedef npy_double float_t # <<<<<<<<<<<<<< @@ -980,7 +979,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t; */ typedef npy_double __pyx_t_5numpy_float_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":766 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 * * ctypedef npy_double float_t * ctypedef npy_double double_t # <<<<<<<<<<<<<< @@ -989,7 +988,7 @@ typedef npy_double __pyx_t_5numpy_float_t; */ typedef npy_double __pyx_t_5numpy_double_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":767 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 * ctypedef npy_double float_t * ctypedef npy_double double_t * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< @@ -998,9 +997,9 @@ typedef npy_double __pyx_t_5numpy_double_t; */ typedef npy_longdouble __pyx_t_5numpy_longdouble_t; -/* "word2vec_inner.pxd":12 +/* "word2vec_inner.pxd":19 + * void* PyCObject_AsVoidPtr(object obj) * - * cimport numpy as np * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< * * # BLAS routine signatures @@ -1033,7 +1032,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do /*--- Type declarations ---*/ -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":769 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 * ctypedef npy_longdouble longdouble_t * * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< @@ -1042,7 +1041,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do */ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":770 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 * * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< @@ -1051,7 +1050,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; */ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":771 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< @@ -1060,7 +1059,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; */ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":773 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 * ctypedef npy_clongdouble clongdouble_t * * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< @@ -1068,8 +1067,10 @@ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; * cdef inline object PyArray_MultiIterNew1(a): */ typedef npy_cdouble __pyx_t_5numpy_complex_t; +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig; +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config; -/* "word2vec_inner.pxd":15 +/* "word2vec_inner.pxd":22 * * # BLAS routine signatures * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1078,7 +1079,7 @@ typedef npy_cdouble __pyx_t_5numpy_complex_t; */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)(int const *, float const *, int const *, float *, int const *); -/* "word2vec_inner.pxd":16 +/* "word2vec_inner.pxd":23 * # BLAS routine signatures * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1087,7 +1088,7 @@ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)(int const *, */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); -/* "word2vec_inner.pxd":17 +/* "word2vec_inner.pxd":24 * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1096,7 +1097,7 @@ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)(int const *, */ typedef float (*__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)(int const *, float const *, int const *, float const *, int const *); -/* "word2vec_inner.pxd":18 +/* "word2vec_inner.pxd":25 * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1105,7 +1106,7 @@ typedef float (*__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)(int const *, */ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)(int const *, float const *, int const *, float const *, int const *); -/* "word2vec_inner.pxd":19 +/* "word2vec_inner.pxd":26 * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< @@ -1114,7 +1115,7 @@ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)(int const * */ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)(int const *, float const *, int const *); -/* "word2vec_inner.pxd":20 +/* "word2vec_inner.pxd":27 * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< @@ -1123,7 +1124,7 @@ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)(int const * */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)(int const *, float const *, float const *, int const *); -/* "word2vec_inner.pxd":35 +/* "word2vec_inner.pxd":44 * * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1132,7 +1133,7 @@ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)(int const *, */ typedef __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr)(int const *, float const *, int const *, float const *, int const *); -/* "word2vec_inner.pxd":36 +/* "word2vec_inner.pxd":45 * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1141,6 +1142,117 @@ typedef __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_t_6gensim_6model */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); +/* "word2vec_inner.pxd":51 + * + * + * cdef struct Word2VecConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, compute_loss, size, window, cbow_mean, workers + * REAL_t running_training_loss, alpha + */ +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig { + int hs; + int negative; + int sample; + int compute_loss; + int size; + int window; + int cbow_mean; + int workers; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t running_training_loss; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + int sentence_idx[(0x2710 + 1)]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; +}; + +/* "word2vec_inner.pxd":125 + * + * + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=*) # <<<<<<<<<<<<<< + */ +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config { + int __pyx_n; + PyObject *_neu1; +}; +struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig; +struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config; + +/* "gensim/models/doc2vec_inner.pxd":23 + * + * + * cdef struct Doc2VecConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, learn_doctags, learn_words, learn_hidden, train_words, cbow_mean + * int document_len, doctag_len, window, expected_doctag_len, null_word_index, workers, docvecs_count + */ +struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig { + int hs; + int negative; + int sample; + int learn_doctags; + int learn_words; + int learn_hidden; + int train_words; + int cbow_mean; + int document_len; + int doctag_len; + int window; + int expected_doctag_len; + int null_word_index; + int workers; + int docvecs_count; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_vectors; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *doctag_vectors; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *doctag_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + int layer1_size; + int vector_size; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t doctag_indexes[0x2710]; + __pyx_t_5numpy_uint32_t window_indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; +}; + +/* "gensim/models/doc2vec_inner.pxd":91 + * + * + * cdef init_d2v_config(Doc2VecConfig *c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=*, work=*, # <<<<<<<<<<<<<< + * neu1=*, word_vectors=*, word_locks=*, doctag_vectors=*, doctag_locks=*, docvecs_count=*) + */ +struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config { + int __pyx_n; + PyObject *train_words; + PyObject *work; + PyObject *neu1; + PyObject *word_vectors; + PyObject *word_locks; + PyObject *doctag_vectors; + PyObject *doctag_locks; + PyObject *docvecs_count; +}; + /* --- Runtime support code (head) --- */ /* Refnanny.proto */ #ifndef CYTHON_REFNANNY @@ -1215,17 +1327,22 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject /* GetBuiltinName.proto */ static PyObject *__Pyx_GetBuiltinName(PyObject *name); -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ - const char* function_name); +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif /* ExtTypeTest.proto */ static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); @@ -1240,11 +1357,17 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg /* GetModuleGlobalName.proto */ static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); -/* PySequenceContains.proto */ -static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { - int result = PySequence_Contains(seq, item); - return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); -} +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); /* GetItemInt.proto */ #define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ @@ -1293,17 +1416,6 @@ static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObje #define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) #endif -/* DictGetItem.proto */ -#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY -static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); -#define __Pyx_PyObject_Dict_GetItem(obj, name)\ - (likely(PyDict_CheckExact(obj)) ?\ - __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) -#else -#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) -#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) -#endif - /* PyThreadStateGet.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; @@ -1560,6 +1672,9 @@ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObj /* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(void); +/* FunctionExport.proto */ +static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); + /* PyIdentifierFromString.proto */ #if !defined(__Pyx_PyIdentifier_FromString) #if PY_MAJOR_VERSION < 3 @@ -1585,8 +1700,6 @@ static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (** static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); -/* Module declarations from 'cython' */ - /* Module declarations from 'cpython.buffer' */ /* Module declarations from 'libc.string' */ @@ -1638,6 +1751,8 @@ static __pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr *__pyx_vp_6gensim_ static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14word2vec_inner_bisect_left)(__pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG); /*proto*/ static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14word2vec_inner_random_int32)(unsigned PY_LONG_LONG *); /*proto*/ +/* Module declarations from 'cython' */ + /* Module declarations from 'gensim.models.doc2vec_inner' */ static int __pyx_v_6gensim_6models_13doc2vec_inner_ONE; static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_6gensim_6models_13doc2vec_inner_ONEF; @@ -1647,6 +1762,7 @@ static void __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_hs(__pyx_t_ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int); /*proto*/ static void __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int const , int); /*proto*/ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int const , int); /*proto*/ +static PyObject *__pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config(struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config *__pyx_optional_args); /*proto*/ #define __Pyx_MODULE_NAME "gensim.models.doc2vec_inner" extern int __pyx_module_is_main_gensim__models__doc2vec_inner; int __pyx_module_is_main_gensim__models__doc2vec_inner = 0; @@ -1657,28 +1773,26 @@ static PyObject *__pyx_builtin_range; static PyObject *__pyx_builtin_enumerate; static PyObject *__pyx_builtin_ValueError; static PyObject *__pyx_builtin_RuntimeError; +static const char __pyx_k_[] = "\000"; +static const char __pyx_k_c[] = "c"; static const char __pyx_k_i[] = "i"; static const char __pyx_k_j[] = "j"; static const char __pyx_k_k[] = "k"; static const char __pyx_k_m[] = "m"; static const char __pyx_k_n[] = "n"; -static const char __pyx_k_r[] = "r"; -static const char __pyx_k__5[] = "\000"; static const char __pyx_k_hs[] = "hs"; static const char __pyx_k_np[] = "np"; static const char __pyx_k_wv[] = "wv"; -static const char __pyx_k__17[] = "*"; +static const char __pyx_k__13[] = "*"; static const char __pyx_k_REAL[] = "REAL"; static const char __pyx_k_code[] = "code"; static const char __pyx_k_item[] = "item"; static const char __pyx_k_main[] = "__main__"; static const char __pyx_k_neu1[] = "neu1"; -static const char __pyx_k_size[] = "size"; static const char __pyx_k_syn1[] = "syn1"; static const char __pyx_k_test[] = "__test__"; static const char __pyx_k_work[] = "work"; static const char __pyx_k_alpha[] = "alpha"; -static const char __pyx_k_codes[] = "codes"; static const char __pyx_k_count[] = "count"; static const char __pyx_k_dtype[] = "dtype"; static const char __pyx_k_fblas[] = "fblas"; @@ -1691,22 +1805,17 @@ static const char __pyx_k_token[] = "token"; static const char __pyx_k_vocab[] = "vocab"; static const char __pyx_k_zeros[] = "zeros"; static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_neu1_2[] = "_neu1"; -static const char __pyx_k_points[] = "points"; static const char __pyx_k_random[] = "random"; static const char __pyx_k_result[] = "result"; static const char __pyx_k_sample[] = "sample"; static const char __pyx_k_window[] = "window"; -static const char __pyx_k_work_2[] = "_work"; -static const char __pyx_k_alpha_2[] = "_alpha"; static const char __pyx_k_docvecs[] = "docvecs"; static const char __pyx_k_float32[] = "float32"; -static const char __pyx_k_indexes[] = "indexes"; static const char __pyx_k_randint[] = "randint"; static const char __pyx_k_syn1neg[] = "syn1neg"; static const char __pyx_k_vectors[] = "vectors"; static const char __pyx_k_vlookup[] = "vlookup"; -static const char __pyx_k_codelens[] = "codelens"; +static const char __pyx_k_workers[] = "workers"; static const char __pyx_k_negative[] = "negative"; static const char __pyx_k_cbow_mean[] = "cbow_mean"; static const char __pyx_k_cum_table[] = "cum_table"; @@ -1714,7 +1823,6 @@ static const char __pyx_k_doc_words[] = "doc_words"; static const char __pyx_k_enumerate[] = "enumerate"; static const char __pyx_k_inv_count[] = "inv_count"; static const char __pyx_k_ValueError[] = "ValueError"; -static const char __pyx_k_doctag_len[] = "doctag_len"; static const char __pyx_k_sample_int[] = "sample_int"; static const char __pyx_k_trainables[] = "trainables"; static const char __pyx_k_vocabulary[] = "vocabulary"; @@ -1722,39 +1830,23 @@ static const char __pyx_k_word_locks[] = "word_locks"; static const char __pyx_k_ImportError[] = "ImportError"; static const char __pyx_k_layer1_size[] = "layer1_size"; static const char __pyx_k_learn_words[] = "learn_words"; -static const char __pyx_k_next_random[] = "next_random"; static const char __pyx_k_train_words[] = "train_words"; static const char __pyx_k_vector_size[] = "vector_size"; static const char __pyx_k_RuntimeError[] = "RuntimeError"; static const char __pyx_k_dm_tag_count[] = "dm_tag_count"; static const char __pyx_k_doctag_locks[] = "doctag_locks"; -static const char __pyx_k_document_len[] = "document_len"; static const char __pyx_k_learn_hidden[] = "learn_hidden"; static const char __pyx_k_predict_word[] = "predict_word"; static const char __pyx_k_vectors_docs[] = "vectors_docs"; -static const char __pyx_k_word_locks_2[] = "_word_locks"; static const char __pyx_k_word_vectors[] = "word_vectors"; -static const char __pyx_k_cum_table_len[] = "cum_table_len"; static const char __pyx_k_learn_doctags[] = "learn_doctags"; -static const char __pyx_k_learn_words_2[] = "_learn_words"; -static const char __pyx_k_train_words_2[] = "_train_words"; static const char __pyx_k_vectors_lockf[] = "vectors_lockf"; static const char __pyx_k_doctag_indexes[] = "doctag_indexes"; -static const char __pyx_k_doctag_locks_2[] = "_doctag_locks"; static const char __pyx_k_doctag_vectors[] = "doctag_vectors"; -static const char __pyx_k_learn_hidden_2[] = "_learn_hidden"; -static const char __pyx_k_window_indexes[] = "window_indexes"; -static const char __pyx_k_word_vectors_2[] = "_word_vectors"; -static const char __pyx_k_learn_doctags_2[] = "_learn_doctags"; -static const char __pyx_k_null_word_index[] = "null_word_index"; -static const char __pyx_k_reduced_windows[] = "reduced_windows"; -static const char __pyx_k_doctag_indexes_2[] = "_doctag_indexes"; -static const char __pyx_k_doctag_vectors_2[] = "_doctag_vectors"; static const char __pyx_k_scipy_linalg_blas[] = "scipy.linalg.blas"; static const char __pyx_k_train_document_dm[] = "train_document_dm"; static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; static const char __pyx_k_vectors_docs_lockf[] = "vectors_docs_lockf"; -static const char __pyx_k_expected_doctag_len[] = "expected_doctag_len"; static const char __pyx_k_train_document_dbow[] = "train_document_dbow"; static const char __pyx_k_train_document_dm_concat[] = "train_document_dm_concat"; static const char __pyx_k_gensim_models_doc2vec_inner[] = "gensim.models.doc2vec_inner"; @@ -1768,6 +1860,7 @@ static const char __pyx_k_Optimized_cython_functions_for_t[] = "Optimized cython static const char __pyx_k_ndarray_is_not_Fortran_contiguou[] = "ndarray is not Fortran contiguous"; static const char __pyx_k_numpy_core_umath_failed_to_impor[] = "numpy.core.umath failed to import"; static const char __pyx_k_Format_string_allocated_too_shor_2[] = "Format string allocated too short."; +static PyObject *__pyx_kp_s_; static PyObject *__pyx_kp_u_Format_string_allocated_too_shor; static PyObject *__pyx_kp_u_Format_string_allocated_too_shor_2; static PyObject *__pyx_n_s_ImportError; @@ -1775,32 +1868,22 @@ static PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor; static PyObject *__pyx_n_s_REAL; static PyObject *__pyx_n_s_RuntimeError; static PyObject *__pyx_n_s_ValueError; -static PyObject *__pyx_n_s__17; -static PyObject *__pyx_kp_s__5; +static PyObject *__pyx_n_s__13; static PyObject *__pyx_n_s_alpha; -static PyObject *__pyx_n_s_alpha_2; +static PyObject *__pyx_n_s_c; static PyObject *__pyx_n_s_cbow_mean; static PyObject *__pyx_n_s_cline_in_traceback; static PyObject *__pyx_n_s_code; -static PyObject *__pyx_n_s_codelens; -static PyObject *__pyx_n_s_codes; static PyObject *__pyx_n_s_count; static PyObject *__pyx_n_s_cum_table; -static PyObject *__pyx_n_s_cum_table_len; static PyObject *__pyx_n_s_dm_tag_count; static PyObject *__pyx_n_s_doc_words; static PyObject *__pyx_n_s_doctag_indexes; -static PyObject *__pyx_n_s_doctag_indexes_2; -static PyObject *__pyx_n_s_doctag_len; static PyObject *__pyx_n_s_doctag_locks; -static PyObject *__pyx_n_s_doctag_locks_2; static PyObject *__pyx_n_s_doctag_vectors; -static PyObject *__pyx_n_s_doctag_vectors_2; -static PyObject *__pyx_n_s_document_len; static PyObject *__pyx_n_s_docvecs; static PyObject *__pyx_n_s_dtype; static PyObject *__pyx_n_s_enumerate; -static PyObject *__pyx_n_s_expected_doctag_len; static PyObject *__pyx_n_s_fblas; static PyObject *__pyx_n_s_float32; static PyObject *__pyx_n_s_gensim_models_doc2vec_inner; @@ -1809,18 +1892,14 @@ static PyObject *__pyx_n_s_hs; static PyObject *__pyx_n_s_i; static PyObject *__pyx_n_s_import; static PyObject *__pyx_n_s_index; -static PyObject *__pyx_n_s_indexes; static PyObject *__pyx_n_s_inv_count; static PyObject *__pyx_n_s_item; static PyObject *__pyx_n_s_j; static PyObject *__pyx_n_s_k; static PyObject *__pyx_n_s_layer1_size; static PyObject *__pyx_n_s_learn_doctags; -static PyObject *__pyx_n_s_learn_doctags_2; static PyObject *__pyx_n_s_learn_hidden; -static PyObject *__pyx_n_s_learn_hidden_2; static PyObject *__pyx_n_s_learn_words; -static PyObject *__pyx_n_s_learn_words_2; static PyObject *__pyx_n_s_m; static PyObject *__pyx_n_s_main; static PyObject *__pyx_n_s_model; @@ -1829,26 +1908,19 @@ static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous; static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou; static PyObject *__pyx_n_s_negative; static PyObject *__pyx_n_s_neu1; -static PyObject *__pyx_n_s_neu1_2; -static PyObject *__pyx_n_s_next_random; static PyObject *__pyx_n_s_np; -static PyObject *__pyx_n_s_null_word_index; static PyObject *__pyx_n_s_numpy; static PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to; static PyObject *__pyx_kp_s_numpy_core_umath_failed_to_impor; static PyObject *__pyx_n_s_point; -static PyObject *__pyx_n_s_points; static PyObject *__pyx_n_s_predict_word; -static PyObject *__pyx_n_s_r; static PyObject *__pyx_n_s_randint; static PyObject *__pyx_n_s_random; static PyObject *__pyx_n_s_range; -static PyObject *__pyx_n_s_reduced_windows; static PyObject *__pyx_n_s_result; static PyObject *__pyx_n_s_sample; static PyObject *__pyx_n_s_sample_int; static PyObject *__pyx_n_s_scipy_linalg_blas; -static PyObject *__pyx_n_s_size; static PyObject *__pyx_n_s_syn1; static PyObject *__pyx_n_s_syn1neg; static PyObject *__pyx_n_s_test; @@ -1857,7 +1929,6 @@ static PyObject *__pyx_n_s_train_document_dbow; static PyObject *__pyx_n_s_train_document_dm; static PyObject *__pyx_n_s_train_document_dm_concat; static PyObject *__pyx_n_s_train_words; -static PyObject *__pyx_n_s_train_words_2; static PyObject *__pyx_n_s_trainables; static PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd; static PyObject *__pyx_n_s_vector_size; @@ -1869,13 +1940,10 @@ static PyObject *__pyx_n_s_vlookup; static PyObject *__pyx_n_s_vocab; static PyObject *__pyx_n_s_vocabulary; static PyObject *__pyx_n_s_window; -static PyObject *__pyx_n_s_window_indexes; static PyObject *__pyx_n_s_word_locks; -static PyObject *__pyx_n_s_word_locks_2; static PyObject *__pyx_n_s_word_vectors; -static PyObject *__pyx_n_s_word_vectors_2; static PyObject *__pyx_n_s_work; -static PyObject *__pyx_n_s_work_2; +static PyObject *__pyx_n_s_workers; static PyObject *__pyx_n_s_wv; static PyObject *__pyx_n_s_zeros; static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_doc_words, PyObject *__pyx_v_doctag_indexes, PyObject *__pyx_v_alpha, PyObject *__pyx_v_work, PyObject *__pyx_v_train_words, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks); /* proto */ @@ -1885,10 +1953,10 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */ static PyObject *__pyx_int_0; static PyObject *__pyx_int_16777216; -static PyObject *__pyx_tuple_; static PyObject *__pyx_tuple__2; static PyObject *__pyx_tuple__3; static PyObject *__pyx_tuple__4; +static PyObject *__pyx_tuple__5; static PyObject *__pyx_tuple__6; static PyObject *__pyx_tuple__7; static PyObject *__pyx_tuple__8; @@ -1896,16 +1964,12 @@ static PyObject *__pyx_tuple__9; static PyObject *__pyx_tuple__10; static PyObject *__pyx_tuple__11; static PyObject *__pyx_tuple__12; -static PyObject *__pyx_tuple__13; static PyObject *__pyx_tuple__14; -static PyObject *__pyx_tuple__15; static PyObject *__pyx_tuple__16; static PyObject *__pyx_tuple__18; -static PyObject *__pyx_tuple__20; -static PyObject *__pyx_tuple__22; +static PyObject *__pyx_codeobj__15; +static PyObject *__pyx_codeobj__17; static PyObject *__pyx_codeobj__19; -static PyObject *__pyx_codeobj__21; -static PyObject *__pyx_codeobj__23; /* Late includes */ /* "gensim/models/doc2vec_inner.pyx":37 @@ -3286,906 +3350,1193 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_13doc2vec_inner_fast_docume /* "gensim/models/doc2vec_inner.pyx":223 * * - * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, # <<<<<<<<<<<<<< - * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, - * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): + * cdef init_d2v_config(Doc2VecConfig *c, model, alpha, learn_doctags, learn_words, learn_hidden, # <<<<<<<<<<<<<< + * train_words=False, work=None, neu1=None, word_vectors=None, word_locks=None, doctag_vectors=None, + * doctag_locks=None, docvecs_count=0): */ -/* Python wrapper */ -static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_1train_document_dbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static char __pyx_doc_6gensim_6models_13doc2vec_inner_train_document_dbow[] = "train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None)\nUpdate distributed bag of words model (\"PV-DBOW\") by training on a single document.\n\n Called internally from :meth:`~gensim.models.doc2vec.Doc2Vec.train` and\n :meth:`~gensim.models.doc2vec.Doc2Vec.infer_vector`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.doc2vec.Doc2Vec`\n The model to train.\n doc_words : list of str\n The input document as a list of words to be used for training. Each word will be looked up in\n the model's vocabulary.\n doctag_indexes : list of int\n Indices into `doctag_vectors` used to obtain the tags of the document.\n alpha : float\n Learning rate.\n work : list of float, optional\n Updates to be performed on each neuron in the hidden layer of the underlying network.\n train_words : bool, optional\n Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both** `learn_words`\n and `train_words` are set to True.\n learn_doctags : bool, optional\n Whether the tag vectors should be updated.\n learn_words : bool, optional\n Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both**\n `learn_words` and `train_words` are set to True.\n learn_hidden : bool, optional\n Whether or not the weights of the hidden layer will be updated.\n word_vectors : numpy.ndarray, optional\n The vector representation for each word in the vocabulary. If None, these will be retrieved from the model.\n word_locks : numpy.ndarray, optional\n A learning lock factor for each weight in the hidden layer for words, value 0 completely blocks updates,\n a value of 1 allows to update word-vectors.\n doctag_vectors : numpy.ndarray, ""optional\n Vector representations of the tags. If None, these will be retrieved from the model.\n doctag_locks : numpy.ndarray, optional\n The lock factors for each tag, same as `word_locks`, but for document-vectors.\n\n Returns\n -------\n int\n Number of words in the input document that were actually used for training.\n\n "; -static PyMethodDef __pyx_mdef_6gensim_6models_13doc2vec_inner_1train_document_dbow = {"train_document_dbow", (PyCFunction)__pyx_pw_6gensim_6models_13doc2vec_inner_1train_document_dbow, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_13doc2vec_inner_train_document_dbow}; -static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_1train_document_dbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_model = 0; - PyObject *__pyx_v_doc_words = 0; - PyObject *__pyx_v_doctag_indexes = 0; - PyObject *__pyx_v_alpha = 0; - PyObject *__pyx_v_work = 0; - PyObject *__pyx_v_train_words = 0; - PyObject *__pyx_v_learn_doctags = 0; - PyObject *__pyx_v_learn_words = 0; - PyObject *__pyx_v_learn_hidden = 0; - PyObject *__pyx_v_word_vectors = 0; - PyObject *__pyx_v_word_locks = 0; - PyObject *__pyx_v_doctag_vectors = 0; - PyObject *__pyx_v_doctag_locks = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("train_document_dbow (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_doc_words,&__pyx_n_s_doctag_indexes,&__pyx_n_s_alpha,&__pyx_n_s_work,&__pyx_n_s_train_words,&__pyx_n_s_learn_doctags,&__pyx_n_s_learn_words,&__pyx_n_s_learn_hidden,&__pyx_n_s_word_vectors,&__pyx_n_s_word_locks,&__pyx_n_s_doctag_vectors,&__pyx_n_s_doctag_locks,0}; - PyObject* values[13] = {0,0,0,0,0,0,0,0,0,0,0,0,0}; - values[4] = ((PyObject *)Py_None); +static PyObject *__pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config(struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig *__pyx_v_c, PyObject *__pyx_v_model, PyObject *__pyx_v_alpha, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config *__pyx_optional_args) { - /* "gensim/models/doc2vec_inner.pyx":224 + /* "gensim/models/doc2vec_inner.pyx":224 * - * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, - * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, # <<<<<<<<<<<<<< - * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): - * """Update distributed bag of words model ("PV-DBOW") by training on a single document. + * cdef init_d2v_config(Doc2VecConfig *c, model, alpha, learn_doctags, learn_words, learn_hidden, + * train_words=False, work=None, neu1=None, word_vectors=None, word_locks=None, doctag_vectors=None, # <<<<<<<<<<<<<< + * doctag_locks=None, docvecs_count=0): + * c[0].hs = model.hs */ - values[5] = ((PyObject *)Py_False); - values[6] = ((PyObject *)Py_True); - values[7] = ((PyObject *)Py_True); - values[8] = ((PyObject *)Py_True); + PyObject *__pyx_v_train_words = ((PyObject *)Py_False); + PyObject *__pyx_v_work = ((PyObject *)Py_None); + PyObject *__pyx_v_neu1 = ((PyObject *)Py_None); + PyObject *__pyx_v_word_vectors = ((PyObject *)Py_None); + PyObject *__pyx_v_word_locks = ((PyObject *)Py_None); + PyObject *__pyx_v_doctag_vectors = ((PyObject *)Py_None); - /* "gensim/models/doc2vec_inner.pyx":225 - * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, - * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, - * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): # <<<<<<<<<<<<<< - * """Update distributed bag of words model ("PV-DBOW") by training on a single document. - * + /* "gensim/models/doc2vec_inner.pyx":225 + * cdef init_d2v_config(Doc2VecConfig *c, model, alpha, learn_doctags, learn_words, learn_hidden, + * train_words=False, work=None, neu1=None, word_vectors=None, word_locks=None, doctag_vectors=None, + * doctag_locks=None, docvecs_count=0): # <<<<<<<<<<<<<< + * c[0].hs = model.hs + * c[0].negative = model.negative */ - values[9] = ((PyObject *)Py_None); - values[10] = ((PyObject *)Py_None); - values[11] = ((PyObject *)Py_None); - values[12] = ((PyObject *)Py_None); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); - CYTHON_FALLTHROUGH; - case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); - CYTHON_FALLTHROUGH; - case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); - CYTHON_FALLTHROUGH; - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doc_words)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_document_dbow", 0, 4, 13, 1); __PYX_ERR(0, 223, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_indexes)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_document_dbow", 0, 4, 13, 2); __PYX_ERR(0, 223, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 3: - if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_document_dbow", 0, 4, 13, 3); __PYX_ERR(0, 223, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 4: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work); - if (value) { values[4] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 5: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_train_words); - if (value) { values[5] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 6: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_doctags); - if (value) { values[6] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 7: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_words); - if (value) { values[7] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 8: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_hidden); - if (value) { values[8] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 9: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_vectors); - if (value) { values[9] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 10: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_locks); - if (value) { values[10] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 11: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_vectors); - if (value) { values[11] = value; kw_args--; } - } - CYTHON_FALLTHROUGH; - case 12: - if (kw_args > 0) { - PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_locks); - if (value) { values[12] = value; kw_args--; } + PyObject *__pyx_v_doctag_locks = ((PyObject *)Py_None); + PyObject *__pyx_v_docvecs_count = ((PyObject *)__pyx_int_0); + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + Py_ssize_t __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + unsigned PY_LONG_LONG __pyx_t_9; + PyObject *__pyx_t_10 = NULL; + __Pyx_RefNannySetupContext("init_d2v_config", 0); + if (__pyx_optional_args) { + if (__pyx_optional_args->__pyx_n > 0) { + __pyx_v_train_words = __pyx_optional_args->train_words; + if (__pyx_optional_args->__pyx_n > 1) { + __pyx_v_work = __pyx_optional_args->work; + if (__pyx_optional_args->__pyx_n > 2) { + __pyx_v_neu1 = __pyx_optional_args->neu1; + if (__pyx_optional_args->__pyx_n > 3) { + __pyx_v_word_vectors = __pyx_optional_args->word_vectors; + if (__pyx_optional_args->__pyx_n > 4) { + __pyx_v_word_locks = __pyx_optional_args->word_locks; + if (__pyx_optional_args->__pyx_n > 5) { + __pyx_v_doctag_vectors = __pyx_optional_args->doctag_vectors; + if (__pyx_optional_args->__pyx_n > 6) { + __pyx_v_doctag_locks = __pyx_optional_args->doctag_locks; + if (__pyx_optional_args->__pyx_n > 7) { + __pyx_v_docvecs_count = __pyx_optional_args->docvecs_count; + } + } + } + } + } } } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_document_dbow") < 0)) __PYX_ERR(0, 223, __pyx_L3_error) - } - } else { - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); - CYTHON_FALLTHROUGH; - case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); - CYTHON_FALLTHROUGH; - case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); - CYTHON_FALLTHROUGH; - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - CYTHON_FALLTHROUGH; - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - CYTHON_FALLTHROUGH; - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - CYTHON_FALLTHROUGH; - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - CYTHON_FALLTHROUGH; - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - CYTHON_FALLTHROUGH; - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - break; - default: goto __pyx_L5_argtuple_error; - } } - __pyx_v_model = values[0]; - __pyx_v_doc_words = values[1]; - __pyx_v_doctag_indexes = values[2]; - __pyx_v_alpha = values[3]; - __pyx_v_work = values[4]; - __pyx_v_train_words = values[5]; - __pyx_v_learn_doctags = values[6]; - __pyx_v_learn_words = values[7]; - __pyx_v_learn_hidden = values[8]; - __pyx_v_word_vectors = values[9]; - __pyx_v_word_locks = values[10]; - __pyx_v_doctag_vectors = values[11]; - __pyx_v_doctag_locks = values[12]; } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("train_document_dbow", 0, 4, 13, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 223, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("gensim.models.doc2vec_inner.train_document_dbow", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(__pyx_self, __pyx_v_model, __pyx_v_doc_words, __pyx_v_doctag_indexes, __pyx_v_alpha, __pyx_v_work, __pyx_v_train_words, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, __pyx_v_word_vectors, __pyx_v_word_locks, __pyx_v_doctag_vectors, __pyx_v_doctag_locks); - - /* "gensim/models/doc2vec_inner.pyx":223 - * - * - * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, # <<<<<<<<<<<<<< - * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, - * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_doc_words, PyObject *__pyx_v_doctag_indexes, PyObject *__pyx_v_alpha, PyObject *__pyx_v_work, PyObject *__pyx_v_train_words, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks) { - int __pyx_v_hs; - int __pyx_v_negative; - int __pyx_v_sample; - int __pyx_v__train_words; - int __pyx_v__learn_words; - int __pyx_v__learn_hidden; - int __pyx_v__learn_doctags; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__word_vectors; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__doctag_vectors; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__word_locks; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__doctag_locks; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__work; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; - int __pyx_v_size; - int __pyx_v_codelens[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v__doctag_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_reduced_windows[0x2710]; - int __pyx_v_document_len; - int __pyx_v_doctag_len; - int __pyx_v_window; - int __pyx_v_i; - int __pyx_v_j; - long __pyx_v_result; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1; - __pyx_t_5numpy_uint32_t *__pyx_v_points[0x2710]; - __pyx_t_5numpy_uint8_t *__pyx_v_codes[0x2710]; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg; - __pyx_t_5numpy_uint32_t *__pyx_v_cum_table; - unsigned PY_LONG_LONG __pyx_v_cum_table_len; - unsigned PY_LONG_LONG __pyx_v_next_random; - PyObject *__pyx_v_vlookup = NULL; - PyObject *__pyx_v_token = NULL; - PyObject *__pyx_v_predict_word = NULL; - PyObject *__pyx_v_item = NULL; - long __pyx_v_k; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_4; - int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - PyObject *__pyx_t_8 = NULL; - unsigned PY_LONG_LONG __pyx_t_9; - PyObject *__pyx_t_10 = NULL; - PyObject *(*__pyx_t_11)(PyObject *); - __pyx_t_5numpy_uint32_t __pyx_t_12; - Py_ssize_t __pyx_t_13; - PyObject *__pyx_t_14 = NULL; - int __pyx_t_15; - PyObject *__pyx_t_16 = NULL; - long __pyx_t_17; - int __pyx_t_18; - long __pyx_t_19; - int __pyx_t_20; - int __pyx_t_21; - int __pyx_t_22; - __Pyx_RefNannySetupContext("train_document_dbow", 0); __Pyx_INCREF(__pyx_v_work); + __Pyx_INCREF(__pyx_v_neu1); __Pyx_INCREF(__pyx_v_word_vectors); __Pyx_INCREF(__pyx_v_word_locks); __Pyx_INCREF(__pyx_v_doctag_vectors); __Pyx_INCREF(__pyx_v_doctag_locks); - /* "gensim/models/doc2vec_inner.pyx":270 - * - * """ - * cdef int hs = model.hs # <<<<<<<<<<<<<< - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) + /* "gensim/models/doc2vec_inner.pyx":226 + * train_words=False, work=None, neu1=None, word_vectors=None, word_locks=None, doctag_vectors=None, + * doctag_locks=None, docvecs_count=0): + * c[0].hs = model.hs # <<<<<<<<<<<<<< + * c[0].negative = model.negative + * c[0].sample = (model.vocabulary.sample != 0) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 270, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 226, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 270, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 226, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_hs = __pyx_t_2; + (__pyx_v_c[0]).hs = __pyx_t_2; - /* "gensim/models/doc2vec_inner.pyx":271 - * """ - * cdef int hs = model.hs - * cdef int negative = model.negative # <<<<<<<<<<<<<< - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int _train_words = train_words + /* "gensim/models/doc2vec_inner.pyx":227 + * doctag_locks=None, docvecs_count=0): + * c[0].hs = model.hs + * c[0].negative = model.negative # <<<<<<<<<<<<<< + * c[0].sample = (model.vocabulary.sample != 0) + * c[0].cbow_mean = model.cbow_mean */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 271, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 227, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 271, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 227, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_negative = __pyx_t_2; + (__pyx_v_c[0]).negative = __pyx_t_2; - /* "gensim/models/doc2vec_inner.pyx":272 - * cdef int hs = model.hs - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< - * cdef int _train_words = train_words - * cdef int _learn_words = learn_words + /* "gensim/models/doc2vec_inner.pyx":228 + * c[0].hs = model.hs + * c[0].negative = model.negative + * c[0].sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< + * c[0].cbow_mean = model.cbow_mean + * c[0].train_words = train_words */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 272, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 228, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 272, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 228, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 272, __pyx_L1_error) + __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 228, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 272, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 228, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_sample = __pyx_t_2; + (__pyx_v_c[0]).sample = __pyx_t_2; - /* "gensim/models/doc2vec_inner.pyx":273 - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int _train_words = train_words # <<<<<<<<<<<<<< - * cdef int _learn_words = learn_words - * cdef int _learn_hidden = learn_hidden + /* "gensim/models/doc2vec_inner.pyx":229 + * c[0].negative = model.negative + * c[0].sample = (model.vocabulary.sample != 0) + * c[0].cbow_mean = model.cbow_mean # <<<<<<<<<<<<<< + * c[0].train_words = train_words + * c[0].learn_doctags = learn_doctags */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_train_words); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 273, __pyx_L1_error) - __pyx_v__train_words = __pyx_t_2; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_cbow_mean); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 229, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).cbow_mean = __pyx_t_2; + + /* "gensim/models/doc2vec_inner.pyx":230 + * c[0].sample = (model.vocabulary.sample != 0) + * c[0].cbow_mean = model.cbow_mean + * c[0].train_words = train_words # <<<<<<<<<<<<<< + * c[0].learn_doctags = learn_doctags + * c[0].learn_words = learn_words + */ + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_train_words); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 230, __pyx_L1_error) + (__pyx_v_c[0]).train_words = __pyx_t_2; + + /* "gensim/models/doc2vec_inner.pyx":231 + * c[0].cbow_mean = model.cbow_mean + * c[0].train_words = train_words + * c[0].learn_doctags = learn_doctags # <<<<<<<<<<<<<< + * c[0].learn_words = learn_words + * c[0].learn_hidden = learn_hidden + */ + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_doctags); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 231, __pyx_L1_error) + (__pyx_v_c[0]).learn_doctags = __pyx_t_2; + + /* "gensim/models/doc2vec_inner.pyx":232 + * c[0].train_words = train_words + * c[0].learn_doctags = learn_doctags + * c[0].learn_words = learn_words # <<<<<<<<<<<<<< + * c[0].learn_hidden = learn_hidden + * c[0].alpha = alpha + */ + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_words); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 232, __pyx_L1_error) + (__pyx_v_c[0]).learn_words = __pyx_t_2; + + /* "gensim/models/doc2vec_inner.pyx":233 + * c[0].learn_doctags = learn_doctags + * c[0].learn_words = learn_words + * c[0].learn_hidden = learn_hidden # <<<<<<<<<<<<<< + * c[0].alpha = alpha + * c[0].layer1_size = model.trainables.layer1_size + */ + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_hidden); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 233, __pyx_L1_error) + (__pyx_v_c[0]).learn_hidden = __pyx_t_2; + + /* "gensim/models/doc2vec_inner.pyx":234 + * c[0].learn_words = learn_words + * c[0].learn_hidden = learn_hidden + * c[0].alpha = alpha # <<<<<<<<<<<<<< + * c[0].layer1_size = model.trainables.layer1_size + * c[0].vector_size = model.docvecs.vector_size + */ + __pyx_t_4 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_4 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 234, __pyx_L1_error) + (__pyx_v_c[0]).alpha = __pyx_t_4; + + /* "gensim/models/doc2vec_inner.pyx":235 + * c[0].learn_hidden = learn_hidden + * c[0].alpha = alpha + * c[0].layer1_size = model.trainables.layer1_size # <<<<<<<<<<<<<< + * c[0].vector_size = model.docvecs.vector_size + * c[0].workers = model.workers + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 235, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 235, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 235, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + (__pyx_v_c[0]).layer1_size = __pyx_t_2; - /* "gensim/models/doc2vec_inner.pyx":274 - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int _train_words = train_words - * cdef int _learn_words = learn_words # <<<<<<<<<<<<<< - * cdef int _learn_hidden = learn_hidden - * cdef int _learn_doctags = learn_doctags + /* "gensim/models/doc2vec_inner.pyx":236 + * c[0].alpha = alpha + * c[0].layer1_size = model.trainables.layer1_size + * c[0].vector_size = model.docvecs.vector_size # <<<<<<<<<<<<<< + * c[0].workers = model.workers + * c[0].docvecs_count = docvecs_count */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_words); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 274, __pyx_L1_error) - __pyx_v__learn_words = __pyx_t_2; + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_docvecs); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 236, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 236, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 236, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).vector_size = __pyx_t_2; - /* "gensim/models/doc2vec_inner.pyx":275 - * cdef int _train_words = train_words - * cdef int _learn_words = learn_words - * cdef int _learn_hidden = learn_hidden # <<<<<<<<<<<<<< - * cdef int _learn_doctags = learn_doctags + /* "gensim/models/doc2vec_inner.pyx":237 + * c[0].layer1_size = model.trainables.layer1_size + * c[0].vector_size = model.docvecs.vector_size + * c[0].workers = model.workers # <<<<<<<<<<<<<< + * c[0].docvecs_count = docvecs_count * */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_hidden); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 275, __pyx_L1_error) - __pyx_v__learn_hidden = __pyx_t_2; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_workers); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 237, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 237, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).workers = __pyx_t_2; - /* "gensim/models/doc2vec_inner.pyx":276 - * cdef int _learn_words = learn_words - * cdef int _learn_hidden = learn_hidden - * cdef int _learn_doctags = learn_doctags # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":238 + * c[0].vector_size = model.docvecs.vector_size + * c[0].workers = model.workers + * c[0].docvecs_count = docvecs_count # <<<<<<<<<<<<<< * - * cdef REAL_t *_word_vectors + * c[0].window = model.window */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_doctags); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 276, __pyx_L1_error) - __pyx_v__learn_doctags = __pyx_t_2; + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_docvecs_count); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 238, __pyx_L1_error) + (__pyx_v_c[0]).docvecs_count = __pyx_t_2; - /* "gensim/models/doc2vec_inner.pyx":283 - * cdef REAL_t *_doctag_locks - * cdef REAL_t *_work - * cdef REAL_t _alpha = alpha # <<<<<<<<<<<<<< - * cdef int size = model.trainables.layer1_size + /* "gensim/models/doc2vec_inner.pyx":240 + * c[0].docvecs_count = docvecs_count + * + * c[0].window = model.window # <<<<<<<<<<<<<< + * c[0].expected_doctag_len = model.dm_tag_count * */ - __pyx_t_4 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_4 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 283, __pyx_L1_error) - __pyx_v__alpha = __pyx_t_4; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).window = __pyx_t_2; - /* "gensim/models/doc2vec_inner.pyx":284 - * cdef REAL_t *_work - * cdef REAL_t _alpha = alpha - * cdef int size = model.trainables.layer1_size # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":241 + * + * c[0].window = model.window + * c[0].expected_doctag_len = model.dm_tag_count # <<<<<<<<<<<<<< * - * cdef int codelens[MAX_DOCUMENT_LEN] + * if '\0' in model.wv.vocab: */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 284, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_dm_tag_count); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 241, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 284, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 241, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 284, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_size = __pyx_t_2; + (__pyx_v_c[0]).expected_doctag_len = __pyx_t_2; - /* "gensim/models/doc2vec_inner.pyx":292 - * cdef int document_len - * cdef int doctag_len - * cdef int window = model.window # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":243 + * c[0].expected_doctag_len = model.dm_tag_count + * + * if '\0' in model.wv.vocab: # <<<<<<<<<<<<<< + * c[0].null_word_index = model.wv.vocab['\0'].index * - * cdef int i, j */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 292, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 243, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 243, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_5 = (__Pyx_PySequence_ContainsTF(__pyx_kp_s_, __pyx_t_3, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 243, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_window = __pyx_t_2; + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { - /* "gensim/models/doc2vec_inner.pyx":296 - * cdef int i, j - * cdef unsigned long long r - * cdef long result = 0 # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":244 + * + * if '\0' in model.wv.vocab: + * c[0].null_word_index = model.wv.vocab['\0'].index # <<<<<<<<<<<<<< + * + * # default vectors, locks from syn0/doctag_syn0 + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vocab); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 244, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_Dict_GetItem(__pyx_t_1, __pyx_kp_s_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 244, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 244, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 244, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).null_word_index = __pyx_t_2; + + /* "gensim/models/doc2vec_inner.pyx":243 + * c[0].expected_doctag_len = model.dm_tag_count + * + * if '\0' in model.wv.vocab: # <<<<<<<<<<<<<< + * c[0].null_word_index = model.wv.vocab['\0'].index * - * # For hierarchical softmax */ - __pyx_v_result = 0; + } - /* "gensim/models/doc2vec_inner.pyx":310 + /* "gensim/models/doc2vec_inner.pyx":247 * * # default vectors, locks from syn0/doctag_syn0 * if word_vectors is None: # <<<<<<<<<<<<<< * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) + * c[0].word_vectors = (np.PyArray_DATA(word_vectors)) */ - __pyx_t_5 = (__pyx_v_word_vectors == Py_None); - __pyx_t_6 = (__pyx_t_5 != 0); - if (__pyx_t_6) { + __pyx_t_6 = (__pyx_v_word_vectors == Py_None); + __pyx_t_5 = (__pyx_t_6 != 0); + if (__pyx_t_5) { - /* "gensim/models/doc2vec_inner.pyx":311 + /* "gensim/models/doc2vec_inner.pyx":248 * # default vectors, locks from syn0/doctag_syn0 * if word_vectors is None: * word_vectors = model.wv.vectors # <<<<<<<<<<<<<< - * _word_vectors = (np.PyArray_DATA(word_vectors)) + * c[0].word_vectors = (np.PyArray_DATA(word_vectors)) * if doctag_vectors is None: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 311, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 311, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 248, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF_SET(__pyx_v_word_vectors, __pyx_t_1); - __pyx_t_1 = 0; + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 248, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_word_vectors, __pyx_t_3); + __pyx_t_3 = 0; - /* "gensim/models/doc2vec_inner.pyx":310 + /* "gensim/models/doc2vec_inner.pyx":247 * * # default vectors, locks from syn0/doctag_syn0 * if word_vectors is None: # <<<<<<<<<<<<<< * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) + * c[0].word_vectors = (np.PyArray_DATA(word_vectors)) */ } - /* "gensim/models/doc2vec_inner.pyx":312 + /* "gensim/models/doc2vec_inner.pyx":249 * if word_vectors is None: * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) # <<<<<<<<<<<<<< + * c[0].word_vectors = (np.PyArray_DATA(word_vectors)) # <<<<<<<<<<<<<< * if doctag_vectors is None: * doctag_vectors = model.docvecs.vectors_docs */ - if (!(likely(((__pyx_v_word_vectors) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_vectors, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 312, __pyx_L1_error) - __pyx_v__word_vectors = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_vectors))); + if (!(likely(((__pyx_v_word_vectors) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_vectors, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 249, __pyx_L1_error) + (__pyx_v_c[0]).word_vectors = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_vectors))); - /* "gensim/models/doc2vec_inner.pyx":313 + /* "gensim/models/doc2vec_inner.pyx":250 * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) + * c[0].word_vectors = (np.PyArray_DATA(word_vectors)) * if doctag_vectors is None: # <<<<<<<<<<<<<< * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) + * c[0].doctag_vectors = (np.PyArray_DATA(doctag_vectors)) */ - __pyx_t_6 = (__pyx_v_doctag_vectors == Py_None); - __pyx_t_5 = (__pyx_t_6 != 0); - if (__pyx_t_5) { + __pyx_t_5 = (__pyx_v_doctag_vectors == Py_None); + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { - /* "gensim/models/doc2vec_inner.pyx":314 - * _word_vectors = (np.PyArray_DATA(word_vectors)) + /* "gensim/models/doc2vec_inner.pyx":251 + * c[0].word_vectors = (np.PyArray_DATA(word_vectors)) * if doctag_vectors is None: * doctag_vectors = model.docvecs.vectors_docs # <<<<<<<<<<<<<< - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) + * c[0].doctag_vectors = (np.PyArray_DATA(doctag_vectors)) * if word_locks is None: */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_docvecs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 314, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_docs); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 314, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_docvecs); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 251, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_doctag_vectors, __pyx_t_3); - __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_docs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 251, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_doctag_vectors, __pyx_t_1); + __pyx_t_1 = 0; - /* "gensim/models/doc2vec_inner.pyx":313 + /* "gensim/models/doc2vec_inner.pyx":250 * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) + * c[0].word_vectors = (np.PyArray_DATA(word_vectors)) * if doctag_vectors is None: # <<<<<<<<<<<<<< * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) + * c[0].doctag_vectors = (np.PyArray_DATA(doctag_vectors)) */ } - /* "gensim/models/doc2vec_inner.pyx":315 + /* "gensim/models/doc2vec_inner.pyx":252 * if doctag_vectors is None: * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) # <<<<<<<<<<<<<< + * c[0].doctag_vectors = (np.PyArray_DATA(doctag_vectors)) # <<<<<<<<<<<<<< * if word_locks is None: * word_locks = model.trainables.vectors_lockf */ - if (!(likely(((__pyx_v_doctag_vectors) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_doctag_vectors, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 315, __pyx_L1_error) - __pyx_v__doctag_vectors = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_doctag_vectors))); + if (!(likely(((__pyx_v_doctag_vectors) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_doctag_vectors, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 252, __pyx_L1_error) + (__pyx_v_c[0]).doctag_vectors = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_doctag_vectors))); - /* "gensim/models/doc2vec_inner.pyx":316 + /* "gensim/models/doc2vec_inner.pyx":253 * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) + * c[0].doctag_vectors = (np.PyArray_DATA(doctag_vectors)) * if word_locks is None: # <<<<<<<<<<<<<< * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) + * c[0].word_locks = (np.PyArray_DATA(word_locks)) */ - __pyx_t_5 = (__pyx_v_word_locks == Py_None); - __pyx_t_6 = (__pyx_t_5 != 0); - if (__pyx_t_6) { + __pyx_t_6 = (__pyx_v_word_locks == Py_None); + __pyx_t_5 = (__pyx_t_6 != 0); + if (__pyx_t_5) { - /* "gensim/models/doc2vec_inner.pyx":317 - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) + /* "gensim/models/doc2vec_inner.pyx":254 + * c[0].doctag_vectors = (np.PyArray_DATA(doctag_vectors)) * if word_locks is None: * word_locks = model.trainables.vectors_lockf # <<<<<<<<<<<<<< - * _word_locks = (np.PyArray_DATA(word_locks)) + * c[0].word_locks = (np.PyArray_DATA(word_locks)) * if doctag_locks is None: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 317, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 317, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 254, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF_SET(__pyx_v_word_locks, __pyx_t_1); - __pyx_t_1 = 0; + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_lockf); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 254, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_word_locks, __pyx_t_3); + __pyx_t_3 = 0; - /* "gensim/models/doc2vec_inner.pyx":316 + /* "gensim/models/doc2vec_inner.pyx":253 * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) + * c[0].doctag_vectors = (np.PyArray_DATA(doctag_vectors)) * if word_locks is None: # <<<<<<<<<<<<<< * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) + * c[0].word_locks = (np.PyArray_DATA(word_locks)) */ } - /* "gensim/models/doc2vec_inner.pyx":318 + /* "gensim/models/doc2vec_inner.pyx":255 * if word_locks is None: * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) # <<<<<<<<<<<<<< + * c[0].word_locks = (np.PyArray_DATA(word_locks)) # <<<<<<<<<<<<<< * if doctag_locks is None: * doctag_locks = model.trainables.vectors_docs_lockf */ - if (!(likely(((__pyx_v_word_locks) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_locks, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 318, __pyx_L1_error) - __pyx_v__word_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_locks))); + if (!(likely(((__pyx_v_word_locks) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_locks, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 255, __pyx_L1_error) + (__pyx_v_c[0]).word_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_locks))); - /* "gensim/models/doc2vec_inner.pyx":319 + /* "gensim/models/doc2vec_inner.pyx":256 * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) + * c[0].word_locks = (np.PyArray_DATA(word_locks)) * if doctag_locks is None: # <<<<<<<<<<<<<< * doctag_locks = model.trainables.vectors_docs_lockf - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) + * c[0].doctag_locks = (np.PyArray_DATA(doctag_locks)) */ - __pyx_t_6 = (__pyx_v_doctag_locks == Py_None); - __pyx_t_5 = (__pyx_t_6 != 0); - if (__pyx_t_5) { + __pyx_t_5 = (__pyx_v_doctag_locks == Py_None); + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { - /* "gensim/models/doc2vec_inner.pyx":320 - * _word_locks = (np.PyArray_DATA(word_locks)) + /* "gensim/models/doc2vec_inner.pyx":257 + * c[0].word_locks = (np.PyArray_DATA(word_locks)) * if doctag_locks is None: * doctag_locks = model.trainables.vectors_docs_lockf # <<<<<<<<<<<<<< - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) + * c[0].doctag_locks = (np.PyArray_DATA(doctag_locks)) * */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 320, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_docs_lockf); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 320, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 257, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_doctag_locks, __pyx_t_3); - __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_docs_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 257, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF_SET(__pyx_v_doctag_locks, __pyx_t_1); + __pyx_t_1 = 0; - /* "gensim/models/doc2vec_inner.pyx":319 + /* "gensim/models/doc2vec_inner.pyx":256 * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) + * c[0].word_locks = (np.PyArray_DATA(word_locks)) * if doctag_locks is None: # <<<<<<<<<<<<<< * doctag_locks = model.trainables.vectors_docs_lockf - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) + * c[0].doctag_locks = (np.PyArray_DATA(doctag_locks)) */ } - /* "gensim/models/doc2vec_inner.pyx":321 + /* "gensim/models/doc2vec_inner.pyx":258 * if doctag_locks is None: * doctag_locks = model.trainables.vectors_docs_lockf - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) # <<<<<<<<<<<<<< + * c[0].doctag_locks = (np.PyArray_DATA(doctag_locks)) # <<<<<<<<<<<<<< * - * if hs: + * if c[0].hs: */ - if (!(likely(((__pyx_v_doctag_locks) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_doctag_locks, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 321, __pyx_L1_error) - __pyx_v__doctag_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_doctag_locks))); + if (!(likely(((__pyx_v_doctag_locks) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_doctag_locks, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 258, __pyx_L1_error) + (__pyx_v_c[0]).doctag_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_doctag_locks))); - /* "gensim/models/doc2vec_inner.pyx":323 - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) + /* "gensim/models/doc2vec_inner.pyx":260 + * c[0].doctag_locks = (np.PyArray_DATA(doctag_locks)) * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + * if c[0].hs: # <<<<<<<<<<<<<< + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * */ - __pyx_t_5 = (__pyx_v_hs != 0); - if (__pyx_t_5) { + __pyx_t_6 = ((__pyx_v_c[0]).hs != 0); + if (__pyx_t_6) { - /* "gensim/models/doc2vec_inner.pyx":324 + /* "gensim/models/doc2vec_inner.pyx":261 * - * if hs: - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< + * if c[0].hs: + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< * - * if negative: + * if c[0].negative: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 324, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 324, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 261, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 324, __pyx_L1_error) - __pyx_v_syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 261, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 261, __pyx_L1_error) + (__pyx_v_c[0]).syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/doc2vec_inner.pyx":323 - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) + /* "gensim/models/doc2vec_inner.pyx":260 + * c[0].doctag_locks = (np.PyArray_DATA(doctag_locks)) * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + * if c[0].hs: # <<<<<<<<<<<<<< + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * */ } - /* "gensim/models/doc2vec_inner.pyx":326 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/doc2vec_inner.pyx":263 + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * if c[0].negative: # <<<<<<<<<<<<<< + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) */ - __pyx_t_5 = (__pyx_v_negative != 0); - if (__pyx_t_5) { + __pyx_t_6 = ((__pyx_v_c[0]).negative != 0); + if (__pyx_t_6) { - /* "gensim/models/doc2vec_inner.pyx":327 + /* "gensim/models/doc2vec_inner.pyx":264 * - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 327, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 327, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 327, __pyx_L1_error) - __pyx_v_syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "gensim/models/doc2vec_inner.pyx":328 - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: + * if c[0].negative: + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 328, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 264, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 328, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 264, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 328, __pyx_L1_error) - __pyx_v_cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 264, __pyx_L1_error) + (__pyx_v_c[0]).syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/doc2vec_inner.pyx":329 - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/doc2vec_inner.pyx":265 + * if c[0].negative: + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 329, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 265, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 329, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 265, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_7 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_7 == ((Py_ssize_t)-1))) __PYX_ERR(0, 329, __pyx_L1_error) + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 265, __pyx_L1_error) + (__pyx_v_c[0]).cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "gensim/models/doc2vec_inner.pyx":266 + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< + * if c[0].negative or c[0].sample: + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_cum_table_len = __pyx_t_7; + __pyx_t_7 = PyObject_Length(__pyx_t_1); if (unlikely(__pyx_t_7 == ((Py_ssize_t)-1))) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).cum_table_len = __pyx_t_7; - /* "gensim/models/doc2vec_inner.pyx":326 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/doc2vec_inner.pyx":263 + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * if c[0].negative: # <<<<<<<<<<<<<< + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) */ } - /* "gensim/models/doc2vec_inner.pyx":330 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/doc2vec_inner.pyx":267 + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: # <<<<<<<<<<<<<< + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) * */ - __pyx_t_6 = (__pyx_v_negative != 0); - if (!__pyx_t_6) { + __pyx_t_5 = ((__pyx_v_c[0]).negative != 0); + if (!__pyx_t_5) { } else { - __pyx_t_5 = __pyx_t_6; - goto __pyx_L10_bool_binop_done; + __pyx_t_6 = __pyx_t_5; + goto __pyx_L11_bool_binop_done; } - __pyx_t_6 = (__pyx_v_sample != 0); - __pyx_t_5 = __pyx_t_6; - __pyx_L10_bool_binop_done:; - if (__pyx_t_5) { + __pyx_t_5 = ((__pyx_v_c[0]).sample != 0); + __pyx_t_6 = __pyx_t_5; + __pyx_L11_bool_binop_done:; + if (__pyx_t_6) { - /* "gensim/models/doc2vec_inner.pyx":331 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":268 + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< * * # convert Python structures to primitive types, so we can release the GIL */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 331, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 331, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 331, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_randint); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 331, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 331, __pyx_L1_error) + __pyx_t_3 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 331, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 268, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_randint); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 331, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 268, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_8 = PyNumber_Add(__pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 331, __pyx_L1_error) + __pyx_t_8 = PyNumber_Add(__pyx_t_3, __pyx_t_1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_9 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_8); if (unlikely((__pyx_t_9 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 331, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_9 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_8); if (unlikely((__pyx_t_9 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_next_random = __pyx_t_9; + (__pyx_v_c[0]).next_random = __pyx_t_9; - /* "gensim/models/doc2vec_inner.pyx":330 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/doc2vec_inner.pyx":267 + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: # <<<<<<<<<<<<<< + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) * */ } - /* "gensim/models/doc2vec_inner.pyx":334 + /* "gensim/models/doc2vec_inner.pyx":271 * * # convert Python structures to primitive types, so we can release the GIL * if work is None: # <<<<<<<<<<<<<< * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) + * c[0].work = np.PyArray_DATA(work) */ - __pyx_t_5 = (__pyx_v_work == Py_None); - __pyx_t_6 = (__pyx_t_5 != 0); - if (__pyx_t_6) { + __pyx_t_6 = (__pyx_v_work == Py_None); + __pyx_t_5 = (__pyx_t_6 != 0); + if (__pyx_t_5) { - /* "gensim/models/doc2vec_inner.pyx":335 + /* "gensim/models/doc2vec_inner.pyx":272 * # convert Python structures to primitive types, so we can release the GIL * if work is None: * work = zeros(model.trainables.layer1_size, dtype=REAL) # <<<<<<<<<<<<<< - * _work = np.PyArray_DATA(work) - * + * c[0].work = np.PyArray_DATA(work) + * if neu1 is None: */ - __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_zeros); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 335, __pyx_L1_error) + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_zeros); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 272, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 335, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 272, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 335, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 272, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 335, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 272, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_REAL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 335, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 272, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_REAL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 272, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); - if (PyDict_SetItem(__pyx_t_1, __pyx_n_s_dtype, __pyx_t_10) < 0) __PYX_ERR(0, 335, __pyx_L1_error) + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_dtype, __pyx_t_10) < 0) __PYX_ERR(0, 272, __pyx_L1_error) __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_3, __pyx_t_1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 335, __pyx_L1_error) + __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 272, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF_SET(__pyx_v_work, __pyx_t_10); __pyx_t_10 = 0; - /* "gensim/models/doc2vec_inner.pyx":334 + /* "gensim/models/doc2vec_inner.pyx":271 * * # convert Python structures to primitive types, so we can release the GIL * if work is None: # <<<<<<<<<<<<<< * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) + * c[0].work = np.PyArray_DATA(work) */ } - /* "gensim/models/doc2vec_inner.pyx":336 + /* "gensim/models/doc2vec_inner.pyx":273 * if work is None: * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) # <<<<<<<<<<<<<< - * - * vlookup = model.wv.vocab + * c[0].work = np.PyArray_DATA(work) # <<<<<<<<<<<<<< + * if neu1 is None: + * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) */ - if (!(likely(((__pyx_v_work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 336, __pyx_L1_error) - __pyx_v__work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_work))); + if (!(likely(((__pyx_v_work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 273, __pyx_L1_error) + (__pyx_v_c[0]).work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_work))); - /* "gensim/models/doc2vec_inner.pyx":338 - * _work = np.PyArray_DATA(work) - * - * vlookup = model.wv.vocab # <<<<<<<<<<<<<< - * i = 0 - * for token in doc_words: + /* "gensim/models/doc2vec_inner.pyx":274 + * work = zeros(model.trainables.layer1_size, dtype=REAL) + * c[0].work = np.PyArray_DATA(work) + * if neu1 is None: # <<<<<<<<<<<<<< + * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) + * c[0].neu1 = np.PyArray_DATA(neu1) */ - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 338, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_vocab); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 338, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_v_vlookup = __pyx_t_1; - __pyx_t_1 = 0; + __pyx_t_5 = (__pyx_v_neu1 == Py_None); + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { - /* "gensim/models/doc2vec_inner.pyx":339 + /* "gensim/models/doc2vec_inner.pyx":275 + * c[0].work = np.PyArray_DATA(work) + * if neu1 is None: + * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) # <<<<<<<<<<<<<< + * c[0].neu1 = np.PyArray_DATA(neu1) * - * vlookup = model.wv.vocab - * i = 0 # <<<<<<<<<<<<<< - * for token in doc_words: - * predict_word = vlookup[token] if token in vlookup else None - */ - __pyx_v_i = 0; - - /* "gensim/models/doc2vec_inner.pyx":340 - * vlookup = model.wv.vocab - * i = 0 - * for token in doc_words: # <<<<<<<<<<<<<< - * predict_word = vlookup[token] if token in vlookup else None - * if predict_word is None: # shrink document to leave out word + */ + __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_zeros); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 275, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 275, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 275, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 275, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 275, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_REAL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 275, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + if (PyDict_SetItem(__pyx_t_1, __pyx_n_s_dtype, __pyx_t_8) < 0) __PYX_ERR(0, 275, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_3, __pyx_t_1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 275, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_neu1, __pyx_t_8); + __pyx_t_8 = 0; + + /* "gensim/models/doc2vec_inner.pyx":274 + * work = zeros(model.trainables.layer1_size, dtype=REAL) + * c[0].work = np.PyArray_DATA(work) + * if neu1 is None: # <<<<<<<<<<<<<< + * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) + * c[0].neu1 = np.PyArray_DATA(neu1) + */ + } + + /* "gensim/models/doc2vec_inner.pyx":276 + * if neu1 is None: + * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) + * c[0].neu1 = np.PyArray_DATA(neu1) # <<<<<<<<<<<<<< + * + * + */ + if (!(likely(((__pyx_v_neu1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_neu1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 276, __pyx_L1_error) + (__pyx_v_c[0]).neu1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_neu1))); + + /* "gensim/models/doc2vec_inner.pyx":223 + * + * + * cdef init_d2v_config(Doc2VecConfig *c, model, alpha, learn_doctags, learn_words, learn_hidden, # <<<<<<<<<<<<<< + * train_words=False, work=None, neu1=None, word_vectors=None, word_locks=None, doctag_vectors=None, + * doctag_locks=None, docvecs_count=0): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_AddTraceback("gensim.models.doc2vec_inner.init_d2v_config", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_work); + __Pyx_XDECREF(__pyx_v_neu1); + __Pyx_XDECREF(__pyx_v_word_vectors); + __Pyx_XDECREF(__pyx_v_word_locks); + __Pyx_XDECREF(__pyx_v_doctag_vectors); + __Pyx_XDECREF(__pyx_v_doctag_locks); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/doc2vec_inner.pyx":280 + * + * + * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, # <<<<<<<<<<<<<< + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_1train_document_dbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_13doc2vec_inner_train_document_dbow[] = "train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None)\nUpdate distributed bag of words model (\"PV-DBOW\") by training on a single document.\n\n Called internally from :meth:`~gensim.models.doc2vec.Doc2Vec.train` and\n :meth:`~gensim.models.doc2vec.Doc2Vec.infer_vector`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.doc2vec.Doc2Vec`\n The model to train.\n doc_words : list of str\n The input document as a list of words to be used for training. Each word will be looked up in\n the model's vocabulary.\n doctag_indexes : list of int\n Indices into `doctag_vectors` used to obtain the tags of the document.\n alpha : float\n Learning rate.\n work : list of float, optional\n Updates to be performed on each neuron in the hidden layer of the underlying network.\n train_words : bool, optional\n Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both** `learn_words`\n and `train_words` are set to True.\n learn_doctags : bool, optional\n Whether the tag vectors should be updated.\n learn_words : bool, optional\n Word vectors will be updated exactly as per Word2Vec skip-gram training only if **both**\n `learn_words` and `train_words` are set to True.\n learn_hidden : bool, optional\n Whether or not the weights of the hidden layer will be updated.\n word_vectors : numpy.ndarray, optional\n The vector representation for each word in the vocabulary. If None, these will be retrieved from the model.\n word_locks : numpy.ndarray, optional\n A learning lock factor for each weight in the hidden layer for words, value 0 completely blocks updates,\n a value of 1 allows to update word-vectors.\n doctag_vectors : numpy.ndarray, ""optional\n Vector representations of the tags. If None, these will be retrieved from the model.\n doctag_locks : numpy.ndarray, optional\n The lock factors for each tag, same as `word_locks`, but for document-vectors.\n\n Returns\n -------\n int\n Number of words in the input document that were actually used for training.\n\n "; +static PyMethodDef __pyx_mdef_6gensim_6models_13doc2vec_inner_1train_document_dbow = {"train_document_dbow", (PyCFunction)__pyx_pw_6gensim_6models_13doc2vec_inner_1train_document_dbow, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_13doc2vec_inner_train_document_dbow}; +static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_1train_document_dbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_model = 0; + PyObject *__pyx_v_doc_words = 0; + PyObject *__pyx_v_doctag_indexes = 0; + PyObject *__pyx_v_alpha = 0; + PyObject *__pyx_v_work = 0; + PyObject *__pyx_v_train_words = 0; + PyObject *__pyx_v_learn_doctags = 0; + PyObject *__pyx_v_learn_words = 0; + PyObject *__pyx_v_learn_hidden = 0; + PyObject *__pyx_v_word_vectors = 0; + PyObject *__pyx_v_word_locks = 0; + PyObject *__pyx_v_doctag_vectors = 0; + PyObject *__pyx_v_doctag_locks = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("train_document_dbow (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_doc_words,&__pyx_n_s_doctag_indexes,&__pyx_n_s_alpha,&__pyx_n_s_work,&__pyx_n_s_train_words,&__pyx_n_s_learn_doctags,&__pyx_n_s_learn_words,&__pyx_n_s_learn_hidden,&__pyx_n_s_word_vectors,&__pyx_n_s_word_locks,&__pyx_n_s_doctag_vectors,&__pyx_n_s_doctag_locks,0}; + PyObject* values[13] = {0,0,0,0,0,0,0,0,0,0,0,0,0}; + values[4] = ((PyObject *)Py_None); + + /* "gensim/models/doc2vec_inner.pyx":281 + * + * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, # <<<<<<<<<<<<<< + * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): + * """Update distributed bag of words model ("PV-DBOW") by training on a single document. + */ + values[5] = ((PyObject *)Py_False); + values[6] = ((PyObject *)Py_True); + values[7] = ((PyObject *)Py_True); + values[8] = ((PyObject *)Py_True); + + /* "gensim/models/doc2vec_inner.pyx":282 + * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): # <<<<<<<<<<<<<< + * """Update distributed bag of words model ("PV-DBOW") by training on a single document. + * + */ + values[9] = ((PyObject *)Py_None); + values[10] = ((PyObject *)Py_None); + values[11] = ((PyObject *)Py_None); + values[12] = ((PyObject *)Py_None); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); + CYTHON_FALLTHROUGH; + case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); + CYTHON_FALLTHROUGH; + case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); + CYTHON_FALLTHROUGH; + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doc_words)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_document_dbow", 0, 4, 13, 1); __PYX_ERR(0, 280, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_indexes)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_document_dbow", 0, 4, 13, 2); __PYX_ERR(0, 280, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_document_dbow", 0, 4, 13, 3); __PYX_ERR(0, 280, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work); + if (value) { values[4] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 5: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_train_words); + if (value) { values[5] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 6: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_doctags); + if (value) { values[6] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 7: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_words); + if (value) { values[7] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 8: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_learn_hidden); + if (value) { values[8] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 9: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_vectors); + if (value) { values[9] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 10: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_word_locks); + if (value) { values[10] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 11: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_vectors); + if (value) { values[11] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 12: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_locks); + if (value) { values[12] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_document_dbow") < 0)) __PYX_ERR(0, 280, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 13: values[12] = PyTuple_GET_ITEM(__pyx_args, 12); + CYTHON_FALLTHROUGH; + case 12: values[11] = PyTuple_GET_ITEM(__pyx_args, 11); + CYTHON_FALLTHROUGH; + case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); + CYTHON_FALLTHROUGH; + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_model = values[0]; + __pyx_v_doc_words = values[1]; + __pyx_v_doctag_indexes = values[2]; + __pyx_v_alpha = values[3]; + __pyx_v_work = values[4]; + __pyx_v_train_words = values[5]; + __pyx_v_learn_doctags = values[6]; + __pyx_v_learn_words = values[7]; + __pyx_v_learn_hidden = values[8]; + __pyx_v_word_vectors = values[9]; + __pyx_v_word_locks = values[10]; + __pyx_v_doctag_vectors = values[11]; + __pyx_v_doctag_locks = values[12]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("train_document_dbow", 0, 4, 13, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 280, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.doc2vec_inner.train_document_dbow", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(__pyx_self, __pyx_v_model, __pyx_v_doc_words, __pyx_v_doctag_indexes, __pyx_v_alpha, __pyx_v_work, __pyx_v_train_words, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, __pyx_v_word_vectors, __pyx_v_word_locks, __pyx_v_doctag_vectors, __pyx_v_doctag_locks); + + /* "gensim/models/doc2vec_inner.pyx":280 + * + * + * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, # <<<<<<<<<<<<<< + * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, + * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_doc_words, PyObject *__pyx_v_doctag_indexes, PyObject *__pyx_v_alpha, PyObject *__pyx_v_work, PyObject *__pyx_v_train_words, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks) { + struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig __pyx_v_c; + int __pyx_v_i; + int __pyx_v_j; + long __pyx_v_result; + PyObject *__pyx_v_vlookup = NULL; + PyObject *__pyx_v_token = NULL; + PyObject *__pyx_v_predict_word = NULL; + PyObject *__pyx_v_item = NULL; + long __pyx_v_k; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config __pyx_t_2; + Py_ssize_t __pyx_t_3; + long __pyx_t_4; + Py_ssize_t __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *(*__pyx_t_7)(PyObject *); + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + __pyx_t_5numpy_uint32_t __pyx_t_12; + int __pyx_t_13; + PyObject *__pyx_t_14 = NULL; + int __pyx_t_15; + PyObject *__pyx_t_16 = NULL; + int __pyx_t_17; + int __pyx_t_18; + long __pyx_t_19; + int __pyx_t_20; + int __pyx_t_21; + __Pyx_RefNannySetupContext("train_document_dbow", 0); + + /* "gensim/models/doc2vec_inner.pyx":330 + * + * cdef int i, j + * cdef long result = 0 # <<<<<<<<<<<<<< + * + * init_d2v_config(&c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=train_words, work=work, + */ + __pyx_v_result = 0; + + /* "gensim/models/doc2vec_inner.pyx":332 + * cdef long result = 0 + * + * init_d2v_config(&c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=train_words, work=work, # <<<<<<<<<<<<<< + * neu1=None, word_vectors=word_vectors, word_locks=word_locks, + * doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) + */ + __pyx_t_2.__pyx_n = 7; + __pyx_t_2.train_words = __pyx_v_train_words; + __pyx_t_2.work = __pyx_v_work; + __pyx_t_2.neu1 = Py_None; + __pyx_t_2.word_vectors = __pyx_v_word_vectors; + __pyx_t_2.word_locks = __pyx_v_word_locks; + __pyx_t_2.doctag_vectors = __pyx_v_doctag_vectors; + __pyx_t_2.doctag_locks = __pyx_v_doctag_locks; + __pyx_t_1 = __pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config((&__pyx_v_c), __pyx_v_model, __pyx_v_alpha, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 332, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/doc2vec_inner.pyx":336 + * doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) + * + * c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) # <<<<<<<<<<<<<< + * + * vlookup = model.wv.vocab + */ + __pyx_t_3 = PyObject_Length(__pyx_v_doctag_indexes); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(0, 336, __pyx_L1_error) + __pyx_t_4 = 0x2710; + if (((__pyx_t_3 < __pyx_t_4) != 0)) { + __pyx_t_5 = __pyx_t_3; + } else { + __pyx_t_5 = __pyx_t_4; + } + __pyx_v_c.doctag_len = ((int)__pyx_t_5); + + /* "gensim/models/doc2vec_inner.pyx":338 + * c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) + * + * vlookup = model.wv.vocab # <<<<<<<<<<<<<< + * i = 0 + * for token in doc_words: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_vlookup = __pyx_t_6; + __pyx_t_6 = 0; + + /* "gensim/models/doc2vec_inner.pyx":339 + * + * vlookup = model.wv.vocab + * i = 0 # <<<<<<<<<<<<<< + * for token in doc_words: + * predict_word = vlookup[token] if token in vlookup else None + */ + __pyx_v_i = 0; + + /* "gensim/models/doc2vec_inner.pyx":340 + * vlookup = model.wv.vocab + * i = 0 + * for token in doc_words: # <<<<<<<<<<<<<< + * predict_word = vlookup[token] if token in vlookup else None + * if predict_word is None: # shrink document to leave out word */ if (likely(PyList_CheckExact(__pyx_v_doc_words)) || PyTuple_CheckExact(__pyx_v_doc_words)) { - __pyx_t_1 = __pyx_v_doc_words; __Pyx_INCREF(__pyx_t_1); __pyx_t_7 = 0; - __pyx_t_11 = NULL; + __pyx_t_6 = __pyx_v_doc_words; __Pyx_INCREF(__pyx_t_6); __pyx_t_5 = 0; + __pyx_t_7 = NULL; } else { - __pyx_t_7 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_doc_words); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 340, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_11 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 340, __pyx_L1_error) + __pyx_t_5 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_doc_words); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 340, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 340, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_11)) { - if (likely(PyList_CheckExact(__pyx_t_1))) { - if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_1)) break; + if (likely(!__pyx_t_7)) { + if (likely(PyList_CheckExact(__pyx_t_6))) { + if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_6)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_10 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_10); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 340, __pyx_L1_error) + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_6, __pyx_t_5); __Pyx_INCREF(__pyx_t_1); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 340, __pyx_L1_error) #else - __pyx_t_10 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 340, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); + __pyx_t_1 = PySequence_ITEM(__pyx_t_6, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 340, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } else { - if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_6)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_10 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_10); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 340, __pyx_L1_error) + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_5); __Pyx_INCREF(__pyx_t_1); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 340, __pyx_L1_error) #else - __pyx_t_10 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 340, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); + __pyx_t_1 = PySequence_ITEM(__pyx_t_6, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 340, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } } else { - __pyx_t_10 = __pyx_t_11(__pyx_t_1); - if (unlikely(!__pyx_t_10)) { + __pyx_t_1 = __pyx_t_7(__pyx_t_6); + if (unlikely(!__pyx_t_1)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); @@ -4193,10 +4544,10 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY } break; } - __Pyx_GOTREF(__pyx_t_10); + __Pyx_GOTREF(__pyx_t_1); } - __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_10); - __pyx_t_10 = 0; + __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_1); + __pyx_t_1 = 0; /* "gensim/models/doc2vec_inner.pyx":341 * i = 0 @@ -4205,166 +4556,166 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged */ - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 341, __pyx_L1_error) - if ((__pyx_t_6 != 0)) { - __pyx_t_3 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 341, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_10 = __pyx_t_3; - __pyx_t_3 = 0; + __pyx_t_8 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 341, __pyx_L1_error) + if ((__pyx_t_8 != 0)) { + __pyx_t_9 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 341, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_1 = __pyx_t_9; + __pyx_t_9 = 0; } else { __Pyx_INCREF(Py_None); - __pyx_t_10 = Py_None; + __pyx_t_1 = Py_None; } - __Pyx_XDECREF_SET(__pyx_v_predict_word, __pyx_t_10); - __pyx_t_10 = 0; + __Pyx_XDECREF_SET(__pyx_v_predict_word, __pyx_t_1); + __pyx_t_1 = 0; /* "gensim/models/doc2vec_inner.pyx":342 * for token in doc_words: * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word # <<<<<<<<<<<<<< * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): */ - __pyx_t_6 = (__pyx_v_predict_word == Py_None); - __pyx_t_5 = (__pyx_t_6 != 0); - if (__pyx_t_5) { + __pyx_t_8 = (__pyx_v_predict_word == Py_None); + __pyx_t_10 = (__pyx_t_8 != 0); + if (__pyx_t_10) { /* "gensim/models/doc2vec_inner.pyx":343 * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged # <<<<<<<<<<<<<< - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): * continue */ - goto __pyx_L13_continue; + goto __pyx_L3_continue; /* "gensim/models/doc2vec_inner.pyx":342 * for token in doc_words: * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word # <<<<<<<<<<<<<< * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): */ } /* "gensim/models/doc2vec_inner.pyx":344 * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[i] = predict_word.index + * c.indexes[i] = predict_word.index */ - __pyx_t_6 = (__pyx_v_sample != 0); - if (__pyx_t_6) { + __pyx_t_8 = (__pyx_v_c.sample != 0); + if (__pyx_t_8) { } else { - __pyx_t_5 = __pyx_t_6; - goto __pyx_L17_bool_binop_done; + __pyx_t_10 = __pyx_t_8; + goto __pyx_L7_bool_binop_done; } - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_3 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_next_random))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_8 = PyObject_RichCompare(__pyx_t_10, __pyx_t_3, Py_LT); __Pyx_XGOTREF(__pyx_t_8); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_5 = __pyx_t_6; - __pyx_L17_bool_binop_done:; - if (__pyx_t_5) { + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 344, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_c.next_random))); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 344, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_11 = PyObject_RichCompare(__pyx_t_1, __pyx_t_9, Py_LT); __Pyx_XGOTREF(__pyx_t_11); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 344, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_11); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 344, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_10 = __pyx_t_8; + __pyx_L7_bool_binop_done:; + if (__pyx_t_10) { /* "gensim/models/doc2vec_inner.pyx":345 * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): * continue # <<<<<<<<<<<<<< - * indexes[i] = predict_word.index - * if hs: + * c.indexes[i] = predict_word.index + * if c.hs: */ - goto __pyx_L13_continue; + goto __pyx_L3_continue; /* "gensim/models/doc2vec_inner.pyx":344 * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[i] = predict_word.index + * c.indexes[i] = predict_word.index */ } /* "gensim/models/doc2vec_inner.pyx":346 - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): * continue - * indexes[i] = predict_word.index # <<<<<<<<<<<<<< - * if hs: - * codelens[i] = len(predict_word.code) + * c.indexes[i] = predict_word.index # <<<<<<<<<<<<<< + * if c.hs: + * c.codelens[i] = len(predict_word.code) */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_index); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 346, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_8); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 346, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - (__pyx_v_indexes[__pyx_v_i]) = __pyx_t_12; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_index); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 346, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_11); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 346, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.indexes[__pyx_v_i]) = __pyx_t_12; /* "gensim/models/doc2vec_inner.pyx":347 * continue - * indexes[i] = predict_word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) + * c.indexes[i] = predict_word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) */ - __pyx_t_5 = (__pyx_v_hs != 0); - if (__pyx_t_5) { + __pyx_t_10 = (__pyx_v_c.hs != 0); + if (__pyx_t_10) { /* "gensim/models/doc2vec_inner.pyx":348 - * indexes[i] = predict_word.index - * if hs: - * codelens[i] = len(predict_word.code) # <<<<<<<<<<<<<< - * codes[i] = np.PyArray_DATA(predict_word.code) - * points[i] = np.PyArray_DATA(predict_word.point) - */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 348, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_13 = PyObject_Length(__pyx_t_8); if (unlikely(__pyx_t_13 == ((Py_ssize_t)-1))) __PYX_ERR(0, 348, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - (__pyx_v_codelens[__pyx_v_i]) = ((int)__pyx_t_13); + * c.indexes[i] = predict_word.index + * if c.hs: + * c.codelens[i] = len(predict_word.code) # <<<<<<<<<<<<<< + * c.codes[i] = np.PyArray_DATA(predict_word.code) + * c.points[i] = np.PyArray_DATA(predict_word.point) + */ + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 348, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_3 = PyObject_Length(__pyx_t_11); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(0, 348, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.codelens[__pyx_v_i]) = ((int)__pyx_t_3); /* "gensim/models/doc2vec_inner.pyx":349 - * if hs: - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) # <<<<<<<<<<<<<< - * points[i] = np.PyArray_DATA(predict_word.point) + * if c.hs: + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) # <<<<<<<<<<<<<< + * c.points[i] = np.PyArray_DATA(predict_word.point) * result += 1 */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 349, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - if (!(likely(((__pyx_t_8) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_8, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 349, __pyx_L1_error) - (__pyx_v_codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_8))); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 349, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 349, __pyx_L1_error) + (__pyx_v_c.codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; /* "gensim/models/doc2vec_inner.pyx":350 - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) - * points[i] = np.PyArray_DATA(predict_word.point) # <<<<<<<<<<<<<< + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) + * c.points[i] = np.PyArray_DATA(predict_word.point) # <<<<<<<<<<<<<< * result += 1 * i += 1 */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_point); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 350, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - if (!(likely(((__pyx_t_8) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_8, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 350, __pyx_L1_error) - (__pyx_v_points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_8))); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_point); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 350, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 350, __pyx_L1_error) + (__pyx_v_c.points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; /* "gensim/models/doc2vec_inner.pyx":347 * continue - * indexes[i] = predict_word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) + * c.indexes[i] = predict_word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) */ } /* "gensim/models/doc2vec_inner.pyx":351 - * codes[i] = np.PyArray_DATA(predict_word.code) - * points[i] = np.PyArray_DATA(predict_word.point) + * c.codes[i] = np.PyArray_DATA(predict_word.code) + * c.points[i] = np.PyArray_DATA(predict_word.point) * result += 1 # <<<<<<<<<<<<<< * i += 1 * if i == MAX_DOCUMENT_LEN: @@ -4372,7 +4723,7 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY __pyx_v_result = (__pyx_v_result + 1); /* "gensim/models/doc2vec_inner.pyx":352 - * points[i] = np.PyArray_DATA(predict_word.point) + * c.points[i] = np.PyArray_DATA(predict_word.point) * result += 1 * i += 1 # <<<<<<<<<<<<<< * if i == MAX_DOCUMENT_LEN: @@ -4385,26 +4736,26 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY * i += 1 * if i == MAX_DOCUMENT_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? - * document_len = i + * c.document_len = i */ - __pyx_t_5 = ((__pyx_v_i == 0x2710) != 0); - if (__pyx_t_5) { + __pyx_t_10 = ((__pyx_v_i == 0x2710) != 0); + if (__pyx_t_10) { /* "gensim/models/doc2vec_inner.pyx":354 * i += 1 * if i == MAX_DOCUMENT_LEN: * break # TODO: log warning, tally overflow? # <<<<<<<<<<<<<< - * document_len = i + * c.document_len = i * */ - goto __pyx_L14_break; + goto __pyx_L4_break; /* "gensim/models/doc2vec_inner.pyx":353 * result += 1 * i += 1 * if i == MAX_DOCUMENT_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? - * document_len = i + * c.document_len = i */ } @@ -4415,77 +4766,77 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word */ - __pyx_L13_continue:; + __pyx_L3_continue:; } - __pyx_L14_break:; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_L4_break:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "gensim/models/doc2vec_inner.pyx":355 * if i == MAX_DOCUMENT_LEN: * break # TODO: log warning, tally overflow? - * document_len = i # <<<<<<<<<<<<<< + * c.document_len = i # <<<<<<<<<<<<<< * - * if _train_words: + * if c.train_words: */ - __pyx_v_document_len = __pyx_v_i; + __pyx_v_c.document_len = __pyx_v_i; /* "gensim/models/doc2vec_inner.pyx":357 - * document_len = i + * c.document_len = i * - * if _train_words: # <<<<<<<<<<<<<< + * if c.train_words: # <<<<<<<<<<<<<< * # single randint() call avoids a big thread-synchronization slowdown - * for i, item in enumerate(model.random.randint(0, window, document_len)): + * for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): */ - __pyx_t_5 = (__pyx_v__train_words != 0); - if (__pyx_t_5) { + __pyx_t_10 = (__pyx_v_c.train_words != 0); + if (__pyx_t_10) { /* "gensim/models/doc2vec_inner.pyx":359 - * if _train_words: + * if c.train_words: * # single randint() call avoids a big thread-synchronization slowdown - * for i, item in enumerate(model.random.randint(0, window, document_len)): # <<<<<<<<<<<<<< - * reduced_windows[i] = item - * - */ - __pyx_t_2 = 0; - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 359, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_randint); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 359, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_8 = __Pyx_PyInt_From_int(__pyx_v_window); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 359, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_10 = __Pyx_PyInt_From_int(__pyx_v_document_len); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 359, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); + * for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): # <<<<<<<<<<<<<< + * c.reduced_windows[i] = item + * + */ + __pyx_t_13 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 359, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_11, __pyx_n_s_randint); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 359, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_PyInt_From_int(__pyx_v_c.window); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 359, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_c.document_len); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 359, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); __pyx_t_14 = NULL; __pyx_t_15 = 0; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_3); + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_9); if (likely(__pyx_t_14)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); __Pyx_INCREF(__pyx_t_14); __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); + __Pyx_DECREF_SET(__pyx_t_9, function); __pyx_t_15 = 1; } } #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_3)) { - PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_int_0, __pyx_t_8, __pyx_t_10}; - __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 359, __pyx_L1_error) + if (PyFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_int_0, __pyx_t_11, __pyx_t_1}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 359, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; } else #endif #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { - PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_int_0, __pyx_t_8, __pyx_t_10}; - __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 359, __pyx_L1_error) + if (__Pyx_PyFastCFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_int_0, __pyx_t_11, __pyx_t_1}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 359, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; } else #endif { @@ -4497,48 +4848,48 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY __Pyx_INCREF(__pyx_int_0); __Pyx_GIVEREF(__pyx_int_0); PyTuple_SET_ITEM(__pyx_t_16, 0+__pyx_t_15, __pyx_int_0); - __Pyx_GIVEREF(__pyx_t_8); - PyTuple_SET_ITEM(__pyx_t_16, 1+__pyx_t_15, __pyx_t_8); - __Pyx_GIVEREF(__pyx_t_10); - PyTuple_SET_ITEM(__pyx_t_16, 2+__pyx_t_15, __pyx_t_10); - __pyx_t_8 = 0; - __pyx_t_10 = 0; - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_16, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 359, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_11); + PyTuple_SET_ITEM(__pyx_t_16, 1+__pyx_t_15, __pyx_t_11); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_16, 2+__pyx_t_15, __pyx_t_1); + __pyx_t_11 = 0; + __pyx_t_1 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_t_16, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 359, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (likely(PyList_CheckExact(__pyx_t_1)) || PyTuple_CheckExact(__pyx_t_1)) { - __pyx_t_3 = __pyx_t_1; __Pyx_INCREF(__pyx_t_3); __pyx_t_7 = 0; - __pyx_t_11 = NULL; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (likely(PyList_CheckExact(__pyx_t_6)) || PyTuple_CheckExact(__pyx_t_6)) { + __pyx_t_9 = __pyx_t_6; __Pyx_INCREF(__pyx_t_9); __pyx_t_5 = 0; + __pyx_t_7 = NULL; } else { - __pyx_t_7 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 359, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_11 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 359, __pyx_L1_error) + __pyx_t_5 = -1; __pyx_t_9 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 359, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_7 = Py_TYPE(__pyx_t_9)->tp_iternext; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 359, __pyx_L1_error) } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; for (;;) { - if (likely(!__pyx_t_11)) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_3)) break; + if (likely(!__pyx_t_7)) { + if (likely(PyList_CheckExact(__pyx_t_9))) { + if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_9)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 359, __pyx_L1_error) + __pyx_t_6 = PyList_GET_ITEM(__pyx_t_9, __pyx_t_5); __Pyx_INCREF(__pyx_t_6); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 359, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_3, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 359, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = PySequence_ITEM(__pyx_t_9, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 359, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); #endif } else { - if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_9)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 359, __pyx_L1_error) + __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_9, __pyx_t_5); __Pyx_INCREF(__pyx_t_6); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 359, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_3, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 359, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = PySequence_ITEM(__pyx_t_9, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 359, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); #endif } } else { - __pyx_t_1 = __pyx_t_11(__pyx_t_3); - if (unlikely(!__pyx_t_1)) { + __pyx_t_6 = __pyx_t_7(__pyx_t_9); + if (unlikely(!__pyx_t_6)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); @@ -4546,86 +4897,70 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY } break; } - __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); } - __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_v_i = __pyx_t_2; - __pyx_t_2 = (__pyx_t_2 + 1); + __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_v_i = __pyx_t_13; + __pyx_t_13 = (__pyx_t_13 + 1); /* "gensim/models/doc2vec_inner.pyx":360 * # single randint() call avoids a big thread-synchronization slowdown - * for i, item in enumerate(model.random.randint(0, window, document_len)): - * reduced_windows[i] = item # <<<<<<<<<<<<<< + * for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): + * c.reduced_windows[i] = item # <<<<<<<<<<<<<< * - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) + * for i in range(c.doctag_len): */ __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 360, __pyx_L1_error) - (__pyx_v_reduced_windows[__pyx_v_i]) = __pyx_t_12; + (__pyx_v_c.reduced_windows[__pyx_v_i]) = __pyx_t_12; /* "gensim/models/doc2vec_inner.pyx":359 - * if _train_words: + * if c.train_words: * # single randint() call avoids a big thread-synchronization slowdown - * for i, item in enumerate(model.random.randint(0, window, document_len)): # <<<<<<<<<<<<<< - * reduced_windows[i] = item + * for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): # <<<<<<<<<<<<<< + * c.reduced_windows[i] = item * */ } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; /* "gensim/models/doc2vec_inner.pyx":357 - * document_len = i + * c.document_len = i * - * if _train_words: # <<<<<<<<<<<<<< + * if c.train_words: # <<<<<<<<<<<<<< * # single randint() call avoids a big thread-synchronization slowdown - * for i, item in enumerate(model.random.randint(0, window, document_len)): + * for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): */ } /* "gensim/models/doc2vec_inner.pyx":362 - * reduced_windows[i] = item + * c.reduced_windows[i] = item * - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) # <<<<<<<<<<<<<< - * for i in range(doctag_len): - * _doctag_indexes[i] = doctag_indexes[i] - */ - __pyx_t_7 = PyObject_Length(__pyx_v_doctag_indexes); if (unlikely(__pyx_t_7 == ((Py_ssize_t)-1))) __PYX_ERR(0, 362, __pyx_L1_error) - __pyx_t_17 = 0x2710; - if (((__pyx_t_7 < __pyx_t_17) != 0)) { - __pyx_t_13 = __pyx_t_7; - } else { - __pyx_t_13 = __pyx_t_17; - } - __pyx_v_doctag_len = ((int)__pyx_t_13); - - /* "gensim/models/doc2vec_inner.pyx":363 - * - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - * for i in range(doctag_len): # <<<<<<<<<<<<<< - * _doctag_indexes[i] = doctag_indexes[i] + * for i in range(c.doctag_len): # <<<<<<<<<<<<<< + * c.doctag_indexes[i] = doctag_indexes[i] * result += 1 */ - __pyx_t_2 = __pyx_v_doctag_len; - __pyx_t_15 = __pyx_t_2; - for (__pyx_t_18 = 0; __pyx_t_18 < __pyx_t_15; __pyx_t_18+=1) { - __pyx_v_i = __pyx_t_18; + __pyx_t_13 = __pyx_v_c.doctag_len; + __pyx_t_15 = __pyx_t_13; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_15; __pyx_t_17+=1) { + __pyx_v_i = __pyx_t_17; - /* "gensim/models/doc2vec_inner.pyx":364 - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - * for i in range(doctag_len): - * _doctag_indexes[i] = doctag_indexes[i] # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":363 + * + * for i in range(c.doctag_len): + * c.doctag_indexes[i] = doctag_indexes[i] # <<<<<<<<<<<<<< * result += 1 * */ - __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_doctag_indexes, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 0, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 364, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_3); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 364, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - (__pyx_v__doctag_indexes[__pyx_v_i]) = __pyx_t_12; + __pyx_t_9 = __Pyx_GetItemInt(__pyx_v_doctag_indexes, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 0, 0, 0); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 363, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_9); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 363, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + (__pyx_v_c.doctag_indexes[__pyx_v_i]) = __pyx_t_12; - /* "gensim/models/doc2vec_inner.pyx":365 - * for i in range(doctag_len): - * _doctag_indexes[i] = doctag_indexes[i] + /* "gensim/models/doc2vec_inner.pyx":364 + * for i in range(c.doctag_len): + * c.doctag_indexes[i] = doctag_indexes[i] * result += 1 # <<<<<<<<<<<<<< * * # release GIL & train on the document @@ -4633,12 +4968,12 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY __pyx_v_result = (__pyx_v_result + 1); } - /* "gensim/models/doc2vec_inner.pyx":368 + /* "gensim/models/doc2vec_inner.pyx":367 * * # release GIL & train on the document * with nogil: # <<<<<<<<<<<<<< - * for i in range(document_len): - * if _train_words: # simultaneous skip-gram wordvec-training + * for i in range(c.document_len): + * if c.train_words: # simultaneous skip-gram wordvec-training */ { #ifdef WITH_THREAD @@ -4648,205 +4983,206 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY #endif /*try:*/ { - /* "gensim/models/doc2vec_inner.pyx":369 + /* "gensim/models/doc2vec_inner.pyx":368 * # release GIL & train on the document * with nogil: - * for i in range(document_len): # <<<<<<<<<<<<<< - * if _train_words: # simultaneous skip-gram wordvec-training - * j = i - window + reduced_windows[i] + * for i in range(c.document_len): # <<<<<<<<<<<<<< + * if c.train_words: # simultaneous skip-gram wordvec-training + * j = i - c.window + c.reduced_windows[i] */ - __pyx_t_2 = __pyx_v_document_len; - __pyx_t_15 = __pyx_t_2; - for (__pyx_t_18 = 0; __pyx_t_18 < __pyx_t_15; __pyx_t_18+=1) { - __pyx_v_i = __pyx_t_18; + __pyx_t_13 = __pyx_v_c.document_len; + __pyx_t_15 = __pyx_t_13; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_15; __pyx_t_17+=1) { + __pyx_v_i = __pyx_t_17; - /* "gensim/models/doc2vec_inner.pyx":370 + /* "gensim/models/doc2vec_inner.pyx":369 * with nogil: - * for i in range(document_len): - * if _train_words: # simultaneous skip-gram wordvec-training # <<<<<<<<<<<<<< - * j = i - window + reduced_windows[i] + * for i in range(c.document_len): + * if c.train_words: # simultaneous skip-gram wordvec-training # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] * if j < 0: */ - __pyx_t_5 = (__pyx_v__train_words != 0); - if (__pyx_t_5) { + __pyx_t_10 = (__pyx_v_c.train_words != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":371 - * for i in range(document_len): - * if _train_words: # simultaneous skip-gram wordvec-training - * j = i - window + reduced_windows[i] # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":370 + * for i in range(c.document_len): + * if c.train_words: # simultaneous skip-gram wordvec-training + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< * if j < 0: * j = 0 */ - __pyx_v_j = ((__pyx_v_i - __pyx_v_window) + (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/doc2vec_inner.pyx":372 - * if _train_words: # simultaneous skip-gram wordvec-training - * j = i - window + reduced_windows[i] + /* "gensim/models/doc2vec_inner.pyx":371 + * if c.train_words: # simultaneous skip-gram wordvec-training + * j = i - c.window + c.reduced_windows[i] * if j < 0: # <<<<<<<<<<<<<< * j = 0 - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ - __pyx_t_5 = ((__pyx_v_j < 0) != 0); - if (__pyx_t_5) { + __pyx_t_10 = ((__pyx_v_j < 0) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":373 - * j = i - window + reduced_windows[i] + /* "gensim/models/doc2vec_inner.pyx":372 + * j = i - c.window + c.reduced_windows[i] * if j < 0: * j = 0 # <<<<<<<<<<<<<< - * k = i + window + 1 - reduced_windows[i] - * if k > document_len: + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > c.document_len: */ __pyx_v_j = 0; - /* "gensim/models/doc2vec_inner.pyx":372 - * if _train_words: # simultaneous skip-gram wordvec-training - * j = i - window + reduced_windows[i] + /* "gensim/models/doc2vec_inner.pyx":371 + * if c.train_words: # simultaneous skip-gram wordvec-training + * j = i - c.window + c.reduced_windows[i] * if j < 0: # <<<<<<<<<<<<<< * j = 0 - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ } - /* "gensim/models/doc2vec_inner.pyx":374 + /* "gensim/models/doc2vec_inner.pyx":373 * if j < 0: * j = 0 - * k = i + window + 1 - reduced_windows[i] # <<<<<<<<<<<<<< - * if k > document_len: - * k = document_len + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< + * if k > c.document_len: + * k = c.document_len */ - __pyx_v_k = (((__pyx_v_i + __pyx_v_window) + 1) - (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/doc2vec_inner.pyx":375 + /* "gensim/models/doc2vec_inner.pyx":374 * j = 0 - * k = i + window + 1 - reduced_windows[i] - * if k > document_len: # <<<<<<<<<<<<<< - * k = document_len + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > c.document_len: # <<<<<<<<<<<<<< + * k = c.document_len * for j in range(j, k): */ - __pyx_t_5 = ((__pyx_v_k > __pyx_v_document_len) != 0); - if (__pyx_t_5) { + __pyx_t_10 = ((__pyx_v_k > __pyx_v_c.document_len) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":376 - * k = i + window + 1 - reduced_windows[i] - * if k > document_len: - * k = document_len # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":375 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > c.document_len: + * k = c.document_len # <<<<<<<<<<<<<< * for j in range(j, k): * if j == i: */ - __pyx_v_k = __pyx_v_document_len; + __pyx_t_18 = __pyx_v_c.document_len; + __pyx_v_k = __pyx_t_18; - /* "gensim/models/doc2vec_inner.pyx":375 + /* "gensim/models/doc2vec_inner.pyx":374 * j = 0 - * k = i + window + 1 - reduced_windows[i] - * if k > document_len: # <<<<<<<<<<<<<< - * k = document_len + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > c.document_len: # <<<<<<<<<<<<<< + * k = c.document_len * for j in range(j, k): */ } - /* "gensim/models/doc2vec_inner.pyx":377 - * if k > document_len: - * k = document_len + /* "gensim/models/doc2vec_inner.pyx":376 + * if k > c.document_len: + * k = c.document_len * for j in range(j, k): # <<<<<<<<<<<<<< * if j == i: * continue */ - __pyx_t_17 = __pyx_v_k; - __pyx_t_19 = __pyx_t_17; - for (__pyx_t_20 = __pyx_v_j; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { - __pyx_v_j = __pyx_t_20; + __pyx_t_4 = __pyx_v_k; + __pyx_t_19 = __pyx_t_4; + for (__pyx_t_18 = __pyx_v_j; __pyx_t_18 < __pyx_t_19; __pyx_t_18+=1) { + __pyx_v_j = __pyx_t_18; - /* "gensim/models/doc2vec_inner.pyx":378 - * k = document_len + /* "gensim/models/doc2vec_inner.pyx":377 + * k = c.document_len * for j in range(j, k): * if j == i: # <<<<<<<<<<<<<< * continue - * if hs: + * if c.hs: */ - __pyx_t_5 = ((__pyx_v_j == __pyx_v_i) != 0); - if (__pyx_t_5) { + __pyx_t_10 = ((__pyx_v_j == __pyx_v_i) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":379 + /* "gensim/models/doc2vec_inner.pyx":378 * for j in range(j, k): * if j == i: * continue # <<<<<<<<<<<<<< - * if hs: + * if c.hs: * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose */ - goto __pyx_L34_continue; + goto __pyx_L24_continue; - /* "gensim/models/doc2vec_inner.pyx":378 - * k = document_len + /* "gensim/models/doc2vec_inner.pyx":377 + * k = c.document_len * for j in range(j, k): * if j == i: # <<<<<<<<<<<<<< * continue - * if hs: + * if c.hs: */ } - /* "gensim/models/doc2vec_inner.pyx":380 + /* "gensim/models/doc2vec_inner.pyx":379 * if j == i: * continue - * if hs: # <<<<<<<<<<<<<< + * if c.hs: # <<<<<<<<<<<<<< * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _word_vectors, syn1, size, indexes[j], + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.word_vectors, c.syn1, c.layer1_size, */ - __pyx_t_5 = (__pyx_v_hs != 0); - if (__pyx_t_5) { + __pyx_t_10 = (__pyx_v_c.hs != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":382 - * if hs: + /* "gensim/models/doc2vec_inner.pyx":381 + * if c.hs: * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _word_vectors, syn1, size, indexes[j], # <<<<<<<<<<<<<< - * _alpha, _work, _learn_words, _learn_hidden, _word_locks) - * if negative: + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.word_vectors, c.syn1, c.layer1_size, # <<<<<<<<<<<<<< + * c.indexes[j], c.alpha, c.work, c.learn_words, c.learn_hidden, c.word_locks) + * if c.negative: */ - __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_hs((__pyx_v_points[__pyx_v_i]), (__pyx_v_codes[__pyx_v_i]), (__pyx_v_codelens[__pyx_v_i]), __pyx_v__word_vectors, __pyx_v_syn1, __pyx_v_size, (__pyx_v_indexes[__pyx_v_j]), __pyx_v__alpha, __pyx_v__work, __pyx_v__learn_words, __pyx_v__learn_hidden, __pyx_v__word_locks); + __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.word_vectors, __pyx_v_c.syn1, __pyx_v_c.layer1_size, (__pyx_v_c.indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.learn_words, __pyx_v_c.learn_hidden, __pyx_v_c.word_locks); - /* "gensim/models/doc2vec_inner.pyx":380 + /* "gensim/models/doc2vec_inner.pyx":379 * if j == i: * continue - * if hs: # <<<<<<<<<<<<<< + * if c.hs: # <<<<<<<<<<<<<< * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _word_vectors, syn1, size, indexes[j], + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.word_vectors, c.syn1, c.layer1_size, */ } - /* "gensim/models/doc2vec_inner.pyx":384 - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _word_vectors, syn1, size, indexes[j], - * _alpha, _work, _learn_words, _learn_hidden, _word_locks) - * if negative: # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":383 + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.word_vectors, c.syn1, c.layer1_size, + * c.indexes[j], c.alpha, c.work, c.learn_words, c.learn_hidden, c.word_locks) + * if c.negative: # <<<<<<<<<<<<<< * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose - * next_random = fast_document_dbow_neg(negative, cum_table, cum_table_len, _word_vectors, syn1neg, size, + * c.next_random = fast_document_dbow_neg(c.negative, c.cum_table, c.cum_table_len, c.word_vectors, */ - __pyx_t_5 = (__pyx_v_negative != 0); - if (__pyx_t_5) { + __pyx_t_10 = (__pyx_v_c.negative != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":386 - * if negative: + /* "gensim/models/doc2vec_inner.pyx":385 + * if c.negative: * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose - * next_random = fast_document_dbow_neg(negative, cum_table, cum_table_len, _word_vectors, syn1neg, size, # <<<<<<<<<<<<<< - * indexes[i], indexes[j], _alpha, _work, next_random, - * _learn_words, _learn_hidden, _word_locks) + * c.next_random = fast_document_dbow_neg(c.negative, c.cum_table, c.cum_table_len, c.word_vectors, # <<<<<<<<<<<<<< + * c.syn1neg, c.layer1_size, c.indexes[i], c.indexes[j], + * c.alpha, c.work, c.next_random, c.learn_words, */ - __pyx_v_next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_neg(__pyx_v_negative, __pyx_v_cum_table, __pyx_v_cum_table_len, __pyx_v__word_vectors, __pyx_v_syn1neg, __pyx_v_size, (__pyx_v_indexes[__pyx_v_i]), (__pyx_v_indexes[__pyx_v_j]), __pyx_v__alpha, __pyx_v__work, __pyx_v_next_random, __pyx_v__learn_words, __pyx_v__learn_hidden, __pyx_v__word_locks); + __pyx_v_c.next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.word_vectors, __pyx_v_c.syn1neg, __pyx_v_c.layer1_size, (__pyx_v_c.indexes[__pyx_v_i]), (__pyx_v_c.indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.next_random, __pyx_v_c.learn_words, __pyx_v_c.learn_hidden, __pyx_v_c.word_locks); - /* "gensim/models/doc2vec_inner.pyx":384 - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _word_vectors, syn1, size, indexes[j], - * _alpha, _work, _learn_words, _learn_hidden, _word_locks) - * if negative: # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":383 + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.word_vectors, c.syn1, c.layer1_size, + * c.indexes[j], c.alpha, c.work, c.learn_words, c.learn_hidden, c.word_locks) + * if c.negative: # <<<<<<<<<<<<<< * # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose - * next_random = fast_document_dbow_neg(negative, cum_table, cum_table_len, _word_vectors, syn1neg, size, + * c.next_random = fast_document_dbow_neg(c.negative, c.cum_table, c.cum_table_len, c.word_vectors, */ } - __pyx_L34_continue:; + __pyx_L24_continue:; } - /* "gensim/models/doc2vec_inner.pyx":370 + /* "gensim/models/doc2vec_inner.pyx":369 * with nogil: - * for i in range(document_len): - * if _train_words: # simultaneous skip-gram wordvec-training # <<<<<<<<<<<<<< - * j = i - window + reduced_windows[i] + * for i in range(c.document_len): + * if c.train_words: # simultaneous skip-gram wordvec-training # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] * if j < 0: */ } @@ -4854,80 +5190,80 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY /* "gensim/models/doc2vec_inner.pyx":391 * * # docvec-training - * for j in range(doctag_len): # <<<<<<<<<<<<<< - * if hs: - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _doctag_vectors, syn1, size, _doctag_indexes[j], + * for j in range(c.doctag_len): # <<<<<<<<<<<<<< + * if c.hs: + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, */ - __pyx_t_20 = __pyx_v_doctag_len; - __pyx_t_21 = __pyx_t_20; - for (__pyx_t_22 = 0; __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { - __pyx_v_j = __pyx_t_22; + __pyx_t_18 = __pyx_v_c.doctag_len; + __pyx_t_20 = __pyx_t_18; + for (__pyx_t_21 = 0; __pyx_t_21 < __pyx_t_20; __pyx_t_21+=1) { + __pyx_v_j = __pyx_t_21; /* "gensim/models/doc2vec_inner.pyx":392 * # docvec-training - * for j in range(doctag_len): - * if hs: # <<<<<<<<<<<<<< - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _doctag_vectors, syn1, size, _doctag_indexes[j], - * _alpha, _work, _learn_doctags, _learn_hidden, _doctag_locks) + * for j in range(c.doctag_len): + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, + * c.doctag_indexes[j], c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) */ - __pyx_t_5 = (__pyx_v_hs != 0); - if (__pyx_t_5) { + __pyx_t_10 = (__pyx_v_c.hs != 0); + if (__pyx_t_10) { /* "gensim/models/doc2vec_inner.pyx":393 - * for j in range(doctag_len): - * if hs: - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _doctag_vectors, syn1, size, _doctag_indexes[j], # <<<<<<<<<<<<<< - * _alpha, _work, _learn_doctags, _learn_hidden, _doctag_locks) - * if negative: + * for j in range(c.doctag_len): + * if c.hs: + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, # <<<<<<<<<<<<<< + * c.doctag_indexes[j], c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) + * if c.negative: */ - __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_hs((__pyx_v_points[__pyx_v_i]), (__pyx_v_codes[__pyx_v_i]), (__pyx_v_codelens[__pyx_v_i]), __pyx_v__doctag_vectors, __pyx_v_syn1, __pyx_v_size, (__pyx_v__doctag_indexes[__pyx_v_j]), __pyx_v__alpha, __pyx_v__work, __pyx_v__learn_doctags, __pyx_v__learn_hidden, __pyx_v__doctag_locks); + __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.doctag_vectors, __pyx_v_c.syn1, __pyx_v_c.layer1_size, (__pyx_v_c.doctag_indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.learn_doctags, __pyx_v_c.learn_hidden, __pyx_v_c.doctag_locks); /* "gensim/models/doc2vec_inner.pyx":392 * # docvec-training - * for j in range(doctag_len): - * if hs: # <<<<<<<<<<<<<< - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _doctag_vectors, syn1, size, _doctag_indexes[j], - * _alpha, _work, _learn_doctags, _learn_hidden, _doctag_locks) + * for j in range(c.doctag_len): + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, + * c.doctag_indexes[j], c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) */ } /* "gensim/models/doc2vec_inner.pyx":395 - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _doctag_vectors, syn1, size, _doctag_indexes[j], - * _alpha, _work, _learn_doctags, _learn_hidden, _doctag_locks) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_document_dbow_neg(negative, cum_table, cum_table_len, _doctag_vectors, syn1neg, size, - * indexes[i], _doctag_indexes[j], _alpha, _work, next_random, + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, + * c.doctag_indexes[j], c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dbow_neg(c.negative, c.cum_table, c.cum_table_len, c.doctag_vectors, + * c.syn1neg, c.layer1_size, c.indexes[i], c.doctag_indexes[j], */ - __pyx_t_5 = (__pyx_v_negative != 0); - if (__pyx_t_5) { + __pyx_t_10 = (__pyx_v_c.negative != 0); + if (__pyx_t_10) { /* "gensim/models/doc2vec_inner.pyx":396 - * _alpha, _work, _learn_doctags, _learn_hidden, _doctag_locks) - * if negative: - * next_random = fast_document_dbow_neg(negative, cum_table, cum_table_len, _doctag_vectors, syn1neg, size, # <<<<<<<<<<<<<< - * indexes[i], _doctag_indexes[j], _alpha, _work, next_random, - * _learn_doctags, _learn_hidden, _doctag_locks) + * c.doctag_indexes[j], c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) + * if c.negative: + * c.next_random = fast_document_dbow_neg(c.negative, c.cum_table, c.cum_table_len, c.doctag_vectors, # <<<<<<<<<<<<<< + * c.syn1neg, c.layer1_size, c.indexes[i], c.doctag_indexes[j], + * c.alpha, c.work, c.next_random, c.learn_doctags, */ - __pyx_v_next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_neg(__pyx_v_negative, __pyx_v_cum_table, __pyx_v_cum_table_len, __pyx_v__doctag_vectors, __pyx_v_syn1neg, __pyx_v_size, (__pyx_v_indexes[__pyx_v_i]), (__pyx_v__doctag_indexes[__pyx_v_j]), __pyx_v__alpha, __pyx_v__work, __pyx_v_next_random, __pyx_v__learn_doctags, __pyx_v__learn_hidden, __pyx_v__doctag_locks); + __pyx_v_c.next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.doctag_vectors, __pyx_v_c.syn1neg, __pyx_v_c.layer1_size, (__pyx_v_c.indexes[__pyx_v_i]), (__pyx_v_c.doctag_indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.next_random, __pyx_v_c.learn_doctags, __pyx_v_c.learn_hidden, __pyx_v_c.doctag_locks); /* "gensim/models/doc2vec_inner.pyx":395 - * fast_document_dbow_hs(points[i], codes[i], codelens[i], _doctag_vectors, syn1, size, _doctag_indexes[j], - * _alpha, _work, _learn_doctags, _learn_hidden, _doctag_locks) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_document_dbow_neg(negative, cum_table, cum_table_len, _doctag_vectors, syn1neg, size, - * indexes[i], _doctag_indexes[j], _alpha, _work, next_random, + * fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, + * c.doctag_indexes[j], c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dbow_neg(c.negative, c.cum_table, c.cum_table_len, c.doctag_vectors, + * c.syn1neg, c.layer1_size, c.indexes[i], c.doctag_indexes[j], */ } } } } - /* "gensim/models/doc2vec_inner.pyx":368 + /* "gensim/models/doc2vec_inner.pyx":367 * * # release GIL & train on the document * with nogil: # <<<<<<<<<<<<<< - * for i in range(document_len): - * if _train_words: # simultaneous skip-gram wordvec-training + * for i in range(c.document_len): + * if c.train_words: # simultaneous skip-gram wordvec-training */ /*finally:*/ { /*normal exit:*/{ @@ -4935,27 +5271,27 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY __Pyx_FastGIL_Forget(); Py_BLOCK_THREADS #endif - goto __pyx_L28; + goto __pyx_L18; } - __pyx_L28:; + __pyx_L18:; } } - /* "gensim/models/doc2vec_inner.pyx":400 - * _learn_doctags, _learn_hidden, _doctag_locks) + /* "gensim/models/doc2vec_inner.pyx":401 + * c.learn_hidden, c.doctag_locks) * * return result # <<<<<<<<<<<<<< * * */ __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v_result); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 400, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; + __pyx_t_9 = __Pyx_PyInt_From_long(__pyx_v_result); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_r = __pyx_t_9; + __pyx_t_9 = 0; goto __pyx_L0; - /* "gensim/models/doc2vec_inner.pyx":223 + /* "gensim/models/doc2vec_inner.pyx":280 * * * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, # <<<<<<<<<<<<<< @@ -4966,9 +5302,9 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_14); __Pyx_XDECREF(__pyx_t_16); __Pyx_AddTraceback("gensim.models.doc2vec_inner.train_document_dbow", __pyx_clineno, __pyx_lineno, __pyx_filename); @@ -4978,17 +5314,12 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_train_document_dbow(CY __Pyx_XDECREF(__pyx_v_token); __Pyx_XDECREF(__pyx_v_predict_word); __Pyx_XDECREF(__pyx_v_item); - __Pyx_XDECREF(__pyx_v_work); - __Pyx_XDECREF(__pyx_v_word_vectors); - __Pyx_XDECREF(__pyx_v_word_locks); - __Pyx_XDECREF(__pyx_v_doctag_vectors); - __Pyx_XDECREF(__pyx_v_doctag_locks); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } -/* "gensim/models/doc2vec_inner.pyx":403 +/* "gensim/models/doc2vec_inner.pyx":404 * * * def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< @@ -5023,7 +5354,7 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_3train_document_dm(PyO values[4] = ((PyObject *)Py_None); values[5] = ((PyObject *)Py_None); - /* "gensim/models/doc2vec_inner.pyx":404 + /* "gensim/models/doc2vec_inner.pyx":405 * * def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, * learn_doctags=True, learn_words=True, learn_hidden=True, # <<<<<<<<<<<<<< @@ -5034,7 +5365,7 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_3train_document_dm(PyO values[7] = ((PyObject *)Py_True); values[8] = ((PyObject *)Py_True); - /* "gensim/models/doc2vec_inner.pyx":405 + /* "gensim/models/doc2vec_inner.pyx":406 * def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, * learn_doctags=True, learn_words=True, learn_hidden=True, * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): # <<<<<<<<<<<<<< @@ -5087,19 +5418,19 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_3train_document_dm(PyO case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doc_words)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_document_dm", 0, 4, 13, 1); __PYX_ERR(0, 403, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_document_dm", 0, 4, 13, 1); __PYX_ERR(0, 404, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_indexes)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_document_dm", 0, 4, 13, 2); __PYX_ERR(0, 403, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_document_dm", 0, 4, 13, 2); __PYX_ERR(0, 404, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_document_dm", 0, 4, 13, 3); __PYX_ERR(0, 403, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_document_dm", 0, 4, 13, 3); __PYX_ERR(0, 404, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 4: @@ -5157,7 +5488,7 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_3train_document_dm(PyO } } if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_document_dm") < 0)) __PYX_ERR(0, 403, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_document_dm") < 0)) __PYX_ERR(0, 404, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { @@ -5203,728 +5534,133 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_3train_document_dm(PyO } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("train_document_dm", 0, 4, 13, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 403, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("gensim.models.doc2vec_inner.train_document_dm", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(__pyx_self, __pyx_v_model, __pyx_v_doc_words, __pyx_v_doctag_indexes, __pyx_v_alpha, __pyx_v_work, __pyx_v_neu1, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, __pyx_v_word_vectors, __pyx_v_word_locks, __pyx_v_doctag_vectors, __pyx_v_doctag_locks); - - /* "gensim/models/doc2vec_inner.pyx":403 - * - * - * def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< - * learn_doctags=True, learn_words=True, learn_hidden=True, - * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_doc_words, PyObject *__pyx_v_doctag_indexes, PyObject *__pyx_v_alpha, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks) { - int __pyx_v_hs; - int __pyx_v_negative; - int __pyx_v_sample; - int __pyx_v__learn_doctags; - int __pyx_v__learn_words; - int __pyx_v__learn_hidden; - int __pyx_v_cbow_mean; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_count; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_inv_count; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__word_vectors; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__doctag_vectors; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__word_locks; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__doctag_locks; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__work; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__neu1; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; - int __pyx_v_size; - int __pyx_v_codelens[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v__doctag_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_reduced_windows[0x2710]; - int __pyx_v_document_len; - int __pyx_v_doctag_len; - int __pyx_v_window; - int __pyx_v_i; - int __pyx_v_j; - int __pyx_v_k; - int __pyx_v_m; - long __pyx_v_result; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1; - __pyx_t_5numpy_uint32_t *__pyx_v_points[0x2710]; - __pyx_t_5numpy_uint8_t *__pyx_v_codes[0x2710]; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg; - __pyx_t_5numpy_uint32_t *__pyx_v_cum_table; - unsigned PY_LONG_LONG __pyx_v_cum_table_len; - unsigned PY_LONG_LONG __pyx_v_next_random; - PyObject *__pyx_v_vlookup = NULL; - PyObject *__pyx_v_token = NULL; - PyObject *__pyx_v_predict_word = NULL; - PyObject *__pyx_v_item = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_4; - int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - PyObject *__pyx_t_8 = NULL; - unsigned PY_LONG_LONG __pyx_t_9; - PyObject *__pyx_t_10 = NULL; - PyObject *(*__pyx_t_11)(PyObject *); - __pyx_t_5numpy_uint32_t __pyx_t_12; - Py_ssize_t __pyx_t_13; - PyObject *__pyx_t_14 = NULL; - int __pyx_t_15; - PyObject *__pyx_t_16 = NULL; - long __pyx_t_17; - int __pyx_t_18; - int __pyx_t_19; - int __pyx_t_20; - int __pyx_t_21; - __Pyx_RefNannySetupContext("train_document_dm", 0); - __Pyx_INCREF(__pyx_v_work); - __Pyx_INCREF(__pyx_v_neu1); - __Pyx_INCREF(__pyx_v_word_vectors); - __Pyx_INCREF(__pyx_v_word_locks); - __Pyx_INCREF(__pyx_v_doctag_vectors); - __Pyx_INCREF(__pyx_v_doctag_locks); - - /* "gensim/models/doc2vec_inner.pyx":451 - * - * """ - * cdef int hs = model.hs # <<<<<<<<<<<<<< - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 451, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 451, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_hs = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":452 - * """ - * cdef int hs = model.hs - * cdef int negative = model.negative # <<<<<<<<<<<<<< - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int _learn_doctags = learn_doctags - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 452, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 452, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_negative = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":453 - * cdef int hs = model.hs - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< - * cdef int _learn_doctags = learn_doctags - * cdef int _learn_words = learn_words - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 453, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 453, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 453, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 453, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_sample = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":454 - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int _learn_doctags = learn_doctags # <<<<<<<<<<<<<< - * cdef int _learn_words = learn_words - * cdef int _learn_hidden = learn_hidden - */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_doctags); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 454, __pyx_L1_error) - __pyx_v__learn_doctags = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":455 - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int _learn_doctags = learn_doctags - * cdef int _learn_words = learn_words # <<<<<<<<<<<<<< - * cdef int _learn_hidden = learn_hidden - * cdef int cbow_mean = model.cbow_mean - */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_words); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 455, __pyx_L1_error) - __pyx_v__learn_words = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":456 - * cdef int _learn_doctags = learn_doctags - * cdef int _learn_words = learn_words - * cdef int _learn_hidden = learn_hidden # <<<<<<<<<<<<<< - * cdef int cbow_mean = model.cbow_mean - * cdef REAL_t count, inv_count = 1.0 - */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_hidden); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 456, __pyx_L1_error) - __pyx_v__learn_hidden = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":457 - * cdef int _learn_words = learn_words - * cdef int _learn_hidden = learn_hidden - * cdef int cbow_mean = model.cbow_mean # <<<<<<<<<<<<<< - * cdef REAL_t count, inv_count = 1.0 - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_cbow_mean); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 457, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 457, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_cbow_mean = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":458 - * cdef int _learn_hidden = learn_hidden - * cdef int cbow_mean = model.cbow_mean - * cdef REAL_t count, inv_count = 1.0 # <<<<<<<<<<<<<< - * - * cdef REAL_t *_word_vectors - */ - __pyx_v_inv_count = 1.0; - - /* "gensim/models/doc2vec_inner.pyx":466 - * cdef REAL_t *_work - * cdef REAL_t *_neu1 - * cdef REAL_t _alpha = alpha # <<<<<<<<<<<<<< - * cdef int size = model.trainables.layer1_size - * - */ - __pyx_t_4 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_4 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 466, __pyx_L1_error) - __pyx_v__alpha = __pyx_t_4; - - /* "gensim/models/doc2vec_inner.pyx":467 - * cdef REAL_t *_neu1 - * cdef REAL_t _alpha = alpha - * cdef int size = model.trainables.layer1_size # <<<<<<<<<<<<<< - * - * cdef int codelens[MAX_DOCUMENT_LEN] - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 467, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 467, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 467, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_size = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":475 - * cdef int document_len - * cdef int doctag_len - * cdef int window = model.window # <<<<<<<<<<<<<< - * - * cdef int i, j, k, m - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 475, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 475, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_window = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":478 - * - * cdef int i, j, k, m - * cdef long result = 0 # <<<<<<<<<<<<<< - * - * # For hierarchical softmax - */ - __pyx_v_result = 0; - - /* "gensim/models/doc2vec_inner.pyx":493 - * - * # default vectors, locks from syn0/doctag_syn0 - * if word_vectors is None: # <<<<<<<<<<<<<< - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) - */ - __pyx_t_5 = (__pyx_v_word_vectors == Py_None); - __pyx_t_6 = (__pyx_t_5 != 0); - if (__pyx_t_6) { - - /* "gensim/models/doc2vec_inner.pyx":494 - * # default vectors, locks from syn0/doctag_syn0 - * if word_vectors is None: - * word_vectors = model.wv.vectors # <<<<<<<<<<<<<< - * _word_vectors = (np.PyArray_DATA(word_vectors)) - * if doctag_vectors is None: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 494, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 494, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF_SET(__pyx_v_word_vectors, __pyx_t_1); - __pyx_t_1 = 0; - - /* "gensim/models/doc2vec_inner.pyx":493 - * - * # default vectors, locks from syn0/doctag_syn0 - * if word_vectors is None: # <<<<<<<<<<<<<< - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":495 - * if word_vectors is None: - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) # <<<<<<<<<<<<<< - * if doctag_vectors is None: - * doctag_vectors = model.docvecs.vectors_docs - */ - if (!(likely(((__pyx_v_word_vectors) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_vectors, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 495, __pyx_L1_error) - __pyx_v__word_vectors = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_vectors))); - - /* "gensim/models/doc2vec_inner.pyx":496 - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) - * if doctag_vectors is None: # <<<<<<<<<<<<<< - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - */ - __pyx_t_6 = (__pyx_v_doctag_vectors == Py_None); - __pyx_t_5 = (__pyx_t_6 != 0); - if (__pyx_t_5) { - - /* "gensim/models/doc2vec_inner.pyx":497 - * _word_vectors = (np.PyArray_DATA(word_vectors)) - * if doctag_vectors is None: - * doctag_vectors = model.docvecs.vectors_docs # <<<<<<<<<<<<<< - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - * if word_locks is None: - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_docvecs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 497, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_docs); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 497, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_doctag_vectors, __pyx_t_3); - __pyx_t_3 = 0; - - /* "gensim/models/doc2vec_inner.pyx":496 - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) - * if doctag_vectors is None: # <<<<<<<<<<<<<< - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":498 - * if doctag_vectors is None: - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) # <<<<<<<<<<<<<< - * if word_locks is None: - * word_locks = model.trainables.vectors_lockf - */ - if (!(likely(((__pyx_v_doctag_vectors) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_doctag_vectors, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 498, __pyx_L1_error) - __pyx_v__doctag_vectors = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_doctag_vectors))); - - /* "gensim/models/doc2vec_inner.pyx":499 - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - * if word_locks is None: # <<<<<<<<<<<<<< - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) - */ - __pyx_t_5 = (__pyx_v_word_locks == Py_None); - __pyx_t_6 = (__pyx_t_5 != 0); - if (__pyx_t_6) { - - /* "gensim/models/doc2vec_inner.pyx":500 - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - * if word_locks is None: - * word_locks = model.trainables.vectors_lockf # <<<<<<<<<<<<<< - * _word_locks = (np.PyArray_DATA(word_locks)) - * if doctag_locks is None: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 500, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 500, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF_SET(__pyx_v_word_locks, __pyx_t_1); - __pyx_t_1 = 0; - - /* "gensim/models/doc2vec_inner.pyx":499 - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - * if word_locks is None: # <<<<<<<<<<<<<< - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":501 - * if word_locks is None: - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) # <<<<<<<<<<<<<< - * if doctag_locks is None: - * doctag_locks = model.trainables.vectors_docs_lockf - */ - if (!(likely(((__pyx_v_word_locks) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_locks, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 501, __pyx_L1_error) - __pyx_v__word_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_locks))); - - /* "gensim/models/doc2vec_inner.pyx":502 - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) - * if doctag_locks is None: # <<<<<<<<<<<<<< - * doctag_locks = model.trainables.vectors_docs_lockf - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) - */ - __pyx_t_6 = (__pyx_v_doctag_locks == Py_None); - __pyx_t_5 = (__pyx_t_6 != 0); - if (__pyx_t_5) { - - /* "gensim/models/doc2vec_inner.pyx":503 - * _word_locks = (np.PyArray_DATA(word_locks)) - * if doctag_locks is None: - * doctag_locks = model.trainables.vectors_docs_lockf # <<<<<<<<<<<<<< - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 503, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_docs_lockf); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 503, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_doctag_locks, __pyx_t_3); - __pyx_t_3 = 0; - - /* "gensim/models/doc2vec_inner.pyx":502 - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) - * if doctag_locks is None: # <<<<<<<<<<<<<< - * doctag_locks = model.trainables.vectors_docs_lockf - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":504 - * if doctag_locks is None: - * doctag_locks = model.trainables.vectors_docs_lockf - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) # <<<<<<<<<<<<<< - * - * if hs: - */ - if (!(likely(((__pyx_v_doctag_locks) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_doctag_locks, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 504, __pyx_L1_error) - __pyx_v__doctag_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_doctag_locks))); - - /* "gensim/models/doc2vec_inner.pyx":506 - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) - * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - */ - __pyx_t_5 = (__pyx_v_hs != 0); - if (__pyx_t_5) { - - /* "gensim/models/doc2vec_inner.pyx":507 - * - * if hs: - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< - * - * if negative: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 507, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 507, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 507, __pyx_L1_error) - __pyx_v_syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "gensim/models/doc2vec_inner.pyx":506 - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) - * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - */ - } - - /* "gensim/models/doc2vec_inner.pyx":509 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - */ - __pyx_t_5 = (__pyx_v_negative != 0); - if (__pyx_t_5) { + __Pyx_RaiseArgtupleInvalid("train_document_dm", 0, 4, 13, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 404, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.doc2vec_inner.train_document_dm", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(__pyx_self, __pyx_v_model, __pyx_v_doc_words, __pyx_v_doctag_indexes, __pyx_v_alpha, __pyx_v_work, __pyx_v_neu1, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, __pyx_v_word_vectors, __pyx_v_word_locks, __pyx_v_doctag_vectors, __pyx_v_doctag_locks); - /* "gensim/models/doc2vec_inner.pyx":510 + /* "gensim/models/doc2vec_inner.pyx":404 * - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 510, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 510, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 510, __pyx_L1_error) - __pyx_v_syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "gensim/models/doc2vec_inner.pyx":511 - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 511, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 511, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 511, __pyx_L1_error) - __pyx_v_cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "gensim/models/doc2vec_inner.pyx":512 - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 512, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 512, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_7 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_7 == ((Py_ssize_t)-1))) __PYX_ERR(0, 512, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_cum_table_len = __pyx_t_7; - - /* "gensim/models/doc2vec_inner.pyx":509 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< + * learn_doctags=True, learn_words=True, learn_hidden=True, + * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): */ - } - /* "gensim/models/doc2vec_inner.pyx":513 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - * - */ - __pyx_t_6 = (__pyx_v_negative != 0); - if (!__pyx_t_6) { - } else { - __pyx_t_5 = __pyx_t_6; - goto __pyx_L10_bool_binop_done; - } - __pyx_t_6 = (__pyx_v_sample != 0); - __pyx_t_5 = __pyx_t_6; - __pyx_L10_bool_binop_done:; - if (__pyx_t_5) { + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} - /* "gensim/models/doc2vec_inner.pyx":514 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< - * - * # convert Python structures to primitive types, so we can release the GIL - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_8 = PyNumber_Add(__pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_9 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_8); if (unlikely((__pyx_t_9 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_next_random = __pyx_t_9; +static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_doc_words, PyObject *__pyx_v_doctag_indexes, PyObject *__pyx_v_alpha, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks) { + struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig __pyx_v_c; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_count; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_inv_count; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_m; + long __pyx_v_result; + PyObject *__pyx_v_vlookup = NULL; + PyObject *__pyx_v_token = NULL; + PyObject *__pyx_v_predict_word = NULL; + PyObject *__pyx_v_item = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config __pyx_t_2; + Py_ssize_t __pyx_t_3; + long __pyx_t_4; + Py_ssize_t __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *(*__pyx_t_7)(PyObject *); + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + __pyx_t_5numpy_uint32_t __pyx_t_12; + int __pyx_t_13; + PyObject *__pyx_t_14 = NULL; + int __pyx_t_15; + PyObject *__pyx_t_16 = NULL; + int __pyx_t_17; + int __pyx_t_18; + int __pyx_t_19; + int __pyx_t_20; + __Pyx_RefNannySetupContext("train_document_dm", 0); - /* "gensim/models/doc2vec_inner.pyx":513 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/doc2vec_inner.pyx":454 + * cdef Doc2VecConfig c * + * cdef REAL_t count, inv_count = 1.0 # <<<<<<<<<<<<<< + * cdef int i, j, k, m + * cdef long result = 0 */ - } + __pyx_v_inv_count = 1.0; - /* "gensim/models/doc2vec_inner.pyx":517 + /* "gensim/models/doc2vec_inner.pyx":456 + * cdef REAL_t count, inv_count = 1.0 + * cdef int i, j, k, m + * cdef long result = 0 # <<<<<<<<<<<<<< * - * # convert Python structures to primitive types, so we can release the GIL - * if work is None: # <<<<<<<<<<<<<< - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) - */ - __pyx_t_5 = (__pyx_v_work == Py_None); - __pyx_t_6 = (__pyx_t_5 != 0); - if (__pyx_t_6) { - - /* "gensim/models/doc2vec_inner.pyx":518 - * # convert Python structures to primitive types, so we can release the GIL - * if work is None: - * work = zeros(model.trainables.layer1_size, dtype=REAL) # <<<<<<<<<<<<<< - * _work = np.PyArray_DATA(work) - * if neu1 is None: + * init_d2v_config(&c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=False, */ - __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_zeros); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 518, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 518, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 518, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 518, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 518, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_REAL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 518, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - if (PyDict_SetItem(__pyx_t_1, __pyx_n_s_dtype, __pyx_t_10) < 0) __PYX_ERR(0, 518, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_3, __pyx_t_1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 518, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_work, __pyx_t_10); - __pyx_t_10 = 0; + __pyx_v_result = 0; - /* "gensim/models/doc2vec_inner.pyx":517 + /* "gensim/models/doc2vec_inner.pyx":458 + * cdef long result = 0 * - * # convert Python structures to primitive types, so we can release the GIL - * if work is None: # <<<<<<<<<<<<<< - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":519 - * if work is None: - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) # <<<<<<<<<<<<<< - * if neu1 is None: - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - */ - if (!(likely(((__pyx_v_work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 519, __pyx_L1_error) - __pyx_v__work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_work))); - - /* "gensim/models/doc2vec_inner.pyx":520 - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) - * if neu1 is None: # <<<<<<<<<<<<<< - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - * _neu1 = np.PyArray_DATA(neu1) - */ - __pyx_t_6 = (__pyx_v_neu1 == Py_None); - __pyx_t_5 = (__pyx_t_6 != 0); - if (__pyx_t_5) { + * init_d2v_config(&c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=False, # <<<<<<<<<<<<<< + * work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + * doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) + */ + __pyx_t_2.__pyx_n = 7; + __pyx_t_2.train_words = Py_False; + __pyx_t_2.work = __pyx_v_work; + __pyx_t_2.neu1 = __pyx_v_neu1; + __pyx_t_2.word_vectors = __pyx_v_word_vectors; + __pyx_t_2.word_locks = __pyx_v_word_locks; + __pyx_t_2.doctag_vectors = __pyx_v_doctag_vectors; + __pyx_t_2.doctag_locks = __pyx_v_doctag_locks; + __pyx_t_1 = __pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config((&__pyx_v_c), __pyx_v_model, __pyx_v_alpha, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 458, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/doc2vec_inner.pyx":521 - * _work = np.PyArray_DATA(work) - * if neu1 is None: - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) # <<<<<<<<<<<<<< - * _neu1 = np.PyArray_DATA(neu1) + /* "gensim/models/doc2vec_inner.pyx":462 + * doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) * - */ - __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_zeros); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 521, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 521, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 521, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 521, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 521, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_REAL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 521, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_dtype, __pyx_t_8) < 0) __PYX_ERR(0, 521, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 521, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF_SET(__pyx_v_neu1, __pyx_t_8); - __pyx_t_8 = 0; - - /* "gensim/models/doc2vec_inner.pyx":520 - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) - * if neu1 is None: # <<<<<<<<<<<<<< - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - * _neu1 = np.PyArray_DATA(neu1) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":522 - * if neu1 is None: - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - * _neu1 = np.PyArray_DATA(neu1) # <<<<<<<<<<<<<< + * c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) # <<<<<<<<<<<<<< * * vlookup = model.wv.vocab */ - if (!(likely(((__pyx_v_neu1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_neu1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 522, __pyx_L1_error) - __pyx_v__neu1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_neu1))); + __pyx_t_3 = PyObject_Length(__pyx_v_doctag_indexes); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(0, 462, __pyx_L1_error) + __pyx_t_4 = 0x2710; + if (((__pyx_t_3 < __pyx_t_4) != 0)) { + __pyx_t_5 = __pyx_t_3; + } else { + __pyx_t_5 = __pyx_t_4; + } + __pyx_v_c.doctag_len = ((int)__pyx_t_5); - /* "gensim/models/doc2vec_inner.pyx":524 - * _neu1 = np.PyArray_DATA(neu1) + /* "gensim/models/doc2vec_inner.pyx":464 + * c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) * * vlookup = model.wv.vocab # <<<<<<<<<<<<<< * i = 0 * for token in doc_words: */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 524, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 524, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_vlookup = __pyx_t_3; - __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 464, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 464, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_vlookup = __pyx_t_6; + __pyx_t_6 = 0; - /* "gensim/models/doc2vec_inner.pyx":525 + /* "gensim/models/doc2vec_inner.pyx":465 * * vlookup = model.wv.vocab * i = 0 # <<<<<<<<<<<<<< @@ -5933,7 +5669,7 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT */ __pyx_v_i = 0; - /* "gensim/models/doc2vec_inner.pyx":526 + /* "gensim/models/doc2vec_inner.pyx":466 * vlookup = model.wv.vocab * i = 0 * for token in doc_words: # <<<<<<<<<<<<<< @@ -5941,222 +5677,222 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT * if predict_word is None: # shrink document to leave out word */ if (likely(PyList_CheckExact(__pyx_v_doc_words)) || PyTuple_CheckExact(__pyx_v_doc_words)) { - __pyx_t_3 = __pyx_v_doc_words; __Pyx_INCREF(__pyx_t_3); __pyx_t_7 = 0; - __pyx_t_11 = NULL; + __pyx_t_6 = __pyx_v_doc_words; __Pyx_INCREF(__pyx_t_6); __pyx_t_5 = 0; + __pyx_t_7 = NULL; } else { - __pyx_t_7 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_doc_words); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 526, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_11 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 526, __pyx_L1_error) + __pyx_t_5 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_doc_words); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 466, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 466, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_11)) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_3)) break; + if (likely(!__pyx_t_7)) { + if (likely(PyList_CheckExact(__pyx_t_6))) { + if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_6)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_8); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 526, __pyx_L1_error) + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_6, __pyx_t_5); __Pyx_INCREF(__pyx_t_1); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 466, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 526, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = PySequence_ITEM(__pyx_t_6, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 466, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } else { - if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_6)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_8); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 526, __pyx_L1_error) + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_5); __Pyx_INCREF(__pyx_t_1); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 466, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 526, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = PySequence_ITEM(__pyx_t_6, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 466, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } } else { - __pyx_t_8 = __pyx_t_11(__pyx_t_3); - if (unlikely(!__pyx_t_8)) { + __pyx_t_1 = __pyx_t_7(__pyx_t_6); + if (unlikely(!__pyx_t_1)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 526, __pyx_L1_error) + else __PYX_ERR(0, 466, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_1); } - __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_8); - __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_1); + __pyx_t_1 = 0; - /* "gensim/models/doc2vec_inner.pyx":527 + /* "gensim/models/doc2vec_inner.pyx":467 * i = 0 * for token in doc_words: * predict_word = vlookup[token] if token in vlookup else None # <<<<<<<<<<<<<< * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged */ - __pyx_t_5 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 527, __pyx_L1_error) - if ((__pyx_t_5 != 0)) { - __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 527, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_8 = __pyx_t_1; - __pyx_t_1 = 0; + __pyx_t_8 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 467, __pyx_L1_error) + if ((__pyx_t_8 != 0)) { + __pyx_t_9 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 467, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_1 = __pyx_t_9; + __pyx_t_9 = 0; } else { __Pyx_INCREF(Py_None); - __pyx_t_8 = Py_None; + __pyx_t_1 = Py_None; } - __Pyx_XDECREF_SET(__pyx_v_predict_word, __pyx_t_8); - __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_predict_word, __pyx_t_1); + __pyx_t_1 = 0; - /* "gensim/models/doc2vec_inner.pyx":528 + /* "gensim/models/doc2vec_inner.pyx":468 * for token in doc_words: * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word # <<<<<<<<<<<<<< * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): */ - __pyx_t_5 = (__pyx_v_predict_word == Py_None); - __pyx_t_6 = (__pyx_t_5 != 0); - if (__pyx_t_6) { + __pyx_t_8 = (__pyx_v_predict_word == Py_None); + __pyx_t_10 = (__pyx_t_8 != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":529 + /* "gensim/models/doc2vec_inner.pyx":469 * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged # <<<<<<<<<<<<<< - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): * continue */ - goto __pyx_L14_continue; + goto __pyx_L3_continue; - /* "gensim/models/doc2vec_inner.pyx":528 + /* "gensim/models/doc2vec_inner.pyx":468 * for token in doc_words: * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word # <<<<<<<<<<<<<< * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): */ } - /* "gensim/models/doc2vec_inner.pyx":530 + /* "gensim/models/doc2vec_inner.pyx":470 * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[i] = predict_word.index + * c.indexes[i] = predict_word.index */ - __pyx_t_5 = (__pyx_v_sample != 0); - if (__pyx_t_5) { + __pyx_t_8 = (__pyx_v_c.sample != 0); + if (__pyx_t_8) { } else { - __pyx_t_6 = __pyx_t_5; - goto __pyx_L18_bool_binop_done; + __pyx_t_10 = __pyx_t_8; + goto __pyx_L7_bool_binop_done; } - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 530, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_1 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_next_random))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 530, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 470, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_10 = PyObject_RichCompare(__pyx_t_8, __pyx_t_1, Py_LT); __Pyx_XGOTREF(__pyx_t_10); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 530, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_9 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_c.next_random))); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 470, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_11 = PyObject_RichCompare(__pyx_t_1, __pyx_t_9, Py_LT); __Pyx_XGOTREF(__pyx_t_11); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 470, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_10); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 530, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_t_6 = __pyx_t_5; - __pyx_L18_bool_binop_done:; - if (__pyx_t_6) { + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_11); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 470, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_10 = __pyx_t_8; + __pyx_L7_bool_binop_done:; + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":531 + /* "gensim/models/doc2vec_inner.pyx":471 * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): * continue # <<<<<<<<<<<<<< - * indexes[i] = predict_word.index - * if hs: + * c.indexes[i] = predict_word.index + * if c.hs: */ - goto __pyx_L14_continue; + goto __pyx_L3_continue; - /* "gensim/models/doc2vec_inner.pyx":530 + /* "gensim/models/doc2vec_inner.pyx":470 * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[i] = predict_word.index + * c.indexes[i] = predict_word.index */ } - /* "gensim/models/doc2vec_inner.pyx":532 - * if sample and predict_word.sample_int < random_int32(&next_random): + /* "gensim/models/doc2vec_inner.pyx":472 + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): * continue - * indexes[i] = predict_word.index # <<<<<<<<<<<<<< - * if hs: - * codelens[i] = len(predict_word.code) + * c.indexes[i] = predict_word.index # <<<<<<<<<<<<<< + * if c.hs: + * c.codelens[i] = len(predict_word.code) */ - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_index); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 532, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_10); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 532, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - (__pyx_v_indexes[__pyx_v_i]) = __pyx_t_12; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_index); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 472, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_11); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 472, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.indexes[__pyx_v_i]) = __pyx_t_12; - /* "gensim/models/doc2vec_inner.pyx":533 + /* "gensim/models/doc2vec_inner.pyx":473 * continue - * indexes[i] = predict_word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) - */ - __pyx_t_6 = (__pyx_v_hs != 0); - if (__pyx_t_6) { - - /* "gensim/models/doc2vec_inner.pyx":534 - * indexes[i] = predict_word.index - * if hs: - * codelens[i] = len(predict_word.code) # <<<<<<<<<<<<<< - * codes[i] = np.PyArray_DATA(predict_word.code) - * points[i] = np.PyArray_DATA(predict_word.point) - */ - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 534, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_13 = PyObject_Length(__pyx_t_10); if (unlikely(__pyx_t_13 == ((Py_ssize_t)-1))) __PYX_ERR(0, 534, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - (__pyx_v_codelens[__pyx_v_i]) = ((int)__pyx_t_13); - - /* "gensim/models/doc2vec_inner.pyx":535 - * if hs: - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) # <<<<<<<<<<<<<< - * points[i] = np.PyArray_DATA(predict_word.point) + * c.indexes[i] = predict_word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) + */ + __pyx_t_10 = (__pyx_v_c.hs != 0); + if (__pyx_t_10) { + + /* "gensim/models/doc2vec_inner.pyx":474 + * c.indexes[i] = predict_word.index + * if c.hs: + * c.codelens[i] = len(predict_word.code) # <<<<<<<<<<<<<< + * c.codes[i] = np.PyArray_DATA(predict_word.code) + * c.points[i] = np.PyArray_DATA(predict_word.point) + */ + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 474, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_3 = PyObject_Length(__pyx_t_11); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(0, 474, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.codelens[__pyx_v_i]) = ((int)__pyx_t_3); + + /* "gensim/models/doc2vec_inner.pyx":475 + * if c.hs: + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) # <<<<<<<<<<<<<< + * c.points[i] = np.PyArray_DATA(predict_word.point) * result += 1 */ - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 535, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - if (!(likely(((__pyx_t_10) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_10, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 535, __pyx_L1_error) - (__pyx_v_codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_10))); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 475, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 475, __pyx_L1_error) + (__pyx_v_c.codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/doc2vec_inner.pyx":536 - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) - * points[i] = np.PyArray_DATA(predict_word.point) # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":476 + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) + * c.points[i] = np.PyArray_DATA(predict_word.point) # <<<<<<<<<<<<<< * result += 1 * i += 1 */ - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_point); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 536, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - if (!(likely(((__pyx_t_10) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_10, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 536, __pyx_L1_error) - (__pyx_v_points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_10))); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_point); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 476, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 476, __pyx_L1_error) + (__pyx_v_c.points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/doc2vec_inner.pyx":533 + /* "gensim/models/doc2vec_inner.pyx":473 * continue - * indexes[i] = predict_word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) + * c.indexes[i] = predict_word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) */ } - /* "gensim/models/doc2vec_inner.pyx":537 - * codes[i] = np.PyArray_DATA(predict_word.code) - * points[i] = np.PyArray_DATA(predict_word.point) + /* "gensim/models/doc2vec_inner.pyx":477 + * c.codes[i] = np.PyArray_DATA(predict_word.code) + * c.points[i] = np.PyArray_DATA(predict_word.point) * result += 1 # <<<<<<<<<<<<<< * i += 1 * if i == MAX_DOCUMENT_LEN: */ __pyx_v_result = (__pyx_v_result + 1); - /* "gensim/models/doc2vec_inner.pyx":538 - * points[i] = np.PyArray_DATA(predict_word.point) + /* "gensim/models/doc2vec_inner.pyx":478 + * c.points[i] = np.PyArray_DATA(predict_word.point) * result += 1 * i += 1 # <<<<<<<<<<<<<< * if i == MAX_DOCUMENT_LEN: @@ -6164,106 +5900,106 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT */ __pyx_v_i = (__pyx_v_i + 1); - /* "gensim/models/doc2vec_inner.pyx":539 + /* "gensim/models/doc2vec_inner.pyx":479 * result += 1 * i += 1 * if i == MAX_DOCUMENT_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? - * document_len = i + * c.document_len = i */ - __pyx_t_6 = ((__pyx_v_i == 0x2710) != 0); - if (__pyx_t_6) { + __pyx_t_10 = ((__pyx_v_i == 0x2710) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":540 + /* "gensim/models/doc2vec_inner.pyx":480 * i += 1 * if i == MAX_DOCUMENT_LEN: * break # TODO: log warning, tally overflow? # <<<<<<<<<<<<<< - * document_len = i + * c.document_len = i * */ - goto __pyx_L15_break; + goto __pyx_L4_break; - /* "gensim/models/doc2vec_inner.pyx":539 + /* "gensim/models/doc2vec_inner.pyx":479 * result += 1 * i += 1 * if i == MAX_DOCUMENT_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? - * document_len = i + * c.document_len = i */ } - /* "gensim/models/doc2vec_inner.pyx":526 + /* "gensim/models/doc2vec_inner.pyx":466 * vlookup = model.wv.vocab * i = 0 * for token in doc_words: # <<<<<<<<<<<<<< * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word */ - __pyx_L14_continue:; + __pyx_L3_continue:; } - __pyx_L15_break:; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_L4_break:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - /* "gensim/models/doc2vec_inner.pyx":541 + /* "gensim/models/doc2vec_inner.pyx":481 * if i == MAX_DOCUMENT_LEN: * break # TODO: log warning, tally overflow? - * document_len = i # <<<<<<<<<<<<<< + * c.document_len = i # <<<<<<<<<<<<<< * * # single randint() call avoids a big thread-sync slowdown */ - __pyx_v_document_len = __pyx_v_i; + __pyx_v_c.document_len = __pyx_v_i; - /* "gensim/models/doc2vec_inner.pyx":544 + /* "gensim/models/doc2vec_inner.pyx":484 * * # single randint() call avoids a big thread-sync slowdown - * for i, item in enumerate(model.random.randint(0, window, document_len)): # <<<<<<<<<<<<<< - * reduced_windows[i] = item - * - */ - __pyx_t_2 = 0; - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 544, __pyx_L1_error) + * for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): # <<<<<<<<<<<<<< + * c.reduced_windows[i] = item + * + */ + __pyx_t_13 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_11, __pyx_n_s_randint); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_PyInt_From_int(__pyx_v_c.window); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_c.document_len); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 484, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_t_10 = __Pyx_PyInt_From_int(__pyx_v_window); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_8 = __Pyx_PyInt_From_int(__pyx_v_document_len); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); __pyx_t_14 = NULL; __pyx_t_15 = 0; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { - __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_1); + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_9); if (likely(__pyx_t_14)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); __Pyx_INCREF(__pyx_t_14); __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_1, function); + __Pyx_DECREF_SET(__pyx_t_9, function); __pyx_t_15 = 1; } } #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_1)) { - PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_int_0, __pyx_t_10, __pyx_t_8}; - __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 544, __pyx_L1_error) + if (PyFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_int_0, __pyx_t_11, __pyx_t_1}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 484, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; } else #endif #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { - PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_int_0, __pyx_t_10, __pyx_t_8}; - __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 544, __pyx_L1_error) + if (__Pyx_PyFastCFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[4] = {__pyx_t_14, __pyx_int_0, __pyx_t_11, __pyx_t_1}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 484, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; } else #endif { - __pyx_t_16 = PyTuple_New(3+__pyx_t_15); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 544, __pyx_L1_error) + __pyx_t_16 = PyTuple_New(3+__pyx_t_15); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 484, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_16); if (__pyx_t_14) { __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_16, 0, __pyx_t_14); __pyx_t_14 = NULL; @@ -6271,126 +6007,110 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT __Pyx_INCREF(__pyx_int_0); __Pyx_GIVEREF(__pyx_int_0); PyTuple_SET_ITEM(__pyx_t_16, 0+__pyx_t_15, __pyx_int_0); - __Pyx_GIVEREF(__pyx_t_10); - PyTuple_SET_ITEM(__pyx_t_16, 1+__pyx_t_15, __pyx_t_10); - __Pyx_GIVEREF(__pyx_t_8); - PyTuple_SET_ITEM(__pyx_t_16, 2+__pyx_t_15, __pyx_t_8); - __pyx_t_10 = 0; - __pyx_t_8 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_16, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_11); + PyTuple_SET_ITEM(__pyx_t_16, 1+__pyx_t_15, __pyx_t_11); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_16, 2+__pyx_t_15, __pyx_t_1); + __pyx_t_11 = 0; + __pyx_t_1 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_t_16, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (likely(PyList_CheckExact(__pyx_t_3)) || PyTuple_CheckExact(__pyx_t_3)) { - __pyx_t_1 = __pyx_t_3; __Pyx_INCREF(__pyx_t_1); __pyx_t_7 = 0; - __pyx_t_11 = NULL; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (likely(PyList_CheckExact(__pyx_t_6)) || PyTuple_CheckExact(__pyx_t_6)) { + __pyx_t_9 = __pyx_t_6; __Pyx_INCREF(__pyx_t_9); __pyx_t_5 = 0; + __pyx_t_7 = NULL; } else { - __pyx_t_7 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_11 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 544, __pyx_L1_error) + __pyx_t_5 = -1; __pyx_t_9 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_7 = Py_TYPE(__pyx_t_9)->tp_iternext; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 484, __pyx_L1_error) } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; for (;;) { - if (likely(!__pyx_t_11)) { - if (likely(PyList_CheckExact(__pyx_t_1))) { - if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_1)) break; + if (likely(!__pyx_t_7)) { + if (likely(PyList_CheckExact(__pyx_t_9))) { + if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_9)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 544, __pyx_L1_error) + __pyx_t_6 = PyList_GET_ITEM(__pyx_t_9, __pyx_t_5); __Pyx_INCREF(__pyx_t_6); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 484, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = PySequence_ITEM(__pyx_t_9, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); #endif } else { - if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_9)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_3); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 544, __pyx_L1_error) + __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_9, __pyx_t_5); __Pyx_INCREF(__pyx_t_6); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 484, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = PySequence_ITEM(__pyx_t_9, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 484, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); #endif } } else { - __pyx_t_3 = __pyx_t_11(__pyx_t_1); - if (unlikely(!__pyx_t_3)) { + __pyx_t_6 = __pyx_t_7(__pyx_t_9); + if (unlikely(!__pyx_t_6)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 544, __pyx_L1_error) + else __PYX_ERR(0, 484, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_6); } - __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_v_i = __pyx_t_2; - __pyx_t_2 = (__pyx_t_2 + 1); + __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_v_i = __pyx_t_13; + __pyx_t_13 = (__pyx_t_13 + 1); - /* "gensim/models/doc2vec_inner.pyx":545 + /* "gensim/models/doc2vec_inner.pyx":485 * # single randint() call avoids a big thread-sync slowdown - * for i, item in enumerate(model.random.randint(0, window, document_len)): - * reduced_windows[i] = item # <<<<<<<<<<<<<< + * for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): + * c.reduced_windows[i] = item # <<<<<<<<<<<<<< * - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) + * for i in range(c.doctag_len): */ - __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 545, __pyx_L1_error) - (__pyx_v_reduced_windows[__pyx_v_i]) = __pyx_t_12; + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 485, __pyx_L1_error) + (__pyx_v_c.reduced_windows[__pyx_v_i]) = __pyx_t_12; - /* "gensim/models/doc2vec_inner.pyx":544 + /* "gensim/models/doc2vec_inner.pyx":484 * * # single randint() call avoids a big thread-sync slowdown - * for i, item in enumerate(model.random.randint(0, window, document_len)): # <<<<<<<<<<<<<< - * reduced_windows[i] = item - * - */ - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "gensim/models/doc2vec_inner.pyx":547 - * reduced_windows[i] = item + * for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): # <<<<<<<<<<<<<< + * c.reduced_windows[i] = item * - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) # <<<<<<<<<<<<<< - * for i in range(doctag_len): - * _doctag_indexes[i] = doctag_indexes[i] */ - __pyx_t_7 = PyObject_Length(__pyx_v_doctag_indexes); if (unlikely(__pyx_t_7 == ((Py_ssize_t)-1))) __PYX_ERR(0, 547, __pyx_L1_error) - __pyx_t_17 = 0x2710; - if (((__pyx_t_7 < __pyx_t_17) != 0)) { - __pyx_t_13 = __pyx_t_7; - } else { - __pyx_t_13 = __pyx_t_17; } - __pyx_v_doctag_len = ((int)__pyx_t_13); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - /* "gensim/models/doc2vec_inner.pyx":548 + /* "gensim/models/doc2vec_inner.pyx":487 + * c.reduced_windows[i] = item * - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - * for i in range(doctag_len): # <<<<<<<<<<<<<< - * _doctag_indexes[i] = doctag_indexes[i] + * for i in range(c.doctag_len): # <<<<<<<<<<<<<< + * c.doctag_indexes[i] = doctag_indexes[i] * result += 1 */ - __pyx_t_2 = __pyx_v_doctag_len; - __pyx_t_15 = __pyx_t_2; - for (__pyx_t_18 = 0; __pyx_t_18 < __pyx_t_15; __pyx_t_18+=1) { - __pyx_v_i = __pyx_t_18; + __pyx_t_13 = __pyx_v_c.doctag_len; + __pyx_t_15 = __pyx_t_13; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_15; __pyx_t_17+=1) { + __pyx_v_i = __pyx_t_17; - /* "gensim/models/doc2vec_inner.pyx":549 - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - * for i in range(doctag_len): - * _doctag_indexes[i] = doctag_indexes[i] # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":488 + * + * for i in range(c.doctag_len): + * c.doctag_indexes[i] = doctag_indexes[i] # <<<<<<<<<<<<<< * result += 1 * */ - __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_doctag_indexes, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 0, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 549, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_1); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 549, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - (__pyx_v__doctag_indexes[__pyx_v_i]) = __pyx_t_12; + __pyx_t_9 = __Pyx_GetItemInt(__pyx_v_doctag_indexes, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 0, 0, 0); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 488, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_9); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 488, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + (__pyx_v_c.doctag_indexes[__pyx_v_i]) = __pyx_t_12; - /* "gensim/models/doc2vec_inner.pyx":550 - * for i in range(doctag_len): - * _doctag_indexes[i] = doctag_indexes[i] + /* "gensim/models/doc2vec_inner.pyx":489 + * for i in range(c.doctag_len): + * c.doctag_indexes[i] = doctag_indexes[i] * result += 1 # <<<<<<<<<<<<<< * * # release GIL & train on the document @@ -6398,12 +6118,12 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT __pyx_v_result = (__pyx_v_result + 1); } - /* "gensim/models/doc2vec_inner.pyx":553 + /* "gensim/models/doc2vec_inner.pyx":492 * * # release GIL & train on the document * with nogil: # <<<<<<<<<<<<<< - * for i in range(document_len): - * j = i - window + reduced_windows[i] + * for i in range(c.document_len): + * j = i - c.window + c.reduced_windows[i] */ { #ifdef WITH_THREAD @@ -6413,142 +6133,143 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT #endif /*try:*/ { - /* "gensim/models/doc2vec_inner.pyx":554 + /* "gensim/models/doc2vec_inner.pyx":493 * # release GIL & train on the document * with nogil: - * for i in range(document_len): # <<<<<<<<<<<<<< - * j = i - window + reduced_windows[i] + * for i in range(c.document_len): # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] * if j < 0: */ - __pyx_t_2 = __pyx_v_document_len; - __pyx_t_15 = __pyx_t_2; - for (__pyx_t_18 = 0; __pyx_t_18 < __pyx_t_15; __pyx_t_18+=1) { - __pyx_v_i = __pyx_t_18; + __pyx_t_13 = __pyx_v_c.document_len; + __pyx_t_15 = __pyx_t_13; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_15; __pyx_t_17+=1) { + __pyx_v_i = __pyx_t_17; - /* "gensim/models/doc2vec_inner.pyx":555 + /* "gensim/models/doc2vec_inner.pyx":494 * with nogil: - * for i in range(document_len): - * j = i - window + reduced_windows[i] # <<<<<<<<<<<<<< + * for i in range(c.document_len): + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< * if j < 0: * j = 0 */ - __pyx_v_j = ((__pyx_v_i - __pyx_v_window) + (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/doc2vec_inner.pyx":556 - * for i in range(document_len): - * j = i - window + reduced_windows[i] + /* "gensim/models/doc2vec_inner.pyx":495 + * for i in range(c.document_len): + * j = i - c.window + c.reduced_windows[i] * if j < 0: # <<<<<<<<<<<<<< * j = 0 - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ - __pyx_t_6 = ((__pyx_v_j < 0) != 0); - if (__pyx_t_6) { + __pyx_t_10 = ((__pyx_v_j < 0) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":557 - * j = i - window + reduced_windows[i] + /* "gensim/models/doc2vec_inner.pyx":496 + * j = i - c.window + c.reduced_windows[i] * if j < 0: * j = 0 # <<<<<<<<<<<<<< - * k = i + window + 1 - reduced_windows[i] - * if k > document_len: + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > c.document_len: */ __pyx_v_j = 0; - /* "gensim/models/doc2vec_inner.pyx":556 - * for i in range(document_len): - * j = i - window + reduced_windows[i] + /* "gensim/models/doc2vec_inner.pyx":495 + * for i in range(c.document_len): + * j = i - c.window + c.reduced_windows[i] * if j < 0: # <<<<<<<<<<<<<< * j = 0 - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ } - /* "gensim/models/doc2vec_inner.pyx":558 + /* "gensim/models/doc2vec_inner.pyx":497 * if j < 0: * j = 0 - * k = i + window + 1 - reduced_windows[i] # <<<<<<<<<<<<<< - * if k > document_len: - * k = document_len + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< + * if k > c.document_len: + * k = c.document_len */ - __pyx_v_k = (((__pyx_v_i + __pyx_v_window) + 1) - (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/doc2vec_inner.pyx":559 + /* "gensim/models/doc2vec_inner.pyx":498 * j = 0 - * k = i + window + 1 - reduced_windows[i] - * if k > document_len: # <<<<<<<<<<<<<< - * k = document_len + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > c.document_len: # <<<<<<<<<<<<<< + * k = c.document_len * */ - __pyx_t_6 = ((__pyx_v_k > __pyx_v_document_len) != 0); - if (__pyx_t_6) { + __pyx_t_10 = ((__pyx_v_k > __pyx_v_c.document_len) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":560 - * k = i + window + 1 - reduced_windows[i] - * if k > document_len: - * k = document_len # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":499 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > c.document_len: + * k = c.document_len # <<<<<<<<<<<<<< * * # compose l1 (in _neu1) & clear _work */ - __pyx_v_k = __pyx_v_document_len; + __pyx_t_18 = __pyx_v_c.document_len; + __pyx_v_k = __pyx_t_18; - /* "gensim/models/doc2vec_inner.pyx":559 + /* "gensim/models/doc2vec_inner.pyx":498 * j = 0 - * k = i + window + 1 - reduced_windows[i] - * if k > document_len: # <<<<<<<<<<<<<< - * k = document_len + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > c.document_len: # <<<<<<<<<<<<<< + * k = c.document_len * */ } - /* "gensim/models/doc2vec_inner.pyx":563 + /* "gensim/models/doc2vec_inner.pyx":502 * * # compose l1 (in _neu1) & clear _work - * memset(_neu1, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< + * memset(c.neu1, 0, c.layer1_size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< * count = 0.0 * for m in range(j, k): */ - (void)(memset(__pyx_v__neu1, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + (void)(memset(__pyx_v_c.neu1, 0, (__pyx_v_c.layer1_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/doc2vec_inner.pyx":564 + /* "gensim/models/doc2vec_inner.pyx":503 * # compose l1 (in _neu1) & clear _work - * memset(_neu1, 0, size * cython.sizeof(REAL_t)) + * memset(c.neu1, 0, c.layer1_size * cython.sizeof(REAL_t)) * count = 0.0 # <<<<<<<<<<<<<< * for m in range(j, k): * if m == i: */ __pyx_v_count = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.0); - /* "gensim/models/doc2vec_inner.pyx":565 - * memset(_neu1, 0, size * cython.sizeof(REAL_t)) + /* "gensim/models/doc2vec_inner.pyx":504 + * memset(c.neu1, 0, c.layer1_size * cython.sizeof(REAL_t)) * count = 0.0 * for m in range(j, k): # <<<<<<<<<<<<<< * if m == i: * continue */ - __pyx_t_19 = __pyx_v_k; - __pyx_t_20 = __pyx_t_19; - for (__pyx_t_21 = __pyx_v_j; __pyx_t_21 < __pyx_t_20; __pyx_t_21+=1) { - __pyx_v_m = __pyx_t_21; + __pyx_t_18 = __pyx_v_k; + __pyx_t_19 = __pyx_t_18; + for (__pyx_t_20 = __pyx_v_j; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { + __pyx_v_m = __pyx_t_20; - /* "gensim/models/doc2vec_inner.pyx":566 + /* "gensim/models/doc2vec_inner.pyx":505 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< * continue * else: */ - __pyx_t_6 = ((__pyx_v_m == __pyx_v_i) != 0); - if (__pyx_t_6) { + __pyx_t_10 = ((__pyx_v_m == __pyx_v_i) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":567 + /* "gensim/models/doc2vec_inner.pyx":506 * for m in range(j, k): * if m == i: * continue # <<<<<<<<<<<<<< * else: * count += ONEF */ - goto __pyx_L33_continue; + goto __pyx_L22_continue; - /* "gensim/models/doc2vec_inner.pyx":566 + /* "gensim/models/doc2vec_inner.pyx":505 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -6557,292 +6278,292 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT */ } - /* "gensim/models/doc2vec_inner.pyx":569 + /* "gensim/models/doc2vec_inner.pyx":508 * continue * else: * count += ONEF # <<<<<<<<<<<<<< - * our_saxpy(&size, &ONEF, &_word_vectors[indexes[m] * size], &ONE, _neu1, &ONE) - * for m in range(doctag_len): + * our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) + * for m in range(c.doctag_len): */ /*else*/ { __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_13doc2vec_inner_ONEF); - /* "gensim/models/doc2vec_inner.pyx":570 + /* "gensim/models/doc2vec_inner.pyx":509 * else: * count += ONEF - * our_saxpy(&size, &ONEF, &_word_vectors[indexes[m] * size], &ONE, _neu1, &ONE) # <<<<<<<<<<<<<< - * for m in range(doctag_len): + * our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) # <<<<<<<<<<<<<< + * for m in range(c.doctag_len): * count += ONEF */ - __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONEF), (&(__pyx_v__word_vectors[((__pyx_v_indexes[__pyx_v_m]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), __pyx_v__neu1, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.layer1_size), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONEF), (&(__pyx_v_c.word_vectors[((__pyx_v_c.indexes[__pyx_v_m]) * __pyx_v_c.layer1_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), __pyx_v_c.neu1, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); } - __pyx_L33_continue:; + __pyx_L22_continue:; } - /* "gensim/models/doc2vec_inner.pyx":571 + /* "gensim/models/doc2vec_inner.pyx":510 * count += ONEF - * our_saxpy(&size, &ONEF, &_word_vectors[indexes[m] * size], &ONE, _neu1, &ONE) - * for m in range(doctag_len): # <<<<<<<<<<<<<< + * our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) + * for m in range(c.doctag_len): # <<<<<<<<<<<<<< * count += ONEF - * our_saxpy(&size, &ONEF, &_doctag_vectors[_doctag_indexes[m] * size], &ONE, _neu1, &ONE) + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) */ - __pyx_t_19 = __pyx_v_doctag_len; - __pyx_t_20 = __pyx_t_19; - for (__pyx_t_21 = 0; __pyx_t_21 < __pyx_t_20; __pyx_t_21+=1) { - __pyx_v_m = __pyx_t_21; + __pyx_t_18 = __pyx_v_c.doctag_len; + __pyx_t_19 = __pyx_t_18; + for (__pyx_t_20 = 0; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { + __pyx_v_m = __pyx_t_20; - /* "gensim/models/doc2vec_inner.pyx":572 - * our_saxpy(&size, &ONEF, &_word_vectors[indexes[m] * size], &ONE, _neu1, &ONE) - * for m in range(doctag_len): + /* "gensim/models/doc2vec_inner.pyx":511 + * our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) + * for m in range(c.doctag_len): * count += ONEF # <<<<<<<<<<<<<< - * our_saxpy(&size, &ONEF, &_doctag_vectors[_doctag_indexes[m] * size], &ONE, _neu1, &ONE) + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) * if count > (0.5): */ __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_13doc2vec_inner_ONEF); - /* "gensim/models/doc2vec_inner.pyx":573 - * for m in range(doctag_len): + /* "gensim/models/doc2vec_inner.pyx":512 + * for m in range(c.doctag_len): * count += ONEF - * our_saxpy(&size, &ONEF, &_doctag_vectors[_doctag_indexes[m] * size], &ONE, _neu1, &ONE) # <<<<<<<<<<<<<< + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) # <<<<<<<<<<<<<< * if count > (0.5): * inv_count = ONEF/count */ - __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONEF), (&(__pyx_v__doctag_vectors[((__pyx_v__doctag_indexes[__pyx_v_m]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), __pyx_v__neu1, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.layer1_size), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONEF), (&(__pyx_v_c.doctag_vectors[((__pyx_v_c.doctag_indexes[__pyx_v_m]) * __pyx_v_c.layer1_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), __pyx_v_c.neu1, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); } - /* "gensim/models/doc2vec_inner.pyx":574 + /* "gensim/models/doc2vec_inner.pyx":513 * count += ONEF - * our_saxpy(&size, &ONEF, &_doctag_vectors[_doctag_indexes[m] * size], &ONE, _neu1, &ONE) + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< * inv_count = ONEF/count - * if cbow_mean: + * if c.cbow_mean: */ - __pyx_t_6 = ((__pyx_v_count > ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.5)) != 0); - if (__pyx_t_6) { + __pyx_t_10 = ((__pyx_v_count > ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.5)) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":575 - * our_saxpy(&size, &ONEF, &_doctag_vectors[_doctag_indexes[m] * size], &ONE, _neu1, &ONE) + /* "gensim/models/doc2vec_inner.pyx":514 + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) * if count > (0.5): * inv_count = ONEF/count # <<<<<<<<<<<<<< - * if cbow_mean: - * sscal(&size, &inv_count, _neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * if c.cbow_mean: + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) */ __pyx_v_inv_count = (__pyx_v_6gensim_6models_13doc2vec_inner_ONEF / __pyx_v_count); - /* "gensim/models/doc2vec_inner.pyx":574 + /* "gensim/models/doc2vec_inner.pyx":513 * count += ONEF - * our_saxpy(&size, &ONEF, &_doctag_vectors[_doctag_indexes[m] * size], &ONE, _neu1, &ONE) + * our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< * inv_count = ONEF/count - * if cbow_mean: + * if c.cbow_mean: */ } - /* "gensim/models/doc2vec_inner.pyx":576 + /* "gensim/models/doc2vec_inner.pyx":515 * if count > (0.5): * inv_count = ONEF/count - * if cbow_mean: # <<<<<<<<<<<<<< - * sscal(&size, &inv_count, _neu1, &ONE) # (does this need BLAS-variants like saxpy?) - * memset(_work, 0, size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.cbow_mean: # <<<<<<<<<<<<<< + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error */ - __pyx_t_6 = (__pyx_v_cbow_mean != 0); - if (__pyx_t_6) { + __pyx_t_10 = (__pyx_v_c.cbow_mean != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":577 + /* "gensim/models/doc2vec_inner.pyx":516 * inv_count = ONEF/count - * if cbow_mean: - * sscal(&size, &inv_count, _neu1, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< - * memset(_work, 0, size * cython.sizeof(REAL_t)) # work to accumulate l1 error - * if hs: + * if c.cbow_mean: + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.hs: */ - __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v__neu1, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); + __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_c.layer1_size), (&__pyx_v_inv_count), __pyx_v_c.neu1, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); - /* "gensim/models/doc2vec_inner.pyx":576 + /* "gensim/models/doc2vec_inner.pyx":515 * if count > (0.5): * inv_count = ONEF/count - * if cbow_mean: # <<<<<<<<<<<<<< - * sscal(&size, &inv_count, _neu1, &ONE) # (does this need BLAS-variants like saxpy?) - * memset(_work, 0, size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.cbow_mean: # <<<<<<<<<<<<<< + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error */ } - /* "gensim/models/doc2vec_inner.pyx":578 - * if cbow_mean: - * sscal(&size, &inv_count, _neu1, &ONE) # (does this need BLAS-variants like saxpy?) - * memset(_work, 0, size * cython.sizeof(REAL_t)) # work to accumulate l1 error # <<<<<<<<<<<<<< - * if hs: - * fast_document_dm_hs(points[i], codes[i], codelens[i], - */ - (void)(memset(__pyx_v__work, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - - /* "gensim/models/doc2vec_inner.pyx":579 - * sscal(&size, &inv_count, _neu1, &ONE) # (does this need BLAS-variants like saxpy?) - * memset(_work, 0, size * cython.sizeof(REAL_t)) # work to accumulate l1 error - * if hs: # <<<<<<<<<<<<<< - * fast_document_dm_hs(points[i], codes[i], codelens[i], - * _neu1, syn1, _alpha, _work, - */ - __pyx_t_6 = (__pyx_v_hs != 0); - if (__pyx_t_6) { - - /* "gensim/models/doc2vec_inner.pyx":580 - * memset(_work, 0, size * cython.sizeof(REAL_t)) # work to accumulate l1 error - * if hs: - * fast_document_dm_hs(points[i], codes[i], codelens[i], # <<<<<<<<<<<<<< - * _neu1, syn1, _alpha, _work, - * size, _learn_hidden) - */ - __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_hs((__pyx_v_points[__pyx_v_i]), (__pyx_v_codes[__pyx_v_i]), (__pyx_v_codelens[__pyx_v_i]), __pyx_v__neu1, __pyx_v_syn1, __pyx_v__alpha, __pyx_v__work, __pyx_v_size, __pyx_v__learn_hidden); - - /* "gensim/models/doc2vec_inner.pyx":579 - * sscal(&size, &inv_count, _neu1, &ONE) # (does this need BLAS-variants like saxpy?) - * memset(_work, 0, size * cython.sizeof(REAL_t)) # work to accumulate l1 error - * if hs: # <<<<<<<<<<<<<< - * fast_document_dm_hs(points[i], codes[i], codelens[i], - * _neu1, syn1, _alpha, _work, + /* "gensim/models/doc2vec_inner.pyx":517 + * if c.cbow_mean: + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error # <<<<<<<<<<<<<< + * if c.hs: + * fast_document_dm_hs(c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, c.alpha, c.work, + */ + (void)(memset(__pyx_v_c.work, 0, (__pyx_v_c.layer1_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + + /* "gensim/models/doc2vec_inner.pyx":518 + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dm_hs(c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, c.alpha, c.work, + * c.layer1_size, c.learn_hidden) + */ + __pyx_t_10 = (__pyx_v_c.hs != 0); + if (__pyx_t_10) { + + /* "gensim/models/doc2vec_inner.pyx":519 + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.hs: + * fast_document_dm_hs(c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, c.alpha, c.work, # <<<<<<<<<<<<<< + * c.layer1_size, c.learn_hidden) + * if c.negative: + */ + __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.neu1, __pyx_v_c.syn1, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.layer1_size, __pyx_v_c.learn_hidden); + + /* "gensim/models/doc2vec_inner.pyx":518 + * sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dm_hs(c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, c.alpha, c.work, + * c.layer1_size, c.learn_hidden) */ } - /* "gensim/models/doc2vec_inner.pyx":583 - * _neu1, syn1, _alpha, _work, - * size, _learn_hidden) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_document_dm_neg(negative, cum_table, cum_table_len, next_random, - * _neu1, syn1neg, indexes[i], _alpha, _work, + /* "gensim/models/doc2vec_inner.pyx":521 + * fast_document_dm_hs(c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, c.alpha, c.work, + * c.layer1_size, c.learn_hidden) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dm_neg(c.negative, c.cum_table, c.cum_table_len, c.next_random, + * c.neu1, c.syn1neg, c.indexes[i], c.alpha, c.work, c.layer1_size, */ - __pyx_t_6 = (__pyx_v_negative != 0); - if (__pyx_t_6) { + __pyx_t_10 = (__pyx_v_c.negative != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":584 - * size, _learn_hidden) - * if negative: - * next_random = fast_document_dm_neg(negative, cum_table, cum_table_len, next_random, # <<<<<<<<<<<<<< - * _neu1, syn1neg, indexes[i], _alpha, _work, - * size, _learn_hidden) + /* "gensim/models/doc2vec_inner.pyx":522 + * c.layer1_size, c.learn_hidden) + * if c.negative: + * c.next_random = fast_document_dm_neg(c.negative, c.cum_table, c.cum_table_len, c.next_random, # <<<<<<<<<<<<<< + * c.neu1, c.syn1neg, c.indexes[i], c.alpha, c.work, c.layer1_size, + * c.learn_hidden) */ - __pyx_v_next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_neg(__pyx_v_negative, __pyx_v_cum_table, __pyx_v_cum_table_len, __pyx_v_next_random, __pyx_v__neu1, __pyx_v_syn1neg, (__pyx_v_indexes[__pyx_v_i]), __pyx_v__alpha, __pyx_v__work, __pyx_v_size, __pyx_v__learn_hidden); + __pyx_v_c.next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.next_random, __pyx_v_c.neu1, __pyx_v_c.syn1neg, (__pyx_v_c.indexes[__pyx_v_i]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.layer1_size, __pyx_v_c.learn_hidden); - /* "gensim/models/doc2vec_inner.pyx":583 - * _neu1, syn1, _alpha, _work, - * size, _learn_hidden) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_document_dm_neg(negative, cum_table, cum_table_len, next_random, - * _neu1, syn1neg, indexes[i], _alpha, _work, + /* "gensim/models/doc2vec_inner.pyx":521 + * fast_document_dm_hs(c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, c.alpha, c.work, + * c.layer1_size, c.learn_hidden) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dm_neg(c.negative, c.cum_table, c.cum_table_len, c.next_random, + * c.neu1, c.syn1neg, c.indexes[i], c.alpha, c.work, c.layer1_size, */ } - /* "gensim/models/doc2vec_inner.pyx":588 - * size, _learn_hidden) + /* "gensim/models/doc2vec_inner.pyx":526 + * c.learn_hidden) * - * if not cbow_mean: # <<<<<<<<<<<<<< - * sscal(&size, &inv_count, _work, &ONE) # (does this need BLAS-variants like saxpy?) + * if not c.cbow_mean: # <<<<<<<<<<<<<< + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) * # apply accumulated error in work */ - __pyx_t_6 = ((!(__pyx_v_cbow_mean != 0)) != 0); - if (__pyx_t_6) { + __pyx_t_10 = ((!(__pyx_v_c.cbow_mean != 0)) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":589 + /* "gensim/models/doc2vec_inner.pyx":527 * - * if not cbow_mean: - * sscal(&size, &inv_count, _work, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< + * if not c.cbow_mean: + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< * # apply accumulated error in work - * if _learn_doctags: + * if c.learn_doctags: */ - __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v__work, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); + __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_c.layer1_size), (&__pyx_v_inv_count), __pyx_v_c.work, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); - /* "gensim/models/doc2vec_inner.pyx":588 - * size, _learn_hidden) + /* "gensim/models/doc2vec_inner.pyx":526 + * c.learn_hidden) * - * if not cbow_mean: # <<<<<<<<<<<<<< - * sscal(&size, &inv_count, _work, &ONE) # (does this need BLAS-variants like saxpy?) + * if not c.cbow_mean: # <<<<<<<<<<<<<< + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) * # apply accumulated error in work */ } - /* "gensim/models/doc2vec_inner.pyx":591 - * sscal(&size, &inv_count, _work, &ONE) # (does this need BLAS-variants like saxpy?) + /* "gensim/models/doc2vec_inner.pyx":529 + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) * # apply accumulated error in work - * if _learn_doctags: # <<<<<<<<<<<<<< - * for m in range(doctag_len): - * our_saxpy(&size, &_doctag_locks[_doctag_indexes[m]], _work, + * if c.learn_doctags: # <<<<<<<<<<<<<< + * for m in range(c.doctag_len): + * our_saxpy(&c.layer1_size, &c.doctag_locks[c.doctag_indexes[m]], c.work, */ - __pyx_t_6 = (__pyx_v__learn_doctags != 0); - if (__pyx_t_6) { + __pyx_t_10 = (__pyx_v_c.learn_doctags != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":592 + /* "gensim/models/doc2vec_inner.pyx":530 * # apply accumulated error in work - * if _learn_doctags: - * for m in range(doctag_len): # <<<<<<<<<<<<<< - * our_saxpy(&size, &_doctag_locks[_doctag_indexes[m]], _work, - * &ONE, &_doctag_vectors[_doctag_indexes[m] * size], &ONE) - */ - __pyx_t_19 = __pyx_v_doctag_len; - __pyx_t_20 = __pyx_t_19; - for (__pyx_t_21 = 0; __pyx_t_21 < __pyx_t_20; __pyx_t_21+=1) { - __pyx_v_m = __pyx_t_21; - - /* "gensim/models/doc2vec_inner.pyx":593 - * if _learn_doctags: - * for m in range(doctag_len): - * our_saxpy(&size, &_doctag_locks[_doctag_indexes[m]], _work, # <<<<<<<<<<<<<< - * &ONE, &_doctag_vectors[_doctag_indexes[m] * size], &ONE) - * if _learn_words: - */ - __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v__doctag_locks[(__pyx_v__doctag_indexes[__pyx_v_m])])), __pyx_v__work, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), (&(__pyx_v__doctag_vectors[((__pyx_v__doctag_indexes[__pyx_v_m]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); + * if c.learn_doctags: + * for m in range(c.doctag_len): # <<<<<<<<<<<<<< + * our_saxpy(&c.layer1_size, &c.doctag_locks[c.doctag_indexes[m]], c.work, + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE) + */ + __pyx_t_18 = __pyx_v_c.doctag_len; + __pyx_t_19 = __pyx_t_18; + for (__pyx_t_20 = 0; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { + __pyx_v_m = __pyx_t_20; + + /* "gensim/models/doc2vec_inner.pyx":531 + * if c.learn_doctags: + * for m in range(c.doctag_len): + * our_saxpy(&c.layer1_size, &c.doctag_locks[c.doctag_indexes[m]], c.work, # <<<<<<<<<<<<<< + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE) + * if c.learn_words: + */ + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.layer1_size), (&(__pyx_v_c.doctag_locks[(__pyx_v_c.doctag_indexes[__pyx_v_m])])), __pyx_v_c.work, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), (&(__pyx_v_c.doctag_vectors[((__pyx_v_c.doctag_indexes[__pyx_v_m]) * __pyx_v_c.layer1_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); } - /* "gensim/models/doc2vec_inner.pyx":591 - * sscal(&size, &inv_count, _work, &ONE) # (does this need BLAS-variants like saxpy?) + /* "gensim/models/doc2vec_inner.pyx":529 + * sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) * # apply accumulated error in work - * if _learn_doctags: # <<<<<<<<<<<<<< - * for m in range(doctag_len): - * our_saxpy(&size, &_doctag_locks[_doctag_indexes[m]], _work, + * if c.learn_doctags: # <<<<<<<<<<<<<< + * for m in range(c.doctag_len): + * our_saxpy(&c.layer1_size, &c.doctag_locks[c.doctag_indexes[m]], c.work, */ } - /* "gensim/models/doc2vec_inner.pyx":595 - * our_saxpy(&size, &_doctag_locks[_doctag_indexes[m]], _work, - * &ONE, &_doctag_vectors[_doctag_indexes[m] * size], &ONE) - * if _learn_words: # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":533 + * our_saxpy(&c.layer1_size, &c.doctag_locks[c.doctag_indexes[m]], c.work, + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE) + * if c.learn_words: # <<<<<<<<<<<<<< * for m in range(j, k): * if m == i: */ - __pyx_t_6 = (__pyx_v__learn_words != 0); - if (__pyx_t_6) { + __pyx_t_10 = (__pyx_v_c.learn_words != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":596 - * &ONE, &_doctag_vectors[_doctag_indexes[m] * size], &ONE) - * if _learn_words: + /* "gensim/models/doc2vec_inner.pyx":534 + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE) + * if c.learn_words: * for m in range(j, k): # <<<<<<<<<<<<<< * if m == i: * continue */ - __pyx_t_19 = __pyx_v_k; - __pyx_t_20 = __pyx_t_19; - for (__pyx_t_21 = __pyx_v_j; __pyx_t_21 < __pyx_t_20; __pyx_t_21+=1) { - __pyx_v_m = __pyx_t_21; + __pyx_t_18 = __pyx_v_k; + __pyx_t_19 = __pyx_t_18; + for (__pyx_t_20 = __pyx_v_j; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { + __pyx_v_m = __pyx_t_20; - /* "gensim/models/doc2vec_inner.pyx":597 - * if _learn_words: + /* "gensim/models/doc2vec_inner.pyx":535 + * if c.learn_words: * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< * continue * else: */ - __pyx_t_6 = ((__pyx_v_m == __pyx_v_i) != 0); - if (__pyx_t_6) { + __pyx_t_10 = ((__pyx_v_m == __pyx_v_i) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":598 + /* "gensim/models/doc2vec_inner.pyx":536 * for m in range(j, k): * if m == i: * continue # <<<<<<<<<<<<<< * else: - * our_saxpy(&size, &_word_locks[indexes[m]], _work, &ONE, + * our_saxpy(&c.layer1_size, &c.word_locks[c.indexes[m]], c.work, &ONE, */ - goto __pyx_L47_continue; + goto __pyx_L36_continue; - /* "gensim/models/doc2vec_inner.pyx":597 - * if _learn_words: + /* "gensim/models/doc2vec_inner.pyx":535 + * if c.learn_words: * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< * continue @@ -6850,31 +6571,31 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT */ } - /* "gensim/models/doc2vec_inner.pyx":600 + /* "gensim/models/doc2vec_inner.pyx":538 * continue * else: - * our_saxpy(&size, &_word_locks[indexes[m]], _work, &ONE, # <<<<<<<<<<<<<< - * &_word_vectors[indexes[m] * size], &ONE) + * our_saxpy(&c.layer1_size, &c.word_locks[c.indexes[m]], c.work, &ONE, # <<<<<<<<<<<<<< + * &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE) * */ /*else*/ { - /* "gensim/models/doc2vec_inner.pyx":601 + /* "gensim/models/doc2vec_inner.pyx":539 * else: - * our_saxpy(&size, &_word_locks[indexes[m]], _work, &ONE, - * &_word_vectors[indexes[m] * size], &ONE) # <<<<<<<<<<<<<< + * our_saxpy(&c.layer1_size, &c.word_locks[c.indexes[m]], c.work, &ONE, + * &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE) # <<<<<<<<<<<<<< * * return result */ - __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v__word_locks[(__pyx_v_indexes[__pyx_v_m])])), __pyx_v__work, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), (&(__pyx_v__word_vectors[((__pyx_v_indexes[__pyx_v_m]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.layer1_size), (&(__pyx_v_c.word_locks[(__pyx_v_c.indexes[__pyx_v_m])])), __pyx_v_c.work, (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), (&(__pyx_v_c.word_vectors[((__pyx_v_c.indexes[__pyx_v_m]) * __pyx_v_c.layer1_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); } - __pyx_L47_continue:; + __pyx_L36_continue:; } - /* "gensim/models/doc2vec_inner.pyx":595 - * our_saxpy(&size, &_doctag_locks[_doctag_indexes[m]], _work, - * &ONE, &_doctag_vectors[_doctag_indexes[m] * size], &ONE) - * if _learn_words: # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":533 + * our_saxpy(&c.layer1_size, &c.doctag_locks[c.doctag_indexes[m]], c.work, + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE) + * if c.learn_words: # <<<<<<<<<<<<<< * for m in range(j, k): * if m == i: */ @@ -6882,12 +6603,12 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT } } - /* "gensim/models/doc2vec_inner.pyx":553 + /* "gensim/models/doc2vec_inner.pyx":492 * * # release GIL & train on the document * with nogil: # <<<<<<<<<<<<<< - * for i in range(document_len): - * j = i - window + reduced_windows[i] + * for i in range(c.document_len): + * j = i - c.window + c.reduced_windows[i] */ /*finally:*/ { /*normal exit:*/{ @@ -6895,27 +6616,27 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT __Pyx_FastGIL_Forget(); Py_BLOCK_THREADS #endif - goto __pyx_L28; + goto __pyx_L17; } - __pyx_L28:; + __pyx_L17:; } } - /* "gensim/models/doc2vec_inner.pyx":603 - * &_word_vectors[indexes[m] * size], &ONE) + /* "gensim/models/doc2vec_inner.pyx":541 + * &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE) * * return result # <<<<<<<<<<<<<< * * */ __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v_result); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 603, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; + __pyx_t_9 = __Pyx_PyInt_From_long(__pyx_v_result); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 541, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_r = __pyx_t_9; + __pyx_t_9 = 0; goto __pyx_L0; - /* "gensim/models/doc2vec_inner.pyx":403 + /* "gensim/models/doc2vec_inner.pyx":404 * * * def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< @@ -6926,9 +6647,9 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_14); __Pyx_XDECREF(__pyx_t_16); __Pyx_AddTraceback("gensim.models.doc2vec_inner.train_document_dm", __pyx_clineno, __pyx_lineno, __pyx_filename); @@ -6938,18 +6659,12 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_2train_document_dm(CYT __Pyx_XDECREF(__pyx_v_token); __Pyx_XDECREF(__pyx_v_predict_word); __Pyx_XDECREF(__pyx_v_item); - __Pyx_XDECREF(__pyx_v_work); - __Pyx_XDECREF(__pyx_v_neu1); - __Pyx_XDECREF(__pyx_v_word_vectors); - __Pyx_XDECREF(__pyx_v_word_locks); - __Pyx_XDECREF(__pyx_v_doctag_vectors); - __Pyx_XDECREF(__pyx_v_doctag_locks); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } -/* "gensim/models/doc2vec_inner.pyx":606 +/* "gensim/models/doc2vec_inner.pyx":544 * * * def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< @@ -6984,7 +6699,7 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_5train_document_dm_con values[4] = ((PyObject *)Py_None); values[5] = ((PyObject *)Py_None); - /* "gensim/models/doc2vec_inner.pyx":607 + /* "gensim/models/doc2vec_inner.pyx":545 * * def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, * learn_doctags=True, learn_words=True, learn_hidden=True, # <<<<<<<<<<<<<< @@ -6995,7 +6710,7 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_5train_document_dm_con values[7] = ((PyObject *)Py_True); values[8] = ((PyObject *)Py_True); - /* "gensim/models/doc2vec_inner.pyx":608 + /* "gensim/models/doc2vec_inner.pyx":546 * def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, * learn_doctags=True, learn_words=True, learn_hidden=True, * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): # <<<<<<<<<<<<<< @@ -7048,19 +6763,19 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_5train_document_dm_con case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doc_words)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_document_dm_concat", 0, 4, 13, 1); __PYX_ERR(0, 606, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_document_dm_concat", 0, 4, 13, 1); __PYX_ERR(0, 544, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_doctag_indexes)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_document_dm_concat", 0, 4, 13, 2); __PYX_ERR(0, 606, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_document_dm_concat", 0, 4, 13, 2); __PYX_ERR(0, 544, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_document_dm_concat", 0, 4, 13, 3); __PYX_ERR(0, 606, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_document_dm_concat", 0, 4, 13, 3); __PYX_ERR(0, 544, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 4: @@ -7118,7 +6833,7 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_5train_document_dm_con } } if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_document_dm_concat") < 0)) __PYX_ERR(0, 606, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_document_dm_concat") < 0)) __PYX_ERR(0, 544, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { @@ -7146,821 +6861,171 @@ static PyObject *__pyx_pw_6gensim_6models_13doc2vec_inner_5train_document_dm_con values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; - } - } - __pyx_v_model = values[0]; - __pyx_v_doc_words = values[1]; - __pyx_v_doctag_indexes = values[2]; - __pyx_v_alpha = values[3]; - __pyx_v_work = values[4]; - __pyx_v_neu1 = values[5]; - __pyx_v_learn_doctags = values[6]; - __pyx_v_learn_words = values[7]; - __pyx_v_learn_hidden = values[8]; - __pyx_v_word_vectors = values[9]; - __pyx_v_word_locks = values[10]; - __pyx_v_doctag_vectors = values[11]; - __pyx_v_doctag_locks = values[12]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("train_document_dm_concat", 0, 4, 13, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 606, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("gensim.models.doc2vec_inner.train_document_dm_concat", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_concat(__pyx_self, __pyx_v_model, __pyx_v_doc_words, __pyx_v_doctag_indexes, __pyx_v_alpha, __pyx_v_work, __pyx_v_neu1, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, __pyx_v_word_vectors, __pyx_v_word_locks, __pyx_v_doctag_vectors, __pyx_v_doctag_locks); - - /* "gensim/models/doc2vec_inner.pyx":606 - * - * - * def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< - * learn_doctags=True, learn_words=True, learn_hidden=True, - * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): - */ - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_concat(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_doc_words, PyObject *__pyx_v_doctag_indexes, PyObject *__pyx_v_alpha, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks) { - int __pyx_v_hs; - int __pyx_v_negative; - int __pyx_v_sample; - int __pyx_v__learn_doctags; - int __pyx_v__learn_words; - int __pyx_v__learn_hidden; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__word_vectors; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__doctag_vectors; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__word_locks; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__doctag_locks; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__work; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__neu1; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; - int __pyx_v_layer1_size; - int __pyx_v_vector_size; - int __pyx_v_codelens[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v__doctag_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_window_indexes[0x2710]; - int __pyx_v_document_len; - int __pyx_v_doctag_len; - int __pyx_v_window; - int __pyx_v_expected_doctag_len; - int __pyx_v_i; - int __pyx_v_j; - int __pyx_v_k; - int __pyx_v_m; - int __pyx_v_n; - long __pyx_v_result; - int __pyx_v_null_word_index; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1; - __pyx_t_5numpy_uint32_t *__pyx_v_points[0x2710]; - __pyx_t_5numpy_uint8_t *__pyx_v_codes[0x2710]; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg; - __pyx_t_5numpy_uint32_t *__pyx_v_cum_table; - unsigned PY_LONG_LONG __pyx_v_cum_table_len; - unsigned PY_LONG_LONG __pyx_v_next_random; - PyObject *__pyx_v_vlookup = NULL; - PyObject *__pyx_v_token = NULL; - PyObject *__pyx_v_predict_word = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_4; - Py_ssize_t __pyx_t_5; - long __pyx_t_6; - Py_ssize_t __pyx_t_7; - int __pyx_t_8; - int __pyx_t_9; - PyObject *__pyx_t_10 = NULL; - unsigned PY_LONG_LONG __pyx_t_11; - PyObject *__pyx_t_12 = NULL; - PyObject *(*__pyx_t_13)(PyObject *); - __pyx_t_5numpy_uint32_t __pyx_t_14; - int __pyx_t_15; - int __pyx_t_16; - int __pyx_t_17; - int __pyx_t_18; - int __pyx_t_19; - long __pyx_t_20; - __Pyx_RefNannySetupContext("train_document_dm_concat", 0); - __Pyx_INCREF(__pyx_v_work); - __Pyx_INCREF(__pyx_v_neu1); - __Pyx_INCREF(__pyx_v_word_vectors); - __Pyx_INCREF(__pyx_v_word_locks); - __Pyx_INCREF(__pyx_v_doctag_vectors); - __Pyx_INCREF(__pyx_v_doctag_locks); - - /* "gensim/models/doc2vec_inner.pyx":654 - * - * """ - * cdef int hs = model.hs # <<<<<<<<<<<<<< - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 654, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 654, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_hs = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":655 - * """ - * cdef int hs = model.hs - * cdef int negative = model.negative # <<<<<<<<<<<<<< - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int _learn_doctags = learn_doctags - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 655, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 655, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_negative = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":656 - * cdef int hs = model.hs - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< - * cdef int _learn_doctags = learn_doctags - * cdef int _learn_words = learn_words - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 656, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 656, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 656, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 656, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_sample = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":657 - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int _learn_doctags = learn_doctags # <<<<<<<<<<<<<< - * cdef int _learn_words = learn_words - * cdef int _learn_hidden = learn_hidden - */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_doctags); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 657, __pyx_L1_error) - __pyx_v__learn_doctags = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":658 - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int _learn_doctags = learn_doctags - * cdef int _learn_words = learn_words # <<<<<<<<<<<<<< - * cdef int _learn_hidden = learn_hidden - * - */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_words); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 658, __pyx_L1_error) - __pyx_v__learn_words = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":659 - * cdef int _learn_doctags = learn_doctags - * cdef int _learn_words = learn_words - * cdef int _learn_hidden = learn_hidden # <<<<<<<<<<<<<< - * - * cdef REAL_t *_word_vectors - */ - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_v_learn_hidden); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 659, __pyx_L1_error) - __pyx_v__learn_hidden = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":667 - * cdef REAL_t *_work - * cdef REAL_t *_neu1 - * cdef REAL_t _alpha = alpha # <<<<<<<<<<<<<< - * cdef int layer1_size = model.trainables.layer1_size - * cdef int vector_size = model.docvecs.vector_size - */ - __pyx_t_4 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_4 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 667, __pyx_L1_error) - __pyx_v__alpha = __pyx_t_4; - - /* "gensim/models/doc2vec_inner.pyx":668 - * cdef REAL_t *_neu1 - * cdef REAL_t _alpha = alpha - * cdef int layer1_size = model.trainables.layer1_size # <<<<<<<<<<<<<< - * cdef int vector_size = model.docvecs.vector_size - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 668, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 668, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 668, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_layer1_size = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":669 - * cdef REAL_t _alpha = alpha - * cdef int layer1_size = model.trainables.layer1_size - * cdef int vector_size = model.docvecs.vector_size # <<<<<<<<<<<<<< - * - * cdef int codelens[MAX_DOCUMENT_LEN] - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_docvecs); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 669, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 669, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 669, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_vector_size = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":677 - * cdef int document_len - * cdef int doctag_len - * cdef int window = model.window # <<<<<<<<<<<<<< - * cdef int expected_doctag_len = model.dm_tag_count - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 677, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 677, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_window = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":678 - * cdef int doctag_len - * cdef int window = model.window - * cdef int expected_doctag_len = model.dm_tag_count # <<<<<<<<<<<<<< - * - * cdef int i, j, k, m, n - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_dm_tag_count); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 678, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 678, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_expected_doctag_len = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":681 - * - * cdef int i, j, k, m, n - * cdef long result = 0 # <<<<<<<<<<<<<< - * cdef int null_word_index = model.wv.vocab['\0'].index - * - */ - __pyx_v_result = 0; - - /* "gensim/models/doc2vec_inner.pyx":682 - * cdef int i, j, k, m, n - * cdef long result = 0 - * cdef int null_word_index = model.wv.vocab['\0'].index # <<<<<<<<<<<<<< - * - * # For hierarchical softmax - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 682, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 682, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_Dict_GetItem(__pyx_t_3, __pyx_kp_s__5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 682, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 682, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 682, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_null_word_index = __pyx_t_2; - - /* "gensim/models/doc2vec_inner.pyx":695 - * cdef unsigned long long next_random - * - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) # <<<<<<<<<<<<<< - * if doctag_len != expected_doctag_len: - * return 0 # skip doc without expected number of tags - */ - __pyx_t_5 = PyObject_Length(__pyx_v_doctag_indexes); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 695, __pyx_L1_error) - __pyx_t_6 = 0x2710; - if (((__pyx_t_5 < __pyx_t_6) != 0)) { - __pyx_t_7 = __pyx_t_5; - } else { - __pyx_t_7 = __pyx_t_6; - } - __pyx_v_doctag_len = ((int)__pyx_t_7); - - /* "gensim/models/doc2vec_inner.pyx":696 - * - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - * if doctag_len != expected_doctag_len: # <<<<<<<<<<<<<< - * return 0 # skip doc without expected number of tags - * - */ - __pyx_t_8 = ((__pyx_v_doctag_len != __pyx_v_expected_doctag_len) != 0); - if (__pyx_t_8) { - - /* "gensim/models/doc2vec_inner.pyx":697 - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - * if doctag_len != expected_doctag_len: - * return 0 # skip doc without expected number of tags # <<<<<<<<<<<<<< - * - * # default vectors, locks from syn0/doctag_syn0 - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_int_0); - __pyx_r = __pyx_int_0; - goto __pyx_L0; - - /* "gensim/models/doc2vec_inner.pyx":696 - * - * doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - * if doctag_len != expected_doctag_len: # <<<<<<<<<<<<<< - * return 0 # skip doc without expected number of tags - * - */ - } - - /* "gensim/models/doc2vec_inner.pyx":700 - * - * # default vectors, locks from syn0/doctag_syn0 - * if word_vectors is None: # <<<<<<<<<<<<<< - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) - */ - __pyx_t_8 = (__pyx_v_word_vectors == Py_None); - __pyx_t_9 = (__pyx_t_8 != 0); - if (__pyx_t_9) { - - /* "gensim/models/doc2vec_inner.pyx":701 - * # default vectors, locks from syn0/doctag_syn0 - * if word_vectors is None: - * word_vectors = model.wv.vectors # <<<<<<<<<<<<<< - * _word_vectors = (np.PyArray_DATA(word_vectors)) - * if doctag_vectors is None: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 701, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 701, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF_SET(__pyx_v_word_vectors, __pyx_t_1); - __pyx_t_1 = 0; - - /* "gensim/models/doc2vec_inner.pyx":700 - * - * # default vectors, locks from syn0/doctag_syn0 - * if word_vectors is None: # <<<<<<<<<<<<<< - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":702 - * if word_vectors is None: - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) # <<<<<<<<<<<<<< - * if doctag_vectors is None: - * doctag_vectors = model.docvecs.vectors_docs - */ - if (!(likely(((__pyx_v_word_vectors) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_vectors, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 702, __pyx_L1_error) - __pyx_v__word_vectors = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_vectors))); - - /* "gensim/models/doc2vec_inner.pyx":703 - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) - * if doctag_vectors is None: # <<<<<<<<<<<<<< - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - */ - __pyx_t_9 = (__pyx_v_doctag_vectors == Py_None); - __pyx_t_8 = (__pyx_t_9 != 0); - if (__pyx_t_8) { - - /* "gensim/models/doc2vec_inner.pyx":704 - * _word_vectors = (np.PyArray_DATA(word_vectors)) - * if doctag_vectors is None: - * doctag_vectors = model.docvecs.vectors_docs # <<<<<<<<<<<<<< - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - * if word_locks is None: - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_docvecs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 704, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_docs); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 704, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_doctag_vectors, __pyx_t_3); - __pyx_t_3 = 0; - - /* "gensim/models/doc2vec_inner.pyx":703 - * word_vectors = model.wv.vectors - * _word_vectors = (np.PyArray_DATA(word_vectors)) - * if doctag_vectors is None: # <<<<<<<<<<<<<< - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":705 - * if doctag_vectors is None: - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) # <<<<<<<<<<<<<< - * if word_locks is None: - * word_locks = model.trainables.vectors_lockf - */ - if (!(likely(((__pyx_v_doctag_vectors) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_doctag_vectors, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 705, __pyx_L1_error) - __pyx_v__doctag_vectors = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_doctag_vectors))); - - /* "gensim/models/doc2vec_inner.pyx":706 - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - * if word_locks is None: # <<<<<<<<<<<<<< - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) - */ - __pyx_t_8 = (__pyx_v_word_locks == Py_None); - __pyx_t_9 = (__pyx_t_8 != 0); - if (__pyx_t_9) { - - /* "gensim/models/doc2vec_inner.pyx":707 - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - * if word_locks is None: - * word_locks = model.trainables.vectors_lockf # <<<<<<<<<<<<<< - * _word_locks = (np.PyArray_DATA(word_locks)) - * if doctag_locks is None: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 707, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 707, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF_SET(__pyx_v_word_locks, __pyx_t_1); - __pyx_t_1 = 0; - - /* "gensim/models/doc2vec_inner.pyx":706 - * doctag_vectors = model.docvecs.vectors_docs - * _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - * if word_locks is None: # <<<<<<<<<<<<<< - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) - */ + } + } + __pyx_v_model = values[0]; + __pyx_v_doc_words = values[1]; + __pyx_v_doctag_indexes = values[2]; + __pyx_v_alpha = values[3]; + __pyx_v_work = values[4]; + __pyx_v_neu1 = values[5]; + __pyx_v_learn_doctags = values[6]; + __pyx_v_learn_words = values[7]; + __pyx_v_learn_hidden = values[8]; + __pyx_v_word_vectors = values[9]; + __pyx_v_word_locks = values[10]; + __pyx_v_doctag_vectors = values[11]; + __pyx_v_doctag_locks = values[12]; } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("train_document_dm_concat", 0, 4, 13, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 544, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.doc2vec_inner.train_document_dm_concat", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_concat(__pyx_self, __pyx_v_model, __pyx_v_doc_words, __pyx_v_doctag_indexes, __pyx_v_alpha, __pyx_v_work, __pyx_v_neu1, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, __pyx_v_word_vectors, __pyx_v_word_locks, __pyx_v_doctag_vectors, __pyx_v_doctag_locks); - /* "gensim/models/doc2vec_inner.pyx":708 - * if word_locks is None: - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) # <<<<<<<<<<<<<< - * if doctag_locks is None: - * doctag_locks = model.trainables.vectors_docs_lockf - */ - if (!(likely(((__pyx_v_word_locks) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_locks, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 708, __pyx_L1_error) - __pyx_v__word_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_locks))); - - /* "gensim/models/doc2vec_inner.pyx":709 - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) - * if doctag_locks is None: # <<<<<<<<<<<<<< - * doctag_locks = model.trainables.vectors_docs_lockf - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) - */ - __pyx_t_9 = (__pyx_v_doctag_locks == Py_None); - __pyx_t_8 = (__pyx_t_9 != 0); - if (__pyx_t_8) { - - /* "gensim/models/doc2vec_inner.pyx":710 - * _word_locks = (np.PyArray_DATA(word_locks)) - * if doctag_locks is None: - * doctag_locks = model.trainables.vectors_docs_lockf # <<<<<<<<<<<<<< - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) + /* "gensim/models/doc2vec_inner.pyx":544 * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 710, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_docs_lockf); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 710, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_doctag_locks, __pyx_t_3); - __pyx_t_3 = 0; - - /* "gensim/models/doc2vec_inner.pyx":709 - * word_locks = model.trainables.vectors_lockf - * _word_locks = (np.PyArray_DATA(word_locks)) - * if doctag_locks is None: # <<<<<<<<<<<<<< - * doctag_locks = model.trainables.vectors_docs_lockf - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":711 - * if doctag_locks is None: - * doctag_locks = model.trainables.vectors_docs_lockf - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) # <<<<<<<<<<<<<< * - * if hs: + * def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< + * learn_doctags=True, learn_words=True, learn_hidden=True, + * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): */ - if (!(likely(((__pyx_v_doctag_locks) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_doctag_locks, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 711, __pyx_L1_error) - __pyx_v__doctag_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_doctag_locks))); - /* "gensim/models/doc2vec_inner.pyx":713 - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) - * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - */ - __pyx_t_8 = (__pyx_v_hs != 0); - if (__pyx_t_8) { + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} - /* "gensim/models/doc2vec_inner.pyx":714 - * - * if hs: - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< - * - * if negative: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 714, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 714, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 714, __pyx_L1_error) - __pyx_v_syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; +static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_concat(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_doc_words, PyObject *__pyx_v_doctag_indexes, PyObject *__pyx_v_alpha, PyObject *__pyx_v_work, PyObject *__pyx_v_neu1, PyObject *__pyx_v_learn_doctags, PyObject *__pyx_v_learn_words, PyObject *__pyx_v_learn_hidden, PyObject *__pyx_v_word_vectors, PyObject *__pyx_v_word_locks, PyObject *__pyx_v_doctag_vectors, PyObject *__pyx_v_doctag_locks) { + struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig __pyx_v_c; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_m; + int __pyx_v_n; + long __pyx_v_result; + PyObject *__pyx_v_vlookup = NULL; + PyObject *__pyx_v_token = NULL; + PyObject *__pyx_v_predict_word = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config __pyx_t_2; + Py_ssize_t __pyx_t_3; + long __pyx_t_4; + Py_ssize_t __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *(*__pyx_t_8)(PyObject *); + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + __pyx_t_5numpy_uint32_t __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + int __pyx_t_15; + int __pyx_t_16; + int __pyx_t_17; + int __pyx_t_18; + int __pyx_t_19; + long __pyx_t_20; + __Pyx_RefNannySetupContext("train_document_dm_concat", 0); - /* "gensim/models/doc2vec_inner.pyx":713 - * _doctag_locks = (np.PyArray_DATA(doctag_locks)) + /* "gensim/models/doc2vec_inner.pyx":595 * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + * cdef int i, j, k, m, n + * cdef long result = 0 # <<<<<<<<<<<<<< * + * init_d2v_config(&c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=False, work=work, neu1=neu1, */ - } + __pyx_v_result = 0; - /* "gensim/models/doc2vec_inner.pyx":716 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/doc2vec_inner.pyx":597 + * cdef long result = 0 * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - */ - __pyx_t_8 = (__pyx_v_negative != 0); - if (__pyx_t_8) { - - /* "gensim/models/doc2vec_inner.pyx":717 + * init_d2v_config(&c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=False, work=work, neu1=neu1, # <<<<<<<<<<<<<< + * word_vectors=word_vectors, word_locks=word_locks, doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) * - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 717, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 717, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 717, __pyx_L1_error) - __pyx_v_syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "gensim/models/doc2vec_inner.pyx":718 - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 718, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 718, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 718, __pyx_L1_error) - __pyx_v_cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "gensim/models/doc2vec_inner.pyx":719 - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 719, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 719, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_7 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_7 == ((Py_ssize_t)-1))) __PYX_ERR(0, 719, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_cum_table_len = __pyx_t_7; + __pyx_t_2.__pyx_n = 7; + __pyx_t_2.train_words = Py_False; + __pyx_t_2.work = __pyx_v_work; + __pyx_t_2.neu1 = __pyx_v_neu1; + __pyx_t_2.word_vectors = __pyx_v_word_vectors; + __pyx_t_2.word_locks = __pyx_v_word_locks; + __pyx_t_2.doctag_vectors = __pyx_v_doctag_vectors; + __pyx_t_2.doctag_locks = __pyx_v_doctag_locks; + __pyx_t_1 = __pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config((&__pyx_v_c), __pyx_v_model, __pyx_v_alpha, __pyx_v_learn_doctags, __pyx_v_learn_words, __pyx_v_learn_hidden, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 597, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/doc2vec_inner.pyx":716 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/doc2vec_inner.pyx":600 + * word_vectors=word_vectors, word_locks=word_locks, doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":720 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + * c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) # <<<<<<<<<<<<<< * + * if c.doctag_len != c.expected_doctag_len: */ - __pyx_t_9 = (__pyx_v_negative != 0); - if (!__pyx_t_9) { + __pyx_t_3 = PyObject_Length(__pyx_v_doctag_indexes); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(0, 600, __pyx_L1_error) + __pyx_t_4 = 0x2710; + if (((__pyx_t_3 < __pyx_t_4) != 0)) { + __pyx_t_5 = __pyx_t_3; } else { - __pyx_t_8 = __pyx_t_9; - goto __pyx_L11_bool_binop_done; + __pyx_t_5 = __pyx_t_4; } - __pyx_t_9 = (__pyx_v_sample != 0); - __pyx_t_8 = __pyx_t_9; - __pyx_L11_bool_binop_done:; - if (__pyx_t_8) { + __pyx_v_c.doctag_len = ((int)__pyx_t_5); - /* "gensim/models/doc2vec_inner.pyx":721 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":602 + * c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) * - * # convert Python structures to primitive types, so we can release the GIL - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_t_10 = PyNumber_Add(__pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_11 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_10); if (unlikely((__pyx_t_11 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_v_next_random = __pyx_t_11; - - /* "gensim/models/doc2vec_inner.pyx":720 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + * if c.doctag_len != c.expected_doctag_len: # <<<<<<<<<<<<<< + * return 0 # skip doc without expected number of tags * */ - } + __pyx_t_6 = ((__pyx_v_c.doctag_len != __pyx_v_c.expected_doctag_len) != 0); + if (__pyx_t_6) { - /* "gensim/models/doc2vec_inner.pyx":724 + /* "gensim/models/doc2vec_inner.pyx":603 * - * # convert Python structures to primitive types, so we can release the GIL - * if work is None: # <<<<<<<<<<<<<< - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) - */ - __pyx_t_8 = (__pyx_v_work == Py_None); - __pyx_t_9 = (__pyx_t_8 != 0); - if (__pyx_t_9) { - - /* "gensim/models/doc2vec_inner.pyx":725 - * # convert Python structures to primitive types, so we can release the GIL - * if work is None: - * work = zeros(model.trainables.layer1_size, dtype=REAL) # <<<<<<<<<<<<<< - * _work = np.PyArray_DATA(work) - * if neu1 is None: - */ - __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_zeros); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 725, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 725, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 725, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 725, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 725, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_12 = __Pyx_GetModuleGlobalName(__pyx_n_s_REAL); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 725, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - if (PyDict_SetItem(__pyx_t_1, __pyx_n_s_dtype, __pyx_t_12) < 0) __PYX_ERR(0, 725, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - __pyx_t_12 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_3, __pyx_t_1); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 725, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF_SET(__pyx_v_work, __pyx_t_12); - __pyx_t_12 = 0; - - /* "gensim/models/doc2vec_inner.pyx":724 + * if c.doctag_len != c.expected_doctag_len: + * return 0 # skip doc without expected number of tags # <<<<<<<<<<<<<< * - * # convert Python structures to primitive types, so we can release the GIL - * if work is None: # <<<<<<<<<<<<<< - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":726 - * if work is None: - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) # <<<<<<<<<<<<<< - * if neu1 is None: - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - */ - if (!(likely(((__pyx_v_work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 726, __pyx_L1_error) - __pyx_v__work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_work))); - - /* "gensim/models/doc2vec_inner.pyx":727 - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) - * if neu1 is None: # <<<<<<<<<<<<<< - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - * _neu1 = np.PyArray_DATA(neu1) + * vlookup = model.wv.vocab */ - __pyx_t_9 = (__pyx_v_neu1 == Py_None); - __pyx_t_8 = (__pyx_t_9 != 0); - if (__pyx_t_8) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_int_0); + __pyx_r = __pyx_int_0; + goto __pyx_L0; - /* "gensim/models/doc2vec_inner.pyx":728 - * _work = np.PyArray_DATA(work) - * if neu1 is None: - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) # <<<<<<<<<<<<<< - * _neu1 = np.PyArray_DATA(neu1) + /* "gensim/models/doc2vec_inner.pyx":602 + * c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) * - */ - __pyx_t_12 = __Pyx_GetModuleGlobalName(__pyx_n_s_zeros); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 728, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 728, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_layer1_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 728, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 728, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 728, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_REAL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 728, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_dtype, __pyx_t_10) < 0) __PYX_ERR(0, 728, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_12, __pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 728, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF_SET(__pyx_v_neu1, __pyx_t_10); - __pyx_t_10 = 0; - - /* "gensim/models/doc2vec_inner.pyx":727 - * work = zeros(model.trainables.layer1_size, dtype=REAL) - * _work = np.PyArray_DATA(work) - * if neu1 is None: # <<<<<<<<<<<<<< - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - * _neu1 = np.PyArray_DATA(neu1) - */ - } - - /* "gensim/models/doc2vec_inner.pyx":729 - * if neu1 is None: - * neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - * _neu1 = np.PyArray_DATA(neu1) # <<<<<<<<<<<<<< + * if c.doctag_len != c.expected_doctag_len: # <<<<<<<<<<<<<< + * return 0 # skip doc without expected number of tags * - * vlookup = model.wv.vocab */ - if (!(likely(((__pyx_v_neu1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_neu1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 729, __pyx_L1_error) - __pyx_v__neu1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_neu1))); + } - /* "gensim/models/doc2vec_inner.pyx":731 - * _neu1 = np.PyArray_DATA(neu1) + /* "gensim/models/doc2vec_inner.pyx":605 + * return 0 # skip doc without expected number of tags * * vlookup = model.wv.vocab # <<<<<<<<<<<<<< * i = 0 * for token in doc_words: */ - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 731, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 731, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - __pyx_v_vlookup = __pyx_t_3; - __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 605, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 605, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_vlookup = __pyx_t_7; + __pyx_t_7 = 0; - /* "gensim/models/doc2vec_inner.pyx":732 + /* "gensim/models/doc2vec_inner.pyx":606 * * vlookup = model.wv.vocab * i = 0 # <<<<<<<<<<<<<< @@ -7969,7 +7034,7 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_con */ __pyx_v_i = 0; - /* "gensim/models/doc2vec_inner.pyx":733 + /* "gensim/models/doc2vec_inner.pyx":607 * vlookup = model.wv.vocab * i = 0 * for token in doc_words: # <<<<<<<<<<<<<< @@ -7977,222 +7042,222 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_con * if predict_word is None: # shrink document to leave out word */ if (likely(PyList_CheckExact(__pyx_v_doc_words)) || PyTuple_CheckExact(__pyx_v_doc_words)) { - __pyx_t_3 = __pyx_v_doc_words; __Pyx_INCREF(__pyx_t_3); __pyx_t_7 = 0; - __pyx_t_13 = NULL; + __pyx_t_7 = __pyx_v_doc_words; __Pyx_INCREF(__pyx_t_7); __pyx_t_5 = 0; + __pyx_t_8 = NULL; } else { - __pyx_t_7 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_doc_words); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 733, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_13 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 733, __pyx_L1_error) + __pyx_t_5 = -1; __pyx_t_7 = PyObject_GetIter(__pyx_v_doc_words); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 607, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = Py_TYPE(__pyx_t_7)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 607, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_13)) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_3)) break; + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_7))) { + if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_7)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_10 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_10); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 733, __pyx_L1_error) + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_7, __pyx_t_5); __Pyx_INCREF(__pyx_t_1); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 607, __pyx_L1_error) #else - __pyx_t_10 = PySequence_ITEM(__pyx_t_3, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 733, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); + __pyx_t_1 = PySequence_ITEM(__pyx_t_7, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 607, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } else { - if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_7)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_10 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_10); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 733, __pyx_L1_error) + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_7, __pyx_t_5); __Pyx_INCREF(__pyx_t_1); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 607, __pyx_L1_error) #else - __pyx_t_10 = PySequence_ITEM(__pyx_t_3, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 733, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); + __pyx_t_1 = PySequence_ITEM(__pyx_t_7, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 607, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } } else { - __pyx_t_10 = __pyx_t_13(__pyx_t_3); - if (unlikely(!__pyx_t_10)) { + __pyx_t_1 = __pyx_t_8(__pyx_t_7); + if (unlikely(!__pyx_t_1)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 733, __pyx_L1_error) + else __PYX_ERR(0, 607, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_10); + __Pyx_GOTREF(__pyx_t_1); } - __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_10); - __pyx_t_10 = 0; + __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_1); + __pyx_t_1 = 0; - /* "gensim/models/doc2vec_inner.pyx":734 + /* "gensim/models/doc2vec_inner.pyx":608 * i = 0 * for token in doc_words: * predict_word = vlookup[token] if token in vlookup else None # <<<<<<<<<<<<<< * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged */ - __pyx_t_8 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 734, __pyx_L1_error) - if ((__pyx_t_8 != 0)) { - __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 734, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_10 = __pyx_t_1; - __pyx_t_1 = 0; + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 608, __pyx_L1_error) + if ((__pyx_t_6 != 0)) { + __pyx_t_9 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 608, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_1 = __pyx_t_9; + __pyx_t_9 = 0; } else { __Pyx_INCREF(Py_None); - __pyx_t_10 = Py_None; + __pyx_t_1 = Py_None; } - __Pyx_XDECREF_SET(__pyx_v_predict_word, __pyx_t_10); - __pyx_t_10 = 0; + __Pyx_XDECREF_SET(__pyx_v_predict_word, __pyx_t_1); + __pyx_t_1 = 0; - /* "gensim/models/doc2vec_inner.pyx":735 + /* "gensim/models/doc2vec_inner.pyx":609 * for token in doc_words: * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word # <<<<<<<<<<<<<< * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): */ - __pyx_t_8 = (__pyx_v_predict_word == Py_None); - __pyx_t_9 = (__pyx_t_8 != 0); - if (__pyx_t_9) { + __pyx_t_6 = (__pyx_v_predict_word == Py_None); + __pyx_t_10 = (__pyx_t_6 != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":736 + /* "gensim/models/doc2vec_inner.pyx":610 * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged # <<<<<<<<<<<<<< - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): * continue */ - goto __pyx_L15_continue; + goto __pyx_L4_continue; - /* "gensim/models/doc2vec_inner.pyx":735 + /* "gensim/models/doc2vec_inner.pyx":609 * for token in doc_words: * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word # <<<<<<<<<<<<<< * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): */ } - /* "gensim/models/doc2vec_inner.pyx":737 + /* "gensim/models/doc2vec_inner.pyx":611 * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[i] = predict_word.index + * c.indexes[i] = predict_word.index */ - __pyx_t_8 = (__pyx_v_sample != 0); - if (__pyx_t_8) { + __pyx_t_6 = (__pyx_v_c.sample != 0); + if (__pyx_t_6) { } else { - __pyx_t_9 = __pyx_t_8; - goto __pyx_L19_bool_binop_done; + __pyx_t_10 = __pyx_t_6; + goto __pyx_L8_bool_binop_done; } - __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 737, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_10); - __pyx_t_1 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_next_random))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 737, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 611, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_12 = PyObject_RichCompare(__pyx_t_10, __pyx_t_1, Py_LT); __Pyx_XGOTREF(__pyx_t_12); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 737, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_9 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_c.next_random))); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 611, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_11 = PyObject_RichCompare(__pyx_t_1, __pyx_t_9, Py_LT); __Pyx_XGOTREF(__pyx_t_11); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 611, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_12); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 737, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - __pyx_t_9 = __pyx_t_8; - __pyx_L19_bool_binop_done:; - if (__pyx_t_9) { + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_11); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 611, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_10 = __pyx_t_6; + __pyx_L8_bool_binop_done:; + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":738 + /* "gensim/models/doc2vec_inner.pyx":612 * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): * continue # <<<<<<<<<<<<<< - * indexes[i] = predict_word.index - * if hs: + * c.indexes[i] = predict_word.index + * if c.hs: */ - goto __pyx_L15_continue; + goto __pyx_L4_continue; - /* "gensim/models/doc2vec_inner.pyx":737 + /* "gensim/models/doc2vec_inner.pyx":611 * if predict_word is None: # shrink document to leave out word * continue # leaving i unchanged - * if sample and predict_word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[i] = predict_word.index + * c.indexes[i] = predict_word.index */ } - /* "gensim/models/doc2vec_inner.pyx":739 - * if sample and predict_word.sample_int < random_int32(&next_random): + /* "gensim/models/doc2vec_inner.pyx":613 + * if c.sample and predict_word.sample_int < random_int32(&c.next_random): * continue - * indexes[i] = predict_word.index # <<<<<<<<<<<<<< - * if hs: - * codelens[i] = len(predict_word.code) + * c.indexes[i] = predict_word.index # <<<<<<<<<<<<<< + * if c.hs: + * c.codelens[i] = len(predict_word.code) */ - __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_index); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 739, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __pyx_t_14 = __Pyx_PyInt_As_npy_uint32(__pyx_t_12); if (unlikely((__pyx_t_14 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 739, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - (__pyx_v_indexes[__pyx_v_i]) = __pyx_t_14; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_index); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 613, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_11); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 613, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.indexes[__pyx_v_i]) = __pyx_t_12; - /* "gensim/models/doc2vec_inner.pyx":740 + /* "gensim/models/doc2vec_inner.pyx":614 * continue - * indexes[i] = predict_word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) - */ - __pyx_t_9 = (__pyx_v_hs != 0); - if (__pyx_t_9) { - - /* "gensim/models/doc2vec_inner.pyx":741 - * indexes[i] = predict_word.index - * if hs: - * codelens[i] = len(predict_word.code) # <<<<<<<<<<<<<< - * codes[i] = np.PyArray_DATA(predict_word.code) - * points[i] = np.PyArray_DATA(predict_word.point) - */ - __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 741, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - __pyx_t_5 = PyObject_Length(__pyx_t_12); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 741, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - (__pyx_v_codelens[__pyx_v_i]) = ((int)__pyx_t_5); - - /* "gensim/models/doc2vec_inner.pyx":742 - * if hs: - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) # <<<<<<<<<<<<<< - * points[i] = np.PyArray_DATA(predict_word.point) + * c.indexes[i] = predict_word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) + */ + __pyx_t_10 = (__pyx_v_c.hs != 0); + if (__pyx_t_10) { + + /* "gensim/models/doc2vec_inner.pyx":615 + * c.indexes[i] = predict_word.index + * if c.hs: + * c.codelens[i] = len(predict_word.code) # <<<<<<<<<<<<<< + * c.codes[i] = np.PyArray_DATA(predict_word.code) + * c.points[i] = np.PyArray_DATA(predict_word.point) + */ + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 615, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_3 = PyObject_Length(__pyx_t_11); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(0, 615, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.codelens[__pyx_v_i]) = ((int)__pyx_t_3); + + /* "gensim/models/doc2vec_inner.pyx":616 + * if c.hs: + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) # <<<<<<<<<<<<<< + * c.points[i] = np.PyArray_DATA(predict_word.point) * result += 1 */ - __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 742, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - if (!(likely(((__pyx_t_12) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_12, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 742, __pyx_L1_error) - (__pyx_v_codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_12))); - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 616, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 616, __pyx_L1_error) + (__pyx_v_c.codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/doc2vec_inner.pyx":743 - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) - * points[i] = np.PyArray_DATA(predict_word.point) # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":617 + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) + * c.points[i] = np.PyArray_DATA(predict_word.point) # <<<<<<<<<<<<<< * result += 1 * i += 1 */ - __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_point); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 743, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_12); - if (!(likely(((__pyx_t_12) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_12, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 743, __pyx_L1_error) - (__pyx_v_points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_12))); - __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_predict_word, __pyx_n_s_point); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 617, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 617, __pyx_L1_error) + (__pyx_v_c.points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/doc2vec_inner.pyx":740 + /* "gensim/models/doc2vec_inner.pyx":614 * continue - * indexes[i] = predict_word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[i] = len(predict_word.code) - * codes[i] = np.PyArray_DATA(predict_word.code) + * c.indexes[i] = predict_word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[i] = len(predict_word.code) + * c.codes[i] = np.PyArray_DATA(predict_word.code) */ } - /* "gensim/models/doc2vec_inner.pyx":744 - * codes[i] = np.PyArray_DATA(predict_word.code) - * points[i] = np.PyArray_DATA(predict_word.point) + /* "gensim/models/doc2vec_inner.pyx":618 + * c.codes[i] = np.PyArray_DATA(predict_word.code) + * c.points[i] = np.PyArray_DATA(predict_word.point) * result += 1 # <<<<<<<<<<<<<< * i += 1 * if i == MAX_DOCUMENT_LEN: */ __pyx_v_result = (__pyx_v_result + 1); - /* "gensim/models/doc2vec_inner.pyx":745 - * points[i] = np.PyArray_DATA(predict_word.point) + /* "gensim/models/doc2vec_inner.pyx":619 + * c.points[i] = np.PyArray_DATA(predict_word.point) * result += 1 * i += 1 # <<<<<<<<<<<<<< * if i == MAX_DOCUMENT_LEN: @@ -8200,83 +7265,83 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_con */ __pyx_v_i = (__pyx_v_i + 1); - /* "gensim/models/doc2vec_inner.pyx":746 + /* "gensim/models/doc2vec_inner.pyx":620 * result += 1 * i += 1 * if i == MAX_DOCUMENT_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? - * document_len = i + * c.document_len = i */ - __pyx_t_9 = ((__pyx_v_i == 0x2710) != 0); - if (__pyx_t_9) { + __pyx_t_10 = ((__pyx_v_i == 0x2710) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":747 + /* "gensim/models/doc2vec_inner.pyx":621 * i += 1 * if i == MAX_DOCUMENT_LEN: * break # TODO: log warning, tally overflow? # <<<<<<<<<<<<<< - * document_len = i + * c.document_len = i * */ - goto __pyx_L16_break; + goto __pyx_L5_break; - /* "gensim/models/doc2vec_inner.pyx":746 + /* "gensim/models/doc2vec_inner.pyx":620 * result += 1 * i += 1 * if i == MAX_DOCUMENT_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? - * document_len = i + * c.document_len = i */ } - /* "gensim/models/doc2vec_inner.pyx":733 + /* "gensim/models/doc2vec_inner.pyx":607 * vlookup = model.wv.vocab * i = 0 * for token in doc_words: # <<<<<<<<<<<<<< * predict_word = vlookup[token] if token in vlookup else None * if predict_word is None: # shrink document to leave out word */ - __pyx_L15_continue:; + __pyx_L4_continue:; } - __pyx_L16_break:; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_L5_break:; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "gensim/models/doc2vec_inner.pyx":748 + /* "gensim/models/doc2vec_inner.pyx":622 * if i == MAX_DOCUMENT_LEN: * break # TODO: log warning, tally overflow? - * document_len = i # <<<<<<<<<<<<<< + * c.document_len = i # <<<<<<<<<<<<<< * - * for i in range(doctag_len): + * for i in range(c.doctag_len): */ - __pyx_v_document_len = __pyx_v_i; + __pyx_v_c.document_len = __pyx_v_i; - /* "gensim/models/doc2vec_inner.pyx":750 - * document_len = i + /* "gensim/models/doc2vec_inner.pyx":624 + * c.document_len = i * - * for i in range(doctag_len): # <<<<<<<<<<<<<< - * _doctag_indexes[i] = doctag_indexes[i] + * for i in range(c.doctag_len): # <<<<<<<<<<<<<< + * c.doctag_indexes[i] = doctag_indexes[i] * result += 1 */ - __pyx_t_2 = __pyx_v_doctag_len; - __pyx_t_15 = __pyx_t_2; - for (__pyx_t_16 = 0; __pyx_t_16 < __pyx_t_15; __pyx_t_16+=1) { - __pyx_v_i = __pyx_t_16; + __pyx_t_13 = __pyx_v_c.doctag_len; + __pyx_t_14 = __pyx_t_13; + for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_i = __pyx_t_15; - /* "gensim/models/doc2vec_inner.pyx":751 + /* "gensim/models/doc2vec_inner.pyx":625 * - * for i in range(doctag_len): - * _doctag_indexes[i] = doctag_indexes[i] # <<<<<<<<<<<<<< + * for i in range(c.doctag_len): + * c.doctag_indexes[i] = doctag_indexes[i] # <<<<<<<<<<<<<< * result += 1 * */ - __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_doctag_indexes, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 0, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 751, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_14 = __Pyx_PyInt_As_npy_uint32(__pyx_t_3); if (unlikely((__pyx_t_14 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 751, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - (__pyx_v__doctag_indexes[__pyx_v_i]) = __pyx_t_14; + __pyx_t_7 = __Pyx_GetItemInt(__pyx_v_doctag_indexes, __pyx_v_i, int, 1, __Pyx_PyInt_From_int, 0, 0, 0); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 625, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_7); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 625, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + (__pyx_v_c.doctag_indexes[__pyx_v_i]) = __pyx_t_12; - /* "gensim/models/doc2vec_inner.pyx":752 - * for i in range(doctag_len): - * _doctag_indexes[i] = doctag_indexes[i] + /* "gensim/models/doc2vec_inner.pyx":626 + * for i in range(c.doctag_len): + * c.doctag_indexes[i] = doctag_indexes[i] * result += 1 # <<<<<<<<<<<<<< * * # release GIL & train on the document @@ -8284,12 +7349,12 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_con __pyx_v_result = (__pyx_v_result + 1); } - /* "gensim/models/doc2vec_inner.pyx":755 + /* "gensim/models/doc2vec_inner.pyx":629 * * # release GIL & train on the document * with nogil: # <<<<<<<<<<<<<< - * for i in range(document_len): - * j = i - window # negative OK: will pad with null word + * for i in range(c.document_len): + * j = i - c.window # negative OK: will pad with null word */ { #ifdef WITH_THREAD @@ -8299,344 +7364,345 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_con #endif /*try:*/ { - /* "gensim/models/doc2vec_inner.pyx":756 + /* "gensim/models/doc2vec_inner.pyx":630 * # release GIL & train on the document * with nogil: - * for i in range(document_len): # <<<<<<<<<<<<<< - * j = i - window # negative OK: will pad with null word - * k = i + window + 1 # past document end OK: will pad with null word + * for i in range(c.document_len): # <<<<<<<<<<<<<< + * j = i - c.window # negative OK: will pad with null word + * k = i + c.window + 1 # past document end OK: will pad with null word */ - __pyx_t_2 = __pyx_v_document_len; - __pyx_t_15 = __pyx_t_2; - for (__pyx_t_16 = 0; __pyx_t_16 < __pyx_t_15; __pyx_t_16+=1) { - __pyx_v_i = __pyx_t_16; + __pyx_t_13 = __pyx_v_c.document_len; + __pyx_t_14 = __pyx_t_13; + for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_i = __pyx_t_15; - /* "gensim/models/doc2vec_inner.pyx":757 + /* "gensim/models/doc2vec_inner.pyx":631 * with nogil: - * for i in range(document_len): - * j = i - window # negative OK: will pad with null word # <<<<<<<<<<<<<< - * k = i + window + 1 # past document end OK: will pad with null word + * for i in range(c.document_len): + * j = i - c.window # negative OK: will pad with null word # <<<<<<<<<<<<<< + * k = i + c.window + 1 # past document end OK: will pad with null word * */ - __pyx_v_j = (__pyx_v_i - __pyx_v_window); + __pyx_v_j = (__pyx_v_i - __pyx_v_c.window); - /* "gensim/models/doc2vec_inner.pyx":758 - * for i in range(document_len): - * j = i - window # negative OK: will pad with null word - * k = i + window + 1 # past document end OK: will pad with null word # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":632 + * for i in range(c.document_len): + * j = i - c.window # negative OK: will pad with null word + * k = i + c.window + 1 # past document end OK: will pad with null word # <<<<<<<<<<<<<< * * # compose l1 & clear work */ - __pyx_v_k = ((__pyx_v_i + __pyx_v_window) + 1); + __pyx_v_k = ((__pyx_v_i + __pyx_v_c.window) + 1); - /* "gensim/models/doc2vec_inner.pyx":761 + /* "gensim/models/doc2vec_inner.pyx":635 * * # compose l1 & clear work - * for m in range(doctag_len): # <<<<<<<<<<<<<< + * for m in range(c.doctag_len): # <<<<<<<<<<<<<< * # doc vector(s) - * memcpy(&_neu1[m * vector_size], &_doctag_vectors[_doctag_indexes[m] * vector_size], + * memcpy(&c.neu1[m * c.vector_size], &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], */ - __pyx_t_17 = __pyx_v_doctag_len; - __pyx_t_18 = __pyx_t_17; - for (__pyx_t_19 = 0; __pyx_t_19 < __pyx_t_18; __pyx_t_19+=1) { - __pyx_v_m = __pyx_t_19; + __pyx_t_16 = __pyx_v_c.doctag_len; + __pyx_t_17 = __pyx_t_16; + for (__pyx_t_18 = 0; __pyx_t_18 < __pyx_t_17; __pyx_t_18+=1) { + __pyx_v_m = __pyx_t_18; - /* "gensim/models/doc2vec_inner.pyx":763 - * for m in range(doctag_len): + /* "gensim/models/doc2vec_inner.pyx":637 + * for m in range(c.doctag_len): * # doc vector(s) - * memcpy(&_neu1[m * vector_size], &_doctag_vectors[_doctag_indexes[m] * vector_size], # <<<<<<<<<<<<<< - * vector_size * cython.sizeof(REAL_t)) + * memcpy(&c.neu1[m * c.vector_size], &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], # <<<<<<<<<<<<<< + * c.vector_size * cython.sizeof(REAL_t)) * n = 0 */ - (void)(memcpy((&(__pyx_v__neu1[(__pyx_v_m * __pyx_v_vector_size)])), (&(__pyx_v__doctag_vectors[((__pyx_v__doctag_indexes[__pyx_v_m]) * __pyx_v_vector_size)])), (__pyx_v_vector_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + (void)(memcpy((&(__pyx_v_c.neu1[(__pyx_v_m * __pyx_v_c.vector_size)])), (&(__pyx_v_c.doctag_vectors[((__pyx_v_c.doctag_indexes[__pyx_v_m]) * __pyx_v_c.vector_size)])), (__pyx_v_c.vector_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); } - /* "gensim/models/doc2vec_inner.pyx":765 - * memcpy(&_neu1[m * vector_size], &_doctag_vectors[_doctag_indexes[m] * vector_size], - * vector_size * cython.sizeof(REAL_t)) + /* "gensim/models/doc2vec_inner.pyx":639 + * memcpy(&c.neu1[m * c.vector_size], &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], + * c.vector_size * cython.sizeof(REAL_t)) * n = 0 # <<<<<<<<<<<<<< * for m in range(j, k): * # word vectors in window */ __pyx_v_n = 0; - /* "gensim/models/doc2vec_inner.pyx":766 - * vector_size * cython.sizeof(REAL_t)) + /* "gensim/models/doc2vec_inner.pyx":640 + * c.vector_size * cython.sizeof(REAL_t)) * n = 0 * for m in range(j, k): # <<<<<<<<<<<<<< * # word vectors in window * if m == i: */ - __pyx_t_17 = __pyx_v_k; - __pyx_t_18 = __pyx_t_17; - for (__pyx_t_19 = __pyx_v_j; __pyx_t_19 < __pyx_t_18; __pyx_t_19+=1) { - __pyx_v_m = __pyx_t_19; + __pyx_t_16 = __pyx_v_k; + __pyx_t_17 = __pyx_t_16; + for (__pyx_t_18 = __pyx_v_j; __pyx_t_18 < __pyx_t_17; __pyx_t_18+=1) { + __pyx_v_m = __pyx_t_18; - /* "gensim/models/doc2vec_inner.pyx":768 + /* "gensim/models/doc2vec_inner.pyx":642 * for m in range(j, k): * # word vectors in window * if m == i: # <<<<<<<<<<<<<< * continue - * if m < 0 or m >= document_len: + * if m < 0 or m >= c.document_len: */ - __pyx_t_9 = ((__pyx_v_m == __pyx_v_i) != 0); - if (__pyx_t_9) { + __pyx_t_10 = ((__pyx_v_m == __pyx_v_i) != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":769 + /* "gensim/models/doc2vec_inner.pyx":643 * # word vectors in window * if m == i: * continue # <<<<<<<<<<<<<< - * if m < 0 or m >= document_len: - * window_indexes[n] = null_word_index + * if m < 0 or m >= c.document_len: + * c.window_indexes[n] = c.null_word_index */ - goto __pyx_L32_continue; + goto __pyx_L21_continue; - /* "gensim/models/doc2vec_inner.pyx":768 + /* "gensim/models/doc2vec_inner.pyx":642 * for m in range(j, k): * # word vectors in window * if m == i: # <<<<<<<<<<<<<< * continue - * if m < 0 or m >= document_len: + * if m < 0 or m >= c.document_len: */ } - /* "gensim/models/doc2vec_inner.pyx":770 + /* "gensim/models/doc2vec_inner.pyx":644 * if m == i: * continue - * if m < 0 or m >= document_len: # <<<<<<<<<<<<<< - * window_indexes[n] = null_word_index + * if m < 0 or m >= c.document_len: # <<<<<<<<<<<<<< + * c.window_indexes[n] = c.null_word_index * else: */ - __pyx_t_8 = ((__pyx_v_m < 0) != 0); - if (!__pyx_t_8) { + __pyx_t_6 = ((__pyx_v_m < 0) != 0); + if (!__pyx_t_6) { } else { - __pyx_t_9 = __pyx_t_8; - goto __pyx_L36_bool_binop_done; + __pyx_t_10 = __pyx_t_6; + goto __pyx_L25_bool_binop_done; } - __pyx_t_8 = ((__pyx_v_m >= __pyx_v_document_len) != 0); - __pyx_t_9 = __pyx_t_8; - __pyx_L36_bool_binop_done:; - if (__pyx_t_9) { + __pyx_t_6 = ((__pyx_v_m >= __pyx_v_c.document_len) != 0); + __pyx_t_10 = __pyx_t_6; + __pyx_L25_bool_binop_done:; + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":771 + /* "gensim/models/doc2vec_inner.pyx":645 * continue - * if m < 0 or m >= document_len: - * window_indexes[n] = null_word_index # <<<<<<<<<<<<<< + * if m < 0 or m >= c.document_len: + * c.window_indexes[n] = c.null_word_index # <<<<<<<<<<<<<< * else: - * window_indexes[n] = indexes[m] + * c.window_indexes[n] = c.indexes[m] */ - (__pyx_v_window_indexes[__pyx_v_n]) = __pyx_v_null_word_index; + __pyx_t_19 = __pyx_v_c.null_word_index; + (__pyx_v_c.window_indexes[__pyx_v_n]) = __pyx_t_19; - /* "gensim/models/doc2vec_inner.pyx":770 + /* "gensim/models/doc2vec_inner.pyx":644 * if m == i: * continue - * if m < 0 or m >= document_len: # <<<<<<<<<<<<<< - * window_indexes[n] = null_word_index + * if m < 0 or m >= c.document_len: # <<<<<<<<<<<<<< + * c.window_indexes[n] = c.null_word_index * else: */ - goto __pyx_L35; + goto __pyx_L24; } - /* "gensim/models/doc2vec_inner.pyx":773 - * window_indexes[n] = null_word_index + /* "gensim/models/doc2vec_inner.pyx":647 + * c.window_indexes[n] = c.null_word_index * else: - * window_indexes[n] = indexes[m] # <<<<<<<<<<<<<< + * c.window_indexes[n] = c.indexes[m] # <<<<<<<<<<<<<< * n += 1 - * for m in range(2 * window): + * for m in range(2 * c.window): */ /*else*/ { - (__pyx_v_window_indexes[__pyx_v_n]) = (__pyx_v_indexes[__pyx_v_m]); + (__pyx_v_c.window_indexes[__pyx_v_n]) = (__pyx_v_c.indexes[__pyx_v_m]); } - __pyx_L35:; + __pyx_L24:; - /* "gensim/models/doc2vec_inner.pyx":774 + /* "gensim/models/doc2vec_inner.pyx":648 * else: - * window_indexes[n] = indexes[m] + * c.window_indexes[n] = c.indexes[m] * n += 1 # <<<<<<<<<<<<<< - * for m in range(2 * window): - * memcpy(&_neu1[(doctag_len + m) * vector_size], &_word_vectors[window_indexes[m] * vector_size], + * for m in range(2 * c.window): + * memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], */ __pyx_v_n = (__pyx_v_n + 1); - __pyx_L32_continue:; + __pyx_L21_continue:; } - /* "gensim/models/doc2vec_inner.pyx":775 - * window_indexes[n] = indexes[m] + /* "gensim/models/doc2vec_inner.pyx":649 + * c.window_indexes[n] = c.indexes[m] * n += 1 - * for m in range(2 * window): # <<<<<<<<<<<<<< - * memcpy(&_neu1[(doctag_len + m) * vector_size], &_word_vectors[window_indexes[m] * vector_size], - * vector_size * cython.sizeof(REAL_t)) + * for m in range(2 * c.window): # <<<<<<<<<<<<<< + * memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], + * c.vector_size * cython.sizeof(REAL_t)) */ - __pyx_t_6 = (2 * __pyx_v_window); - __pyx_t_20 = __pyx_t_6; - for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_20; __pyx_t_17+=1) { - __pyx_v_m = __pyx_t_17; + __pyx_t_4 = (2 * __pyx_v_c.window); + __pyx_t_20 = __pyx_t_4; + for (__pyx_t_16 = 0; __pyx_t_16 < __pyx_t_20; __pyx_t_16+=1) { + __pyx_v_m = __pyx_t_16; - /* "gensim/models/doc2vec_inner.pyx":776 + /* "gensim/models/doc2vec_inner.pyx":650 * n += 1 - * for m in range(2 * window): - * memcpy(&_neu1[(doctag_len + m) * vector_size], &_word_vectors[window_indexes[m] * vector_size], # <<<<<<<<<<<<<< - * vector_size * cython.sizeof(REAL_t)) - * memset(_work, 0, layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + * for m in range(2 * c.window): + * memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], # <<<<<<<<<<<<<< + * c.vector_size * cython.sizeof(REAL_t)) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error */ - (void)(memcpy((&(__pyx_v__neu1[((__pyx_v_doctag_len + __pyx_v_m) * __pyx_v_vector_size)])), (&(__pyx_v__word_vectors[((__pyx_v_window_indexes[__pyx_v_m]) * __pyx_v_vector_size)])), (__pyx_v_vector_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + (void)(memcpy((&(__pyx_v_c.neu1[((__pyx_v_c.doctag_len + __pyx_v_m) * __pyx_v_c.vector_size)])), (&(__pyx_v_c.word_vectors[((__pyx_v_c.window_indexes[__pyx_v_m]) * __pyx_v_c.vector_size)])), (__pyx_v_c.vector_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); } - /* "gensim/models/doc2vec_inner.pyx":778 - * memcpy(&_neu1[(doctag_len + m) * vector_size], &_word_vectors[window_indexes[m] * vector_size], - * vector_size * cython.sizeof(REAL_t)) - * memset(_work, 0, layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":652 + * memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], + * c.vector_size * cython.sizeof(REAL_t)) + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error # <<<<<<<<<<<<<< * - * if hs: + * if c.hs: */ - (void)(memset(__pyx_v__work, 0, (__pyx_v_layer1_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); + (void)(memset(__pyx_v_c.work, 0, (__pyx_v_c.layer1_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/doc2vec_inner.pyx":780 - * memset(_work, 0, layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + /* "gensim/models/doc2vec_inner.pyx":654 + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error * - * if hs: # <<<<<<<<<<<<<< - * fast_document_dmc_hs(points[i], codes[i], codelens[i], - * _neu1, syn1, _alpha, _work, + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dmc_hs(c.points[i], c.codes[i], c.codelens[i], + * c.neu1, c.syn1, c.alpha, c.work, */ - __pyx_t_9 = (__pyx_v_hs != 0); - if (__pyx_t_9) { + __pyx_t_10 = (__pyx_v_c.hs != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":781 + /* "gensim/models/doc2vec_inner.pyx":655 * - * if hs: - * fast_document_dmc_hs(points[i], codes[i], codelens[i], # <<<<<<<<<<<<<< - * _neu1, syn1, _alpha, _work, - * layer1_size, vector_size, _learn_hidden) + * if c.hs: + * fast_document_dmc_hs(c.points[i], c.codes[i], c.codelens[i], # <<<<<<<<<<<<<< + * c.neu1, c.syn1, c.alpha, c.work, + * c.layer1_size, c.vector_size, c.learn_hidden) */ - __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_hs((__pyx_v_points[__pyx_v_i]), (__pyx_v_codes[__pyx_v_i]), (__pyx_v_codelens[__pyx_v_i]), __pyx_v__neu1, __pyx_v_syn1, __pyx_v__alpha, __pyx_v__work, __pyx_v_layer1_size, __pyx_v_vector_size, __pyx_v__learn_hidden); + __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.neu1, __pyx_v_c.syn1, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.layer1_size, __pyx_v_c.vector_size, __pyx_v_c.learn_hidden); - /* "gensim/models/doc2vec_inner.pyx":780 - * memset(_work, 0, layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + /* "gensim/models/doc2vec_inner.pyx":654 + * memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error * - * if hs: # <<<<<<<<<<<<<< - * fast_document_dmc_hs(points[i], codes[i], codelens[i], - * _neu1, syn1, _alpha, _work, + * if c.hs: # <<<<<<<<<<<<<< + * fast_document_dmc_hs(c.points[i], c.codes[i], c.codelens[i], + * c.neu1, c.syn1, c.alpha, c.work, */ } - /* "gensim/models/doc2vec_inner.pyx":784 - * _neu1, syn1, _alpha, _work, - * layer1_size, vector_size, _learn_hidden) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_document_dmc_neg(negative, cum_table, cum_table_len, next_random, - * _neu1, syn1neg, indexes[i], _alpha, _work, + /* "gensim/models/doc2vec_inner.pyx":658 + * c.neu1, c.syn1, c.alpha, c.work, + * c.layer1_size, c.vector_size, c.learn_hidden) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dmc_neg(c.negative, c.cum_table, c.cum_table_len, c.next_random, + * c.neu1, c.syn1neg, c.indexes[i], c.alpha, c.work, */ - __pyx_t_9 = (__pyx_v_negative != 0); - if (__pyx_t_9) { + __pyx_t_10 = (__pyx_v_c.negative != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":785 - * layer1_size, vector_size, _learn_hidden) - * if negative: - * next_random = fast_document_dmc_neg(negative, cum_table, cum_table_len, next_random, # <<<<<<<<<<<<<< - * _neu1, syn1neg, indexes[i], _alpha, _work, - * layer1_size, vector_size, _learn_hidden) + /* "gensim/models/doc2vec_inner.pyx":659 + * c.layer1_size, c.vector_size, c.learn_hidden) + * if c.negative: + * c.next_random = fast_document_dmc_neg(c.negative, c.cum_table, c.cum_table_len, c.next_random, # <<<<<<<<<<<<<< + * c.neu1, c.syn1neg, c.indexes[i], c.alpha, c.work, + * c.layer1_size, c.vector_size, c.learn_hidden) */ - __pyx_v_next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_neg(__pyx_v_negative, __pyx_v_cum_table, __pyx_v_cum_table_len, __pyx_v_next_random, __pyx_v__neu1, __pyx_v_syn1neg, (__pyx_v_indexes[__pyx_v_i]), __pyx_v__alpha, __pyx_v__work, __pyx_v_layer1_size, __pyx_v_vector_size, __pyx_v__learn_hidden); + __pyx_v_c.next_random = __pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.next_random, __pyx_v_c.neu1, __pyx_v_c.syn1neg, (__pyx_v_c.indexes[__pyx_v_i]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.layer1_size, __pyx_v_c.vector_size, __pyx_v_c.learn_hidden); - /* "gensim/models/doc2vec_inner.pyx":784 - * _neu1, syn1, _alpha, _work, - * layer1_size, vector_size, _learn_hidden) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_document_dmc_neg(negative, cum_table, cum_table_len, next_random, - * _neu1, syn1neg, indexes[i], _alpha, _work, + /* "gensim/models/doc2vec_inner.pyx":658 + * c.neu1, c.syn1, c.alpha, c.work, + * c.layer1_size, c.vector_size, c.learn_hidden) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fast_document_dmc_neg(c.negative, c.cum_table, c.cum_table_len, c.next_random, + * c.neu1, c.syn1neg, c.indexes[i], c.alpha, c.work, */ } - /* "gensim/models/doc2vec_inner.pyx":789 - * layer1_size, vector_size, _learn_hidden) + /* "gensim/models/doc2vec_inner.pyx":663 + * c.layer1_size, c.vector_size, c.learn_hidden) * - * if _learn_doctags: # <<<<<<<<<<<<<< - * for m in range(doctag_len): - * our_saxpy(&vector_size, &_doctag_locks[_doctag_indexes[m]], &_work[m * vector_size], + * if c.learn_doctags: # <<<<<<<<<<<<<< + * for m in range(c.doctag_len): + * our_saxpy(&c.vector_size, &c.doctag_locks[c.doctag_indexes[m]], &c.work[m * c.vector_size], */ - __pyx_t_9 = (__pyx_v__learn_doctags != 0); - if (__pyx_t_9) { + __pyx_t_10 = (__pyx_v_c.learn_doctags != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":790 + /* "gensim/models/doc2vec_inner.pyx":664 * - * if _learn_doctags: - * for m in range(doctag_len): # <<<<<<<<<<<<<< - * our_saxpy(&vector_size, &_doctag_locks[_doctag_indexes[m]], &_work[m * vector_size], - * &ONE, &_doctag_vectors[_doctag_indexes[m] * vector_size], &ONE) + * if c.learn_doctags: + * for m in range(c.doctag_len): # <<<<<<<<<<<<<< + * our_saxpy(&c.vector_size, &c.doctag_locks[c.doctag_indexes[m]], &c.work[m * c.vector_size], + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], &ONE) */ - __pyx_t_17 = __pyx_v_doctag_len; - __pyx_t_18 = __pyx_t_17; - for (__pyx_t_19 = 0; __pyx_t_19 < __pyx_t_18; __pyx_t_19+=1) { - __pyx_v_m = __pyx_t_19; + __pyx_t_16 = __pyx_v_c.doctag_len; + __pyx_t_17 = __pyx_t_16; + for (__pyx_t_18 = 0; __pyx_t_18 < __pyx_t_17; __pyx_t_18+=1) { + __pyx_v_m = __pyx_t_18; - /* "gensim/models/doc2vec_inner.pyx":791 - * if _learn_doctags: - * for m in range(doctag_len): - * our_saxpy(&vector_size, &_doctag_locks[_doctag_indexes[m]], &_work[m * vector_size], # <<<<<<<<<<<<<< - * &ONE, &_doctag_vectors[_doctag_indexes[m] * vector_size], &ONE) - * if _learn_words: + /* "gensim/models/doc2vec_inner.pyx":665 + * if c.learn_doctags: + * for m in range(c.doctag_len): + * our_saxpy(&c.vector_size, &c.doctag_locks[c.doctag_indexes[m]], &c.work[m * c.vector_size], # <<<<<<<<<<<<<< + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], &ONE) + * if c.learn_words: */ - __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_vector_size), (&(__pyx_v__doctag_locks[(__pyx_v__doctag_indexes[__pyx_v_m])])), (&(__pyx_v__work[(__pyx_v_m * __pyx_v_vector_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), (&(__pyx_v__doctag_vectors[((__pyx_v__doctag_indexes[__pyx_v_m]) * __pyx_v_vector_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.vector_size), (&(__pyx_v_c.doctag_locks[(__pyx_v_c.doctag_indexes[__pyx_v_m])])), (&(__pyx_v_c.work[(__pyx_v_m * __pyx_v_c.vector_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), (&(__pyx_v_c.doctag_vectors[((__pyx_v_c.doctag_indexes[__pyx_v_m]) * __pyx_v_c.vector_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); } - /* "gensim/models/doc2vec_inner.pyx":789 - * layer1_size, vector_size, _learn_hidden) + /* "gensim/models/doc2vec_inner.pyx":663 + * c.layer1_size, c.vector_size, c.learn_hidden) * - * if _learn_doctags: # <<<<<<<<<<<<<< - * for m in range(doctag_len): - * our_saxpy(&vector_size, &_doctag_locks[_doctag_indexes[m]], &_work[m * vector_size], + * if c.learn_doctags: # <<<<<<<<<<<<<< + * for m in range(c.doctag_len): + * our_saxpy(&c.vector_size, &c.doctag_locks[c.doctag_indexes[m]], &c.work[m * c.vector_size], */ } - /* "gensim/models/doc2vec_inner.pyx":793 - * our_saxpy(&vector_size, &_doctag_locks[_doctag_indexes[m]], &_work[m * vector_size], - * &ONE, &_doctag_vectors[_doctag_indexes[m] * vector_size], &ONE) - * if _learn_words: # <<<<<<<<<<<<<< - * for m in range(2 * window): - * our_saxpy(&vector_size, &_word_locks[window_indexes[m]], &_work[(doctag_len + m) * vector_size], + /* "gensim/models/doc2vec_inner.pyx":667 + * our_saxpy(&c.vector_size, &c.doctag_locks[c.doctag_indexes[m]], &c.work[m * c.vector_size], + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], &ONE) + * if c.learn_words: # <<<<<<<<<<<<<< + * for m in range(2 * c.window): + * our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], */ - __pyx_t_9 = (__pyx_v__learn_words != 0); - if (__pyx_t_9) { + __pyx_t_10 = (__pyx_v_c.learn_words != 0); + if (__pyx_t_10) { - /* "gensim/models/doc2vec_inner.pyx":794 - * &ONE, &_doctag_vectors[_doctag_indexes[m] * vector_size], &ONE) - * if _learn_words: - * for m in range(2 * window): # <<<<<<<<<<<<<< - * our_saxpy(&vector_size, &_word_locks[window_indexes[m]], &_work[(doctag_len + m) * vector_size], - * &ONE, &_word_vectors[window_indexes[m] * vector_size], &ONE) + /* "gensim/models/doc2vec_inner.pyx":668 + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], &ONE) + * if c.learn_words: + * for m in range(2 * c.window): # <<<<<<<<<<<<<< + * our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], + * &ONE, &c.word_vectors[c.window_indexes[m] * c.vector_size], &ONE) */ - __pyx_t_6 = (2 * __pyx_v_window); - __pyx_t_20 = __pyx_t_6; - for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_20; __pyx_t_17+=1) { - __pyx_v_m = __pyx_t_17; + __pyx_t_4 = (2 * __pyx_v_c.window); + __pyx_t_20 = __pyx_t_4; + for (__pyx_t_16 = 0; __pyx_t_16 < __pyx_t_20; __pyx_t_16+=1) { + __pyx_v_m = __pyx_t_16; - /* "gensim/models/doc2vec_inner.pyx":795 - * if _learn_words: - * for m in range(2 * window): - * our_saxpy(&vector_size, &_word_locks[window_indexes[m]], &_work[(doctag_len + m) * vector_size], # <<<<<<<<<<<<<< - * &ONE, &_word_vectors[window_indexes[m] * vector_size], &ONE) + /* "gensim/models/doc2vec_inner.pyx":669 + * if c.learn_words: + * for m in range(2 * c.window): + * our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], # <<<<<<<<<<<<<< + * &ONE, &c.word_vectors[c.window_indexes[m] * c.vector_size], &ONE) * */ - __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_vector_size), (&(__pyx_v__word_locks[(__pyx_v_window_indexes[__pyx_v_m])])), (&(__pyx_v__work[((__pyx_v_doctag_len + __pyx_v_m) * __pyx_v_vector_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), (&(__pyx_v__word_vectors[((__pyx_v_window_indexes[__pyx_v_m]) * __pyx_v_vector_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); + __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_c.vector_size), (&(__pyx_v_c.word_locks[(__pyx_v_c.window_indexes[__pyx_v_m])])), (&(__pyx_v_c.work[((__pyx_v_c.doctag_len + __pyx_v_m) * __pyx_v_c.vector_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE), (&(__pyx_v_c.word_vectors[((__pyx_v_c.window_indexes[__pyx_v_m]) * __pyx_v_c.vector_size)])), (&__pyx_v_6gensim_6models_13doc2vec_inner_ONE)); } - /* "gensim/models/doc2vec_inner.pyx":793 - * our_saxpy(&vector_size, &_doctag_locks[_doctag_indexes[m]], &_work[m * vector_size], - * &ONE, &_doctag_vectors[_doctag_indexes[m] * vector_size], &ONE) - * if _learn_words: # <<<<<<<<<<<<<< - * for m in range(2 * window): - * our_saxpy(&vector_size, &_word_locks[window_indexes[m]], &_work[(doctag_len + m) * vector_size], + /* "gensim/models/doc2vec_inner.pyx":667 + * our_saxpy(&c.vector_size, &c.doctag_locks[c.doctag_indexes[m]], &c.work[m * c.vector_size], + * &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], &ONE) + * if c.learn_words: # <<<<<<<<<<<<<< + * for m in range(2 * c.window): + * our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], */ } } } - /* "gensim/models/doc2vec_inner.pyx":755 + /* "gensim/models/doc2vec_inner.pyx":629 * * # release GIL & train on the document * with nogil: # <<<<<<<<<<<<<< - * for i in range(document_len): - * j = i - window # negative OK: will pad with null word + * for i in range(c.document_len): + * j = i - c.window # negative OK: will pad with null word */ /*finally:*/ { /*normal exit:*/{ @@ -8644,25 +7710,25 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_con __Pyx_FastGIL_Forget(); Py_BLOCK_THREADS #endif - goto __pyx_L27; + goto __pyx_L16; } - __pyx_L27:; + __pyx_L16:; } } - /* "gensim/models/doc2vec_inner.pyx":798 - * &ONE, &_word_vectors[window_indexes[m] * vector_size], &ONE) + /* "gensim/models/doc2vec_inner.pyx":672 + * &ONE, &c.word_vectors[c.window_indexes[m] * c.vector_size], &ONE) * * return result # <<<<<<<<<<<<<< */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_PyInt_From_long(__pyx_v_result); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 798, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = __Pyx_PyInt_From_long(__pyx_v_result); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 672, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_r = __pyx_t_7; + __pyx_t_7 = 0; goto __pyx_L0; - /* "gensim/models/doc2vec_inner.pyx":606 + /* "gensim/models/doc2vec_inner.pyx":544 * * * def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< @@ -8673,27 +7739,21 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_con /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_10); - __Pyx_XDECREF(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_11); __Pyx_AddTraceback("gensim.models.doc2vec_inner.train_document_dm_concat", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_vlookup); __Pyx_XDECREF(__pyx_v_token); __Pyx_XDECREF(__pyx_v_predict_word); - __Pyx_XDECREF(__pyx_v_work); - __Pyx_XDECREF(__pyx_v_neu1); - __Pyx_XDECREF(__pyx_v_word_vectors); - __Pyx_XDECREF(__pyx_v_word_locks); - __Pyx_XDECREF(__pyx_v_doctag_vectors); - __Pyx_XDECREF(__pyx_v_doctag_locks); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -8741,7 +7801,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_info->obj); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":222 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 * * cdef int i, ndim * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -8750,7 +7810,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_endian_detector = 1; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":223 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 * cdef int i, ndim * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -8759,7 +7819,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":225 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 * cdef bint little_endian = ((&endian_detector)[0] != 0) * * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< @@ -8768,7 +7828,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8782,7 +7842,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L4_bool_binop_done; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":228 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -8793,7 +7853,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8802,20 +7862,20 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 229, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 229, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 229, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8824,7 +7884,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8838,7 +7898,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L7_bool_binop_done; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":232 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -8849,7 +7909,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8858,20 +7918,20 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< * * info.buf = PyArray_DATA(self) */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 233, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 233, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 233, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8880,7 +7940,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":235 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 * raise ValueError(u"ndarray is not Fortran contiguous") * * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< @@ -8889,7 +7949,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":236 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 * * info.buf = PyArray_DATA(self) * info.ndim = ndim # <<<<<<<<<<<<<< @@ -8898,7 +7958,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->ndim = __pyx_v_ndim; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8908,7 +7968,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":240 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 * # Allocate new buffer for strides and shape info. * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< @@ -8917,7 +7977,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":241 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim # <<<<<<<<<<<<<< @@ -8926,7 +7986,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":242 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim * for i in range(ndim): # <<<<<<<<<<<<<< @@ -8938,7 +7998,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":243 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 * info.shape = info.strides + ndim * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< @@ -8947,7 +8007,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":244 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< @@ -8957,7 +8017,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8967,7 +8027,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L9; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":246 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 * info.shape[i] = PyArray_DIMS(self)[i] * else: * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< @@ -8977,7 +8037,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":247 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 * else: * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< @@ -8988,7 +8048,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L9:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":248 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL # <<<<<<<<<<<<<< @@ -8997,7 +8057,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->suboffsets = NULL; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":249 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< @@ -9006,7 +8066,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":250 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< @@ -9015,7 +8075,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":253 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 * * cdef int t * cdef char* f = NULL # <<<<<<<<<<<<<< @@ -9024,7 +8084,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_f = NULL; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":254 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 * cdef int t * cdef char* f = NULL * cdef dtype descr = self.descr # <<<<<<<<<<<<<< @@ -9036,7 +8096,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); __pyx_t_3 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":257 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 * cdef int offset * * info.obj = self # <<<<<<<<<<<<<< @@ -9049,7 +8109,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = ((PyObject *)__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -9059,7 +8119,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":260 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 * * if not PyDataType_HASFIELDS(descr): * t = descr.type_num # <<<<<<<<<<<<<< @@ -9069,7 +8129,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_4 = __pyx_v_descr->type_num; __pyx_v_t = __pyx_t_4; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9089,7 +8149,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L15_next_or:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":262 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -9106,7 +8166,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L14_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9115,20 +8175,20 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 263, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 263, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 263, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9137,7 +8197,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":264 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< @@ -9149,7 +8209,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"b"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":265 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< @@ -9160,7 +8220,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"B"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":266 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< @@ -9171,7 +8231,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"h"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":267 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< @@ -9182,7 +8242,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"H"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":268 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< @@ -9193,7 +8253,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"i"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":269 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< @@ -9204,7 +8264,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"I"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":270 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< @@ -9215,7 +8275,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"l"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":271 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< @@ -9226,7 +8286,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"L"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":272 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< @@ -9237,7 +8297,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"q"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":273 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< @@ -9248,7 +8308,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Q"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":274 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< @@ -9259,7 +8319,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"f"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":275 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< @@ -9270,7 +8330,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"d"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":276 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< @@ -9281,7 +8341,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"g"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":277 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< @@ -9292,7 +8352,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zf"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":278 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< @@ -9303,7 +8363,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zd"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":279 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< @@ -9314,7 +8374,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zg"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":280 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< @@ -9326,7 +8386,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; default: - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":282 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 * elif t == NPY_OBJECT: f = "O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -9347,7 +8407,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":283 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f # <<<<<<<<<<<<<< @@ -9356,7 +8416,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->format = __pyx_v_f; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":284 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f * return # <<<<<<<<<<<<<< @@ -9366,7 +8426,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_r = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -9375,7 +8435,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":286 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 * return * else: * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< @@ -9385,7 +8445,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":287 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 * else: * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< @@ -9394,7 +8454,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->format[0]) = '^'; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":288 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 # <<<<<<<<<<<<<< @@ -9403,7 +8463,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_offset = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":289 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< @@ -9413,7 +8473,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) __pyx_v_f = __pyx_t_8; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":292 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 * info.format + _buffer_format_string_len, * &offset) * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< @@ -9423,7 +8483,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_f[0]) = '\x00'; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -9455,7 +8515,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -9479,7 +8539,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s int __pyx_t_1; __Pyx_RefNannySetupContext("__releasebuffer__", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -9489,7 +8549,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":296 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) # <<<<<<<<<<<<<< @@ -9498,7 +8558,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->format); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -9507,7 +8567,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -9517,7 +8577,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":298 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): * PyObject_Free(info.strides) # <<<<<<<<<<<<<< @@ -9526,7 +8586,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->strides); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -9535,7 +8595,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -9547,7 +8607,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __Pyx_RefNannyFinishContext(); } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -9561,7 +8621,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":776 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 * * cdef inline object PyArray_MultiIterNew1(a): * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< @@ -9575,7 +8635,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -9594,7 +8654,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -9608,7 +8668,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":779 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 * * cdef inline object PyArray_MultiIterNew2(a, b): * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< @@ -9622,7 +8682,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -9641,7 +8701,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -9655,7 +8715,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":782 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 * * cdef inline object PyArray_MultiIterNew3(a, b, c): * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< @@ -9669,7 +8729,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -9688,7 +8748,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -9702,7 +8762,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":785 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< @@ -9716,7 +8776,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -9735,7 +8795,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -9749,7 +8809,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":788 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< @@ -9763,7 +8823,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -9782,7 +8842,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -9796,7 +8856,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ int __pyx_t_1; __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -9806,7 +8866,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":792 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): * return d.subarray.shape # <<<<<<<<<<<<<< @@ -9818,7 +8878,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -9827,7 +8887,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":794 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 * return d.subarray.shape * else: * return () # <<<<<<<<<<<<<< @@ -9841,7 +8901,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ goto __pyx_L0; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -9856,7 +8916,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -9885,7 +8945,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx char *__pyx_t_9; __Pyx_RefNannySetupContext("_util_dtypestring", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":801 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 * * cdef dtype child * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -9894,7 +8954,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_endian_detector = 1; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":802 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 * cdef dtype child * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -9903,7 +8963,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -9926,7 +8986,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); __pyx_t_3 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":806 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 * * for childname in descr.names: * fields = descr.fields[childname] # <<<<<<<<<<<<<< @@ -9943,7 +9003,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":807 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 * for childname in descr.names: * fields = descr.fields[childname] * child, new_offset = fields # <<<<<<<<<<<<<< @@ -9978,7 +9038,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); __pyx_t_4 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -9995,20 +9055,20 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); if (unlikely(__pyx_t_6)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< * * if ((child.byteorder == c'>' and little_endian) or */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 810, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 810, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 810, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -10017,7 +9077,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -10037,7 +9097,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L8_next_or:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":813 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 * * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -10054,7 +9114,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = __pyx_t_7; __pyx_L7_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -10063,20 +9123,20 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ if (unlikely(__pyx_t_6)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< * # One could encode it in the format string and have Cython * # complain instead, BUT: < and > in format strings also imply */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 814, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 814, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 814, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -10085,7 +9145,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":824 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 * * # Output padding bytes * while offset[0] < new_offset: # <<<<<<<<<<<<<< @@ -10101,7 +9161,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (!__pyx_t_6) break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":825 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 * # Output padding bytes * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< @@ -10110,7 +9170,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ (__pyx_v_f[0]) = 0x78; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":826 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte * f += 1 # <<<<<<<<<<<<<< @@ -10119,7 +9179,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":827 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 * f[0] = 120 # "x"; pad byte * f += 1 * offset[0] += 1 # <<<<<<<<<<<<<< @@ -10130,7 +9190,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":829 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 * offset[0] += 1 * * offset[0] += child.itemsize # <<<<<<<<<<<<<< @@ -10140,7 +9200,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_8 = 0; (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -10150,7 +9210,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); if (__pyx_t_6) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":832 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 * * if not PyDataType_HASFIELDS(child): * t = child.type_num # <<<<<<<<<<<<<< @@ -10162,7 +9222,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); __pyx_t_4 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -10172,20 +9232,20 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); if (unlikely(__pyx_t_6)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< * * # Until ticket #99 is fixed, use integers to avoid warnings */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__13, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 834, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 834, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(1, 834, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -10194,7 +9254,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":837 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 * * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< @@ -10212,7 +9272,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":838 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< @@ -10230,7 +9290,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":839 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< @@ -10248,7 +9308,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":840 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< @@ -10266,7 +9326,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":841 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< @@ -10284,7 +9344,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":842 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< @@ -10302,7 +9362,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":843 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< @@ -10320,7 +9380,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":844 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< @@ -10338,7 +9398,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":845 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< @@ -10356,7 +9416,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":846 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< @@ -10374,7 +9434,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":847 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< @@ -10392,7 +9452,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":848 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< @@ -10410,7 +9470,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":849 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< @@ -10428,7 +9488,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":850 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< @@ -10448,7 +9508,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":851 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< @@ -10468,7 +9528,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":852 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< @@ -10488,7 +9548,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":853 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< @@ -10506,7 +9566,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":855 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 * elif t == NPY_OBJECT: f[0] = 79 #"O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -10525,7 +9585,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L15:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":856 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * f += 1 # <<<<<<<<<<<<<< @@ -10534,7 +9594,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -10544,7 +9604,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L13; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":860 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 * # Cython ignores struct boundary information ("T{...}"), * # so don't output it * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< @@ -10557,7 +9617,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L13:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -10567,7 +9627,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":861 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 * # so don't output it * f = _util_dtypestring(child, f, end, offset) * return f # <<<<<<<<<<<<<< @@ -10577,7 +9637,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_r = __pyx_v_f; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -10602,7 +9662,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -10617,7 +9677,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a int __pyx_t_2; __Pyx_RefNannySetupContext("set_array_base", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -10628,7 +9688,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":980 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 * cdef PyObject* baseptr * if base is None: * baseptr = NULL # <<<<<<<<<<<<<< @@ -10637,7 +9697,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_baseptr = NULL; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -10647,7 +9707,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a goto __pyx_L3; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":982 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 * baseptr = NULL * else: * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< @@ -10657,7 +9717,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a /*else*/ { Py_INCREF(__pyx_v_base); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":983 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 * else: * Py_INCREF(base) # important to do this before decref below! * baseptr = base # <<<<<<<<<<<<<< @@ -10668,7 +9728,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a } __pyx_L3:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":984 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 * Py_INCREF(base) # important to do this before decref below! * baseptr = base * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< @@ -10677,7 +9737,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ Py_XDECREF(__pyx_v_arr->base); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":985 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 * baseptr = base * Py_XDECREF(arr.base) * arr.base = baseptr # <<<<<<<<<<<<<< @@ -10686,7 +9746,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_arr->base = __pyx_v_baseptr; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -10698,7 +9758,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __Pyx_RefNannyFinishContext(); } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -10712,7 +9772,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py int __pyx_t_1; __Pyx_RefNannySetupContext("get_array_base", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -10722,7 +9782,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":989 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: * return None # <<<<<<<<<<<<<< @@ -10733,7 +9793,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -10742,7 +9802,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":991 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 * return None * else: * return arr.base # <<<<<<<<<<<<<< @@ -10756,7 +9816,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py goto __pyx_L0; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -10771,7 +9831,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -10792,7 +9852,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_array", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -10808,7 +9868,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":998 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 * cdef inline int import_array() except -1: * try: * _import_array() # <<<<<<<<<<<<<< @@ -10817,7 +9877,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { */ __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -10831,7 +9891,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":999 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 * try: * _import_array() * except Exception: # <<<<<<<<<<<<<< @@ -10846,14 +9906,14 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< * * cdef inline int import_umath() except -1: */ - __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__14, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1000, __pyx_L5_except_error) + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1000, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_Raise(__pyx_t_8, 0, 0, 0); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; @@ -10862,7 +9922,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -10877,7 +9937,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __pyx_L8_try_end:; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -10900,7 +9960,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -10921,7 +9981,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_umath", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -10937,7 +9997,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 * cdef inline int import_umath() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -10946,7 +10006,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -10960,7 +10020,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -10975,14 +10035,14 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< * * cdef inline int import_ufunc() except -1: */ - __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__15, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1006, __pyx_L5_except_error) + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1006, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_Raise(__pyx_t_8, 0, 0, 0); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; @@ -10991,7 +10051,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -11006,7 +10066,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __pyx_L8_try_end:; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -11029,7 +10089,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -11050,7 +10110,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_ufunc", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -11066,7 +10126,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 * cdef inline int import_ufunc() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -11075,7 +10135,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -11089,7 +10149,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -11103,12 +10163,12 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< */ - __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__16, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1012, __pyx_L5_except_error) + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1012, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_Raise(__pyx_t_8, 0, 0, 0); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; @@ -11117,7 +10177,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -11132,7 +10192,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __pyx_L8_try_end:; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -11192,6 +10252,7 @@ static struct PyModuleDef __pyx_moduledef = { #endif static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_, __pyx_k_, sizeof(__pyx_k_), 0, 0, 1, 0}, {&__pyx_kp_u_Format_string_allocated_too_shor, __pyx_k_Format_string_allocated_too_shor, sizeof(__pyx_k_Format_string_allocated_too_shor), 0, 1, 0, 0}, {&__pyx_kp_u_Format_string_allocated_too_shor_2, __pyx_k_Format_string_allocated_too_shor_2, sizeof(__pyx_k_Format_string_allocated_too_shor_2), 0, 1, 0, 0}, {&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1}, @@ -11199,32 +10260,22 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_REAL, __pyx_k_REAL, sizeof(__pyx_k_REAL), 0, 0, 1, 1}, {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1}, - {&__pyx_n_s__17, __pyx_k__17, sizeof(__pyx_k__17), 0, 0, 1, 1}, - {&__pyx_kp_s__5, __pyx_k__5, sizeof(__pyx_k__5), 0, 0, 1, 0}, + {&__pyx_n_s__13, __pyx_k__13, sizeof(__pyx_k__13), 0, 0, 1, 1}, {&__pyx_n_s_alpha, __pyx_k_alpha, sizeof(__pyx_k_alpha), 0, 0, 1, 1}, - {&__pyx_n_s_alpha_2, __pyx_k_alpha_2, sizeof(__pyx_k_alpha_2), 0, 0, 1, 1}, + {&__pyx_n_s_c, __pyx_k_c, sizeof(__pyx_k_c), 0, 0, 1, 1}, {&__pyx_n_s_cbow_mean, __pyx_k_cbow_mean, sizeof(__pyx_k_cbow_mean), 0, 0, 1, 1}, {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, {&__pyx_n_s_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 0, 1, 1}, - {&__pyx_n_s_codelens, __pyx_k_codelens, sizeof(__pyx_k_codelens), 0, 0, 1, 1}, - {&__pyx_n_s_codes, __pyx_k_codes, sizeof(__pyx_k_codes), 0, 0, 1, 1}, {&__pyx_n_s_count, __pyx_k_count, sizeof(__pyx_k_count), 0, 0, 1, 1}, {&__pyx_n_s_cum_table, __pyx_k_cum_table, sizeof(__pyx_k_cum_table), 0, 0, 1, 1}, - {&__pyx_n_s_cum_table_len, __pyx_k_cum_table_len, sizeof(__pyx_k_cum_table_len), 0, 0, 1, 1}, {&__pyx_n_s_dm_tag_count, __pyx_k_dm_tag_count, sizeof(__pyx_k_dm_tag_count), 0, 0, 1, 1}, {&__pyx_n_s_doc_words, __pyx_k_doc_words, sizeof(__pyx_k_doc_words), 0, 0, 1, 1}, {&__pyx_n_s_doctag_indexes, __pyx_k_doctag_indexes, sizeof(__pyx_k_doctag_indexes), 0, 0, 1, 1}, - {&__pyx_n_s_doctag_indexes_2, __pyx_k_doctag_indexes_2, sizeof(__pyx_k_doctag_indexes_2), 0, 0, 1, 1}, - {&__pyx_n_s_doctag_len, __pyx_k_doctag_len, sizeof(__pyx_k_doctag_len), 0, 0, 1, 1}, {&__pyx_n_s_doctag_locks, __pyx_k_doctag_locks, sizeof(__pyx_k_doctag_locks), 0, 0, 1, 1}, - {&__pyx_n_s_doctag_locks_2, __pyx_k_doctag_locks_2, sizeof(__pyx_k_doctag_locks_2), 0, 0, 1, 1}, {&__pyx_n_s_doctag_vectors, __pyx_k_doctag_vectors, sizeof(__pyx_k_doctag_vectors), 0, 0, 1, 1}, - {&__pyx_n_s_doctag_vectors_2, __pyx_k_doctag_vectors_2, sizeof(__pyx_k_doctag_vectors_2), 0, 0, 1, 1}, - {&__pyx_n_s_document_len, __pyx_k_document_len, sizeof(__pyx_k_document_len), 0, 0, 1, 1}, {&__pyx_n_s_docvecs, __pyx_k_docvecs, sizeof(__pyx_k_docvecs), 0, 0, 1, 1}, {&__pyx_n_s_dtype, __pyx_k_dtype, sizeof(__pyx_k_dtype), 0, 0, 1, 1}, {&__pyx_n_s_enumerate, __pyx_k_enumerate, sizeof(__pyx_k_enumerate), 0, 0, 1, 1}, - {&__pyx_n_s_expected_doctag_len, __pyx_k_expected_doctag_len, sizeof(__pyx_k_expected_doctag_len), 0, 0, 1, 1}, {&__pyx_n_s_fblas, __pyx_k_fblas, sizeof(__pyx_k_fblas), 0, 0, 1, 1}, {&__pyx_n_s_float32, __pyx_k_float32, sizeof(__pyx_k_float32), 0, 0, 1, 1}, {&__pyx_n_s_gensim_models_doc2vec_inner, __pyx_k_gensim_models_doc2vec_inner, sizeof(__pyx_k_gensim_models_doc2vec_inner), 0, 0, 1, 1}, @@ -11233,18 +10284,14 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1}, - {&__pyx_n_s_indexes, __pyx_k_indexes, sizeof(__pyx_k_indexes), 0, 0, 1, 1}, {&__pyx_n_s_inv_count, __pyx_k_inv_count, sizeof(__pyx_k_inv_count), 0, 0, 1, 1}, {&__pyx_n_s_item, __pyx_k_item, sizeof(__pyx_k_item), 0, 0, 1, 1}, {&__pyx_n_s_j, __pyx_k_j, sizeof(__pyx_k_j), 0, 0, 1, 1}, {&__pyx_n_s_k, __pyx_k_k, sizeof(__pyx_k_k), 0, 0, 1, 1}, {&__pyx_n_s_layer1_size, __pyx_k_layer1_size, sizeof(__pyx_k_layer1_size), 0, 0, 1, 1}, {&__pyx_n_s_learn_doctags, __pyx_k_learn_doctags, sizeof(__pyx_k_learn_doctags), 0, 0, 1, 1}, - {&__pyx_n_s_learn_doctags_2, __pyx_k_learn_doctags_2, sizeof(__pyx_k_learn_doctags_2), 0, 0, 1, 1}, {&__pyx_n_s_learn_hidden, __pyx_k_learn_hidden, sizeof(__pyx_k_learn_hidden), 0, 0, 1, 1}, - {&__pyx_n_s_learn_hidden_2, __pyx_k_learn_hidden_2, sizeof(__pyx_k_learn_hidden_2), 0, 0, 1, 1}, {&__pyx_n_s_learn_words, __pyx_k_learn_words, sizeof(__pyx_k_learn_words), 0, 0, 1, 1}, - {&__pyx_n_s_learn_words_2, __pyx_k_learn_words_2, sizeof(__pyx_k_learn_words_2), 0, 0, 1, 1}, {&__pyx_n_s_m, __pyx_k_m, sizeof(__pyx_k_m), 0, 0, 1, 1}, {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, {&__pyx_n_s_model, __pyx_k_model, sizeof(__pyx_k_model), 0, 0, 1, 1}, @@ -11253,26 +10300,19 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_kp_u_ndarray_is_not_Fortran_contiguou, __pyx_k_ndarray_is_not_Fortran_contiguou, sizeof(__pyx_k_ndarray_is_not_Fortran_contiguou), 0, 1, 0, 0}, {&__pyx_n_s_negative, __pyx_k_negative, sizeof(__pyx_k_negative), 0, 0, 1, 1}, {&__pyx_n_s_neu1, __pyx_k_neu1, sizeof(__pyx_k_neu1), 0, 0, 1, 1}, - {&__pyx_n_s_neu1_2, __pyx_k_neu1_2, sizeof(__pyx_k_neu1_2), 0, 0, 1, 1}, - {&__pyx_n_s_next_random, __pyx_k_next_random, sizeof(__pyx_k_next_random), 0, 0, 1, 1}, {&__pyx_n_s_np, __pyx_k_np, sizeof(__pyx_k_np), 0, 0, 1, 1}, - {&__pyx_n_s_null_word_index, __pyx_k_null_word_index, sizeof(__pyx_k_null_word_index), 0, 0, 1, 1}, {&__pyx_n_s_numpy, __pyx_k_numpy, sizeof(__pyx_k_numpy), 0, 0, 1, 1}, {&__pyx_kp_s_numpy_core_multiarray_failed_to, __pyx_k_numpy_core_multiarray_failed_to, sizeof(__pyx_k_numpy_core_multiarray_failed_to), 0, 0, 1, 0}, {&__pyx_kp_s_numpy_core_umath_failed_to_impor, __pyx_k_numpy_core_umath_failed_to_impor, sizeof(__pyx_k_numpy_core_umath_failed_to_impor), 0, 0, 1, 0}, {&__pyx_n_s_point, __pyx_k_point, sizeof(__pyx_k_point), 0, 0, 1, 1}, - {&__pyx_n_s_points, __pyx_k_points, sizeof(__pyx_k_points), 0, 0, 1, 1}, {&__pyx_n_s_predict_word, __pyx_k_predict_word, sizeof(__pyx_k_predict_word), 0, 0, 1, 1}, - {&__pyx_n_s_r, __pyx_k_r, sizeof(__pyx_k_r), 0, 0, 1, 1}, {&__pyx_n_s_randint, __pyx_k_randint, sizeof(__pyx_k_randint), 0, 0, 1, 1}, {&__pyx_n_s_random, __pyx_k_random, sizeof(__pyx_k_random), 0, 0, 1, 1}, {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, - {&__pyx_n_s_reduced_windows, __pyx_k_reduced_windows, sizeof(__pyx_k_reduced_windows), 0, 0, 1, 1}, {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, {&__pyx_n_s_sample, __pyx_k_sample, sizeof(__pyx_k_sample), 0, 0, 1, 1}, {&__pyx_n_s_sample_int, __pyx_k_sample_int, sizeof(__pyx_k_sample_int), 0, 0, 1, 1}, {&__pyx_n_s_scipy_linalg_blas, __pyx_k_scipy_linalg_blas, sizeof(__pyx_k_scipy_linalg_blas), 0, 0, 1, 1}, - {&__pyx_n_s_size, __pyx_k_size, sizeof(__pyx_k_size), 0, 0, 1, 1}, {&__pyx_n_s_syn1, __pyx_k_syn1, sizeof(__pyx_k_syn1), 0, 0, 1, 1}, {&__pyx_n_s_syn1neg, __pyx_k_syn1neg, sizeof(__pyx_k_syn1neg), 0, 0, 1, 1}, {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, @@ -11281,7 +10321,6 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_train_document_dm, __pyx_k_train_document_dm, sizeof(__pyx_k_train_document_dm), 0, 0, 1, 1}, {&__pyx_n_s_train_document_dm_concat, __pyx_k_train_document_dm_concat, sizeof(__pyx_k_train_document_dm_concat), 0, 0, 1, 1}, {&__pyx_n_s_train_words, __pyx_k_train_words, sizeof(__pyx_k_train_words), 0, 0, 1, 1}, - {&__pyx_n_s_train_words_2, __pyx_k_train_words_2, sizeof(__pyx_k_train_words_2), 0, 0, 1, 1}, {&__pyx_n_s_trainables, __pyx_k_trainables, sizeof(__pyx_k_trainables), 0, 0, 1, 1}, {&__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_k_unknown_dtype_code_in_numpy_pxd, sizeof(__pyx_k_unknown_dtype_code_in_numpy_pxd), 0, 1, 0, 0}, {&__pyx_n_s_vector_size, __pyx_k_vector_size, sizeof(__pyx_k_vector_size), 0, 0, 1, 1}, @@ -11293,13 +10332,10 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_vocab, __pyx_k_vocab, sizeof(__pyx_k_vocab), 0, 0, 1, 1}, {&__pyx_n_s_vocabulary, __pyx_k_vocabulary, sizeof(__pyx_k_vocabulary), 0, 0, 1, 1}, {&__pyx_n_s_window, __pyx_k_window, sizeof(__pyx_k_window), 0, 0, 1, 1}, - {&__pyx_n_s_window_indexes, __pyx_k_window_indexes, sizeof(__pyx_k_window_indexes), 0, 0, 1, 1}, {&__pyx_n_s_word_locks, __pyx_k_word_locks, sizeof(__pyx_k_word_locks), 0, 0, 1, 1}, - {&__pyx_n_s_word_locks_2, __pyx_k_word_locks_2, sizeof(__pyx_k_word_locks_2), 0, 0, 1, 1}, {&__pyx_n_s_word_vectors, __pyx_k_word_vectors, sizeof(__pyx_k_word_vectors), 0, 0, 1, 1}, - {&__pyx_n_s_word_vectors_2, __pyx_k_word_vectors_2, sizeof(__pyx_k_word_vectors_2), 0, 0, 1, 1}, {&__pyx_n_s_work, __pyx_k_work, sizeof(__pyx_k_work), 0, 0, 1, 1}, - {&__pyx_n_s_work_2, __pyx_k_work_2, sizeof(__pyx_k_work_2), 0, 0, 1, 1}, + {&__pyx_n_s_workers, __pyx_k_workers, sizeof(__pyx_k_workers), 0, 0, 1, 1}, {&__pyx_n_s_wv, __pyx_k_wv, sizeof(__pyx_k_wv), 0, 0, 1, 1}, {&__pyx_n_s_zeros, __pyx_k_zeros, sizeof(__pyx_k_zeros), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} @@ -11319,180 +10355,152 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - /* "gensim/models/doc2vec_inner.pyx":331 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< + /* "gensim/models/doc2vec_inner.pyx":268 + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< * * # convert Python structures to primitive types, so we can release the GIL */ - __pyx_tuple_ = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 331, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple_); - __Pyx_GIVEREF(__pyx_tuple_); - __pyx_tuple__2 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 331, __pyx_L1_error) + __pyx_tuple__2 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__2); __Pyx_GIVEREF(__pyx_tuple__2); - - /* "gensim/models/doc2vec_inner.pyx":514 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< - * - * # convert Python structures to primitive types, so we can release the GIL - */ - __pyx_tuple__3 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 514, __pyx_L1_error) + __pyx_tuple__3 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__3); __Pyx_GIVEREF(__pyx_tuple__3); - __pyx_tuple__4 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 514, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__4); - __Pyx_GIVEREF(__pyx_tuple__4); - - /* "gensim/models/doc2vec_inner.pyx":721 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< - * - * # convert Python structures to primitive types, so we can release the GIL - */ - __pyx_tuple__6 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__6); - __Pyx_GIVEREF(__pyx_tuple__6); - __pyx_tuple__7 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(0, 721, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__7); - __Pyx_GIVEREF(__pyx_tuple__7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) */ - __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 229, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__8); - __Pyx_GIVEREF(__pyx_tuple__8); + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< * * info.buf = PyArray_DATA(self) */ - __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 233, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__9); - __Pyx_GIVEREF(__pyx_tuple__9); + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 233, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" */ - __pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(1, 263, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__10); - __Pyx_GIVEREF(__pyx_tuple__10); + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< * * if ((child.byteorder == c'>' and little_endian) or */ - __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(1, 810, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__11); - __Pyx_GIVEREF(__pyx_tuple__11); + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 810, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< * # One could encode it in the format string and have Cython * # complain instead, BUT: < and > in format strings also imply */ - __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(1, 814, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__12); - __Pyx_GIVEREF(__pyx_tuple__12); + __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 814, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< * * # Until ticket #99 is fixed, use integers to avoid warnings */ - __pyx_tuple__13 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(1, 834, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__13); - __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 834, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< * * cdef inline int import_umath() except -1: */ - __pyx_tuple__14 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(1, 1000, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__14); - __Pyx_GIVEREF(__pyx_tuple__14); + __pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(1, 1000, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< * * cdef inline int import_ufunc() except -1: */ - __pyx_tuple__15 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 1006, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__15); - __Pyx_GIVEREF(__pyx_tuple__15); + __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(1, 1006, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< */ - __pyx_tuple__16 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(1, 1012, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__16); - __Pyx_GIVEREF(__pyx_tuple__16); + __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(1, 1012, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); - /* "gensim/models/doc2vec_inner.pyx":223 + /* "gensim/models/doc2vec_inner.pyx":280 * * * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, # <<<<<<<<<<<<<< * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): */ - __pyx_tuple__18 = PyTuple_Pack(50, __pyx_n_s_model, __pyx_n_s_doc_words, __pyx_n_s_doctag_indexes, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_train_words, __pyx_n_s_learn_doctags, __pyx_n_s_learn_words, __pyx_n_s_learn_hidden, __pyx_n_s_word_vectors, __pyx_n_s_word_locks, __pyx_n_s_doctag_vectors, __pyx_n_s_doctag_locks, __pyx_n_s_hs, __pyx_n_s_negative, __pyx_n_s_sample, __pyx_n_s_train_words_2, __pyx_n_s_learn_words_2, __pyx_n_s_learn_hidden_2, __pyx_n_s_learn_doctags_2, __pyx_n_s_word_vectors_2, __pyx_n_s_doctag_vectors_2, __pyx_n_s_word_locks_2, __pyx_n_s_doctag_locks_2, __pyx_n_s_work_2, __pyx_n_s_alpha_2, __pyx_n_s_size, __pyx_n_s_codelens, __pyx_n_s_indexes, __pyx_n_s_doctag_indexes_2, __pyx_n_s_reduced_windows, __pyx_n_s_document_len, __pyx_n_s_doctag_len, __pyx_n_s_window, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_r, __pyx_n_s_result, __pyx_n_s_syn1, __pyx_n_s_points, __pyx_n_s_codes, __pyx_n_s_syn1neg, __pyx_n_s_cum_table, __pyx_n_s_cum_table_len, __pyx_n_s_next_random, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_predict_word, __pyx_n_s_item, __pyx_n_s_k); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(0, 223, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__18); - __Pyx_GIVEREF(__pyx_tuple__18); - __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(13, 0, 50, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__18, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_doc2vec_inner_pyx, __pyx_n_s_train_document_dbow, 223, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(0, 223, __pyx_L1_error) + __pyx_tuple__14 = PyTuple_Pack(22, __pyx_n_s_model, __pyx_n_s_doc_words, __pyx_n_s_doctag_indexes, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_train_words, __pyx_n_s_learn_doctags, __pyx_n_s_learn_words, __pyx_n_s_learn_hidden, __pyx_n_s_word_vectors, __pyx_n_s_word_locks, __pyx_n_s_doctag_vectors, __pyx_n_s_doctag_locks, __pyx_n_s_c, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_result, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_predict_word, __pyx_n_s_item, __pyx_n_s_k); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(0, 280, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__14); + __Pyx_GIVEREF(__pyx_tuple__14); + __pyx_codeobj__15 = (PyObject*)__Pyx_PyCode_New(13, 0, 22, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__14, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_doc2vec_inner_pyx, __pyx_n_s_train_document_dbow, 280, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__15)) __PYX_ERR(0, 280, __pyx_L1_error) - /* "gensim/models/doc2vec_inner.pyx":403 + /* "gensim/models/doc2vec_inner.pyx":404 * * * def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< * learn_doctags=True, learn_words=True, learn_hidden=True, * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): */ - __pyx_tuple__20 = PyTuple_Pack(53, __pyx_n_s_model, __pyx_n_s_doc_words, __pyx_n_s_doctag_indexes, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_learn_doctags, __pyx_n_s_learn_words, __pyx_n_s_learn_hidden, __pyx_n_s_word_vectors, __pyx_n_s_word_locks, __pyx_n_s_doctag_vectors, __pyx_n_s_doctag_locks, __pyx_n_s_hs, __pyx_n_s_negative, __pyx_n_s_sample, __pyx_n_s_learn_doctags_2, __pyx_n_s_learn_words_2, __pyx_n_s_learn_hidden_2, __pyx_n_s_cbow_mean, __pyx_n_s_count, __pyx_n_s_inv_count, __pyx_n_s_word_vectors_2, __pyx_n_s_doctag_vectors_2, __pyx_n_s_word_locks_2, __pyx_n_s_doctag_locks_2, __pyx_n_s_work_2, __pyx_n_s_neu1_2, __pyx_n_s_alpha_2, __pyx_n_s_size, __pyx_n_s_codelens, __pyx_n_s_indexes, __pyx_n_s_doctag_indexes_2, __pyx_n_s_reduced_windows, __pyx_n_s_document_len, __pyx_n_s_doctag_len, __pyx_n_s_window, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_m, __pyx_n_s_result, __pyx_n_s_syn1, __pyx_n_s_points, __pyx_n_s_codes, __pyx_n_s_syn1neg, __pyx_n_s_cum_table, __pyx_n_s_cum_table_len, __pyx_n_s_next_random, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_predict_word, __pyx_n_s_item); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(0, 403, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__20); - __Pyx_GIVEREF(__pyx_tuple__20); - __pyx_codeobj__21 = (PyObject*)__Pyx_PyCode_New(13, 0, 53, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__20, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_doc2vec_inner_pyx, __pyx_n_s_train_document_dm, 403, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__21)) __PYX_ERR(0, 403, __pyx_L1_error) + __pyx_tuple__16 = PyTuple_Pack(25, __pyx_n_s_model, __pyx_n_s_doc_words, __pyx_n_s_doctag_indexes, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_learn_doctags, __pyx_n_s_learn_words, __pyx_n_s_learn_hidden, __pyx_n_s_word_vectors, __pyx_n_s_word_locks, __pyx_n_s_doctag_vectors, __pyx_n_s_doctag_locks, __pyx_n_s_c, __pyx_n_s_count, __pyx_n_s_inv_count, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_m, __pyx_n_s_result, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_predict_word, __pyx_n_s_item); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(0, 404, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__16); + __Pyx_GIVEREF(__pyx_tuple__16); + __pyx_codeobj__17 = (PyObject*)__Pyx_PyCode_New(13, 0, 25, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__16, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_doc2vec_inner_pyx, __pyx_n_s_train_document_dm, 404, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__17)) __PYX_ERR(0, 404, __pyx_L1_error) - /* "gensim/models/doc2vec_inner.pyx":606 + /* "gensim/models/doc2vec_inner.pyx":544 * * * def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< * learn_doctags=True, learn_words=True, learn_hidden=True, * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): */ - __pyx_tuple__22 = PyTuple_Pack(53, __pyx_n_s_model, __pyx_n_s_doc_words, __pyx_n_s_doctag_indexes, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_learn_doctags, __pyx_n_s_learn_words, __pyx_n_s_learn_hidden, __pyx_n_s_word_vectors, __pyx_n_s_word_locks, __pyx_n_s_doctag_vectors, __pyx_n_s_doctag_locks, __pyx_n_s_hs, __pyx_n_s_negative, __pyx_n_s_sample, __pyx_n_s_learn_doctags_2, __pyx_n_s_learn_words_2, __pyx_n_s_learn_hidden_2, __pyx_n_s_word_vectors_2, __pyx_n_s_doctag_vectors_2, __pyx_n_s_word_locks_2, __pyx_n_s_doctag_locks_2, __pyx_n_s_work_2, __pyx_n_s_neu1_2, __pyx_n_s_alpha_2, __pyx_n_s_layer1_size, __pyx_n_s_vector_size, __pyx_n_s_codelens, __pyx_n_s_indexes, __pyx_n_s_doctag_indexes_2, __pyx_n_s_window_indexes, __pyx_n_s_document_len, __pyx_n_s_doctag_len, __pyx_n_s_window, __pyx_n_s_expected_doctag_len, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_m, __pyx_n_s_n, __pyx_n_s_result, __pyx_n_s_null_word_index, __pyx_n_s_syn1, __pyx_n_s_points, __pyx_n_s_codes, __pyx_n_s_syn1neg, __pyx_n_s_cum_table, __pyx_n_s_cum_table_len, __pyx_n_s_next_random, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_predict_word); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(0, 606, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__22); - __Pyx_GIVEREF(__pyx_tuple__22); - __pyx_codeobj__23 = (PyObject*)__Pyx_PyCode_New(13, 0, 53, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_doc2vec_inner_pyx, __pyx_n_s_train_document_dm_concat, 606, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__23)) __PYX_ERR(0, 606, __pyx_L1_error) + __pyx_tuple__18 = PyTuple_Pack(23, __pyx_n_s_model, __pyx_n_s_doc_words, __pyx_n_s_doctag_indexes, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_learn_doctags, __pyx_n_s_learn_words, __pyx_n_s_learn_hidden, __pyx_n_s_word_vectors, __pyx_n_s_word_locks, __pyx_n_s_doctag_vectors, __pyx_n_s_doctag_locks, __pyx_n_s_c, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_m, __pyx_n_s_n, __pyx_n_s_result, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_predict_word); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(0, 544, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__18); + __Pyx_GIVEREF(__pyx_tuple__18); + __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(13, 0, 23, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__18, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_doc2vec_inner_pyx, __pyx_n_s_train_document_dm_concat, 544, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(0, 544, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; @@ -11537,8 +10545,18 @@ static int __Pyx_modinit_function_export_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); /*--- Function export code ---*/ + if (__Pyx_ExportFunction("fast_document_dbow_hs", (void (*)(void))__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("fast_document_dbow_neg", (void (*)(void))__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dbow_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("fast_document_dm_hs", (void (*)(void))__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("fast_document_dm_neg", (void (*)(void))__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dm_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("fast_document_dmc_hs", (void (*)(void))__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int const , int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("fast_document_dmc_neg", (void (*)(void))__pyx_f_6gensim_6models_13doc2vec_inner_fast_document_dmc_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , int const , int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("init_d2v_config", (void (*)(void))__pyx_f_6gensim_6models_13doc2vec_inner_init_d2v_config, "PyObject *(struct __pyx_t_6gensim_6models_13doc2vec_inner_Doc2VecConfig *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, struct __pyx_opt_args_6gensim_6models_13doc2vec_inner_init_d2v_config *__pyx_optional_args)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; } static int __Pyx_modinit_type_init_code(void) { @@ -11630,7 +10648,7 @@ static int __Pyx_modinit_function_import_code(void) { #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (!(defined(__cplusplus)) || (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4))) +#elif defined(__GNUC__) #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) #else #define CYTHON_SMALL_CODE @@ -11784,7 +10802,7 @@ if (!__Pyx_RefNanny) { /*--- Global type/function init code ---*/ (void)__Pyx_modinit_global_init_code(); (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); + if (unlikely(__Pyx_modinit_function_export_code() != 0)) goto __pyx_L1_error; (void)__Pyx_modinit_type_init_code(); if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; if (unlikely(__Pyx_modinit_variable_import_code() != 0)) goto __pyx_L1_error; @@ -11911,9 +10929,9 @@ if (!__Pyx_RefNanny) { */ __pyx_t_8 = PyList_New(1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 25, __pyx_L4_except_error) __Pyx_GOTREF(__pyx_t_8); - __Pyx_INCREF(__pyx_n_s__17); - __Pyx_GIVEREF(__pyx_n_s__17); - PyList_SET_ITEM(__pyx_t_8, 0, __pyx_n_s__17); + __Pyx_INCREF(__pyx_n_s__13); + __Pyx_GIVEREF(__pyx_n_s__13); + PyList_SET_ITEM(__pyx_t_8, 0, __pyx_n_s__13); __pyx_t_9 = __Pyx_Import(__pyx_n_s_scipy_linalg_blas, __pyx_t_8, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 25, __pyx_L4_except_error) __Pyx_GOTREF(__pyx_t_9); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; @@ -11965,40 +10983,40 @@ if (!__Pyx_RefNanny) { */ __pyx_v_6gensim_6models_13doc2vec_inner_ONEF = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)1.0); - /* "gensim/models/doc2vec_inner.pyx":223 + /* "gensim/models/doc2vec_inner.pyx":280 * * * def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, # <<<<<<<<<<<<<< * train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): */ - __pyx_t_7 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_13doc2vec_inner_1train_document_dbow, NULL, __pyx_n_s_gensim_models_doc2vec_inner); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 223, __pyx_L1_error) + __pyx_t_7 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_13doc2vec_inner_1train_document_dbow, NULL, __pyx_n_s_gensim_models_doc2vec_inner); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 280, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_document_dbow, __pyx_t_7) < 0) __PYX_ERR(0, 223, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_document_dbow, __pyx_t_7) < 0) __PYX_ERR(0, 280, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "gensim/models/doc2vec_inner.pyx":403 + /* "gensim/models/doc2vec_inner.pyx":404 * * * def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< * learn_doctags=True, learn_words=True, learn_hidden=True, * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): */ - __pyx_t_7 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_13doc2vec_inner_3train_document_dm, NULL, __pyx_n_s_gensim_models_doc2vec_inner); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 403, __pyx_L1_error) + __pyx_t_7 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_13doc2vec_inner_3train_document_dm, NULL, __pyx_n_s_gensim_models_doc2vec_inner); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 404, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_document_dm, __pyx_t_7) < 0) __PYX_ERR(0, 403, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_document_dm, __pyx_t_7) < 0) __PYX_ERR(0, 404, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "gensim/models/doc2vec_inner.pyx":606 + /* "gensim/models/doc2vec_inner.pyx":544 * * * def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, neu1=None, # <<<<<<<<<<<<<< * learn_doctags=True, learn_words=True, learn_hidden=True, * word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): */ - __pyx_t_7 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_13doc2vec_inner_5train_document_dm_concat, NULL, __pyx_n_s_gensim_models_doc2vec_inner); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 606, __pyx_L1_error) + __pyx_t_7 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_13doc2vec_inner_5train_document_dm_concat, NULL, __pyx_n_s_gensim_models_doc2vec_inner); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 544, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_document_dm_concat, __pyx_t_7) < 0) __PYX_ERR(0, 606, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_document_dm_concat, __pyx_t_7) < 0) __PYX_ERR(0, 544, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; /* "gensim/models/doc2vec_inner.pyx":1 @@ -12011,7 +11029,7 @@ if (!__Pyx_RefNanny) { if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_7) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -12093,8 +11111,87 @@ static PyObject *__Pyx_GetBuiltinName(PyObject *name) { return result; } +/* DictGetItem */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) + PyErr_SetObject(PyExc_KeyError, args); + Py_XDECREF(args); + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* ExtTypeTest */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* GetModuleGlobalName */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + if (likely(result)) { + Py_INCREF(result); + } else if (unlikely(PyErr_Occurred())) { + result = NULL; + } else { +#else + result = PyDict_GetItem(__pyx_d, name); + if (likely(result)) { + Py_INCREF(result); + } else { +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + if (!result) { + PyErr_Clear(); +#endif + result = __Pyx_GetBuiltinName(name); + } + return result; +} + /* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( + static void __Pyx_RaiseArgtupleInvalid( const char* func_name, int exact, Py_ssize_t num_min, @@ -12120,7 +11217,7 @@ static void __Pyx_RaiseArgtupleInvalid( } /* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( + static void __Pyx_RaiseDoubleKeywordsError( const char* func_name, PyObject* kw_name) { @@ -12134,7 +11231,7 @@ static void __Pyx_RaiseDoubleKeywordsError( } /* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( + static int __Pyx_ParseOptionalKeywords( PyObject *kwds, PyObject **argnames[], PyObject *kwds2, @@ -12235,66 +11332,6 @@ static int __Pyx_ParseOptionalKeywords( return -1; } -/* ExtTypeTest */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - if (likely(__Pyx_TypeCheck(obj, type))) - return 1; - PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", - Py_TYPE(obj)->tp_name, type->tp_name); - return 0; -} - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* GetModuleGlobalName */ -static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - if (likely(result)) { - Py_INCREF(result); - } else if (unlikely(PyErr_Occurred())) { - result = NULL; - } else { -#else - result = PyDict_GetItem(__pyx_d, name); - if (likely(result)) { - Py_INCREF(result); - } else { -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - if (!result) { - PyErr_Clear(); -#endif - result = __Pyx_GetBuiltinName(name); - } - return result; -} - /* GetItemInt */ static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { PyObject *r; @@ -12554,25 +11591,6 @@ static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, P } #endif -/* DictGetItem */ - #if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY -static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { - PyObject *value; - value = PyDict_GetItemWithError(d, key); - if (unlikely(!value)) { - if (!PyErr_Occurred()) { - PyObject* args = PyTuple_Pack(1, key); - if (likely(args)) - PyErr_SetObject(PyExc_KeyError, args); - Py_XDECREF(args); - } - return NULL; - } - Py_INCREF(value); - return value; -} -#endif - /* PyErrFetchRestore */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { @@ -13055,9 +12073,6 @@ static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_li #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); @@ -14741,6 +13756,43 @@ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObj return 0; } +/* FunctionExport */ + static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + d = PyObject_GetAttrString(__pyx_m, (char *)"__pyx_capi__"); + if (!d) { + PyErr_Clear(); + d = PyDict_New(); + if (!d) + goto bad; + Py_INCREF(d); + if (PyModule_AddObject(__pyx_m, (char *)"__pyx_capi__", d) < 0) + goto bad; + } + tmp.fp = f; +#if PY_VERSION_HEX >= 0x02070000 + cobj = PyCapsule_New(tmp.p, sig, 0); +#else + cobj = PyCObject_FromVoidPtrAndDesc(tmp.p, (void *)sig, 0); +#endif + if (!cobj) + goto bad; + if (PyDict_SetItemString(d, name, cobj) < 0) + goto bad; + Py_DECREF(cobj); + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(cobj); + Py_XDECREF(d); + return -1; +} + /* ModuleImport */ #ifndef __PYX_HAVE_RT_ImportModule #define __PYX_HAVE_RT_ImportModule @@ -15168,9 +14220,6 @@ static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_DECREF(x); return ival; } -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } diff --git a/gensim/models/doc2vec_inner.pxd b/gensim/models/doc2vec_inner.pxd new file mode 100644 index 0000000000..4ba020fd1c --- /dev/null +++ b/gensim/models/doc2vec_inner.pxd @@ -0,0 +1,92 @@ +#!/usr/bin/env cython +# distutils: language = c++ +# cython: boundscheck=False +# cython: wraparound=False +# cython: cdivision=True +# cython: embedsignature=True +# coding: utf-8 +# +# shared type definitions for doc2vec_inner +# used from doc2vec_corpusfile +# +# Copyright (C) 2018 Dmitry Persiyanov +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +import numpy as np +cimport numpy as np + +from word2vec_inner cimport REAL_t + +DEF MAX_DOCUMENT_LEN = 10000 + + +cdef struct Doc2VecConfig: + int hs, negative, sample, learn_doctags, learn_words, learn_hidden, train_words, cbow_mean + int document_len, doctag_len, window, expected_doctag_len, null_word_index, workers, docvecs_count + + REAL_t *word_vectors + REAL_t *doctag_vectors + REAL_t *word_locks + REAL_t *doctag_locks + REAL_t *work + REAL_t *neu1 + REAL_t alpha + int layer1_size, vector_size + + int codelens[MAX_DOCUMENT_LEN] + np.uint32_t indexes[MAX_DOCUMENT_LEN] + np.uint32_t doctag_indexes[MAX_DOCUMENT_LEN] + np.uint32_t window_indexes[MAX_DOCUMENT_LEN] + np.uint32_t reduced_windows[MAX_DOCUMENT_LEN] + + # For hierarchical softmax + REAL_t *syn1 + np.uint32_t *points[MAX_DOCUMENT_LEN] + np.uint8_t *codes[MAX_DOCUMENT_LEN] + + # For negative sampling + REAL_t *syn1neg + np.uint32_t *cum_table + unsigned long long cum_table_len, next_random + + +cdef void fast_document_dbow_hs( + const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, + REAL_t *context_vectors, REAL_t *syn1, const int size, + const np.uint32_t context_index, const REAL_t alpha, REAL_t *work, int learn_context, int learn_hidden, + REAL_t *context_locks) nogil + + +cdef unsigned long long fast_document_dbow_neg( + const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, + REAL_t *context_vectors, REAL_t *syn1neg, const int size, const np.uint32_t word_index, + const np.uint32_t context_index, const REAL_t alpha, REAL_t *work, + unsigned long long next_random, int learn_context, int learn_hidden, REAL_t *context_locks) nogil + + +cdef void fast_document_dm_hs( + const np.uint32_t *word_point, const np.uint8_t *word_code, int word_code_len, + REAL_t *neu1, REAL_t *syn1, const REAL_t alpha, REAL_t *work, + const int size, int learn_hidden) nogil + + +cdef unsigned long long fast_document_dm_neg( + const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, unsigned long long next_random, + REAL_t *neu1, REAL_t *syn1neg, const int predict_word_index, const REAL_t alpha, REAL_t *work, + const int size, int learn_hidden) nogil + + +cdef void fast_document_dmc_hs( + const np.uint32_t *word_point, const np.uint8_t *word_code, int word_code_len, + REAL_t *neu1, REAL_t *syn1, const REAL_t alpha, REAL_t *work, + const int layer1_size, const int vector_size, int learn_hidden) nogil + + +cdef unsigned long long fast_document_dmc_neg( + const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, unsigned long long next_random, + REAL_t *neu1, REAL_t *syn1neg, const int predict_word_index, const REAL_t alpha, REAL_t *work, + const int layer1_size, const int vector_size, int learn_hidden) nogil + + +cdef init_d2v_config(Doc2VecConfig *c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=*, work=*, + neu1=*, word_vectors=*, word_locks=*, doctag_vectors=*, doctag_locks=*, docvecs_count=*) \ No newline at end of file diff --git a/gensim/models/doc2vec_inner.pyx b/gensim/models/doc2vec_inner.pyx index e0226e3dc0..8d9ca4862f 100644 --- a/gensim/models/doc2vec_inner.pyx +++ b/gensim/models/doc2vec_inner.pyx @@ -220,6 +220,63 @@ cdef unsigned long long fast_document_dmc_neg( return next_random +cdef init_d2v_config(Doc2VecConfig *c, model, alpha, learn_doctags, learn_words, learn_hidden, + train_words=False, work=None, neu1=None, word_vectors=None, word_locks=None, doctag_vectors=None, + doctag_locks=None, docvecs_count=0): + c[0].hs = model.hs + c[0].negative = model.negative + c[0].sample = (model.vocabulary.sample != 0) + c[0].cbow_mean = model.cbow_mean + c[0].train_words = train_words + c[0].learn_doctags = learn_doctags + c[0].learn_words = learn_words + c[0].learn_hidden = learn_hidden + c[0].alpha = alpha + c[0].layer1_size = model.trainables.layer1_size + c[0].vector_size = model.docvecs.vector_size + c[0].workers = model.workers + c[0].docvecs_count = docvecs_count + + c[0].window = model.window + c[0].expected_doctag_len = model.dm_tag_count + + if '\0' in model.wv.vocab: + c[0].null_word_index = model.wv.vocab['\0'].index + + # default vectors, locks from syn0/doctag_syn0 + if word_vectors is None: + word_vectors = model.wv.vectors + c[0].word_vectors = (np.PyArray_DATA(word_vectors)) + if doctag_vectors is None: + doctag_vectors = model.docvecs.vectors_docs + c[0].doctag_vectors = (np.PyArray_DATA(doctag_vectors)) + if word_locks is None: + word_locks = model.trainables.vectors_lockf + c[0].word_locks = (np.PyArray_DATA(word_locks)) + if doctag_locks is None: + doctag_locks = model.trainables.vectors_docs_lockf + c[0].doctag_locks = (np.PyArray_DATA(doctag_locks)) + + if c[0].hs: + c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) + + if c[0].negative: + c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + c[0].cum_table_len = len(model.vocabulary.cum_table) + if c[0].negative or c[0].sample: + c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + + # convert Python structures to primitive types, so we can release the GIL + if work is None: + work = zeros(model.trainables.layer1_size, dtype=REAL) + c[0].work = np.PyArray_DATA(work) + if neu1 is None: + neu1 = zeros(model.trainables.layer1_size, dtype=REAL) + c[0].neu1 = np.PyArray_DATA(neu1) + + + def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, train_words=False, learn_doctags=True, learn_words=True, learn_hidden=True, word_vectors=None, word_locks=None, doctag_vectors=None, doctag_locks=None): @@ -267,73 +324,16 @@ def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, Number of words in the input document that were actually used for training. """ - cdef int hs = model.hs - cdef int negative = model.negative - cdef int sample = (model.vocabulary.sample != 0) - cdef int _train_words = train_words - cdef int _learn_words = learn_words - cdef int _learn_hidden = learn_hidden - cdef int _learn_doctags = learn_doctags - - cdef REAL_t *_word_vectors - cdef REAL_t *_doctag_vectors - cdef REAL_t *_word_locks - cdef REAL_t *_doctag_locks - cdef REAL_t *_work - cdef REAL_t _alpha = alpha - cdef int size = model.trainables.layer1_size - - cdef int codelens[MAX_DOCUMENT_LEN] - cdef np.uint32_t indexes[MAX_DOCUMENT_LEN] - cdef np.uint32_t _doctag_indexes[MAX_DOCUMENT_LEN] - cdef np.uint32_t reduced_windows[MAX_DOCUMENT_LEN] - cdef int document_len - cdef int doctag_len - cdef int window = model.window + cdef Doc2VecConfig c cdef int i, j - cdef unsigned long long r cdef long result = 0 - # For hierarchical softmax - cdef REAL_t *syn1 - cdef np.uint32_t *points[MAX_DOCUMENT_LEN] - cdef np.uint8_t *codes[MAX_DOCUMENT_LEN] - - # For negative sampling - cdef REAL_t *syn1neg - cdef np.uint32_t *cum_table - cdef unsigned long long cum_table_len - cdef unsigned long long next_random - - # default vectors, locks from syn0/doctag_syn0 - if word_vectors is None: - word_vectors = model.wv.vectors - _word_vectors = (np.PyArray_DATA(word_vectors)) - if doctag_vectors is None: - doctag_vectors = model.docvecs.vectors_docs - _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - if word_locks is None: - word_locks = model.trainables.vectors_lockf - _word_locks = (np.PyArray_DATA(word_locks)) - if doctag_locks is None: - doctag_locks = model.trainables.vectors_docs_lockf - _doctag_locks = (np.PyArray_DATA(doctag_locks)) - - if hs: - syn1 = (np.PyArray_DATA(model.trainables.syn1)) + init_d2v_config(&c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=train_words, work=work, + neu1=None, word_vectors=word_vectors, word_locks=word_locks, + doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) - if negative: - syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - cum_table_len = len(model.vocabulary.cum_table) - if negative or sample: - next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - - # convert Python structures to primitive types, so we can release the GIL - if work is None: - work = zeros(model.trainables.layer1_size, dtype=REAL) - _work = np.PyArray_DATA(work) + c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) vlookup = model.wv.vocab i = 0 @@ -341,61 +341,62 @@ def train_document_dbow(model, doc_words, doctag_indexes, alpha, work=None, predict_word = vlookup[token] if token in vlookup else None if predict_word is None: # shrink document to leave out word continue # leaving i unchanged - if sample and predict_word.sample_int < random_int32(&next_random): + if c.sample and predict_word.sample_int < random_int32(&c.next_random): continue - indexes[i] = predict_word.index - if hs: - codelens[i] = len(predict_word.code) - codes[i] = np.PyArray_DATA(predict_word.code) - points[i] = np.PyArray_DATA(predict_word.point) + c.indexes[i] = predict_word.index + if c.hs: + c.codelens[i] = len(predict_word.code) + c.codes[i] = np.PyArray_DATA(predict_word.code) + c.points[i] = np.PyArray_DATA(predict_word.point) result += 1 i += 1 if i == MAX_DOCUMENT_LEN: break # TODO: log warning, tally overflow? - document_len = i + c.document_len = i - if _train_words: + if c.train_words: # single randint() call avoids a big thread-synchronization slowdown - for i, item in enumerate(model.random.randint(0, window, document_len)): - reduced_windows[i] = item + for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): + c.reduced_windows[i] = item - doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - for i in range(doctag_len): - _doctag_indexes[i] = doctag_indexes[i] + for i in range(c.doctag_len): + c.doctag_indexes[i] = doctag_indexes[i] result += 1 # release GIL & train on the document with nogil: - for i in range(document_len): - if _train_words: # simultaneous skip-gram wordvec-training - j = i - window + reduced_windows[i] + for i in range(c.document_len): + if c.train_words: # simultaneous skip-gram wordvec-training + j = i - c.window + c.reduced_windows[i] if j < 0: j = 0 - k = i + window + 1 - reduced_windows[i] - if k > document_len: - k = document_len + k = i + c.window + 1 - c.reduced_windows[i] + if k > c.document_len: + k = c.document_len for j in range(j, k): if j == i: continue - if hs: + if c.hs: # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose - fast_document_dbow_hs(points[i], codes[i], codelens[i], _word_vectors, syn1, size, indexes[j], - _alpha, _work, _learn_words, _learn_hidden, _word_locks) - if negative: + fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.word_vectors, c.syn1, c.layer1_size, + c.indexes[j], c.alpha, c.work, c.learn_words, c.learn_hidden, c.word_locks) + if c.negative: # we reuse the DBOW function, as it is equivalent to skip-gram for this purpose - next_random = fast_document_dbow_neg(negative, cum_table, cum_table_len, _word_vectors, syn1neg, size, - indexes[i], indexes[j], _alpha, _work, next_random, - _learn_words, _learn_hidden, _word_locks) + c.next_random = fast_document_dbow_neg(c.negative, c.cum_table, c.cum_table_len, c.word_vectors, + c.syn1neg, c.layer1_size, c.indexes[i], c.indexes[j], + c.alpha, c.work, c.next_random, c.learn_words, + c.learn_hidden, c.word_locks) # docvec-training - for j in range(doctag_len): - if hs: - fast_document_dbow_hs(points[i], codes[i], codelens[i], _doctag_vectors, syn1, size, _doctag_indexes[j], - _alpha, _work, _learn_doctags, _learn_hidden, _doctag_locks) - if negative: - next_random = fast_document_dbow_neg(negative, cum_table, cum_table_len, _doctag_vectors, syn1neg, size, - indexes[i], _doctag_indexes[j], _alpha, _work, next_random, - _learn_doctags, _learn_hidden, _doctag_locks) + for j in range(c.doctag_len): + if c.hs: + fast_document_dbow_hs(c.points[i], c.codes[i], c.codelens[i], c.doctag_vectors, c.syn1, c.layer1_size, + c.doctag_indexes[j], c.alpha, c.work, c.learn_doctags, c.learn_hidden, c.doctag_locks) + if c.negative: + c.next_random = fast_document_dbow_neg(c.negative, c.cum_table, c.cum_table_len, c.doctag_vectors, + c.syn1neg, c.layer1_size, c.indexes[i], c.doctag_indexes[j], + c.alpha, c.work, c.next_random, c.learn_doctags, + c.learn_hidden, c.doctag_locks) return result @@ -448,78 +449,17 @@ def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=N Number of words in the input document that were actually used for training. """ - cdef int hs = model.hs - cdef int negative = model.negative - cdef int sample = (model.vocabulary.sample != 0) - cdef int _learn_doctags = learn_doctags - cdef int _learn_words = learn_words - cdef int _learn_hidden = learn_hidden - cdef int cbow_mean = model.cbow_mean - cdef REAL_t count, inv_count = 1.0 - - cdef REAL_t *_word_vectors - cdef REAL_t *_doctag_vectors - cdef REAL_t *_word_locks - cdef REAL_t *_doctag_locks - cdef REAL_t *_work - cdef REAL_t *_neu1 - cdef REAL_t _alpha = alpha - cdef int size = model.trainables.layer1_size - - cdef int codelens[MAX_DOCUMENT_LEN] - cdef np.uint32_t indexes[MAX_DOCUMENT_LEN] - cdef np.uint32_t _doctag_indexes[MAX_DOCUMENT_LEN] - cdef np.uint32_t reduced_windows[MAX_DOCUMENT_LEN] - cdef int document_len - cdef int doctag_len - cdef int window = model.window + cdef Doc2VecConfig c + cdef REAL_t count, inv_count = 1.0 cdef int i, j, k, m cdef long result = 0 - # For hierarchical softmax - cdef REAL_t *syn1 - cdef np.uint32_t *points[MAX_DOCUMENT_LEN] - cdef np.uint8_t *codes[MAX_DOCUMENT_LEN] - - # For negative sampling - cdef REAL_t *syn1neg - cdef np.uint32_t *cum_table - cdef unsigned long long cum_table_len - cdef unsigned long long next_random - - - # default vectors, locks from syn0/doctag_syn0 - if word_vectors is None: - word_vectors = model.wv.vectors - _word_vectors = (np.PyArray_DATA(word_vectors)) - if doctag_vectors is None: - doctag_vectors = model.docvecs.vectors_docs - _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - if word_locks is None: - word_locks = model.trainables.vectors_lockf - _word_locks = (np.PyArray_DATA(word_locks)) - if doctag_locks is None: - doctag_locks = model.trainables.vectors_docs_lockf - _doctag_locks = (np.PyArray_DATA(doctag_locks)) - - if hs: - syn1 = (np.PyArray_DATA(model.trainables.syn1)) + init_d2v_config(&c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=False, + work=work, neu1=neu1, word_vectors=word_vectors, word_locks=word_locks, + doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) - if negative: - syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - cum_table_len = len(model.vocabulary.cum_table) - if negative or sample: - next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - - # convert Python structures to primitive types, so we can release the GIL - if work is None: - work = zeros(model.trainables.layer1_size, dtype=REAL) - _work = np.PyArray_DATA(work) - if neu1 is None: - neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - _neu1 = np.PyArray_DATA(neu1) + c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) vlookup = model.wv.vocab i = 0 @@ -527,78 +467,76 @@ def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=N predict_word = vlookup[token] if token in vlookup else None if predict_word is None: # shrink document to leave out word continue # leaving i unchanged - if sample and predict_word.sample_int < random_int32(&next_random): + if c.sample and predict_word.sample_int < random_int32(&c.next_random): continue - indexes[i] = predict_word.index - if hs: - codelens[i] = len(predict_word.code) - codes[i] = np.PyArray_DATA(predict_word.code) - points[i] = np.PyArray_DATA(predict_word.point) + c.indexes[i] = predict_word.index + if c.hs: + c.codelens[i] = len(predict_word.code) + c.codes[i] = np.PyArray_DATA(predict_word.code) + c.points[i] = np.PyArray_DATA(predict_word.point) result += 1 i += 1 if i == MAX_DOCUMENT_LEN: break # TODO: log warning, tally overflow? - document_len = i + c.document_len = i # single randint() call avoids a big thread-sync slowdown - for i, item in enumerate(model.random.randint(0, window, document_len)): - reduced_windows[i] = item + for i, item in enumerate(model.random.randint(0, c.window, c.document_len)): + c.reduced_windows[i] = item - doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - for i in range(doctag_len): - _doctag_indexes[i] = doctag_indexes[i] + for i in range(c.doctag_len): + c.doctag_indexes[i] = doctag_indexes[i] result += 1 # release GIL & train on the document with nogil: - for i in range(document_len): - j = i - window + reduced_windows[i] + for i in range(c.document_len): + j = i - c.window + c.reduced_windows[i] if j < 0: j = 0 - k = i + window + 1 - reduced_windows[i] - if k > document_len: - k = document_len + k = i + c.window + 1 - c.reduced_windows[i] + if k > c.document_len: + k = c.document_len # compose l1 (in _neu1) & clear _work - memset(_neu1, 0, size * cython.sizeof(REAL_t)) + memset(c.neu1, 0, c.layer1_size * cython.sizeof(REAL_t)) count = 0.0 for m in range(j, k): if m == i: continue else: count += ONEF - our_saxpy(&size, &ONEF, &_word_vectors[indexes[m] * size], &ONE, _neu1, &ONE) - for m in range(doctag_len): + our_saxpy(&c.layer1_size, &ONEF, &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) + for m in range(c.doctag_len): count += ONEF - our_saxpy(&size, &ONEF, &_doctag_vectors[_doctag_indexes[m] * size], &ONE, _neu1, &ONE) + our_saxpy(&c.layer1_size, &ONEF, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE, c.neu1, &ONE) if count > (0.5): inv_count = ONEF/count - if cbow_mean: - sscal(&size, &inv_count, _neu1, &ONE) # (does this need BLAS-variants like saxpy?) - memset(_work, 0, size * cython.sizeof(REAL_t)) # work to accumulate l1 error - if hs: - fast_document_dm_hs(points[i], codes[i], codelens[i], - _neu1, syn1, _alpha, _work, - size, _learn_hidden) - if negative: - next_random = fast_document_dm_neg(negative, cum_table, cum_table_len, next_random, - _neu1, syn1neg, indexes[i], _alpha, _work, - size, _learn_hidden) - - if not cbow_mean: - sscal(&size, &inv_count, _work, &ONE) # (does this need BLAS-variants like saxpy?) + if c.cbow_mean: + sscal(&c.layer1_size, &inv_count, c.neu1, &ONE) # (does this need BLAS-variants like saxpy?) + memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + if c.hs: + fast_document_dm_hs(c.points[i], c.codes[i], c.codelens[i], c.neu1, c.syn1, c.alpha, c.work, + c.layer1_size, c.learn_hidden) + if c.negative: + c.next_random = fast_document_dm_neg(c.negative, c.cum_table, c.cum_table_len, c.next_random, + c.neu1, c.syn1neg, c.indexes[i], c.alpha, c.work, c.layer1_size, + c.learn_hidden) + + if not c.cbow_mean: + sscal(&c.layer1_size, &inv_count, c.work, &ONE) # (does this need BLAS-variants like saxpy?) # apply accumulated error in work - if _learn_doctags: - for m in range(doctag_len): - our_saxpy(&size, &_doctag_locks[_doctag_indexes[m]], _work, - &ONE, &_doctag_vectors[_doctag_indexes[m] * size], &ONE) - if _learn_words: + if c.learn_doctags: + for m in range(c.doctag_len): + our_saxpy(&c.layer1_size, &c.doctag_locks[c.doctag_indexes[m]], c.work, + &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.layer1_size], &ONE) + if c.learn_words: for m in range(j, k): if m == i: continue else: - our_saxpy(&size, &_word_locks[indexes[m]], _work, &ONE, - &_word_vectors[indexes[m] * size], &ONE) + our_saxpy(&c.layer1_size, &c.word_locks[c.indexes[m]], c.work, &ONE, + &c.word_vectors[c.indexes[m] * c.layer1_size], &ONE) return result @@ -651,148 +589,84 @@ def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, Number of words in the input document that were actually used for training. """ - cdef int hs = model.hs - cdef int negative = model.negative - cdef int sample = (model.vocabulary.sample != 0) - cdef int _learn_doctags = learn_doctags - cdef int _learn_words = learn_words - cdef int _learn_hidden = learn_hidden - - cdef REAL_t *_word_vectors - cdef REAL_t *_doctag_vectors - cdef REAL_t *_word_locks - cdef REAL_t *_doctag_locks - cdef REAL_t *_work - cdef REAL_t *_neu1 - cdef REAL_t _alpha = alpha - cdef int layer1_size = model.trainables.layer1_size - cdef int vector_size = model.docvecs.vector_size - - cdef int codelens[MAX_DOCUMENT_LEN] - cdef np.uint32_t indexes[MAX_DOCUMENT_LEN] - cdef np.uint32_t _doctag_indexes[MAX_DOCUMENT_LEN] - cdef np.uint32_t window_indexes[MAX_DOCUMENT_LEN] - cdef int document_len - cdef int doctag_len - cdef int window = model.window - cdef int expected_doctag_len = model.dm_tag_count + cdef Doc2VecConfig c cdef int i, j, k, m, n cdef long result = 0 - cdef int null_word_index = model.wv.vocab['\0'].index - # For hierarchical softmax - cdef REAL_t *syn1 - cdef np.uint32_t *points[MAX_DOCUMENT_LEN] - cdef np.uint8_t *codes[MAX_DOCUMENT_LEN] + init_d2v_config(&c, model, alpha, learn_doctags, learn_words, learn_hidden, train_words=False, work=work, neu1=neu1, + word_vectors=word_vectors, word_locks=word_locks, doctag_vectors=doctag_vectors, doctag_locks=doctag_locks) - # For negative sampling - cdef REAL_t *syn1neg - cdef np.uint32_t *cum_table - cdef unsigned long long cum_table_len - cdef unsigned long long next_random + c.doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - doctag_len = min(MAX_DOCUMENT_LEN, len(doctag_indexes)) - if doctag_len != expected_doctag_len: + if c.doctag_len != c.expected_doctag_len: return 0 # skip doc without expected number of tags - # default vectors, locks from syn0/doctag_syn0 - if word_vectors is None: - word_vectors = model.wv.vectors - _word_vectors = (np.PyArray_DATA(word_vectors)) - if doctag_vectors is None: - doctag_vectors = model.docvecs.vectors_docs - _doctag_vectors = (np.PyArray_DATA(doctag_vectors)) - if word_locks is None: - word_locks = model.trainables.vectors_lockf - _word_locks = (np.PyArray_DATA(word_locks)) - if doctag_locks is None: - doctag_locks = model.trainables.vectors_docs_lockf - _doctag_locks = (np.PyArray_DATA(doctag_locks)) - - if hs: - syn1 = (np.PyArray_DATA(model.trainables.syn1)) - - if negative: - syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - cum_table_len = len(model.vocabulary.cum_table) - if negative or sample: - next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - - # convert Python structures to primitive types, so we can release the GIL - if work is None: - work = zeros(model.trainables.layer1_size, dtype=REAL) - _work = np.PyArray_DATA(work) - if neu1 is None: - neu1 = zeros(model.trainables.layer1_size, dtype=REAL) - _neu1 = np.PyArray_DATA(neu1) - vlookup = model.wv.vocab i = 0 for token in doc_words: predict_word = vlookup[token] if token in vlookup else None if predict_word is None: # shrink document to leave out word continue # leaving i unchanged - if sample and predict_word.sample_int < random_int32(&next_random): + if c.sample and predict_word.sample_int < random_int32(&c.next_random): continue - indexes[i] = predict_word.index - if hs: - codelens[i] = len(predict_word.code) - codes[i] = np.PyArray_DATA(predict_word.code) - points[i] = np.PyArray_DATA(predict_word.point) + c.indexes[i] = predict_word.index + if c.hs: + c.codelens[i] = len(predict_word.code) + c.codes[i] = np.PyArray_DATA(predict_word.code) + c.points[i] = np.PyArray_DATA(predict_word.point) result += 1 i += 1 if i == MAX_DOCUMENT_LEN: break # TODO: log warning, tally overflow? - document_len = i + c.document_len = i - for i in range(doctag_len): - _doctag_indexes[i] = doctag_indexes[i] + for i in range(c.doctag_len): + c.doctag_indexes[i] = doctag_indexes[i] result += 1 # release GIL & train on the document with nogil: - for i in range(document_len): - j = i - window # negative OK: will pad with null word - k = i + window + 1 # past document end OK: will pad with null word + for i in range(c.document_len): + j = i - c.window # negative OK: will pad with null word + k = i + c.window + 1 # past document end OK: will pad with null word # compose l1 & clear work - for m in range(doctag_len): + for m in range(c.doctag_len): # doc vector(s) - memcpy(&_neu1[m * vector_size], &_doctag_vectors[_doctag_indexes[m] * vector_size], - vector_size * cython.sizeof(REAL_t)) + memcpy(&c.neu1[m * c.vector_size], &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], + c.vector_size * cython.sizeof(REAL_t)) n = 0 for m in range(j, k): # word vectors in window if m == i: continue - if m < 0 or m >= document_len: - window_indexes[n] = null_word_index + if m < 0 or m >= c.document_len: + c.window_indexes[n] = c.null_word_index else: - window_indexes[n] = indexes[m] + c.window_indexes[n] = c.indexes[m] n += 1 - for m in range(2 * window): - memcpy(&_neu1[(doctag_len + m) * vector_size], &_word_vectors[window_indexes[m] * vector_size], - vector_size * cython.sizeof(REAL_t)) - memset(_work, 0, layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error - - if hs: - fast_document_dmc_hs(points[i], codes[i], codelens[i], - _neu1, syn1, _alpha, _work, - layer1_size, vector_size, _learn_hidden) - if negative: - next_random = fast_document_dmc_neg(negative, cum_table, cum_table_len, next_random, - _neu1, syn1neg, indexes[i], _alpha, _work, - layer1_size, vector_size, _learn_hidden) - - if _learn_doctags: - for m in range(doctag_len): - our_saxpy(&vector_size, &_doctag_locks[_doctag_indexes[m]], &_work[m * vector_size], - &ONE, &_doctag_vectors[_doctag_indexes[m] * vector_size], &ONE) - if _learn_words: - for m in range(2 * window): - our_saxpy(&vector_size, &_word_locks[window_indexes[m]], &_work[(doctag_len + m) * vector_size], - &ONE, &_word_vectors[window_indexes[m] * vector_size], &ONE) + for m in range(2 * c.window): + memcpy(&c.neu1[(c.doctag_len + m) * c.vector_size], &c.word_vectors[c.window_indexes[m] * c.vector_size], + c.vector_size * cython.sizeof(REAL_t)) + memset(c.work, 0, c.layer1_size * cython.sizeof(REAL_t)) # work to accumulate l1 error + + if c.hs: + fast_document_dmc_hs(c.points[i], c.codes[i], c.codelens[i], + c.neu1, c.syn1, c.alpha, c.work, + c.layer1_size, c.vector_size, c.learn_hidden) + if c.negative: + c.next_random = fast_document_dmc_neg(c.negative, c.cum_table, c.cum_table_len, c.next_random, + c.neu1, c.syn1neg, c.indexes[i], c.alpha, c.work, + c.layer1_size, c.vector_size, c.learn_hidden) + + if c.learn_doctags: + for m in range(c.doctag_len): + our_saxpy(&c.vector_size, &c.doctag_locks[c.doctag_indexes[m]], &c.work[m * c.vector_size], + &ONE, &c.doctag_vectors[c.doctag_indexes[m] * c.vector_size], &ONE) + if c.learn_words: + for m in range(2 * c.window): + our_saxpy(&c.vector_size, &c.word_locks[c.window_indexes[m]], &c.work[(c.doctag_len + m) * c.vector_size], + &ONE, &c.word_vectors[c.window_indexes[m] * c.vector_size], &ONE) return result diff --git a/gensim/models/fast_line_sentence.h b/gensim/models/fast_line_sentence.h new file mode 100644 index 0000000000..cd242d75b3 --- /dev/null +++ b/gensim/models/fast_line_sentence.h @@ -0,0 +1,45 @@ +#pragma once + +#include +#include +#include + + +class FastLineSentence { +public: + explicit FastLineSentence() : is_eof_(false) { } + explicit FastLineSentence(const std::string& filename, size_t offset = 0) : filename_(filename), + fs_(filename), + offset_(offset), + is_eof_(false) { + fs_.seekg(offset_); + } + + std::vector ReadSentence() { + if (is_eof_) { + return {}; + } + std::string line, word; + std::getline(fs_, line); + std::vector res; + + std::istringstream iss(line); + while (iss >> word) { + res.push_back(word); + } + + if (fs_.eof()) { + is_eof_ = true; + } + return res; + } + + inline bool IsEof() const { return is_eof_; } + inline void Reset() { fs_.clear(); fs_.seekg(offset_); is_eof_ = false; } + +private: + std::string filename_; + std::ifstream fs_; + size_t offset_; + bool is_eof_; +}; diff --git a/gensim/models/fasttext.py b/gensim/models/fasttext.py index 7460aeaa8f..5bc1109cce 100644 --- a/gensim/models/fasttext.py +++ b/gensim/models/fasttext.py @@ -196,7 +196,7 @@ def train_batch_sg(model, sentences, alpha, work=None, neu1=None): start = max(0, pos - model.window + reduced_window) subwords_indices = (word.index,) - subwords_indices += model.wv.buckets_word[word.index] + subwords_indices += tuple(model.wv.buckets_word[word.index]) for pos2, word2 in enumerate(word_vocabs[start:(pos + model.window + 1 - reduced_window)], start): if pos2 != pos: # don't train on the `word` itself @@ -205,6 +205,21 @@ def train_batch_sg(model, sentences, alpha, work=None, neu1=None): result += len(word_vocabs) return result +try: + from gensim.models.fasttext_corpusfile import train_epoch_sg, train_epoch_cbow, CORPUSFILE_VERSION +except ImportError: + # file-based fasttext is not supported + CORPUSFILE_VERSION = -1 + + def train_epoch_sg(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, + _work, _neu1): + raise RuntimeError("Training with corpus_file argument is not supported") + + def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, + _work, _neu1): + raise RuntimeError("Training with corpus_file argument is not supported") + + FASTTEXT_FILEFORMAT_MAGIC = 793712314 @@ -241,7 +256,7 @@ class FastText(BaseWordEmbeddingsModel): for the internal structure of words, besides their concurrence counts. """ - def __init__(self, sentences=None, input_streams=None, sg=0, hs=0, size=100, alpha=0.025, window=5, min_count=5, + def __init__(self, sentences=None, corpus_file=None, sg=0, hs=0, size=100, alpha=0.025, window=5, min_count=5, max_vocab_size=None, word_ngrams=1, sample=1e-3, seed=1, workers=3, min_alpha=0.0001, negative=5, ns_exponent=0.75, cbow_mean=1, hashfxn=hash, iter=5, null_word=0, min_n=3, max_n=6, sorted_vocab=1, bucket=2000000, trim_rule=None, batch_words=MAX_WORDS_IN_BATCH, callbacks=()): @@ -256,9 +271,10 @@ def __init__(self, sentences=None, input_streams=None, sg=0, hs=0, size=100, alp or :class:`~gensim.models.word2vec.LineSentence` in :mod:`~gensim.models.word2vec` module for such examples. If you don't supply `sentences`, the model is left uninitialized -- use if you plan to initialize it in some other way. - input_streams : list or tuple of iterable of iterables - The tuple or list of `sentences`-like arguments. Use it if you have multiple input streams. It is possible - to process streams in parallel, using `workers` parameter. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (or none of them). min_count : int, optional The model ignores all words with total frequency lower than this. size : int, optional @@ -344,9 +360,9 @@ def __init__(self, sentences=None, input_streams=None, sg=0, hs=0, size=100, alp Initialize and train a `FastText` model:: >>> from gensim.models import FastText - >>> input_streams = [[["cat", "say", "meow"], ["dog", "say", "woof"]]] + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] >>> - >>> model = FastText(input_streams=input_streams, min_count=1) + >>> model = FastText(sentences, min_count=1) >>> say_vector = model['say'] # get vector for word >>> of_vector = model['of'] # get vector for out-of-vocab word @@ -367,7 +383,7 @@ def __init__(self, sentences=None, input_streams=None, sg=0, hs=0, size=100, alp self.wv.bucket = self.bucket super(FastText, self).__init__( - sentences=sentences, input_streams=input_streams, workers=workers, vector_size=size, epochs=iter, + sentences=sentences, corpus_file=corpus_file, workers=workers, vector_size=size, epochs=iter, callbacks=callbacks, batch_words=batch_words, trim_rule=trim_rule, sg=sg, alpha=alpha, window=window, seed=seed, hs=hs, negative=negative, cbow_mean=cbow_mean, min_alpha=min_alpha, fast_version=FAST_VERSION) @@ -421,21 +437,22 @@ def syn0_ngrams_lockf(self): def num_ngram_vectors(self): return self.wv.num_ngram_vectors - def build_vocab(self, sentences=None, input_streams=None, update=False, progress_per=10000, keep_raw_vocab=False, - trim_rule=None, workers=None, **kwargs): + def build_vocab(self, sentences=None, corpus_file=None, update=False, progress_per=10000, keep_raw_vocab=False, + trim_rule=None, **kwargs): """Build vocabulary from a sequence of sentences (can be a once-only generator stream). Each sentence must be a list of unicode strings. Parameters ---------- - sentences : iterable of list of str + sentences : iterable of list of str, optional Can be simply a list of lists of tokens, but for larger corpora, consider an iterable that streams the sentences directly from disk/network. See :class:`~gensim.models.word2vec.BrownCorpus`, :class:`~gensim.models.word2vec.Text8Corpus` or :class:`~gensim.models.word2vec.LineSentence` in :mod:`~gensim.models.word2vec` module for such examples. - input_streams : list or tuple of iterable of iterables - The tuple or list of `sentences`-like arguments. Use it if you have multiple input streams. It is possible - to process streams in parallel, using `workers` parameter. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (not both of them). update : bool If true, the new words in `sentences` will be added to model's vocab. progress_per : int @@ -456,9 +473,6 @@ def build_vocab(self, sentences=None, input_streams=None, update=False, progress * `count` (int) - the word's frequency count in the corpus * `min_count` (int) - the minimum count threshold. - workers : int - Used if `input_streams` is passed. Determines how many processes to use for vocab building. - Actual number of workers is determined by `min(len(input_streams), workers)`. **kwargs Additional key word parameters passed to :meth:`~gensim.models.base_any2vec.BaseWordEmbeddingsModel.build_vocab`. @@ -489,8 +503,8 @@ def build_vocab(self, sentences=None, input_streams=None, update=False, progress self.trainables.old_hash2index_len = len(self.wv.hash2index) return super(FastText, self).build_vocab( - sentences=sentences, input_streams=input_streams, update=update, progress_per=progress_per, - keep_raw_vocab=keep_raw_vocab, trim_rule=trim_rule, workers=workers, **kwargs) + sentences=sentences, corpus_file=corpus_file, update=update, progress_per=progress_per, + keep_raw_vocab=keep_raw_vocab, trim_rule=trim_rule, **kwargs) def _set_train_params(self, **kwargs): pass @@ -518,9 +532,9 @@ def estimate_memory(self, vocab_size=None, report=None): buckets = set() num_ngrams = 0 for word in self.wv.vocab: - ngrams = _compute_ngrams(word, self.min_n, self.max_n) + ngrams = _compute_ngrams(word, self.wv.min_n, self.wv.max_n) num_ngrams += len(ngrams) - buckets.update(_ft_hash(ng) % self.bucket for ng in ngrams) + buckets.update(_ft_hash(ng) % self.trainables.bucket for ng in ngrams) num_buckets = len(buckets) report['syn0_ngrams'] = len(buckets) * vec_size # A tuple (48 bytes) with num_ngrams_word ints (8 bytes) for each word @@ -538,6 +552,19 @@ def estimate_memory(self, vocab_size=None, report=None): ) return report + def _do_train_epoch(self, corpus_file, thread_id, offset, cython_vocab, thread_private_mem, cur_epoch, + total_examples=None, total_words=None, **kwargs): + work, neu1 = thread_private_mem + + if self.sg: + examples, tally, raw_tally = train_epoch_sg(self, corpus_file, offset, cython_vocab, cur_epoch, + total_examples, total_words, work, neu1) + else: + examples, tally, raw_tally = train_epoch_cbow(self, corpus_file, offset, cython_vocab, cur_epoch, + total_examples, total_words, work, neu1) + + return examples, tally, raw_tally + def _do_train_job(self, sentences, alpha, inits): """Train a single batch of sentences. Return 2-tuple `(effective word count after ignoring unknown words and sentence length trimming, total word count)`. @@ -569,7 +596,7 @@ def _do_train_job(self, sentences, alpha, inits): return tally, self._raw_word_count(sentences) - def train(self, sentences=None, input_streams=None, total_examples=None, total_words=None, + def train(self, sentences=None, corpus_file=None, total_examples=None, total_words=None, epochs=None, start_alpha=None, end_alpha=None, word_count=0, queue_factor=2, report_delay=1.0, callbacks=(), **kwargs): """Update the model's neural weights from a sequence of sentences (can be a once-only generator stream). @@ -587,14 +614,15 @@ def train(self, sentences=None, input_streams=None, total_examples=None, total_w Parameters ---------- - sentences : {iterable of iterables, list or tuple of iterable of iterables} + sentences : iterable of list of str, optional The `sentences` iterable can be simply a list of lists of tokens, but for larger corpora, consider an iterable that streams the sentences directly from disk/network. See :class:`~gensim.models.word2vec.BrownCorpus`, :class:`~gensim.models.word2vec.Text8Corpus` or :class:`~gensim.models.word2vec.LineSentence` in :mod:`~gensim.models.word2vec` module for such examples. - input_streams : list or tuple of iterable of iterables - The tuple or list of `sentences`-like arguments. Use it if you have multiple input streams. It is possible - to process streams in parallel, using `workers` parameter. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + If you use this argument instead of `sentences`, you must provide `total_words` argument as well. Only one + of `sentences` or `corpus_file` arguments need to be passed (not both of them). total_examples : int Count of sentences. total_words : int @@ -633,7 +661,7 @@ def train(self, sentences=None, input_streams=None, total_examples=None, total_w """ super(FastText, self).train( - sentences=sentences, input_streams=input_streams, total_examples=total_examples, total_words=total_words, + sentences=sentences, corpus_file=corpus_file, total_examples=total_examples, total_words=total_words, epochs=epochs, start_alpha=start_alpha, end_alpha=end_alpha, word_count=word_count, queue_factor=queue_factor, report_delay=report_delay, callbacks=callbacks) self.trainables.get_vocab_word_vecs(self.wv) @@ -977,7 +1005,7 @@ def init_ngrams_weights(self, wv, update=False, vocabulary=None): wv.hash2index[ngram_hash] = len(ngram_indices) ngram_indices.append(ngram_hash) buckets.append(wv.hash2index[ngram_hash]) - wv.buckets_word[vocab.index] = tuple(buckets) + wv.buckets_word[vocab.index] = np.array(buckets, dtype=np.uint32) wv.num_ngram_vectors = len(ngram_indices) logger.info("Total number of ngrams is %d", wv.num_ngram_vectors) @@ -996,7 +1024,7 @@ def init_ngrams_weights(self, wv, update=False, vocabulary=None): wv.hash2index[ngram_hash] = num_new_ngrams + self.old_hash2index_len num_new_ngrams += 1 buckets.append(wv.hash2index[ngram_hash]) - wv.buckets_word[vocab.index] = tuple(buckets) + wv.buckets_word[vocab.index] = np.array(buckets, dtype=np.uint32) wv.num_ngram_vectors += num_new_ngrams logger.info("Number of new ngrams is %d", num_new_ngrams) diff --git a/gensim/models/fasttext_corpusfile.cpp b/gensim/models/fasttext_corpusfile.cpp new file mode 100644 index 0000000000..861a7df26f --- /dev/null +++ b/gensim/models/fasttext_corpusfile.cpp @@ -0,0 +1,9370 @@ +/* Generated by Cython 0.28.2 */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_28_2" +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (0 && PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef __cplusplus + #error "Cython files generated with the C++ option must be compiled with a C++ compiler." +#endif +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #else + #define CYTHON_INLINE inline + #endif +#endif +template +void __Pyx_call_destructor(T& x) { + x.~T(); +} +template +class __Pyx_FakeReference { + public: + __Pyx_FakeReference() : ptr(NULL) { } + __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } + T *operator->() { return ptr; } + T *operator&() { return ptr; } + operator T&() { return *ptr; } + template bool operator ==(U other) { return *ptr == other; } + template bool operator !=(U other) { return *ptr != other; } + private: + T *ptr; +}; + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__gensim__models__fasttext_corpusfile +#define __PYX_HAVE_API__gensim__models__fasttext_corpusfile +/* Early includes */ +#include +#include +#include "numpy/arrayobject.h" +#include "numpy/ufuncobject.h" +#include "ios" +#include "new" +#include "stdexcept" +#include "typeinfo" +#include +#include +#include "voidptr.h" +#include +#include +#include "fast_line_sentence.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + +/* Header.proto */ +#if !defined(CYTHON_CCOMPLEX) + #if defined(__cplusplus) + #define CYTHON_CCOMPLEX 1 + #elif defined(_Complex_I) + #define CYTHON_CCOMPLEX 1 + #else + #define CYTHON_CCOMPLEX 0 + #endif +#endif +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #include + #else + #include + #endif +#endif +#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) + #undef _Complex_I + #define _Complex_I 1.0fj +#endif + + +static const char *__pyx_f[] = { + "gensim/models/fasttext_corpusfile.pyx", + "__init__.pxd", + "type.pxd", + "gensim/models/word2vec_corpusfile.pxd", +}; +/* NoFastGil.proto */ +#define __Pyx_PyGILState_Ensure PyGILState_Ensure +#define __Pyx_PyGILState_Release PyGILState_Release +#define __Pyx_FastGIL_Remember() +#define __Pyx_FastGIL_Forget() +#define __Pyx_FastGilFuncInit() + +/* ForceInitThreads.proto */ +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 + * # in Cython to enable them only on the right systems. + * + * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + */ +typedef npy_int8 __pyx_t_5numpy_int8_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 + * + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t + */ +typedef npy_int16 __pyx_t_5numpy_int16_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< + * ctypedef npy_int64 int64_t + * #ctypedef npy_int96 int96_t + */ +typedef npy_int32 __pyx_t_5numpy_int32_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< + * #ctypedef npy_int96 int96_t + * #ctypedef npy_int128 int128_t + */ +typedef npy_int64 __pyx_t_5numpy_int64_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 + * #ctypedef npy_int128 int128_t + * + * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + */ +typedef npy_uint8 __pyx_t_5numpy_uint8_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 + * + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t + */ +typedef npy_uint16 __pyx_t_5numpy_uint16_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< + * ctypedef npy_uint64 uint64_t + * #ctypedef npy_uint96 uint96_t + */ +typedef npy_uint32 __pyx_t_5numpy_uint32_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< + * #ctypedef npy_uint96 uint96_t + * #ctypedef npy_uint128 uint128_t + */ +typedef npy_uint64 __pyx_t_5numpy_uint64_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 + * #ctypedef npy_uint128 uint128_t + * + * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< + * ctypedef npy_float64 float64_t + * #ctypedef npy_float80 float80_t + */ +typedef npy_float32 __pyx_t_5numpy_float32_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 + * + * ctypedef npy_float32 float32_t + * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< + * #ctypedef npy_float80 float80_t + * #ctypedef npy_float128 float128_t + */ +typedef npy_float64 __pyx_t_5numpy_float64_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 + * # The int types are mapped a bit surprising -- + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t + */ +typedef npy_long __pyx_t_5numpy_int_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong longlong_t + * + */ +typedef npy_longlong __pyx_t_5numpy_long_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_ulong uint_t + */ +typedef npy_longlong __pyx_t_5numpy_longlong_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 + * ctypedef npy_longlong longlong_t + * + * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t + */ +typedef npy_ulong __pyx_t_5numpy_uint_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 + * + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulonglong_t + * + */ +typedef npy_ulonglong __pyx_t_5numpy_ulong_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_intp intp_t + */ +typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 + * ctypedef npy_ulonglong ulonglong_t + * + * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< + * ctypedef npy_uintp uintp_t + * + */ +typedef npy_intp __pyx_t_5numpy_intp_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 + * + * ctypedef npy_intp intp_t + * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< + * + * ctypedef npy_double float_t + */ +typedef npy_uintp __pyx_t_5numpy_uintp_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 + * ctypedef npy_uintp uintp_t + * + * ctypedef npy_double float_t # <<<<<<<<<<<<<< + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t + */ +typedef npy_double __pyx_t_5numpy_float_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 + * + * ctypedef npy_double float_t + * ctypedef npy_double double_t # <<<<<<<<<<<<<< + * ctypedef npy_longdouble longdouble_t + * + */ +typedef npy_double __pyx_t_5numpy_double_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 + * ctypedef npy_double float_t + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cfloat cfloat_t + */ +typedef npy_longdouble __pyx_t_5numpy_longdouble_t; + +/* "word2vec_inner.pxd":19 + * void* PyCObject_AsVoidPtr(object obj) + * + * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< + * + * # BLAS routine signatures + */ +typedef __pyx_t_5numpy_float32_t __pyx_t_6gensim_6models_14word2vec_inner_REAL_t; + +/* "gensim/models/word2vec_corpusfile.pxd":21 + * cimport numpy as np + * + * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< + * + * + */ +typedef __pyx_t_5numpy_float32_t __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t; + +/* "gensim/models/fasttext_corpusfile.pyx":40 + * ) + * + * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< + * DEF MAX_SENTENCE_LEN = 10000 + * DEF MAX_SUBWORDS = 1000 + */ +typedef __pyx_t_5numpy_float32_t __pyx_t_6gensim_6models_19fasttext_corpusfile_REAL_t; +/* Declarations.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< float > __pyx_t_float_complex; + #else + typedef float _Complex __pyx_t_float_complex; + #endif +#else + typedef struct { float real, imag; } __pyx_t_float_complex; +#endif +static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float); + +/* Declarations.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< double > __pyx_t_double_complex; + #else + typedef double _Complex __pyx_t_double_complex; + #endif +#else + typedef struct { double real, imag; } __pyx_t_double_complex; +#endif +static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double); + + +/*--- Type declarations ---*/ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 + * ctypedef npy_longdouble longdouble_t + * + * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t + */ +typedef npy_cfloat __pyx_t_5numpy_cfloat_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 + * + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< + * ctypedef npy_clongdouble clongdouble_t + * + */ +typedef npy_cdouble __pyx_t_5numpy_cdouble_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cdouble complex_t + */ +typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 + * ctypedef npy_clongdouble clongdouble_t + * + * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew1(a): + */ +typedef npy_cdouble __pyx_t_5numpy_complex_t; +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig; +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config; + +/* "word2vec_inner.pxd":22 + * + * # BLAS routine signatures + * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)(int const *, float const *, int const *, float *, int const *); + +/* "word2vec_inner.pxd":23 + * # BLAS routine signatures + * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); + +/* "word2vec_inner.pxd":24 + * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil + */ +typedef float (*__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)(int const *, float const *, int const *, float const *, int const *); + +/* "word2vec_inner.pxd":25 + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil + * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil + */ +typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)(int const *, float const *, int const *, float const *, int const *); + +/* "word2vec_inner.pxd":26 + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< + * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil + * + */ +typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)(int const *, float const *, int const *); + +/* "word2vec_inner.pxd":27 + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil + * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< + * + * cdef scopy_ptr scopy + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)(int const *, float const *, float const *, int const *); + +/* "word2vec_inner.pxd":44 + * + * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() + * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * + */ +typedef __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr)(int const *, float const *, int const *, float const *, int const *); + +/* "word2vec_inner.pxd":45 + * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() + * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * + * cdef our_dot_ptr our_dot + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); + +/* "word2vec_inner.pxd":51 + * + * + * cdef struct Word2VecConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, compute_loss, size, window, cbow_mean, workers + * REAL_t running_training_loss, alpha + */ +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig { + int hs; + int negative; + int sample; + int compute_loss; + int size; + int window; + int cbow_mean; + int workers; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t running_training_loss; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + int sentence_idx[(0x2710 + 1)]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; +}; + +/* "word2vec_inner.pxd":125 + * + * + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=*) # <<<<<<<<<<<<<< + */ +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config { + int __pyx_n; + PyObject *_neu1; +}; +struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig; + +/* "gensim/models/fasttext_inner.pxd":22 + * + * + * cdef struct FastTextConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, size, window, cbow_mean, workers + * REAL_t alpha + */ +struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig { + int hs; + int negative; + int sample; + int size; + int window; + int cbow_mean; + int workers; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0_vocab; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks_vocab; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0_ngrams; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks_ngrams; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + int sentence_idx[(0x2710 + 1)]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; + int subwords_idx_len[0x2710]; + __pyx_t_5numpy_uint32_t *subwords_idx[0x2710]; +}; +struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem; + +/* "gensim/models/word2vec_corpusfile.pxd":47 + * + * + * cdef struct VocabItem: # <<<<<<<<<<<<<< + * long long sample_int + * np.uint32_t index + */ +struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem { + PY_LONG_LONG sample_int; + __pyx_t_5numpy_uint32_t index; + __pyx_t_5numpy_uint8_t *code; + int code_len; + __pyx_t_5numpy_uint32_t *point; + int subword_idx_len; + __pyx_t_5numpy_uint32_t *subword_idx; +}; + +/* "gensim/models/word2vec_corpusfile.pxd":59 + * + * + * ctypedef unordered_map[string, VocabItem] cvocab_t # <<<<<<<<<<<<<< + * + * cdef class CythonVocab: + */ +typedef std::unordered_map __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t; + +/* "gensim/models/word2vec_corpusfile.pxd":33 + * + * + * cdef class CythonLineSentence: # <<<<<<<<<<<<<< + * cdef FastLineSentence* _thisptr + * cdef public bytes source + */ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence { + PyObject_HEAD + struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_vtab; + FastLineSentence *_thisptr; + PyObject *source; + size_t max_sentence_length; + size_t max_words_in_batch; + size_t offset; + std::vector > buf_data; +}; + + +/* "gensim/models/word2vec_corpusfile.pxd":61 + * ctypedef unordered_map[string, VocabItem] cvocab_t + * + * cdef class CythonVocab: # <<<<<<<<<<<<<< + * cdef cvocab_t vocab + * cdef subword_arrays + */ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab { + PyObject_HEAD + struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_vtab; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t vocab; + PyObject *subword_arrays; +}; + + + +/* "gensim/models/word2vec_corpusfile.pxd":33 + * + * + * cdef class CythonLineSentence: # <<<<<<<<<<<<<< + * cdef FastLineSentence* _thisptr + * cdef public bytes source + */ + +struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence { + bool (*is_eof)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector (*read_sentence)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector > (*_read_chunked_sentence)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector > (*_chunk_sentence)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, std::vector , int __pyx_skip_dispatch); + void (*reset)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector > (*next_batch)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); +}; +static struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; + + +/* "gensim/models/word2vec_corpusfile.pxd":61 + * ctypedef unordered_map[string, VocabItem] cvocab_t + * + * cdef class CythonVocab: # <<<<<<<<<<<<<< + * cdef cvocab_t vocab + * cdef subword_arrays + */ + +struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab { + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *(*get_vocab_ptr)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *); +}; +static struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* RaiseNoneIterError.proto */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* GetVTable.proto */ +static void* __Pyx_GetVtable(PyObject *dict); + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* None.proto */ +static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void); /* proto */ + +/* RealImag.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #define __Pyx_CREAL(z) ((z).real()) + #define __Pyx_CIMAG(z) ((z).imag()) + #else + #define __Pyx_CREAL(z) (__real__(z)) + #define __Pyx_CIMAG(z) (__imag__(z)) + #endif +#else + #define __Pyx_CREAL(z) ((z).real) + #define __Pyx_CIMAG(z) ((z).imag) +#endif +#if defined(__cplusplus) && CYTHON_CCOMPLEX\ + && (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103) + #define __Pyx_SET_CREAL(z,x) ((z).real(x)) + #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) +#else + #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) + #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) +#endif + +/* Arithmetic.proto */ +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq_float(a, b) ((a)==(b)) + #define __Pyx_c_sum_float(a, b) ((a)+(b)) + #define __Pyx_c_diff_float(a, b) ((a)-(b)) + #define __Pyx_c_prod_float(a, b) ((a)*(b)) + #define __Pyx_c_quot_float(a, b) ((a)/(b)) + #define __Pyx_c_neg_float(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero_float(z) ((z)==(float)0) + #define __Pyx_c_conj_float(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs_float(z) (::std::abs(z)) + #define __Pyx_c_pow_float(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero_float(z) ((z)==0) + #define __Pyx_c_conj_float(z) (conjf(z)) + #if 1 + #define __Pyx_c_abs_float(z) (cabsf(z)) + #define __Pyx_c_pow_float(a, b) (cpowf(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex); + static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex); + #if 1 + static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex, __pyx_t_float_complex); + #endif +#endif + +/* Arithmetic.proto */ +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq_double(a, b) ((a)==(b)) + #define __Pyx_c_sum_double(a, b) ((a)+(b)) + #define __Pyx_c_diff_double(a, b) ((a)-(b)) + #define __Pyx_c_prod_double(a, b) ((a)*(b)) + #define __Pyx_c_quot_double(a, b) ((a)/(b)) + #define __Pyx_c_neg_double(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero_double(z) ((z)==(double)0) + #define __Pyx_c_conj_double(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs_double(z) (::std::abs(z)) + #define __Pyx_c_pow_double(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero_double(z) ((z)==0) + #define __Pyx_c_conj_double(z) (conj(z)) + #if 1 + #define __Pyx_c_abs_double(z) (cabs(z)) + #define __Pyx_c_pow_double(a, b) (cpow(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex); + static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex); + #if 1 + static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex, __pyx_t_double_complex); + #endif +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value); + +/* None.proto */ +#include + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* PyIdentifierFromString.proto */ +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +/* ModuleImport.proto */ +static PyObject *__Pyx_ImportModule(const char *name); + +/* TypeImport.proto */ +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); + +/* VoidPtrImport.proto */ +static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig); + +/* FunctionImport.proto */ +static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'numpy' */ + +/* Module declarations from 'numpy' */ +static PyTypeObject *__pyx_ptype_5numpy_dtype = 0; +static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0; +static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0; +static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0; +static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0; +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/ + +/* Module declarations from 'libcpp.string' */ + +/* Module declarations from 'libcpp.vector' */ + +/* Module declarations from 'gensim.models.word2vec_inner' */ +static __pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_scopy = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_scopy (*__pyx_vp_6gensim_6models_14word2vec_inner_scopy) +static __pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_saxpy = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_saxpy (*__pyx_vp_6gensim_6models_14word2vec_inner_saxpy) +static __pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_sdot = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_sdot (*__pyx_vp_6gensim_6models_14word2vec_inner_sdot) +static __pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_dsdot = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_dsdot (*__pyx_vp_6gensim_6models_14word2vec_inner_dsdot) +static __pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_snrm2 = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_snrm2 (*__pyx_vp_6gensim_6models_14word2vec_inner_snrm2) +static __pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_sscal = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_sscal (*__pyx_vp_6gensim_6models_14word2vec_inner_sscal) +static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_vp_6gensim_6models_14word2vec_inner_EXP_TABLE)[0x3E8] = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE (*__pyx_vp_6gensim_6models_14word2vec_inner_EXP_TABLE) +static __pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_our_dot = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_our_dot (*__pyx_vp_6gensim_6models_14word2vec_inner_our_dot) +static __pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_our_saxpy = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy (*__pyx_vp_6gensim_6models_14word2vec_inner_our_saxpy) +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14word2vec_inner_random_int32)(unsigned PY_LONG_LONG *); /*proto*/ + +/* Module declarations from 'gensim.models.fasttext_inner' */ +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_neg)(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static void (*__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_hs)(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_neg)(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static void (*__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_hs)(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static PyObject *(*__pyx_f_6gensim_6models_14fasttext_inner_init_ft_config)(struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig *, PyObject *, PyObject *, PyObject *, PyObject *); /*proto*/ + +/* Module declarations from 'libcpp.utility' */ + +/* Module declarations from 'libcpp.unordered_map' */ + +/* Module declarations from 'libcpp' */ + +/* Module declarations from 'gensim.models.word2vec_corpusfile' */ +static PyTypeObject *__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = 0; +static PyTypeObject *__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab = 0; +static __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (*__pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha)(__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int); /*proto*/ +static __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (*__pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha)(__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int, int, int, int, int); /*proto*/ + +/* Module declarations from 'gensim.models.fasttext_corpusfile' */ +static void __pyx_f_6gensim_6models_19fasttext_corpusfile_prepare_c_structures_for_batch(std::vector > &, int, int, int, int *, int *, int *, unsigned PY_LONG_LONG *, __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *, int *, __pyx_t_5numpy_uint32_t *, int *, __pyx_t_5numpy_uint8_t **, __pyx_t_5numpy_uint32_t **, __pyx_t_5numpy_uint32_t *, int *, __pyx_t_5numpy_uint32_t **); /*proto*/ +#define __Pyx_MODULE_NAME "gensim.models.fasttext_corpusfile" +extern int __pyx_module_is_main_gensim__models__fasttext_corpusfile; +int __pyx_module_is_main_gensim__models__fasttext_corpusfile = 0; + +/* Implementation of 'gensim.models.fasttext_corpusfile' */ +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_ValueError; +static PyObject *__pyx_builtin_RuntimeError; +static PyObject *__pyx_builtin_ImportError; +static const char __pyx_k_c[] = "c"; +static const char __pyx_k_i[] = "i"; +static const char __pyx_k_j[] = "j"; +static const char __pyx_k_k[] = "k"; +static const char __pyx_k_l1[] = "_l1"; +static const char __pyx_k_np[] = "np"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_neu1[] = "_neu1"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_work[] = "_work"; +static const char __pyx_k_alpha[] = "alpha"; +static const char __pyx_k_model[] = "model"; +static const char __pyx_k_numpy[] = "numpy"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_vocab[] = "vocab"; +static const char __pyx_k_epochs[] = "epochs"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_offset[] = "offset"; +static const char __pyx_k_alpha_2[] = "_alpha"; +static const char __pyx_k_idx_end[] = "idx_end"; +static const char __pyx_k_sent_idx[] = "sent_idx"; +static const char __pyx_k_cur_epoch[] = "_cur_epoch"; +static const char __pyx_k_end_alpha[] = "end_alpha"; +static const char __pyx_k_idx_start[] = "idx_start"; +static const char __pyx_k_min_alpha[] = "min_alpha"; +static const char __pyx_k_sentences[] = "sentences"; +static const char __pyx_k_ValueError[] = "ValueError"; +static const char __pyx_k_num_epochs[] = "num_epochs"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_ImportError[] = "ImportError"; +static const char __pyx_k_corpus_file[] = "corpus_file"; +static const char __pyx_k_cur_epoch_2[] = "cur_epoch"; +static const char __pyx_k_start_alpha[] = "start_alpha"; +static const char __pyx_k_total_words[] = "total_words"; +static const char __pyx_k_RuntimeError[] = "RuntimeError"; +static const char __pyx_k_cython_vocab[] = "_cython_vocab"; +static const char __pyx_k_input_stream[] = "input_stream"; +static const char __pyx_k_expected_words[] = "_expected_words"; +static const char __pyx_k_train_epoch_sg[] = "train_epoch_sg"; +static const char __pyx_k_effective_words[] = "effective_words"; +static const char __pyx_k_total_sentences[] = "total_sentences"; +static const char __pyx_k_expected_words_2[] = "expected_words"; +static const char __pyx_k_train_epoch_cbow[] = "train_epoch_cbow"; +static const char __pyx_k_expected_examples[] = "_expected_examples"; +static const char __pyx_k_CORPUSFILE_VERSION[] = "CORPUSFILE_VERSION"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_effective_sentences[] = "effective_sentences"; +static const char __pyx_k_expected_examples_2[] = "expected_examples"; +static const char __pyx_k_total_effective_words[] = "total_effective_words"; +static const char __pyx_k_ndarray_is_not_C_contiguous[] = "ndarray is not C contiguous"; +static const char __pyx_k_numpy_core_multiarray_failed_to[] = "numpy.core.multiarray failed to import"; +static const char __pyx_k_unknown_dtype_code_in_numpy_pxd[] = "unknown dtype code in numpy.pxd (%d)"; +static const char __pyx_k_Format_string_allocated_too_shor[] = "Format string allocated too short, see comment in numpy.pxd"; +static const char __pyx_k_Non_native_byte_order_not_suppor[] = "Non-native byte order not supported"; +static const char __pyx_k_Optimized_cython_functions_for_f[] = "Optimized cython functions for file-based training :class:`~gensim.models.fasttext.FastText` model."; +static const char __pyx_k_gensim_models_fasttext_corpusfil[] = "gensim/models/fasttext_corpusfile.pyx"; +static const char __pyx_k_ndarray_is_not_Fortran_contiguou[] = "ndarray is not Fortran contiguous"; +static const char __pyx_k_numpy_core_umath_failed_to_impor[] = "numpy.core.umath failed to import"; +static const char __pyx_k_Format_string_allocated_too_shor_2[] = "Format string allocated too short."; +static const char __pyx_k_gensim_models_fasttext_corpusfil_2[] = "gensim.models.fasttext_corpusfile"; +static PyObject *__pyx_n_s_CORPUSFILE_VERSION; +static PyObject *__pyx_kp_u_Format_string_allocated_too_shor; +static PyObject *__pyx_kp_u_Format_string_allocated_too_shor_2; +static PyObject *__pyx_n_s_ImportError; +static PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor; +static PyObject *__pyx_n_s_RuntimeError; +static PyObject *__pyx_n_s_ValueError; +static PyObject *__pyx_n_s_alpha; +static PyObject *__pyx_n_s_alpha_2; +static PyObject *__pyx_n_s_c; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_corpus_file; +static PyObject *__pyx_n_s_cur_epoch; +static PyObject *__pyx_n_s_cur_epoch_2; +static PyObject *__pyx_n_s_cython_vocab; +static PyObject *__pyx_n_s_effective_sentences; +static PyObject *__pyx_n_s_effective_words; +static PyObject *__pyx_n_s_end_alpha; +static PyObject *__pyx_n_s_epochs; +static PyObject *__pyx_n_s_expected_examples; +static PyObject *__pyx_n_s_expected_examples_2; +static PyObject *__pyx_n_s_expected_words; +static PyObject *__pyx_n_s_expected_words_2; +static PyObject *__pyx_kp_s_gensim_models_fasttext_corpusfil; +static PyObject *__pyx_n_s_gensim_models_fasttext_corpusfil_2; +static PyObject *__pyx_n_s_i; +static PyObject *__pyx_n_s_idx_end; +static PyObject *__pyx_n_s_idx_start; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_input_stream; +static PyObject *__pyx_n_s_j; +static PyObject *__pyx_n_s_k; +static PyObject *__pyx_n_s_l1; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_min_alpha; +static PyObject *__pyx_n_s_model; +static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous; +static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou; +static PyObject *__pyx_n_s_neu1; +static PyObject *__pyx_n_s_np; +static PyObject *__pyx_n_s_num_epochs; +static PyObject *__pyx_n_s_numpy; +static PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to; +static PyObject *__pyx_kp_s_numpy_core_umath_failed_to_impor; +static PyObject *__pyx_n_s_offset; +static PyObject *__pyx_n_s_pyx_vtable; +static PyObject *__pyx_n_s_range; +static PyObject *__pyx_n_s_sent_idx; +static PyObject *__pyx_n_s_sentences; +static PyObject *__pyx_n_s_start_alpha; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_total_effective_words; +static PyObject *__pyx_n_s_total_sentences; +static PyObject *__pyx_n_s_total_words; +static PyObject *__pyx_n_s_train_epoch_cbow; +static PyObject *__pyx_n_s_train_epoch_sg; +static PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd; +static PyObject *__pyx_n_s_vocab; +static PyObject *__pyx_n_s_work; +static PyObject *__pyx_pf_6gensim_6models_19fasttext_corpusfile_train_epoch_sg(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v__work, PyObject *__pyx_v__l1); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19fasttext_corpusfile_2train_epoch_cbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v__work, PyObject *__pyx_v__neu1); /* proto */ +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */ +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */ +static PyObject *__pyx_int_1; +static PyObject *__pyx_tuple_; +static PyObject *__pyx_tuple__2; +static PyObject *__pyx_tuple__3; +static PyObject *__pyx_tuple__4; +static PyObject *__pyx_tuple__5; +static PyObject *__pyx_tuple__6; +static PyObject *__pyx_tuple__7; +static PyObject *__pyx_tuple__8; +static PyObject *__pyx_tuple__9; +static PyObject *__pyx_tuple__10; +static PyObject *__pyx_tuple__12; +static PyObject *__pyx_codeobj__11; +static PyObject *__pyx_codeobj__13; +/* Late includes */ + +/* "gensim/models/fasttext_corpusfile.pyx":45 + * + * + * cdef void prepare_c_structures_for_batch( # <<<<<<<<<<<<<< + * vector[vector[string]] &sentences, int sample, int hs, int window, int *total_words, + * int *effective_words, int *effective_sentences, unsigned long long *next_random, cvocab_t *vocab, + */ + +static void __pyx_f_6gensim_6models_19fasttext_corpusfile_prepare_c_structures_for_batch(std::vector > &__pyx_v_sentences, int __pyx_v_sample, int __pyx_v_hs, int __pyx_v_window, int *__pyx_v_total_words, int *__pyx_v_effective_words, int *__pyx_v_effective_sentences, unsigned PY_LONG_LONG *__pyx_v_next_random, __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_v_vocab, int *__pyx_v_sentence_idx, __pyx_t_5numpy_uint32_t *__pyx_v_indexes, int *__pyx_v_codelens, __pyx_t_5numpy_uint8_t **__pyx_v_codes, __pyx_t_5numpy_uint32_t **__pyx_v_points, __pyx_t_5numpy_uint32_t *__pyx_v_reduced_windows, int *__pyx_v_subwords_idx_len, __pyx_t_5numpy_uint32_t **__pyx_v_subwords_idx) { + struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem __pyx_v_word; + std::string __pyx_v_token; + std::vector __pyx_v_sent; + int __pyx_v_i; + std::vector > ::iterator __pyx_t_1; + std::vector __pyx_t_2; + int __pyx_t_3; + long __pyx_t_4; + std::vector ::iterator __pyx_t_5; + std::string __pyx_t_6; + int __pyx_t_7; + __pyx_t_5numpy_uint32_t __pyx_t_8; + int __pyx_t_9; + __pyx_t_5numpy_uint32_t *__pyx_t_10; + __pyx_t_5numpy_uint8_t *__pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + + /* "gensim/models/fasttext_corpusfile.pyx":54 + * cdef vector[string] sent + * + * sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< + * for sent in sentences: + * if sent.empty(): + */ + (__pyx_v_sentence_idx[0]) = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":55 + * + * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * for sent in sentences: # <<<<<<<<<<<<<< + * if sent.empty(): + * continue # ignore empty sentences; leave effective_sentences unchanged + */ + __pyx_t_1 = __pyx_v_sentences.begin(); + for (;;) { + if (!(__pyx_t_1 != __pyx_v_sentences.end())) break; + __pyx_t_2 = *__pyx_t_1; + ++__pyx_t_1; + __pyx_v_sent = __pyx_t_2; + + /* "gensim/models/fasttext_corpusfile.pyx":56 + * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * for sent in sentences: + * if sent.empty(): # <<<<<<<<<<<<<< + * continue # ignore empty sentences; leave effective_sentences unchanged + * total_words[0] += sent.size() + */ + __pyx_t_3 = (__pyx_v_sent.empty() != 0); + if (__pyx_t_3) { + + /* "gensim/models/fasttext_corpusfile.pyx":57 + * for sent in sentences: + * if sent.empty(): + * continue # ignore empty sentences; leave effective_sentences unchanged # <<<<<<<<<<<<<< + * total_words[0] += sent.size() + * + */ + goto __pyx_L3_continue; + + /* "gensim/models/fasttext_corpusfile.pyx":56 + * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * for sent in sentences: + * if sent.empty(): # <<<<<<<<<<<<<< + * continue # ignore empty sentences; leave effective_sentences unchanged + * total_words[0] += sent.size() + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":58 + * if sent.empty(): + * continue # ignore empty sentences; leave effective_sentences unchanged + * total_words[0] += sent.size() # <<<<<<<<<<<<<< + * + * for token in sent: + */ + __pyx_t_4 = 0; + (__pyx_v_total_words[__pyx_t_4]) = ((__pyx_v_total_words[__pyx_t_4]) + __pyx_v_sent.size()); + + /* "gensim/models/fasttext_corpusfile.pyx":60 + * total_words[0] += sent.size() + * + * for token in sent: # <<<<<<<<<<<<<< + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): + */ + __pyx_t_5 = __pyx_v_sent.begin(); + for (;;) { + if (!(__pyx_t_5 != __pyx_v_sent.end())) break; + __pyx_t_6 = *__pyx_t_5; + ++__pyx_t_5; + __pyx_v_token = __pyx_t_6; + + /* "gensim/models/fasttext_corpusfile.pyx":62 + * for token in sent: + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): # <<<<<<<<<<<<<< + * continue + * + */ + __pyx_t_3 = (((__pyx_v_vocab[0]).find(__pyx_v_token) == (__pyx_v_vocab[0]).end()) != 0); + if (__pyx_t_3) { + + /* "gensim/models/fasttext_corpusfile.pyx":63 + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): + * continue # <<<<<<<<<<<<<< + * + * word = vocab[0][token] + */ + goto __pyx_L6_continue; + + /* "gensim/models/fasttext_corpusfile.pyx":62 + * for token in sent: + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): # <<<<<<<<<<<<<< + * continue + * + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":65 + * continue + * + * word = vocab[0][token] # <<<<<<<<<<<<<< + * if sample and word.sample_int < random_int32(next_random): + * continue + */ + __pyx_v_word = ((__pyx_v_vocab[0])[__pyx_v_token]); + + /* "gensim/models/fasttext_corpusfile.pyx":66 + * + * word = vocab[0][token] + * if sample and word.sample_int < random_int32(next_random): # <<<<<<<<<<<<<< + * continue + * indexes[effective_words[0]] = word.index + */ + __pyx_t_7 = (__pyx_v_sample != 0); + if (__pyx_t_7) { + } else { + __pyx_t_3 = __pyx_t_7; + goto __pyx_L10_bool_binop_done; + } + __pyx_t_7 = ((__pyx_v_word.sample_int < __pyx_f_6gensim_6models_14word2vec_inner_random_int32(__pyx_v_next_random)) != 0); + __pyx_t_3 = __pyx_t_7; + __pyx_L10_bool_binop_done:; + if (__pyx_t_3) { + + /* "gensim/models/fasttext_corpusfile.pyx":67 + * word = vocab[0][token] + * if sample and word.sample_int < random_int32(next_random): + * continue # <<<<<<<<<<<<<< + * indexes[effective_words[0]] = word.index + * subwords_idx_len[effective_words[0]] = word.subword_idx_len + */ + goto __pyx_L6_continue; + + /* "gensim/models/fasttext_corpusfile.pyx":66 + * + * word = vocab[0][token] + * if sample and word.sample_int < random_int32(next_random): # <<<<<<<<<<<<<< + * continue + * indexes[effective_words[0]] = word.index + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":68 + * if sample and word.sample_int < random_int32(next_random): + * continue + * indexes[effective_words[0]] = word.index # <<<<<<<<<<<<<< + * subwords_idx_len[effective_words[0]] = word.subword_idx_len + * subwords_idx[effective_words[0]] = word.subword_idx + */ + __pyx_t_8 = __pyx_v_word.index; + (__pyx_v_indexes[(__pyx_v_effective_words[0])]) = __pyx_t_8; + + /* "gensim/models/fasttext_corpusfile.pyx":69 + * continue + * indexes[effective_words[0]] = word.index + * subwords_idx_len[effective_words[0]] = word.subword_idx_len # <<<<<<<<<<<<<< + * subwords_idx[effective_words[0]] = word.subword_idx + * + */ + __pyx_t_9 = __pyx_v_word.subword_idx_len; + (__pyx_v_subwords_idx_len[(__pyx_v_effective_words[0])]) = __pyx_t_9; + + /* "gensim/models/fasttext_corpusfile.pyx":70 + * indexes[effective_words[0]] = word.index + * subwords_idx_len[effective_words[0]] = word.subword_idx_len + * subwords_idx[effective_words[0]] = word.subword_idx # <<<<<<<<<<<<<< + * + * if hs: + */ + __pyx_t_10 = __pyx_v_word.subword_idx; + (__pyx_v_subwords_idx[(__pyx_v_effective_words[0])]) = __pyx_t_10; + + /* "gensim/models/fasttext_corpusfile.pyx":72 + * subwords_idx[effective_words[0]] = word.subword_idx + * + * if hs: # <<<<<<<<<<<<<< + * codelens[effective_words[0]] = word.code_len + * codes[effective_words[0]] = word.code + */ + __pyx_t_3 = (__pyx_v_hs != 0); + if (__pyx_t_3) { + + /* "gensim/models/fasttext_corpusfile.pyx":73 + * + * if hs: + * codelens[effective_words[0]] = word.code_len # <<<<<<<<<<<<<< + * codes[effective_words[0]] = word.code + * points[effective_words[0]] = word.point + */ + __pyx_t_9 = __pyx_v_word.code_len; + (__pyx_v_codelens[(__pyx_v_effective_words[0])]) = __pyx_t_9; + + /* "gensim/models/fasttext_corpusfile.pyx":74 + * if hs: + * codelens[effective_words[0]] = word.code_len + * codes[effective_words[0]] = word.code # <<<<<<<<<<<<<< + * points[effective_words[0]] = word.point + * + */ + __pyx_t_11 = __pyx_v_word.code; + (__pyx_v_codes[(__pyx_v_effective_words[0])]) = __pyx_t_11; + + /* "gensim/models/fasttext_corpusfile.pyx":75 + * codelens[effective_words[0]] = word.code_len + * codes[effective_words[0]] = word.code + * points[effective_words[0]] = word.point # <<<<<<<<<<<<<< + * + * effective_words[0] += 1 + */ + __pyx_t_10 = __pyx_v_word.point; + (__pyx_v_points[(__pyx_v_effective_words[0])]) = __pyx_t_10; + + /* "gensim/models/fasttext_corpusfile.pyx":72 + * subwords_idx[effective_words[0]] = word.subword_idx + * + * if hs: # <<<<<<<<<<<<<< + * codelens[effective_words[0]] = word.code_len + * codes[effective_words[0]] = word.code + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":77 + * points[effective_words[0]] = word.point + * + * effective_words[0] += 1 # <<<<<<<<<<<<<< + * if effective_words[0] == MAX_SENTENCE_LEN: + * break + */ + __pyx_t_4 = 0; + (__pyx_v_effective_words[__pyx_t_4]) = ((__pyx_v_effective_words[__pyx_t_4]) + 1); + + /* "gensim/models/fasttext_corpusfile.pyx":78 + * + * effective_words[0] += 1 + * if effective_words[0] == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< + * break + * + */ + __pyx_t_3 = (((__pyx_v_effective_words[0]) == 0x2710) != 0); + if (__pyx_t_3) { + + /* "gensim/models/fasttext_corpusfile.pyx":79 + * effective_words[0] += 1 + * if effective_words[0] == MAX_SENTENCE_LEN: + * break # <<<<<<<<<<<<<< + * + * # keep track of which words go into which sentence, so we don't train + */ + goto __pyx_L7_break; + + /* "gensim/models/fasttext_corpusfile.pyx":78 + * + * effective_words[0] += 1 + * if effective_words[0] == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< + * break + * + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":60 + * total_words[0] += sent.size() + * + * for token in sent: # <<<<<<<<<<<<<< + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): + */ + __pyx_L6_continue:; + } + __pyx_L7_break:; + + /* "gensim/models/fasttext_corpusfile.pyx":84 + * # across sentence boundaries. + * # indices of sentence number X are between 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_epoch_sg") < 0)) __PYX_ERR(0, 95, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 9) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + } + __pyx_v_model = values[0]; + __pyx_v_corpus_file = values[1]; + __pyx_v_offset = values[2]; + __pyx_v__cython_vocab = values[3]; + __pyx_v__cur_epoch = values[4]; + __pyx_v__expected_examples = values[5]; + __pyx_v__expected_words = values[6]; + __pyx_v__work = values[7]; + __pyx_v__l1 = values[8]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 9, 9, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 95, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.fasttext_corpusfile.train_epoch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_19fasttext_corpusfile_train_epoch_sg(__pyx_self, __pyx_v_model, __pyx_v_corpus_file, __pyx_v_offset, __pyx_v__cython_vocab, __pyx_v__cur_epoch, __pyx_v__expected_examples, __pyx_v__expected_words, __pyx_v__work, __pyx_v__l1); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19fasttext_corpusfile_train_epoch_sg(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v__work, PyObject *__pyx_v__l1) { + struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig __pyx_v_c; + int __pyx_v_cur_epoch; + int __pyx_v_num_epochs; + int __pyx_v_expected_examples; + int __pyx_v_expected_words; + __pyx_t_6gensim_6models_19fasttext_corpusfile_REAL_t __pyx_v_start_alpha; + __pyx_t_6gensim_6models_19fasttext_corpusfile_REAL_t __pyx_v_end_alpha; + __pyx_t_6gensim_6models_19fasttext_corpusfile_REAL_t __pyx_v__alpha; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_input_stream = 0; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_vocab = 0; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_effective_words; + int __pyx_v_effective_sentences; + int __pyx_v_total_effective_words; + int __pyx_v_total_sentences; + int __pyx_v_total_words; + int __pyx_v_sent_idx; + int __pyx_v_idx_start; + int __pyx_v_idx_end; + std::vector > __pyx_v_sentences; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + __pyx_t_6gensim_6models_19fasttext_corpusfile_REAL_t __pyx_t_5; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + std::vector > __pyx_t_9; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_t_10; + int __pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + int __pyx_t_15; + int __pyx_t_16; + int __pyx_t_17; + PyObject *__pyx_t_18 = NULL; + PyObject *__pyx_t_19 = NULL; + __Pyx_RefNannySetupContext("train_epoch_sg", 0); + + /* "gensim/models/fasttext_corpusfile.pyx":123 + * + * # For learning rate updates + * cdef int cur_epoch = _cur_epoch # <<<<<<<<<<<<<< + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v__cur_epoch); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 123, __pyx_L1_error) + __pyx_v_cur_epoch = __pyx_t_1; + + /* "gensim/models/fasttext_corpusfile.pyx":124 + * # For learning rate updates + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs # <<<<<<<<<<<<<< + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_epochs); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 124, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 124, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_num_epochs = __pyx_t_1; + + /* "gensim/models/fasttext_corpusfile.pyx":125 + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) # <<<<<<<<<<<<<< + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + */ + __pyx_t_3 = (__pyx_v__expected_examples == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_examples); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 125, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_examples = __pyx_t_1; + + /* "gensim/models/fasttext_corpusfile.pyx":126 + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) # <<<<<<<<<<<<<< + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + */ + __pyx_t_3 = (__pyx_v__expected_words == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_words); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 126, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_words = __pyx_t_1; + + /* "gensim/models/fasttext_corpusfile.pyx":127 + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha # <<<<<<<<<<<<<< + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_start_alpha = __pyx_t_5; + + /* "gensim/models/fasttext_corpusfile.pyx":128 + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha # <<<<<<<<<<<<<< + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_min_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 128, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 128, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_end_alpha = __pyx_t_5; + + /* "gensim/models/fasttext_corpusfile.pyx":129 + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) # <<<<<<<<<<<<<< + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_6 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v__alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha(__pyx_t_6, __pyx_v_end_alpha, __pyx_v_cur_epoch, __pyx_v_num_epochs); + + /* "gensim/models/fasttext_corpusfile.pyx":131 + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) # <<<<<<<<<<<<<< + * cdef CythonVocab vocab = _cython_vocab + * + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_corpus_file); + __Pyx_GIVEREF(__pyx_v_corpus_file); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_corpus_file); + __Pyx_INCREF(__pyx_v_offset); + __Pyx_GIVEREF(__pyx_v_offset); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_offset); + __pyx_t_7 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), __pyx_t_2, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_input_stream = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":132 + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + * cdef CythonVocab vocab = _cython_vocab # <<<<<<<<<<<<<< + * + * cdef int i, j, k + */ + if (!(likely(((__pyx_v__cython_vocab) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__cython_vocab, __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab))))) __PYX_ERR(0, 132, __pyx_L1_error) + __pyx_t_7 = __pyx_v__cython_vocab; + __Pyx_INCREF(__pyx_t_7); + __pyx_v_vocab = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":135 + * + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< + * cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 + * cdef int sent_idx, idx_start, idx_end + */ + __pyx_v_effective_words = 0; + __pyx_v_effective_sentences = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":136 + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 + * cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end + * + */ + __pyx_v_total_effective_words = 0; + __pyx_v_total_sentences = 0; + __pyx_v_total_words = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":139 + * cdef int sent_idx, idx_start, idx_end + * + * init_ft_config(&c, model, _alpha, _work, _l1) # <<<<<<<<<<<<<< + * + * # for preparing batches & training + */ + __pyx_t_7 = PyFloat_FromDouble(__pyx_v__alpha); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_2 = __pyx_f_6gensim_6models_14fasttext_inner_init_ft_config((&__pyx_v_c), __pyx_v_model, __pyx_t_7, __pyx_v__work, __pyx_v__l1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":144 + * cdef vector[vector[string]] sentences + * + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + { + #ifdef WITH_THREAD + PyThreadState *_save; + Py_UNBLOCK_THREADS + __Pyx_FastGIL_Remember(); + #endif + /*try:*/ { + + /* "gensim/models/fasttext_corpusfile.pyx":145 + * + * with nogil: + * input_stream.reset() # <<<<<<<<<<<<<< + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 + */ + ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->reset(__pyx_v_input_stream, 0); + + /* "gensim/models/fasttext_corpusfile.pyx":146 + * with nogil: + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): # <<<<<<<<<<<<<< + * effective_sentences = 0 + * effective_words = 0 + */ + while (1) { + __pyx_t_8 = (((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->is_eof(__pyx_v_input_stream, 0) != 0); + if (!__pyx_t_8) { + } else { + __pyx_t_3 = __pyx_t_8; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_8 = ((__pyx_v_total_words > (__pyx_v_expected_words / __pyx_v_c.workers)) != 0); + __pyx_t_3 = __pyx_t_8; + __pyx_L8_bool_binop_done:; + __pyx_t_8 = ((!__pyx_t_3) != 0); + if (!__pyx_t_8) break; + + /* "gensim/models/fasttext_corpusfile.pyx":147 + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 # <<<<<<<<<<<<<< + * effective_words = 0 + * + */ + __pyx_v_effective_sentences = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":148 + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 + * effective_words = 0 # <<<<<<<<<<<<<< + * + * sentences = input_stream.next_batch() + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":150 + * effective_words = 0 + * + * sentences = input_stream.next_batch() # <<<<<<<<<<<<<< + * + * prepare_c_structures_for_batch( + */ + __pyx_t_9 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->next_batch(__pyx_v_input_stream, 0); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 150, __pyx_L4_error) + __pyx_v_sentences = __pyx_t_9; + + /* "gensim/models/fasttext_corpusfile.pyx":154 + * prepare_c_structures_for_batch( + * sentences, c.sample, c.hs, c.window, &total_words, &effective_words, &effective_sentences, + * &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, c.indexes, c.codelens, # <<<<<<<<<<<<<< + * c.codes, c.points, c.reduced_windows, c.subwords_idx_len, c.subwords_idx) + * + */ + __pyx_t_10 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_v_vocab->__pyx_vtab)->get_vocab_ptr(__pyx_v_vocab); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 154, __pyx_L4_error) + + /* "gensim/models/fasttext_corpusfile.pyx":152 + * sentences = input_stream.next_batch() + * + * prepare_c_structures_for_batch( # <<<<<<<<<<<<<< + * sentences, c.sample, c.hs, c.window, &total_words, &effective_words, &effective_sentences, + * &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, c.indexes, c.codelens, + */ + __pyx_f_6gensim_6models_19fasttext_corpusfile_prepare_c_structures_for_batch(__pyx_v_sentences, __pyx_v_c.sample, __pyx_v_c.hs, __pyx_v_c.window, (&__pyx_v_total_words), (&__pyx_v_effective_words), (&__pyx_v_effective_sentences), (&__pyx_v_c.next_random), __pyx_t_10, __pyx_v_c.sentence_idx, __pyx_v_c.indexes, __pyx_v_c.codelens, __pyx_v_c.codes, __pyx_v_c.points, __pyx_v_c.reduced_windows, __pyx_v_c.subwords_idx_len, __pyx_v_c.subwords_idx); + + /* "gensim/models/fasttext_corpusfile.pyx":157 + * c.codes, c.points, c.reduced_windows, c.subwords_idx_len, c.subwords_idx) + * + * for sent_idx in range(effective_sentences): # <<<<<<<<<<<<<< + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] + */ + __pyx_t_1 = __pyx_v_effective_sentences; + __pyx_t_4 = __pyx_t_1; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_4; __pyx_t_11+=1) { + __pyx_v_sent_idx = __pyx_t_11; + + /* "gensim/models/fasttext_corpusfile.pyx":158 + * + * for sent_idx in range(effective_sentences): + * idx_start = c.sentence_idx[sent_idx] # <<<<<<<<<<<<<< + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): + */ + __pyx_v_idx_start = (__pyx_v_c.sentence_idx[__pyx_v_sent_idx]); + + /* "gensim/models/fasttext_corpusfile.pyx":159 + * for sent_idx in range(effective_sentences): + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + */ + __pyx_v_idx_end = (__pyx_v_c.sentence_idx[(__pyx_v_sent_idx + 1)]); + + /* "gensim/models/fasttext_corpusfile.pyx":160 + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: + */ + __pyx_t_12 = __pyx_v_idx_end; + __pyx_t_13 = __pyx_t_12; + for (__pyx_t_14 = __pyx_v_idx_start; __pyx_t_14 < __pyx_t_13; __pyx_t_14+=1) { + __pyx_v_i = __pyx_t_14; + + /* "gensim/models/fasttext_corpusfile.pyx":161 + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< + * if j < idx_start: + * j = idx_start + */ + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/fasttext_corpusfile.pyx":162 + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: # <<<<<<<<<<<<<< + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + */ + __pyx_t_8 = ((__pyx_v_j < __pyx_v_idx_start) != 0); + if (__pyx_t_8) { + + /* "gensim/models/fasttext_corpusfile.pyx":163 + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: + * j = idx_start # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: + */ + __pyx_v_j = __pyx_v_idx_start; + + /* "gensim/models/fasttext_corpusfile.pyx":162 + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: # <<<<<<<<<<<<<< + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":164 + * if j < idx_start: + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< + * if k > idx_end: + * k = idx_end + */ + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/fasttext_corpusfile.pyx":165 + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: # <<<<<<<<<<<<<< + * k = idx_end + * for j in range(j, k): + */ + __pyx_t_8 = ((__pyx_v_k > __pyx_v_idx_end) != 0); + if (__pyx_t_8) { + + /* "gensim/models/fasttext_corpusfile.pyx":166 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: + * k = idx_end # <<<<<<<<<<<<<< + * for j in range(j, k): + * if j == i: + */ + __pyx_v_k = __pyx_v_idx_end; + + /* "gensim/models/fasttext_corpusfile.pyx":165 + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: # <<<<<<<<<<<<<< + * k = idx_end + * for j in range(j, k): + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":167 + * if k > idx_end: + * k = idx_end + * for j in range(j, k): # <<<<<<<<<<<<<< + * if j == i: + * continue + */ + __pyx_t_15 = __pyx_v_k; + __pyx_t_16 = __pyx_t_15; + for (__pyx_t_17 = __pyx_v_j; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_j = __pyx_t_17; + + /* "gensim/models/fasttext_corpusfile.pyx":168 + * k = idx_end + * for j in range(j, k): + * if j == i: # <<<<<<<<<<<<<< + * continue + * if c.hs: + */ + __pyx_t_8 = ((__pyx_v_j == __pyx_v_i) != 0); + if (__pyx_t_8) { + + /* "gensim/models/fasttext_corpusfile.pyx":169 + * for j in range(j, k): + * if j == i: + * continue # <<<<<<<<<<<<<< + * if c.hs: + * fasttext_fast_sentence_sg_hs( + */ + goto __pyx_L16_continue; + + /* "gensim/models/fasttext_corpusfile.pyx":168 + * k = idx_end + * for j in range(j, k): + * if j == i: # <<<<<<<<<<<<<< + * continue + * if c.hs: + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":170 + * if j == i: + * continue + * if c.hs: # <<<<<<<<<<<<<< + * fasttext_fast_sentence_sg_hs( + * c.points[j], c.codes[j], c.codelens[j], c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + */ + __pyx_t_8 = (__pyx_v_c.hs != 0); + if (__pyx_t_8) { + + /* "gensim/models/fasttext_corpusfile.pyx":171 + * continue + * if c.hs: + * fasttext_fast_sentence_sg_hs( # <<<<<<<<<<<<<< + * c.points[j], c.codes[j], c.codelens[j], c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + * c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, c.neu1, + */ + __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_hs((__pyx_v_c.points[__pyx_v_j]), (__pyx_v_c.codes[__pyx_v_j]), (__pyx_v_c.codelens[__pyx_v_j]), __pyx_v_c.syn0_vocab, __pyx_v_c.syn0_ngrams, __pyx_v_c.syn1, __pyx_v_c.size, (__pyx_v_c.indexes[__pyx_v_i]), (__pyx_v_c.subwords_idx[__pyx_v_i]), (__pyx_v_c.subwords_idx_len[__pyx_v_i]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.neu1, __pyx_v_c.word_locks_vocab, __pyx_v_c.word_locks_ngrams); + + /* "gensim/models/fasttext_corpusfile.pyx":170 + * if j == i: + * continue + * if c.hs: # <<<<<<<<<<<<<< + * fasttext_fast_sentence_sg_hs( + * c.points[j], c.codes[j], c.codelens[j], c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":175 + * c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, c.neu1, + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fasttext_fast_sentence_sg_neg( + * c.negative, c.cum_table, c.cum_table_len, c.syn0_vocab, c.syn0_ngrams, c.syn1neg, c.size, + */ + __pyx_t_8 = (__pyx_v_c.negative != 0); + if (__pyx_t_8) { + + /* "gensim/models/fasttext_corpusfile.pyx":176 + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: + * c.next_random = fasttext_fast_sentence_sg_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.syn0_vocab, c.syn0_ngrams, c.syn1neg, c.size, + * c.indexes[j], c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, + */ + __pyx_v_c.next_random = __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.syn0_vocab, __pyx_v_c.syn0_ngrams, __pyx_v_c.syn1neg, __pyx_v_c.size, (__pyx_v_c.indexes[__pyx_v_j]), (__pyx_v_c.indexes[__pyx_v_i]), (__pyx_v_c.subwords_idx[__pyx_v_i]), (__pyx_v_c.subwords_idx_len[__pyx_v_i]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.neu1, __pyx_v_c.next_random, __pyx_v_c.word_locks_vocab, __pyx_v_c.word_locks_ngrams); + + /* "gensim/models/fasttext_corpusfile.pyx":175 + * c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, c.neu1, + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fasttext_fast_sentence_sg_neg( + * c.negative, c.cum_table, c.cum_table_len, c.syn0_vocab, c.syn0_ngrams, c.syn1neg, c.size, + */ + } + __pyx_L16_continue:; + } + } + } + + /* "gensim/models/fasttext_corpusfile.pyx":181 + * c.neu1, c.next_random, c.word_locks_vocab, c.word_locks_ngrams) + * + * total_sentences += sentences.size() # <<<<<<<<<<<<<< + * total_effective_words += effective_words + * + */ + __pyx_v_total_sentences = (__pyx_v_total_sentences + __pyx_v_sentences.size()); + + /* "gensim/models/fasttext_corpusfile.pyx":182 + * + * total_sentences += sentences.size() + * total_effective_words += effective_words # <<<<<<<<<<<<<< + * + * c.alpha = get_next_alpha(start_alpha, end_alpha, total_sentences, total_words, + */ + __pyx_v_total_effective_words = (__pyx_v_total_effective_words + __pyx_v_effective_words); + + /* "gensim/models/fasttext_corpusfile.pyx":184 + * total_effective_words += effective_words + * + * c.alpha = get_next_alpha(start_alpha, end_alpha, total_sentences, total_words, # <<<<<<<<<<<<<< + * expected_examples, expected_words, cur_epoch, num_epochs) + * + */ + __pyx_v_c.alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha(__pyx_v_start_alpha, __pyx_v_end_alpha, __pyx_v_total_sentences, __pyx_v_total_words, __pyx_v_expected_examples, __pyx_v_expected_words, __pyx_v_cur_epoch, __pyx_v_num_epochs); + } + } + + /* "gensim/models/fasttext_corpusfile.pyx":144 + * cdef vector[vector[string]] sentences + * + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + /*finally:*/ { + /*normal exit:*/{ + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L5; + } + __pyx_L4_error: { + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L1_error; + } + __pyx_L5:; + } + } + + /* "gensim/models/fasttext_corpusfile.pyx":187 + * expected_examples, expected_words, cur_epoch, num_epochs) + * + * return total_sentences, total_effective_words, total_words # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_total_sentences); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_total_effective_words); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_18 = __Pyx_PyInt_From_int(__pyx_v_total_words); if (unlikely(!__pyx_t_18)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_18); + __pyx_t_19 = PyTuple_New(3); if (unlikely(!__pyx_t_19)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_19); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_19, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_19, 1, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_18); + PyTuple_SET_ITEM(__pyx_t_19, 2, __pyx_t_18); + __pyx_t_2 = 0; + __pyx_t_7 = 0; + __pyx_t_18 = 0; + __pyx_r = __pyx_t_19; + __pyx_t_19 = 0; + goto __pyx_L0; + + /* "gensim/models/fasttext_corpusfile.pyx":95 + * + * + * def train_epoch_sg( # <<<<<<<<<<<<<< + * model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, _l1): + * """Train Skipgram model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_18); + __Pyx_XDECREF(__pyx_t_19); + __Pyx_AddTraceback("gensim.models.fasttext_corpusfile.train_epoch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_input_stream); + __Pyx_XDECREF((PyObject *)__pyx_v_vocab); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/fasttext_corpusfile.pyx":190 + * + * + * def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1): + * """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19fasttext_corpusfile_3train_epoch_cbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_19fasttext_corpusfile_2train_epoch_cbow[] = "train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, _neu1)\nTrain CBOW model for one epoch by training on an input stream. This function is used only in multistream mode.\n\n Called internally from :meth:`~gensim.models.fasttext.FastText.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.fasttext.FastText`\n The FastText model instance to train.\n corpus_file : str\n Path to a corpus file.\n _cur_epoch : int\n Current epoch number. Used for calculating and decaying learning rate.\n _work : np.ndarray\n Private working memory for each worker.\n _neu1 : np.ndarray\n Private working memory for each worker.\n\n Returns\n -------\n int\n Number of words in the vocabulary actually used for training (They already existed in the vocabulary\n and were not discarded by negative sampling).\n "; +static PyMethodDef __pyx_mdef_6gensim_6models_19fasttext_corpusfile_3train_epoch_cbow = {"train_epoch_cbow", (PyCFunction)__pyx_pw_6gensim_6models_19fasttext_corpusfile_3train_epoch_cbow, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_19fasttext_corpusfile_2train_epoch_cbow}; +static PyObject *__pyx_pw_6gensim_6models_19fasttext_corpusfile_3train_epoch_cbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_model = 0; + PyObject *__pyx_v_corpus_file = 0; + PyObject *__pyx_v_offset = 0; + PyObject *__pyx_v__cython_vocab = 0; + PyObject *__pyx_v__cur_epoch = 0; + PyObject *__pyx_v__expected_examples = 0; + PyObject *__pyx_v__expected_words = 0; + PyObject *__pyx_v__work = 0; + PyObject *__pyx_v__neu1 = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("train_epoch_cbow (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_corpus_file,&__pyx_n_s_offset,&__pyx_n_s_cython_vocab,&__pyx_n_s_cur_epoch,&__pyx_n_s_expected_examples,&__pyx_n_s_expected_words,&__pyx_n_s_work,&__pyx_n_s_neu1,0}; + PyObject* values[9] = {0,0,0,0,0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_corpus_file)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 9, 9, 1); __PYX_ERR(0, 190, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 9, 9, 2); __PYX_ERR(0, 190, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cython_vocab)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 9, 9, 3); __PYX_ERR(0, 190, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cur_epoch)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 9, 9, 4); __PYX_ERR(0, 190, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_examples)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 9, 9, 5); __PYX_ERR(0, 190, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_words)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 9, 9, 6); __PYX_ERR(0, 190, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 7: + if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 9, 9, 7); __PYX_ERR(0, 190, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 8: + if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_neu1)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 9, 9, 8); __PYX_ERR(0, 190, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_epoch_cbow") < 0)) __PYX_ERR(0, 190, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 9) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + } + __pyx_v_model = values[0]; + __pyx_v_corpus_file = values[1]; + __pyx_v_offset = values[2]; + __pyx_v__cython_vocab = values[3]; + __pyx_v__cur_epoch = values[4]; + __pyx_v__expected_examples = values[5]; + __pyx_v__expected_words = values[6]; + __pyx_v__work = values[7]; + __pyx_v__neu1 = values[8]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 9, 9, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 190, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.fasttext_corpusfile.train_epoch_cbow", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_19fasttext_corpusfile_2train_epoch_cbow(__pyx_self, __pyx_v_model, __pyx_v_corpus_file, __pyx_v_offset, __pyx_v__cython_vocab, __pyx_v__cur_epoch, __pyx_v__expected_examples, __pyx_v__expected_words, __pyx_v__work, __pyx_v__neu1); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19fasttext_corpusfile_2train_epoch_cbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v__work, PyObject *__pyx_v__neu1) { + struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig __pyx_v_c; + int __pyx_v_cur_epoch; + int __pyx_v_num_epochs; + int __pyx_v_expected_examples; + int __pyx_v_expected_words; + __pyx_t_6gensim_6models_19fasttext_corpusfile_REAL_t __pyx_v_start_alpha; + __pyx_t_6gensim_6models_19fasttext_corpusfile_REAL_t __pyx_v_end_alpha; + __pyx_t_6gensim_6models_19fasttext_corpusfile_REAL_t __pyx_v__alpha; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_input_stream = 0; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_vocab = 0; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_effective_words; + int __pyx_v_effective_sentences; + int __pyx_v_total_effective_words; + int __pyx_v_total_sentences; + int __pyx_v_total_words; + int __pyx_v_sent_idx; + int __pyx_v_idx_start; + int __pyx_v_idx_end; + std::vector > __pyx_v_sentences; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + __pyx_t_6gensim_6models_19fasttext_corpusfile_REAL_t __pyx_t_5; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + std::vector > __pyx_t_9; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_t_10; + int __pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + __Pyx_RefNannySetupContext("train_epoch_cbow", 0); + + /* "gensim/models/fasttext_corpusfile.pyx":218 + * + * # For learning rate updates + * cdef int cur_epoch = _cur_epoch # <<<<<<<<<<<<<< + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v__cur_epoch); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 218, __pyx_L1_error) + __pyx_v_cur_epoch = __pyx_t_1; + + /* "gensim/models/fasttext_corpusfile.pyx":219 + * # For learning rate updates + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs # <<<<<<<<<<<<<< + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_epochs); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 219, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 219, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_num_epochs = __pyx_t_1; + + /* "gensim/models/fasttext_corpusfile.pyx":220 + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) # <<<<<<<<<<<<<< + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + */ + __pyx_t_3 = (__pyx_v__expected_examples == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_examples); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 220, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_examples = __pyx_t_1; + + /* "gensim/models/fasttext_corpusfile.pyx":221 + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) # <<<<<<<<<<<<<< + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + */ + __pyx_t_3 = (__pyx_v__expected_words == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_words); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 221, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_words = __pyx_t_1; + + /* "gensim/models/fasttext_corpusfile.pyx":222 + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha # <<<<<<<<<<<<<< + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 222, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 222, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_start_alpha = __pyx_t_5; + + /* "gensim/models/fasttext_corpusfile.pyx":223 + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha # <<<<<<<<<<<<<< + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_min_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 223, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 223, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_end_alpha = __pyx_t_5; + + /* "gensim/models/fasttext_corpusfile.pyx":224 + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) # <<<<<<<<<<<<<< + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 224, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_6 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 224, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v__alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha(__pyx_t_6, __pyx_v_end_alpha, __pyx_v_cur_epoch, __pyx_v_num_epochs); + + /* "gensim/models/fasttext_corpusfile.pyx":226 + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) # <<<<<<<<<<<<<< + * cdef CythonVocab vocab = _cython_vocab + * + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_corpus_file); + __Pyx_GIVEREF(__pyx_v_corpus_file); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_corpus_file); + __Pyx_INCREF(__pyx_v_offset); + __Pyx_GIVEREF(__pyx_v_offset); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_offset); + __pyx_t_7 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), __pyx_t_2, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_input_stream = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":227 + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + * cdef CythonVocab vocab = _cython_vocab # <<<<<<<<<<<<<< + * + * cdef int i, j, k + */ + if (!(likely(((__pyx_v__cython_vocab) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__cython_vocab, __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab))))) __PYX_ERR(0, 227, __pyx_L1_error) + __pyx_t_7 = __pyx_v__cython_vocab; + __Pyx_INCREF(__pyx_t_7); + __pyx_v_vocab = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":230 + * + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< + * cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 + * cdef int sent_idx, idx_start, idx_end + */ + __pyx_v_effective_words = 0; + __pyx_v_effective_sentences = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":231 + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 + * cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end + * + */ + __pyx_v_total_effective_words = 0; + __pyx_v_total_sentences = 0; + __pyx_v_total_words = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":234 + * cdef int sent_idx, idx_start, idx_end + * + * init_ft_config(&c, model, _alpha, _work, _neu1) # <<<<<<<<<<<<<< + * + * # for preparing batches & training + */ + __pyx_t_7 = PyFloat_FromDouble(__pyx_v__alpha); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_2 = __pyx_f_6gensim_6models_14fasttext_inner_init_ft_config((&__pyx_v_c), __pyx_v_model, __pyx_t_7, __pyx_v__work, __pyx_v__neu1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":239 + * cdef vector[vector[string]] sentences + * + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + { + #ifdef WITH_THREAD + PyThreadState *_save; + Py_UNBLOCK_THREADS + __Pyx_FastGIL_Remember(); + #endif + /*try:*/ { + + /* "gensim/models/fasttext_corpusfile.pyx":240 + * + * with nogil: + * input_stream.reset() # <<<<<<<<<<<<<< + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 + */ + ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->reset(__pyx_v_input_stream, 0); + + /* "gensim/models/fasttext_corpusfile.pyx":241 + * with nogil: + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): # <<<<<<<<<<<<<< + * effective_sentences = 0 + * effective_words = 0 + */ + while (1) { + __pyx_t_8 = (((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->is_eof(__pyx_v_input_stream, 0) != 0); + if (!__pyx_t_8) { + } else { + __pyx_t_3 = __pyx_t_8; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_8 = ((__pyx_v_total_words > (__pyx_v_expected_words / __pyx_v_c.workers)) != 0); + __pyx_t_3 = __pyx_t_8; + __pyx_L8_bool_binop_done:; + __pyx_t_8 = ((!__pyx_t_3) != 0); + if (!__pyx_t_8) break; + + /* "gensim/models/fasttext_corpusfile.pyx":242 + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 # <<<<<<<<<<<<<< + * effective_words = 0 + * + */ + __pyx_v_effective_sentences = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":243 + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 + * effective_words = 0 # <<<<<<<<<<<<<< + * + * sentences = input_stream.next_batch() + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":245 + * effective_words = 0 + * + * sentences = input_stream.next_batch() # <<<<<<<<<<<<<< + * + * prepare_c_structures_for_batch( + */ + __pyx_t_9 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_input_stream->__pyx_vtab)->next_batch(__pyx_v_input_stream, 0); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 245, __pyx_L4_error) + __pyx_v_sentences = __pyx_t_9; + + /* "gensim/models/fasttext_corpusfile.pyx":249 + * prepare_c_structures_for_batch( + * sentences, c.sample, c.hs, c.window, &total_words, &effective_words, &effective_sentences, + * &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, c.indexes, c.codelens, # <<<<<<<<<<<<<< + * c.codes, c.points, c.reduced_windows, c.subwords_idx_len, c.subwords_idx) + * + */ + __pyx_t_10 = ((struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_v_vocab->__pyx_vtab)->get_vocab_ptr(__pyx_v_vocab); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(0, 249, __pyx_L4_error) + + /* "gensim/models/fasttext_corpusfile.pyx":247 + * sentences = input_stream.next_batch() + * + * prepare_c_structures_for_batch( # <<<<<<<<<<<<<< + * sentences, c.sample, c.hs, c.window, &total_words, &effective_words, &effective_sentences, + * &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, c.indexes, c.codelens, + */ + __pyx_f_6gensim_6models_19fasttext_corpusfile_prepare_c_structures_for_batch(__pyx_v_sentences, __pyx_v_c.sample, __pyx_v_c.hs, __pyx_v_c.window, (&__pyx_v_total_words), (&__pyx_v_effective_words), (&__pyx_v_effective_sentences), (&__pyx_v_c.next_random), __pyx_t_10, __pyx_v_c.sentence_idx, __pyx_v_c.indexes, __pyx_v_c.codelens, __pyx_v_c.codes, __pyx_v_c.points, __pyx_v_c.reduced_windows, __pyx_v_c.subwords_idx_len, __pyx_v_c.subwords_idx); + + /* "gensim/models/fasttext_corpusfile.pyx":252 + * c.codes, c.points, c.reduced_windows, c.subwords_idx_len, c.subwords_idx) + * + * for sent_idx in range(effective_sentences): # <<<<<<<<<<<<<< + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] + */ + __pyx_t_1 = __pyx_v_effective_sentences; + __pyx_t_4 = __pyx_t_1; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_4; __pyx_t_11+=1) { + __pyx_v_sent_idx = __pyx_t_11; + + /* "gensim/models/fasttext_corpusfile.pyx":253 + * + * for sent_idx in range(effective_sentences): + * idx_start = c.sentence_idx[sent_idx] # <<<<<<<<<<<<<< + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): + */ + __pyx_v_idx_start = (__pyx_v_c.sentence_idx[__pyx_v_sent_idx]); + + /* "gensim/models/fasttext_corpusfile.pyx":254 + * for sent_idx in range(effective_sentences): + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + */ + __pyx_v_idx_end = (__pyx_v_c.sentence_idx[(__pyx_v_sent_idx + 1)]); + + /* "gensim/models/fasttext_corpusfile.pyx":255 + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: + */ + __pyx_t_12 = __pyx_v_idx_end; + __pyx_t_13 = __pyx_t_12; + for (__pyx_t_14 = __pyx_v_idx_start; __pyx_t_14 < __pyx_t_13; __pyx_t_14+=1) { + __pyx_v_i = __pyx_t_14; + + /* "gensim/models/fasttext_corpusfile.pyx":256 + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< + * if j < idx_start: + * j = idx_start + */ + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/fasttext_corpusfile.pyx":257 + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: # <<<<<<<<<<<<<< + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + */ + __pyx_t_8 = ((__pyx_v_j < __pyx_v_idx_start) != 0); + if (__pyx_t_8) { + + /* "gensim/models/fasttext_corpusfile.pyx":258 + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: + * j = idx_start # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: + */ + __pyx_v_j = __pyx_v_idx_start; + + /* "gensim/models/fasttext_corpusfile.pyx":257 + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: # <<<<<<<<<<<<<< + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":259 + * if j < idx_start: + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< + * if k > idx_end: + * k = idx_end + */ + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/fasttext_corpusfile.pyx":260 + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: # <<<<<<<<<<<<<< + * k = idx_end + * + */ + __pyx_t_8 = ((__pyx_v_k > __pyx_v_idx_end) != 0); + if (__pyx_t_8) { + + /* "gensim/models/fasttext_corpusfile.pyx":261 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: + * k = idx_end # <<<<<<<<<<<<<< + * + * if c.hs: + */ + __pyx_v_k = __pyx_v_idx_end; + + /* "gensim/models/fasttext_corpusfile.pyx":260 + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: # <<<<<<<<<<<<<< + * k = idx_end + * + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":263 + * k = idx_end + * + * if c.hs: # <<<<<<<<<<<<<< + * fasttext_fast_sentence_cbow_hs( + * c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + */ + __pyx_t_8 = (__pyx_v_c.hs != 0); + if (__pyx_t_8) { + + /* "gensim/models/fasttext_corpusfile.pyx":264 + * + * if c.hs: + * fasttext_fast_sentence_cbow_hs( # <<<<<<<<<<<<<< + * c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + * c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, c.cbow_mean, + */ + __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), __pyx_v_c.codelens, __pyx_v_c.neu1, __pyx_v_c.syn0_vocab, __pyx_v_c.syn0_ngrams, __pyx_v_c.syn1, __pyx_v_c.size, __pyx_v_c.indexes, __pyx_v_c.subwords_idx, __pyx_v_c.subwords_idx_len, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_c.cbow_mean, __pyx_v_c.word_locks_vocab, __pyx_v_c.word_locks_ngrams); + + /* "gensim/models/fasttext_corpusfile.pyx":263 + * k = idx_end + * + * if c.hs: # <<<<<<<<<<<<<< + * fasttext_fast_sentence_cbow_hs( + * c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + */ + } + + /* "gensim/models/fasttext_corpusfile.pyx":268 + * c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, c.cbow_mean, + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fasttext_fast_sentence_cbow_neg( + * c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, + */ + __pyx_t_8 = (__pyx_v_c.negative != 0); + if (__pyx_t_8) { + + /* "gensim/models/fasttext_corpusfile.pyx":269 + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: + * c.next_random = fasttext_fast_sentence_cbow_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, + * c.syn1neg, c.size, c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, + */ + __pyx_v_c.next_random = __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.codelens, __pyx_v_c.neu1, __pyx_v_c.syn0_vocab, __pyx_v_c.syn0_ngrams, __pyx_v_c.syn1neg, __pyx_v_c.size, __pyx_v_c.indexes, __pyx_v_c.subwords_idx, __pyx_v_c.subwords_idx_len, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_c.cbow_mean, __pyx_v_c.next_random, __pyx_v_c.word_locks_vocab, __pyx_v_c.word_locks_ngrams); + + /* "gensim/models/fasttext_corpusfile.pyx":268 + * c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, c.cbow_mean, + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fasttext_fast_sentence_cbow_neg( + * c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, + */ + } + } + } + + /* "gensim/models/fasttext_corpusfile.pyx":274 + * c.cbow_mean, c.next_random, c.word_locks_vocab, c.word_locks_ngrams) + * + * total_sentences += sentences.size() # <<<<<<<<<<<<<< + * total_effective_words += effective_words + * + */ + __pyx_v_total_sentences = (__pyx_v_total_sentences + __pyx_v_sentences.size()); + + /* "gensim/models/fasttext_corpusfile.pyx":275 + * + * total_sentences += sentences.size() + * total_effective_words += effective_words # <<<<<<<<<<<<<< + * + * c.alpha = get_next_alpha(start_alpha, end_alpha, total_sentences, total_words, + */ + __pyx_v_total_effective_words = (__pyx_v_total_effective_words + __pyx_v_effective_words); + + /* "gensim/models/fasttext_corpusfile.pyx":277 + * total_effective_words += effective_words + * + * c.alpha = get_next_alpha(start_alpha, end_alpha, total_sentences, total_words, # <<<<<<<<<<<<<< + * expected_examples, expected_words, cur_epoch, num_epochs) + * + */ + __pyx_v_c.alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha(__pyx_v_start_alpha, __pyx_v_end_alpha, __pyx_v_total_sentences, __pyx_v_total_words, __pyx_v_expected_examples, __pyx_v_expected_words, __pyx_v_cur_epoch, __pyx_v_num_epochs); + } + } + + /* "gensim/models/fasttext_corpusfile.pyx":239 + * cdef vector[vector[string]] sentences + * + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + /*finally:*/ { + /*normal exit:*/{ + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L5; + } + __pyx_L4_error: { + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L1_error; + } + __pyx_L5:; + } + } + + /* "gensim/models/fasttext_corpusfile.pyx":280 + * expected_examples, expected_words, cur_epoch, num_epochs) + * + * return total_sentences, total_effective_words, total_words # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_total_sentences); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 280, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_total_effective_words); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 280, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_15 = __Pyx_PyInt_From_int(__pyx_v_total_words); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 280, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_16 = PyTuple_New(3); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 280, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_16, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_16, 1, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_15); + PyTuple_SET_ITEM(__pyx_t_16, 2, __pyx_t_15); + __pyx_t_2 = 0; + __pyx_t_7 = 0; + __pyx_t_15 = 0; + __pyx_r = __pyx_t_16; + __pyx_t_16 = 0; + goto __pyx_L0; + + /* "gensim/models/fasttext_corpusfile.pyx":190 + * + * + * def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1): + * """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_15); + __Pyx_XDECREF(__pyx_t_16); + __Pyx_AddTraceback("gensim.models.fasttext_corpusfile.train_epoch_cbow", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_input_stream); + __Pyx_XDECREF((PyObject *)__pyx_v_vocab); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fulfill the PEP. + */ + +/* Python wrapper */ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0); + __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_v_i; + int __pyx_v_ndim; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + int __pyx_v_t; + char *__pyx_v_f; + PyArray_Descr *__pyx_v_descr = 0; + int __pyx_v_offset; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + char *__pyx_t_8; + if (__pyx_v_info == NULL) { + PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete"); + return -1; + } + __Pyx_RefNannySetupContext("__getbuffer__", 0); + __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(__pyx_v_info->obj); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 + * + * cdef int i, ndim + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + */ + __pyx_v_endian_detector = 1; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 + * cdef int i, ndim + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * + * ndim = PyArray_NDIM(self) + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + */ + __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + __pyx_t_2 = (((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not C contiguous") + * + */ + __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_C_CONTIGUOUS) != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + if (unlikely(__pyx_t_1)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 229, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + __pyx_t_2 = (((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L7_bool_binop_done; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not Fortran contiguous") + * + */ + __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_F_CONTIGUOUS) != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L7_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + if (unlikely(__pyx_t_1)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 233, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 233, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 + * raise ValueError(u"ndarray is not Fortran contiguous") + * + * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 + * + * info.buf = PyArray_DATA(self) + * info.ndim = ndim # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * # Allocate new buffer for strides and shape info. + */ + __pyx_v_info->ndim = __pyx_v_ndim; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< + * info.shape = info.strides + ndim + * for i in range(ndim): + */ + __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 + * # This is allocated as one block, strides first. + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) + * info.shape = info.strides + ndim # <<<<<<<<<<<<<< + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + */ + __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) + * info.shape = info.strides + ndim + * for i in range(ndim): # <<<<<<<<<<<<<< + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] + */ + __pyx_t_4 = __pyx_v_ndim; + __pyx_t_5 = __pyx_t_4; + for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { + __pyx_v_i = __pyx_t_6; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 + * info.shape = info.strides + ndim + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + */ + (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< + * else: + * info.strides = PyArray_STRIDES(self) + */ + (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + goto __pyx_L9; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + */ + /*else*/ { + __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 + * else: + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + */ + __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self)); + } + __pyx_L9:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL # <<<<<<<<<<<<<< + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) + */ + __pyx_v_info->suboffsets = NULL; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< + * info.readonly = not PyArray_ISWRITEABLE(self) + * + */ + __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< + * + * cdef int t + */ + __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 + * + * cdef int t + * cdef char* f = NULL # <<<<<<<<<<<<<< + * cdef dtype descr = self.descr + * cdef int offset + */ + __pyx_v_f = NULL; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 + * cdef int t + * cdef char* f = NULL + * cdef dtype descr = self.descr # <<<<<<<<<<<<<< + * cdef int offset + * + */ + __pyx_t_3 = ((PyObject *)__pyx_v_self->descr); + __Pyx_INCREF(__pyx_t_3); + __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 + * cdef int offset + * + * info.obj = self # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(descr): + */ + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = ((PyObject *)__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + * info.obj = self + * + * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 + * + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num # <<<<<<<<<<<<<< + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + */ + __pyx_t_4 = __pyx_v_descr->type_num; + __pyx_v_t = __pyx_t_4; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_2 = ((__pyx_v_descr->byteorder == '>') != 0); + if (!__pyx_t_2) { + goto __pyx_L15_next_or; + } else { + } + __pyx_t_2 = (__pyx_v_little_endian != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L14_bool_binop_done; + } + __pyx_L15_next_or:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + */ + __pyx_t_2 = ((__pyx_v_descr->byteorder == '<') != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L14_bool_binop_done; + } + __pyx_t_2 = ((!(__pyx_v_little_endian != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L14_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + if (unlikely(__pyx_t_1)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 263, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + */ + switch (__pyx_v_t) { + case NPY_BYTE: + __pyx_v_f = ((char *)"b"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + */ + case NPY_UBYTE: + __pyx_v_f = ((char *)"B"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + */ + case NPY_SHORT: + __pyx_v_f = ((char *)"h"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + */ + case NPY_USHORT: + __pyx_v_f = ((char *)"H"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + */ + case NPY_INT: + __pyx_v_f = ((char *)"i"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + */ + case NPY_UINT: + __pyx_v_f = ((char *)"I"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + */ + case NPY_LONG: + __pyx_v_f = ((char *)"l"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + */ + case NPY_ULONG: + __pyx_v_f = ((char *)"L"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + */ + case NPY_LONGLONG: + __pyx_v_f = ((char *)"q"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + */ + case NPY_ULONGLONG: + __pyx_v_f = ((char *)"Q"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + */ + case NPY_FLOAT: + __pyx_v_f = ((char *)"f"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + */ + case NPY_DOUBLE: + __pyx_v_f = ((char *)"d"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + */ + case NPY_LONGDOUBLE: + __pyx_v_f = ((char *)"g"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + */ + case NPY_CFLOAT: + __pyx_v_f = ((char *)"Zf"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" + */ + case NPY_CDOUBLE: + __pyx_v_f = ((char *)"Zd"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f = "O" + * else: + */ + case NPY_CLONGDOUBLE: + __pyx_v_f = ((char *)"Zg"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + case NPY_OBJECT: + __pyx_v_f = ((char *)"O"); + break; + default: + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 + * elif t == NPY_OBJECT: f = "O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * info.format = f + * return + */ + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_t_3); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_7); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 282, __pyx_L1_error) + break; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f # <<<<<<<<<<<<<< + * return + * else: + */ + __pyx_v_info->format = __pyx_v_f; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f + * return # <<<<<<<<<<<<<< + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + * info.obj = self + * + * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + * return + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + */ + /*else*/ { + __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, + */ + (__pyx_v_info->format[0]) = '^'; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 + * info.format = PyObject_Malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 # <<<<<<<<<<<<<< + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + */ + __pyx_v_offset = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< + * info.format + _buffer_format_string_len, + * &offset) + */ + __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) + __pyx_v_f = __pyx_t_8; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + * info.format + _buffer_format_string_len, + * &offset) + * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + */ + (__pyx_v_f[0]) = '\x00'; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fulfill the PEP. + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + if (__pyx_v_info->obj != NULL) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0; + } + goto __pyx_L2; + __pyx_L0:; + if (__pyx_v_info->obj == Py_None) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0; + } + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_descr); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + */ + +/* Python wrapper */ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0); + __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__releasebuffer__", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * PyObject_Free(info.strides) + */ + PyObject_Free(__pyx_v_info->format); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * PyObject_Free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * PyObject_Free(info.strides) # <<<<<<<<<<<<<< + * # info.shape was stored after info.strides in the same block + * + */ + PyObject_Free(__pyx_v_info->strides); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * PyObject_Free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + * + * cdef inline object PyArray_MultiIterNew1(a): + * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew2(a, b): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 776, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + * + * cdef inline object PyArray_MultiIterNew2(a, b): + * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 779, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 782, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 785, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 788, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< + * return d.subarray.shape + * else: + */ + __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape # <<<<<<<<<<<<<< + * else: + * return () + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject*)__pyx_v_d->subarray->shape)); + __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< + * return d.subarray.shape + * else: + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 + * return d.subarray.shape + * else: + * return () # <<<<<<<<<<<<<< + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_empty_tuple); + __pyx_r = __pyx_empty_tuple; + goto __pyx_L0; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + * return () + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) { + PyArray_Descr *__pyx_v_child = 0; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + PyObject *__pyx_v_fields = 0; + PyObject *__pyx_v_childname = NULL; + PyObject *__pyx_v_new_offset = NULL; + PyObject *__pyx_v_t = NULL; + char *__pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + long __pyx_t_8; + char *__pyx_t_9; + __Pyx_RefNannySetupContext("_util_dtypestring", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + * + * cdef dtype child + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * cdef tuple fields + */ + __pyx_v_endian_detector = 1; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + * cdef dtype child + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * cdef tuple fields + * + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + if (unlikely(__pyx_v_descr->names == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(1, 805, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_descr->names; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(1, 805, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 805, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); + __pyx_t_3 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + * + * for childname in descr.names: + * fields = descr.fields[childname] # <<<<<<<<<<<<<< + * child, new_offset = fields + * + */ + if (unlikely(__pyx_v_descr->fields == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 806, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_PyDict_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 806, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(1, 806, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 + * for childname in descr.names: + * fields = descr.fields[childname] + * child, new_offset = fields # <<<<<<<<<<<<<< + * + * if (end - f) - (new_offset - offset[0]) < 15: + */ + if (likely(__pyx_v_fields != Py_None)) { + PyObject* sequence = __pyx_v_fields; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(1, 807, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 807, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 807, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(1, 807, __pyx_L1_error) + } + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) __PYX_ERR(1, 807, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_child, ((PyArray_Descr *)__pyx_t_3)); + __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); + __pyx_t_4 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + __pyx_t_4 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 809, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 809, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 809, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); + if (unlikely(__pyx_t_6)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 810, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 810, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_7 = ((__pyx_v_child->byteorder == '>') != 0); + if (!__pyx_t_7) { + goto __pyx_L8_next_or; + } else { + } + __pyx_t_7 = (__pyx_v_little_endian != 0); + if (!__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L7_bool_binop_done; + } + __pyx_L8_next_or:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + * + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * # One could encode it in the format string and have Cython + */ + __pyx_t_7 = ((__pyx_v_child->byteorder == '<') != 0); + if (__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_7 = ((!(__pyx_v_little_endian != 0)) != 0); + __pyx_t_6 = __pyx_t_7; + __pyx_L7_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + if (unlikely(__pyx_t_6)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 814, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 814, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + * + * # Output padding bytes + * while offset[0] < new_offset: # <<<<<<<<<<<<<< + * f[0] = 120 # "x"; pad byte + * f += 1 + */ + while (1) { + __pyx_t_3 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 824, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_3, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 824, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 824, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!__pyx_t_6) break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + * # Output padding bytes + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< + * f += 1 + * offset[0] += 1 + */ + (__pyx_v_f[0]) = 0x78; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte + * f += 1 # <<<<<<<<<<<<<< + * offset[0] += 1 + * + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + * f[0] = 120 # "x"; pad byte + * f += 1 + * offset[0] += 1 # <<<<<<<<<<<<<< + * + * offset[0] += child.itemsize + */ + __pyx_t_8 = 0; + (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 + * offset[0] += 1 + * + * offset[0] += child.itemsize # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(child): + */ + __pyx_t_8 = 0; + (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); + if (__pyx_t_6) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 + * + * if not PyDataType_HASFIELDS(child): + * t = child.type_num # <<<<<<<<<<<<<< + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") + */ + __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_child->type_num); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 832, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); + __pyx_t_4 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); + if (unlikely(__pyx_t_6)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 834, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 834, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + * + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_BYTE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 837, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 837, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 837, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 98; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UBYTE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 838, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 838, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 838, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 66; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_SHORT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 839, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 839, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 839, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x68; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_USHORT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 840, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 840, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 840, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 72; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_INT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 841, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 841, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 841, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x69; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UINT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 842, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 842, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 842, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 73; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 843, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 843, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 843, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x6C; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 844, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 844, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 844, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 76; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGLONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 845, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 845, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 845, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x71; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONGLONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 846, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 846, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 846, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 81; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_FLOAT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 847, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 847, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 847, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x66; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_DOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 848, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 848, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 848, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x64; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 849, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 849, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 849, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x67; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CFLOAT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 850, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 850, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 850, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x66; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 851, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 851, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 851, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x64; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 852, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 852, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 852, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x67; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_OBJECT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 853, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 853, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 853, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (likely(__pyx_t_6)) { + (__pyx_v_f[0]) = 79; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * f += 1 + * else: + */ + /*else*/ { + __pyx_t_3 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 855, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 855, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 855, __pyx_L1_error) + } + __pyx_L15:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * f += 1 # <<<<<<<<<<<<<< + * else: + * # Cython ignores struct boundary information ("T{...}"), + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + goto __pyx_L13; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + * # Cython ignores struct boundary information ("T{...}"), + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< + * return f + * + */ + /*else*/ { + __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(1, 860, __pyx_L1_error) + __pyx_v_f = __pyx_t_9; + } + __pyx_L13:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) + * return f # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + * return () + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_child); + __Pyx_XDECREF(__pyx_v_fields); + __Pyx_XDECREF(__pyx_v_childname); + __Pyx_XDECREF(__pyx_v_new_offset); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + +static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) { + PyObject *__pyx_v_baseptr; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + __Pyx_RefNannySetupContext("set_array_base", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + __pyx_t_1 = (__pyx_v_base == Py_None); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 + * cdef PyObject* baseptr + * if base is None: + * baseptr = NULL # <<<<<<<<<<<<<< + * else: + * Py_INCREF(base) # important to do this before decref below! + */ + __pyx_v_baseptr = NULL; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + goto __pyx_L3; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 + * baseptr = NULL + * else: + * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< + * baseptr = base + * Py_XDECREF(arr.base) + */ + /*else*/ { + Py_INCREF(__pyx_v_base); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 + * else: + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base # <<<<<<<<<<<<<< + * Py_XDECREF(arr.base) + * arr.base = baseptr + */ + __pyx_v_baseptr = ((PyObject *)__pyx_v_base); + } + __pyx_L3:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base + * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< + * arr.base = baseptr + * + */ + Py_XDECREF(__pyx_v_arr->base); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 + * baseptr = base + * Py_XDECREF(arr.base) + * arr.base = baseptr # <<<<<<<<<<<<<< + * + * cdef inline object get_array_base(ndarray arr): + */ + __pyx_v_arr->base = __pyx_v_baseptr; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("get_array_base", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: + * return None # <<<<<<<<<<<<<< + * else: + * return arr.base + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 + * return None + * else: + * return arr.base # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_arr->base)); + __pyx_r = ((PyObject *)__pyx_v_arr->base); + goto __pyx_L0; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + * # Versions of the import_* functions which are more suitable for + * # Cython code. + * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< + * try: + * _import_array() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_array", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 + * cdef inline int import_array() except -1: + * try: + * _import_array() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") + */ + __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 + * try: + * _import_array() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.multiarray failed to import") + * + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 999, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + * _import_array() + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_umath() except -1: + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1000, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(1, 1000, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + * # Versions of the import_* functions which are more suitable for + * # Cython code. + * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< + * try: + * _import_array() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + * raise ImportError("numpy.core.multiarray failed to import") + * + * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_umath", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + * cdef inline int import_umath() except -1: + * try: + * _import_umath() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + * try: + * _import_umath() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.umath failed to import") + * + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1005, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_ufunc() except -1: + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1006, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(1, 1006, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + * raise ImportError("numpy.core.multiarray failed to import") + * + * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_ufunc", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + * cdef inline int import_ufunc() except -1: + * try: + * _import_umath() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + * try: + * _import_umath() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1011, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1012, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(1, 1012, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_fasttext_corpusfile(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_fasttext_corpusfile}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "fasttext_corpusfile", + __pyx_k_Optimized_cython_functions_for_f, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_n_s_CORPUSFILE_VERSION, __pyx_k_CORPUSFILE_VERSION, sizeof(__pyx_k_CORPUSFILE_VERSION), 0, 0, 1, 1}, + {&__pyx_kp_u_Format_string_allocated_too_shor, __pyx_k_Format_string_allocated_too_shor, sizeof(__pyx_k_Format_string_allocated_too_shor), 0, 1, 0, 0}, + {&__pyx_kp_u_Format_string_allocated_too_shor_2, __pyx_k_Format_string_allocated_too_shor_2, sizeof(__pyx_k_Format_string_allocated_too_shor_2), 0, 1, 0, 0}, + {&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1}, + {&__pyx_kp_u_Non_native_byte_order_not_suppor, __pyx_k_Non_native_byte_order_not_suppor, sizeof(__pyx_k_Non_native_byte_order_not_suppor), 0, 1, 0, 0}, + {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1}, + {&__pyx_n_s_alpha, __pyx_k_alpha, sizeof(__pyx_k_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_alpha_2, __pyx_k_alpha_2, sizeof(__pyx_k_alpha_2), 0, 0, 1, 1}, + {&__pyx_n_s_c, __pyx_k_c, sizeof(__pyx_k_c), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_corpus_file, __pyx_k_corpus_file, sizeof(__pyx_k_corpus_file), 0, 0, 1, 1}, + {&__pyx_n_s_cur_epoch, __pyx_k_cur_epoch, sizeof(__pyx_k_cur_epoch), 0, 0, 1, 1}, + {&__pyx_n_s_cur_epoch_2, __pyx_k_cur_epoch_2, sizeof(__pyx_k_cur_epoch_2), 0, 0, 1, 1}, + {&__pyx_n_s_cython_vocab, __pyx_k_cython_vocab, sizeof(__pyx_k_cython_vocab), 0, 0, 1, 1}, + {&__pyx_n_s_effective_sentences, __pyx_k_effective_sentences, sizeof(__pyx_k_effective_sentences), 0, 0, 1, 1}, + {&__pyx_n_s_effective_words, __pyx_k_effective_words, sizeof(__pyx_k_effective_words), 0, 0, 1, 1}, + {&__pyx_n_s_end_alpha, __pyx_k_end_alpha, sizeof(__pyx_k_end_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_epochs, __pyx_k_epochs, sizeof(__pyx_k_epochs), 0, 0, 1, 1}, + {&__pyx_n_s_expected_examples, __pyx_k_expected_examples, sizeof(__pyx_k_expected_examples), 0, 0, 1, 1}, + {&__pyx_n_s_expected_examples_2, __pyx_k_expected_examples_2, sizeof(__pyx_k_expected_examples_2), 0, 0, 1, 1}, + {&__pyx_n_s_expected_words, __pyx_k_expected_words, sizeof(__pyx_k_expected_words), 0, 0, 1, 1}, + {&__pyx_n_s_expected_words_2, __pyx_k_expected_words_2, sizeof(__pyx_k_expected_words_2), 0, 0, 1, 1}, + {&__pyx_kp_s_gensim_models_fasttext_corpusfil, __pyx_k_gensim_models_fasttext_corpusfil, sizeof(__pyx_k_gensim_models_fasttext_corpusfil), 0, 0, 1, 0}, + {&__pyx_n_s_gensim_models_fasttext_corpusfil_2, __pyx_k_gensim_models_fasttext_corpusfil_2, sizeof(__pyx_k_gensim_models_fasttext_corpusfil_2), 0, 0, 1, 1}, + {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, + {&__pyx_n_s_idx_end, __pyx_k_idx_end, sizeof(__pyx_k_idx_end), 0, 0, 1, 1}, + {&__pyx_n_s_idx_start, __pyx_k_idx_start, sizeof(__pyx_k_idx_start), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_input_stream, __pyx_k_input_stream, sizeof(__pyx_k_input_stream), 0, 0, 1, 1}, + {&__pyx_n_s_j, __pyx_k_j, sizeof(__pyx_k_j), 0, 0, 1, 1}, + {&__pyx_n_s_k, __pyx_k_k, sizeof(__pyx_k_k), 0, 0, 1, 1}, + {&__pyx_n_s_l1, __pyx_k_l1, sizeof(__pyx_k_l1), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_min_alpha, __pyx_k_min_alpha, sizeof(__pyx_k_min_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_model, __pyx_k_model, sizeof(__pyx_k_model), 0, 0, 1, 1}, + {&__pyx_kp_u_ndarray_is_not_C_contiguous, __pyx_k_ndarray_is_not_C_contiguous, sizeof(__pyx_k_ndarray_is_not_C_contiguous), 0, 1, 0, 0}, + {&__pyx_kp_u_ndarray_is_not_Fortran_contiguou, __pyx_k_ndarray_is_not_Fortran_contiguou, sizeof(__pyx_k_ndarray_is_not_Fortran_contiguou), 0, 1, 0, 0}, + {&__pyx_n_s_neu1, __pyx_k_neu1, sizeof(__pyx_k_neu1), 0, 0, 1, 1}, + {&__pyx_n_s_np, __pyx_k_np, sizeof(__pyx_k_np), 0, 0, 1, 1}, + {&__pyx_n_s_num_epochs, __pyx_k_num_epochs, sizeof(__pyx_k_num_epochs), 0, 0, 1, 1}, + {&__pyx_n_s_numpy, __pyx_k_numpy, sizeof(__pyx_k_numpy), 0, 0, 1, 1}, + {&__pyx_kp_s_numpy_core_multiarray_failed_to, __pyx_k_numpy_core_multiarray_failed_to, sizeof(__pyx_k_numpy_core_multiarray_failed_to), 0, 0, 1, 0}, + {&__pyx_kp_s_numpy_core_umath_failed_to_impor, __pyx_k_numpy_core_umath_failed_to_impor, sizeof(__pyx_k_numpy_core_umath_failed_to_impor), 0, 0, 1, 0}, + {&__pyx_n_s_offset, __pyx_k_offset, sizeof(__pyx_k_offset), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_sent_idx, __pyx_k_sent_idx, sizeof(__pyx_k_sent_idx), 0, 0, 1, 1}, + {&__pyx_n_s_sentences, __pyx_k_sentences, sizeof(__pyx_k_sentences), 0, 0, 1, 1}, + {&__pyx_n_s_start_alpha, __pyx_k_start_alpha, sizeof(__pyx_k_start_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_total_effective_words, __pyx_k_total_effective_words, sizeof(__pyx_k_total_effective_words), 0, 0, 1, 1}, + {&__pyx_n_s_total_sentences, __pyx_k_total_sentences, sizeof(__pyx_k_total_sentences), 0, 0, 1, 1}, + {&__pyx_n_s_total_words, __pyx_k_total_words, sizeof(__pyx_k_total_words), 0, 0, 1, 1}, + {&__pyx_n_s_train_epoch_cbow, __pyx_k_train_epoch_cbow, sizeof(__pyx_k_train_epoch_cbow), 0, 0, 1, 1}, + {&__pyx_n_s_train_epoch_sg, __pyx_k_train_epoch_sg, sizeof(__pyx_k_train_epoch_sg), 0, 0, 1, 1}, + {&__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_k_unknown_dtype_code_in_numpy_pxd, sizeof(__pyx_k_unknown_dtype_code_in_numpy_pxd), 0, 1, 0, 0}, + {&__pyx_n_s_vocab, __pyx_k_vocab, sizeof(__pyx_k_vocab), 0, 0, 1, 1}, + {&__pyx_n_s_work, __pyx_k_work, sizeof(__pyx_k_work), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 91, __pyx_L1_error) + __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(1, 229, __pyx_L1_error) + __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(1, 810, __pyx_L1_error) + __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(1, 1000, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple_)) __PYX_ERR(1, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 233, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 810, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 814, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 834, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + * _import_array() + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_umath() except -1: + */ + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 1000, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_ufunc() except -1: + */ + __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 1006, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + */ + __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 1012, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + + /* "gensim/models/fasttext_corpusfile.pyx":95 + * + * + * def train_epoch_sg( # <<<<<<<<<<<<<< + * model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, _l1): + * """Train Skipgram model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + __pyx_tuple__10 = PyTuple_Pack(31, __pyx_n_s_model, __pyx_n_s_corpus_file, __pyx_n_s_offset, __pyx_n_s_cython_vocab, __pyx_n_s_cur_epoch, __pyx_n_s_expected_examples, __pyx_n_s_expected_words, __pyx_n_s_work, __pyx_n_s_l1, __pyx_n_s_c, __pyx_n_s_cur_epoch_2, __pyx_n_s_num_epochs, __pyx_n_s_expected_examples_2, __pyx_n_s_expected_words_2, __pyx_n_s_start_alpha, __pyx_n_s_end_alpha, __pyx_n_s_alpha_2, __pyx_n_s_input_stream, __pyx_n_s_vocab, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_total_effective_words, __pyx_n_s_total_sentences, __pyx_n_s_total_words, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_sentences); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); + __pyx_codeobj__11 = (PyObject*)__Pyx_PyCode_New(9, 0, 31, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__10, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_fasttext_corpusfil, __pyx_n_s_train_epoch_sg, 95, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__11)) __PYX_ERR(0, 95, __pyx_L1_error) + + /* "gensim/models/fasttext_corpusfile.pyx":190 + * + * + * def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1): + * """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + __pyx_tuple__12 = PyTuple_Pack(31, __pyx_n_s_model, __pyx_n_s_corpus_file, __pyx_n_s_offset, __pyx_n_s_cython_vocab, __pyx_n_s_cur_epoch, __pyx_n_s_expected_examples, __pyx_n_s_expected_words, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_c, __pyx_n_s_cur_epoch_2, __pyx_n_s_num_epochs, __pyx_n_s_expected_examples_2, __pyx_n_s_expected_words_2, __pyx_n_s_start_alpha, __pyx_n_s_end_alpha, __pyx_n_s_alpha_2, __pyx_n_s_input_stream, __pyx_n_s_vocab, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_total_effective_words, __pyx_n_s_total_sentences, __pyx_n_s_total_words, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_sentences); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 190, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + __pyx_codeobj__13 = (PyObject*)__Pyx_PyCode_New(9, 0, 31, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_fasttext_corpusfil, __pyx_n_s_train_epoch_cbow, 190, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__13)) __PYX_ERR(0, 190, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_modinit_global_init_code(void); /*proto*/ +static int __Pyx_modinit_variable_export_code(void); /*proto*/ +static int __Pyx_modinit_function_export_code(void); /*proto*/ +static int __Pyx_modinit_type_init_code(void); /*proto*/ +static int __Pyx_modinit_type_import_code(void); /*proto*/ +static int __Pyx_modinit_variable_import_code(void); /*proto*/ +static int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) __PYX_ERR(2, 9, __pyx_L1_error) + __pyx_ptype_5numpy_dtype = __Pyx_ImportType("numpy", "dtype", sizeof(PyArray_Descr), 0); if (unlikely(!__pyx_ptype_5numpy_dtype)) __PYX_ERR(1, 164, __pyx_L1_error) + __pyx_ptype_5numpy_flatiter = __Pyx_ImportType("numpy", "flatiter", sizeof(PyArrayIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_flatiter)) __PYX_ERR(1, 186, __pyx_L1_error) + __pyx_ptype_5numpy_broadcast = __Pyx_ImportType("numpy", "broadcast", sizeof(PyArrayMultiIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_broadcast)) __PYX_ERR(1, 190, __pyx_L1_error) + __pyx_ptype_5numpy_ndarray = __Pyx_ImportType("numpy", "ndarray", sizeof(PyArrayObject), 0); if (unlikely(!__pyx_ptype_5numpy_ndarray)) __PYX_ERR(1, 199, __pyx_L1_error) + __pyx_ptype_5numpy_ufunc = __Pyx_ImportType("numpy", "ufunc", sizeof(PyUFuncObject), 0); if (unlikely(!__pyx_ptype_5numpy_ufunc)) __PYX_ERR(1, 872, __pyx_L1_error) + __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = __Pyx_ImportType("gensim.models.word2vec_corpusfile", "CythonLineSentence", sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), 1); if (unlikely(!__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence)) __PYX_ERR(3, 33, __pyx_L1_error) + __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = (struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence*)__Pyx_GetVtable(__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence->tp_dict); if (unlikely(!__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence)) __PYX_ERR(3, 33, __pyx_L1_error) + __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab = __Pyx_ImportType("gensim.models.word2vec_corpusfile", "CythonVocab", sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab), 1); if (unlikely(!__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab)) __PYX_ERR(3, 61, __pyx_L1_error) + __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab = (struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab*)__Pyx_GetVtable(__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab->tp_dict); if (unlikely(!__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab)) __PYX_ERR(3, 61, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __pyx_t_1 = __Pyx_ImportModule("gensim.models.word2vec_inner"); if (!__pyx_t_1) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "scopy", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_scopy, "__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "saxpy", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_saxpy, "__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "sdot", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_sdot, "__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "dsdot", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_dsdot, "__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "snrm2", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_snrm2, "__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "sscal", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_sscal, "__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "EXP_TABLE", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_EXP_TABLE, "__pyx_t_6gensim_6models_14word2vec_inner_REAL_t [0x3E8]") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "our_dot", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_our_dot, "__pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "our_saxpy", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_our_saxpy, "__pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __pyx_t_1 = __Pyx_ImportModule("gensim.models.word2vec_inner"); if (!__pyx_t_1) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_1, "random_int32", (void (**)(void))&__pyx_f_6gensim_6models_14word2vec_inner_random_int32, "unsigned PY_LONG_LONG (unsigned PY_LONG_LONG *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = __Pyx_ImportModule("gensim.models.fasttext_inner"); if (!__pyx_t_2) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fasttext_fast_sentence_sg_neg", (void (**)(void))&__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fasttext_fast_sentence_sg_hs", (void (**)(void))&__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fasttext_fast_sentence_cbow_neg", (void (**)(void))&__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "fasttext_fast_sentence_cbow_hs", (void (**)(void))&__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_2, "init_ft_config", (void (**)(void))&__pyx_f_6gensim_6models_14fasttext_inner_init_ft_config, "PyObject *(struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig *, PyObject *, PyObject *, PyObject *, PyObject *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_3 = __Pyx_ImportModule("gensim.models.word2vec_corpusfile"); if (!__pyx_t_3) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_3, "get_alpha", (void (**)(void))&__pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha, "__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_3, "get_next_alpha", (void (**)(void))&__pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha, "__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int, int, int, int, int)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_RefNannyFinishContext(); + return -1; +} + + +#if PY_MAJOR_VERSION < 3 +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) + #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initfasttext_corpusfile(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initfasttext_corpusfile(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_fasttext_corpusfile(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_fasttext_corpusfile(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + result = PyDict_SetItemString(moddict, to_name, value); + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__") < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static int __pyx_pymod_exec_fasttext_corpusfile(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m && __pyx_m == __pyx_pyinit_module) return 0; + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_fasttext_corpusfile(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("fasttext_corpusfile", __pyx_methods, __pyx_k_Optimized_cython_functions_for_f, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_gensim__models__fasttext_corpusfile) { + if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "gensim.models.fasttext_corpusfile")) { + if (unlikely(PyDict_SetItemString(modules, "gensim.models.fasttext_corpusfile", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + (void)__Pyx_modinit_type_init_code(); + if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; + if (unlikely(__Pyx_modinit_variable_import_code() != 0)) goto __pyx_L1_error; + if (unlikely(__Pyx_modinit_function_import_code() != 0)) goto __pyx_L1_error; + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "gensim/models/fasttext_corpusfile.pyx":14 + * """Optimized cython functions for file-based training :class:`~gensim.models.fasttext.FastText` model.""" + * + * import numpy as np # <<<<<<<<<<<<<< + * cimport numpy as np + * + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_numpy, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_np, __pyx_t_1) < 0) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":95 + * + * + * def train_epoch_sg( # <<<<<<<<<<<<<< + * model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, _l1): + * """Train Skipgram model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_19fasttext_corpusfile_1train_epoch_sg, NULL, __pyx_n_s_gensim_models_fasttext_corpusfil_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_epoch_sg, __pyx_t_1) < 0) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":190 + * + * + * def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1): + * """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_19fasttext_corpusfile_3train_epoch_cbow, NULL, __pyx_n_s_gensim_models_fasttext_corpusfil_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 190, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_epoch_cbow, __pyx_t_1) < 0) __PYX_ERR(0, 190, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/fasttext_corpusfile.pyx":283 + * + * + * CORPUSFILE_VERSION = 1 # <<<<<<<<<<<<<< + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CORPUSFILE_VERSION, __pyx_int_1) < 0) __PYX_ERR(0, 283, __pyx_L1_error) + + /* "gensim/models/fasttext_corpusfile.pyx":1 + * #!/usr/bin/env cython # <<<<<<<<<<<<<< + * # distutils: language = c++ + * # cython: boundscheck=False + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init gensim.models.fasttext_corpusfile", 0, __pyx_lineno, __pyx_filename); + } + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init gensim.models.fasttext_corpusfile"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* ExtTypeTest */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)meth)) (self, args, nargs); + } +} +#endif + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +#include "frameobject.h" +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = f->f_localsplus; + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* DictGetItem */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) + PyErr_SetObject(PyExc_KeyError, args); + Py_XDECREF(args); + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* RaiseTooManyValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* RaiseNoneIterError */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if PY_VERSION_HEX >= 0x030700A2 + *type = tstate->exc_state.exc_type; + *value = tstate->exc_state.exc_value; + *tb = tstate->exc_state.exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if PY_VERSION_HEX >= 0x030700A2 + tmp_type = tstate->exc_state.exc_type; + tmp_value = tstate->exc_state.exc_value; + tmp_tb = tstate->exc_state.exc_traceback; + tstate->exc_state.exc_type = type; + tstate->exc_state.exc_value = value; + tstate->exc_state.exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) { +#endif + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if PY_VERSION_HEX >= 0x030700A2 + tmp_type = tstate->exc_state.exc_type; + tmp_value = tstate->exc_state.exc_value; + tmp_tb = tstate->exc_state.exc_traceback; + tstate->exc_state.exc_type = local_type; + tstate->exc_state.exc_value = local_value; + tstate->exc_state.exc_traceback = local_tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* GetVTable */ + static void* __Pyx_GetVtable(PyObject *dict) { + void* ptr; + PyObject *ob = PyObject_GetItem(dict, __pyx_n_s_pyx_vtable); + if (!ob) + goto bad; +#if PY_VERSION_HEX >= 0x02070000 + ptr = PyCapsule_GetPointer(ob, 0); +#else + ptr = PyCObject_AsVoidPtr(ob); +#endif + if (!ptr && !PyErr_Occurred()) + PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type"); + Py_DECREF(ob); + return ptr; +bad: + Py_XDECREF(ob); + return NULL; +} + +/* Import */ + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* CLineInTraceback */ + #ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + use_cline = __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback); + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (PyObject_Not(use_cline) != 0) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ + static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ + #include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ + #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* None */ + static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void) { + int err; + #ifdef WITH_THREAD + PyGILState_STATE _save = PyGILState_Ensure(); + #endif + err = !!PyErr_Occurred(); + #ifdef WITH_THREAD + PyGILState_Release(_save); + #endif + return err; +} + +/* Declarations */ + #if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return ::std::complex< float >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return x + y*(__pyx_t_float_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + __pyx_t_float_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +/* Arithmetic */ + #if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + #if 1 + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + if (b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real); + } else if (fabsf(b.real) >= fabsf(b.imag)) { + if (b.real == 0 && b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.imag); + } else { + float r = b.imag / b.real; + float s = 1.0 / (b.real + b.imag * r); + return __pyx_t_float_complex_from_parts( + (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); + } + } else { + float r = b.real / b.imag; + float s = 1.0 / (b.imag + b.real * r); + return __pyx_t_float_complex_from_parts( + (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); + } + } + #else + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + if (b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real); + } else { + float denom = b.real * b.real + b.imag * b.imag; + return __pyx_t_float_complex_from_parts( + (a.real * b.real + a.imag * b.imag) / denom, + (a.imag * b.real - a.real * b.imag) / denom); + } + } + #endif + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrtf(z.real*z.real + z.imag*z.imag); + #else + return hypotf(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + float denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(a, a); + case 3: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(z, a); + case 4: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } else if (b.imag == 0) { + z.real = powf(a.real, b.real); + z.imag = 0; + return z; + } else if (a.real > 0) { + r = a.real; + theta = 0; + } else { + r = -a.real; + theta = atan2f(0, -1); + } + } else { + r = __Pyx_c_abs_float(a); + theta = atan2f(a.imag, a.real); + } + lnr = logf(r); + z_r = expf(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cosf(z_theta); + z.imag = z_r * sinf(z_theta); + return z; + } + #endif +#endif + +/* Declarations */ + #if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return ::std::complex< double >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return x + y*(__pyx_t_double_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + __pyx_t_double_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +/* Arithmetic */ + #if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + #if 1 + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + if (b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real); + } else if (fabs(b.real) >= fabs(b.imag)) { + if (b.real == 0 && b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.imag); + } else { + double r = b.imag / b.real; + double s = 1.0 / (b.real + b.imag * r); + return __pyx_t_double_complex_from_parts( + (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); + } + } else { + double r = b.real / b.imag; + double s = 1.0 / (b.imag + b.real * r); + return __pyx_t_double_complex_from_parts( + (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); + } + } + #else + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + if (b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real); + } else { + double denom = b.real * b.real + b.imag * b.imag; + return __pyx_t_double_complex_from_parts( + (a.real * b.real + a.imag * b.imag) / denom, + (a.imag * b.real - a.real * b.imag) / denom); + } + } + #endif + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrt(z.real*z.real + z.imag*z.imag); + #else + return hypot(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + double denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(a, a); + case 3: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(z, a); + case 4: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } else if (b.imag == 0) { + z.real = pow(a.real, b.real); + z.imag = 0; + return z; + } else if (a.real > 0) { + r = a.real; + theta = 0; + } else { + r = -a.real; + theta = atan2(0, -1); + } + } else { + r = __Pyx_c_abs_double(a); + theta = atan2(a.imag, a.real); + } + lnr = log(r); + z_r = exp(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cos(z_theta); + z.imag = z_r * sin(z_theta); + return z; + } + #endif +#endif + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value) { + const enum NPY_TYPES neg_one = (enum NPY_TYPES) -1, const_zero = (enum NPY_TYPES) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(enum NPY_TYPES) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(enum NPY_TYPES) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum NPY_TYPES) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(enum NPY_TYPES), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* FastTypeChecks */ + #if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) { + if (likely(err == exc_type)) return 1; + if (likely(PyExceptionClass_Check(err))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type); + } + return PyErr_GivenExceptionMatches(err, exc_type); +} +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) { + if (likely(err == exc_type1 || err == exc_type2)) return 1; + if (likely(PyExceptionClass_Check(err))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2); + } + return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2)); +} +#endif + +/* CheckBinaryVersion */ + static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* ModuleImport */ + #ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +/* TypeImport */ + #ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +/* VoidPtrImport */ + #ifndef __PYX_HAVE_RT_ImportVoidPtr +#define __PYX_HAVE_RT_ImportVoidPtr +static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); + if (!d) + goto bad; + cobj = PyDict_GetItemString(d, name); + if (!cobj) { + PyErr_Format(PyExc_ImportError, + "%.200s does not export expected C variable %.200s", + PyModule_GetName(module), name); + goto bad; + } +#if PY_VERSION_HEX >= 0x02070000 + if (!PyCapsule_IsValid(cobj, sig)) { + PyErr_Format(PyExc_TypeError, + "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), name, sig, PyCapsule_GetName(cobj)); + goto bad; + } + *p = PyCapsule_GetPointer(cobj, sig); +#else + {const char *desc, *s1, *s2; + desc = (const char *)PyCObject_GetDesc(cobj); + if (!desc) + goto bad; + s1 = desc; s2 = sig; + while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } + if (*s1 != *s2) { + PyErr_Format(PyExc_TypeError, + "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), name, sig, desc); + goto bad; + } + *p = PyCObject_AsVoidPtr(cobj);} +#endif + if (!(*p)) + goto bad; + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(d); + return -1; +} +#endif + +/* FunctionImport */ + #ifndef __PYX_HAVE_RT_ImportFunction +#define __PYX_HAVE_RT_ImportFunction +static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); + if (!d) + goto bad; + cobj = PyDict_GetItemString(d, funcname); + if (!cobj) { + PyErr_Format(PyExc_ImportError, + "%.200s does not export expected C function %.200s", + PyModule_GetName(module), funcname); + goto bad; + } +#if PY_VERSION_HEX >= 0x02070000 + if (!PyCapsule_IsValid(cobj, sig)) { + PyErr_Format(PyExc_TypeError, + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), funcname, sig, PyCapsule_GetName(cobj)); + goto bad; + } + tmp.p = PyCapsule_GetPointer(cobj, sig); +#else + {const char *desc, *s1, *s2; + desc = (const char *)PyCObject_GetDesc(cobj); + if (!desc) + goto bad; + s1 = desc; s2 = sig; + while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } + if (*s1 != *s2) { + PyErr_Format(PyExc_TypeError, + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), funcname, sig, desc); + goto bad; + } + tmp.p = PyCObject_AsVoidPtr(cobj);} +#endif + *f = tmp.fp; + if (!(*f)) + goto bad; + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(d); + return -1; +} +#endif + +/* InitStrings */ + static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(x); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/gensim/models/fasttext_corpusfile.pyx b/gensim/models/fasttext_corpusfile.pyx new file mode 100644 index 0000000000..4a0a080350 --- /dev/null +++ b/gensim/models/fasttext_corpusfile.pyx @@ -0,0 +1,283 @@ +#!/usr/bin/env cython +# distutils: language = c++ +# cython: boundscheck=False +# cython: wraparound=False +# cython: cdivision=True +# cython: embedsignature=True +# coding: utf-8 +# +# Copyright (C) 2018 Dmitry Persiyanov +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +"""Optimized cython functions for file-based training :class:`~gensim.models.fasttext.FastText` model.""" + +import numpy as np +cimport numpy as np + +from libcpp.string cimport string +from libcpp.vector cimport vector + +from gensim.models.fasttext_inner cimport ( + fasttext_fast_sentence_sg_hs, + fasttext_fast_sentence_sg_neg, + fasttext_fast_sentence_cbow_hs, + fasttext_fast_sentence_cbow_neg, + init_ft_config, + FastTextConfig +) + +from gensim.models.word2vec_inner cimport random_int32 + +from gensim.models.word2vec_corpusfile cimport ( + VocabItem, + CythonVocab, + CythonLineSentence, + get_alpha, + get_next_alpha, + cvocab_t +) + +ctypedef np.float32_t REAL_t +DEF MAX_SENTENCE_LEN = 10000 +DEF MAX_SUBWORDS = 1000 + + +cdef void prepare_c_structures_for_batch( + vector[vector[string]] &sentences, int sample, int hs, int window, int *total_words, + int *effective_words, int *effective_sentences, unsigned long long *next_random, cvocab_t *vocab, + int *sentence_idx, np.uint32_t *indexes, int *codelens, np.uint8_t **codes, np.uint32_t **points, + np.uint32_t *reduced_windows, int *subwords_idx_len, np.uint32_t **subwords_idx) nogil: + cdef VocabItem word + cdef string token + cdef vector[string] sent + + sentence_idx[0] = 0 # indices of the first sentence always start at 0 + for sent in sentences: + if sent.empty(): + continue # ignore empty sentences; leave effective_sentences unchanged + total_words[0] += sent.size() + + for token in sent: + # leaving `effective_words` unchanged = shortening the sentence = expanding the window + if vocab[0].find(token) == vocab[0].end(): + continue + + word = vocab[0][token] + if sample and word.sample_int < random_int32(next_random): + continue + indexes[effective_words[0]] = word.index + subwords_idx_len[effective_words[0]] = word.subword_idx_len + subwords_idx[effective_words[0]] = word.subword_idx + + if hs: + codelens[effective_words[0]] = word.code_len + codes[effective_words[0]] = word.code + points[effective_words[0]] = word.point + + effective_words[0] += 1 + if effective_words[0] == MAX_SENTENCE_LEN: + break + + # keep track of which words go into which sentence, so we don't train + # across sentence boundaries. + # indices of sentence number X are between expected_words / c.workers): + effective_sentences = 0 + effective_words = 0 + + sentences = input_stream.next_batch() + + prepare_c_structures_for_batch( + sentences, c.sample, c.hs, c.window, &total_words, &effective_words, &effective_sentences, + &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, c.indexes, c.codelens, + c.codes, c.points, c.reduced_windows, c.subwords_idx_len, c.subwords_idx) + + for sent_idx in range(effective_sentences): + idx_start = c.sentence_idx[sent_idx] + idx_end = c.sentence_idx[sent_idx + 1] + for i in range(idx_start, idx_end): + j = i - c.window + c.reduced_windows[i] + if j < idx_start: + j = idx_start + k = i + c.window + 1 - c.reduced_windows[i] + if k > idx_end: + k = idx_end + for j in range(j, k): + if j == i: + continue + if c.hs: + fasttext_fast_sentence_sg_hs( + c.points[j], c.codes[j], c.codelens[j], c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, c.neu1, + c.word_locks_vocab, c.word_locks_ngrams) + if c.negative: + c.next_random = fasttext_fast_sentence_sg_neg( + c.negative, c.cum_table, c.cum_table_len, c.syn0_vocab, c.syn0_ngrams, c.syn1neg, c.size, + c.indexes[j], c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, + c.neu1, c.next_random, c.word_locks_vocab, c.word_locks_ngrams) + + total_sentences += sentences.size() + total_effective_words += effective_words + + c.alpha = get_next_alpha(start_alpha, end_alpha, total_sentences, total_words, + expected_examples, expected_words, cur_epoch, num_epochs) + + return total_sentences, total_effective_words, total_words + + +def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, + _neu1): + """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + + Called internally from :meth:`~gensim.models.fasttext.FastText.train`. + + Parameters + ---------- + model : :class:`~gensim.models.fasttext.FastText` + The FastText model instance to train. + corpus_file : str + Path to a corpus file. + _cur_epoch : int + Current epoch number. Used for calculating and decaying learning rate. + _work : np.ndarray + Private working memory for each worker. + _neu1 : np.ndarray + Private working memory for each worker. + + Returns + ------- + int + Number of words in the vocabulary actually used for training (They already existed in the vocabulary + and were not discarded by negative sampling). + """ + cdef FastTextConfig c + + # For learning rate updates + cdef int cur_epoch = _cur_epoch + cdef int num_epochs = model.epochs + cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + cdef int expected_words = (-1 if _expected_words is None else _expected_words) + cdef REAL_t start_alpha = model.alpha + cdef REAL_t end_alpha = model.min_alpha + cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + + cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + cdef CythonVocab vocab = _cython_vocab + + cdef int i, j, k + cdef int effective_words = 0, effective_sentences = 0 + cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 + cdef int sent_idx, idx_start, idx_end + + init_ft_config(&c, model, _alpha, _work, _neu1) + + # for preparing batches & training + cdef vector[vector[string]] sentences + + with nogil: + input_stream.reset() + while not (input_stream.is_eof() or total_words > expected_words / c.workers): + effective_sentences = 0 + effective_words = 0 + + sentences = input_stream.next_batch() + + prepare_c_structures_for_batch( + sentences, c.sample, c.hs, c.window, &total_words, &effective_words, &effective_sentences, + &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, c.indexes, c.codelens, + c.codes, c.points, c.reduced_windows, c.subwords_idx_len, c.subwords_idx) + + for sent_idx in range(effective_sentences): + idx_start = c.sentence_idx[sent_idx] + idx_end = c.sentence_idx[sent_idx + 1] + for i in range(idx_start, idx_end): + j = i - c.window + c.reduced_windows[i] + if j < idx_start: + j = idx_start + k = i + c.window + 1 - c.reduced_windows[i] + if k > idx_end: + k = idx_end + + if c.hs: + fasttext_fast_sentence_cbow_hs( + c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, c.cbow_mean, + c.word_locks_vocab, c.word_locks_ngrams) + if c.negative: + c.next_random = fasttext_fast_sentence_cbow_neg( + c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, + c.syn1neg, c.size, c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, + c.cbow_mean, c.next_random, c.word_locks_vocab, c.word_locks_ngrams) + + total_sentences += sentences.size() + total_effective_words += effective_words + + c.alpha = get_next_alpha(start_alpha, end_alpha, total_sentences, total_words, + expected_examples, expected_words, cur_epoch, num_epochs) + + return total_sentences, total_effective_words, total_words + + +CORPUSFILE_VERSION = 1 diff --git a/gensim/models/fasttext_inner.c b/gensim/models/fasttext_inner.c index 5a13de4924..670c4a94a4 100644 --- a/gensim/models/fasttext_inner.c +++ b/gensim/models/fasttext_inner.c @@ -1,4 +1,4 @@ -/* Generated by Cython 0.28.3 */ +/* Generated by Cython 0.28.2 */ #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -7,7 +7,7 @@ #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else -#define CYTHON_ABI "0_28_3" +#define CYTHON_ABI "0_28_2" #define CYTHON_FUTURE_DIVISION 0 #include #ifndef offsetof @@ -453,7 +453,6 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact - #define PyObject_Unicode PyObject_Str #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -566,8 +565,8 @@ static CYTHON_INLINE float __PYX_NAN() { #include #include "numpy/arrayobject.h" #include "numpy/ufuncobject.h" -#include #include "voidptr.h" +#include #ifdef _OPENMP #include #endif /* _OPENMP */ @@ -652,7 +651,7 @@ static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ @@ -760,7 +759,7 @@ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_cython_runtime; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; @@ -810,7 +809,7 @@ static const char *__pyx_f[] = { #endif -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":730 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 * # in Cython to enable them only on the right systems. * * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< @@ -819,7 +818,7 @@ static const char *__pyx_f[] = { */ typedef npy_int8 __pyx_t_5numpy_int8_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":731 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 * * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< @@ -828,7 +827,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t; */ typedef npy_int16 __pyx_t_5numpy_int16_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":732 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< @@ -837,7 +836,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t; */ typedef npy_int32 __pyx_t_5numpy_int32_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":733 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< @@ -846,7 +845,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t; */ typedef npy_int64 __pyx_t_5numpy_int64_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":737 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 * #ctypedef npy_int128 int128_t * * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< @@ -855,7 +854,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t; */ typedef npy_uint8 __pyx_t_5numpy_uint8_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":738 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 * * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< @@ -864,7 +863,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t; */ typedef npy_uint16 __pyx_t_5numpy_uint16_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":739 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< @@ -873,7 +872,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t; */ typedef npy_uint32 __pyx_t_5numpy_uint32_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":740 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< @@ -882,7 +881,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t; */ typedef npy_uint64 __pyx_t_5numpy_uint64_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":744 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 * #ctypedef npy_uint128 uint128_t * * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< @@ -891,7 +890,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t; */ typedef npy_float32 __pyx_t_5numpy_float32_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":745 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 * * ctypedef npy_float32 float32_t * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< @@ -900,7 +899,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t; */ typedef npy_float64 __pyx_t_5numpy_float64_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":754 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 * # The int types are mapped a bit surprising -- * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t # <<<<<<<<<<<<<< @@ -909,7 +908,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t; */ typedef npy_long __pyx_t_5numpy_int_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":755 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< @@ -918,7 +917,7 @@ typedef npy_long __pyx_t_5numpy_int_t; */ typedef npy_longlong __pyx_t_5numpy_long_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":756 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 * ctypedef npy_long int_t * ctypedef npy_longlong long_t * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< @@ -927,7 +926,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t; */ typedef npy_longlong __pyx_t_5numpy_longlong_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":758 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 * ctypedef npy_longlong longlong_t * * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< @@ -936,7 +935,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t; */ typedef npy_ulong __pyx_t_5numpy_uint_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":759 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 * * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< @@ -945,7 +944,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":760 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< @@ -954,7 +953,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":762 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 * ctypedef npy_ulonglong ulonglong_t * * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< @@ -963,7 +962,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; */ typedef npy_intp __pyx_t_5numpy_intp_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":763 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 * * ctypedef npy_intp intp_t * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< @@ -972,7 +971,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t; */ typedef npy_uintp __pyx_t_5numpy_uintp_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":765 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 * ctypedef npy_uintp uintp_t * * ctypedef npy_double float_t # <<<<<<<<<<<<<< @@ -981,7 +980,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t; */ typedef npy_double __pyx_t_5numpy_float_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":766 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 * * ctypedef npy_double float_t * ctypedef npy_double double_t # <<<<<<<<<<<<<< @@ -990,7 +989,7 @@ typedef npy_double __pyx_t_5numpy_float_t; */ typedef npy_double __pyx_t_5numpy_double_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":767 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 * ctypedef npy_double float_t * ctypedef npy_double double_t * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< @@ -999,9 +998,9 @@ typedef npy_double __pyx_t_5numpy_double_t; */ typedef npy_longdouble __pyx_t_5numpy_longdouble_t; -/* "word2vec_inner.pxd":12 +/* "word2vec_inner.pxd":19 + * void* PyCObject_AsVoidPtr(object obj) * - * cimport numpy as np * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< * * # BLAS routine signatures @@ -1034,7 +1033,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do /*--- Type declarations ---*/ -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":769 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 * ctypedef npy_longdouble longdouble_t * * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< @@ -1043,7 +1042,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do */ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":770 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 * * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< @@ -1052,7 +1051,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; */ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":771 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< @@ -1061,7 +1060,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; */ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":773 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 * ctypedef npy_clongdouble clongdouble_t * * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< @@ -1069,8 +1068,10 @@ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; * cdef inline object PyArray_MultiIterNew1(a): */ typedef npy_cdouble __pyx_t_5numpy_complex_t; +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig; +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config; -/* "word2vec_inner.pxd":15 +/* "word2vec_inner.pxd":22 * * # BLAS routine signatures * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1079,7 +1080,7 @@ typedef npy_cdouble __pyx_t_5numpy_complex_t; */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)(int const *, float const *, int const *, float *, int const *); -/* "word2vec_inner.pxd":16 +/* "word2vec_inner.pxd":23 * # BLAS routine signatures * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1088,7 +1089,7 @@ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)(int const *, */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); -/* "word2vec_inner.pxd":17 +/* "word2vec_inner.pxd":24 * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1097,7 +1098,7 @@ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)(int const *, */ typedef float (*__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)(int const *, float const *, int const *, float const *, int const *); -/* "word2vec_inner.pxd":18 +/* "word2vec_inner.pxd":25 * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1106,7 +1107,7 @@ typedef float (*__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)(int const *, */ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)(int const *, float const *, int const *, float const *, int const *); -/* "word2vec_inner.pxd":19 +/* "word2vec_inner.pxd":26 * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< @@ -1115,7 +1116,7 @@ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)(int const * */ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)(int const *, float const *, int const *); -/* "word2vec_inner.pxd":20 +/* "word2vec_inner.pxd":27 * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< @@ -1124,7 +1125,7 @@ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)(int const * */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)(int const *, float const *, float const *, int const *); -/* "word2vec_inner.pxd":35 +/* "word2vec_inner.pxd":44 * * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1133,7 +1134,7 @@ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)(int const *, */ typedef __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr)(int const *, float const *, int const *, float const *, int const *); -/* "word2vec_inner.pxd":36 +/* "word2vec_inner.pxd":45 * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1142,6 +1143,89 @@ typedef __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_t_6gensim_6model */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); +/* "word2vec_inner.pxd":51 + * + * + * cdef struct Word2VecConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, compute_loss, size, window, cbow_mean, workers + * REAL_t running_training_loss, alpha + */ +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig { + int hs; + int negative; + int sample; + int compute_loss; + int size; + int window; + int cbow_mean; + int workers; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t running_training_loss; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + int sentence_idx[(0x2710 + 1)]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; +}; + +/* "word2vec_inner.pxd":125 + * + * + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=*) # <<<<<<<<<<<<<< + */ +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config { + int __pyx_n; + PyObject *_neu1; +}; +struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig; + +/* "gensim/models/fasttext_inner.pxd":22 + * + * + * cdef struct FastTextConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, size, window, cbow_mean, workers + * REAL_t alpha + */ +struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig { + int hs; + int negative; + int sample; + int size; + int window; + int cbow_mean; + int workers; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0_vocab; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks_vocab; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0_ngrams; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks_ngrams; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + int sentence_idx[(0x2710 + 1)]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; + int subwords_idx_len[0x2710]; + __pyx_t_5numpy_uint32_t *subwords_idx[0x2710]; +}; + /* --- Runtime support code (head) --- */ /* Refnanny.proto */ #ifndef CYTHON_REFNANNY @@ -1216,6 +1300,16 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject /* GetBuiltinName.proto */ static PyObject *__Pyx_GetBuiltinName(PyObject *name); +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + /* RaiseArgTupleInvalid.proto */ static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); @@ -1228,16 +1322,6 @@ static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ const char* function_name); -/* ExtTypeTest.proto */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - /* PySequenceContains.proto */ static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { int result = PySequence_Contains(seq, item); @@ -1273,9 +1357,6 @@ static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* k #define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key) #endif -/* GetModuleGlobalName.proto */ -static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); - /* PyFunctionFastCall.proto */ #if CYTHON_FAST_PYCALL #define __Pyx_PyFunction_FastCall(func, args, nargs)\ @@ -1394,6 +1475,9 @@ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /* ImportFrom.proto */ static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); +/* GetModuleGlobalName.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); + /* PyObjectCallNoArg.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); @@ -1544,7 +1628,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES v static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); /* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); +static CYTHON_INLINE npy_uint32 __Pyx_PyInt_As_npy_uint32(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_As_PY_LONG_LONG(PyObject *); @@ -1553,7 +1637,7 @@ static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_As_PY_LONG_LONG(PyObject *); static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_As_unsigned_PY_LONG_LONG(PyObject *); /* CIntFromPy.proto */ -static CYTHON_INLINE npy_uint32 __Pyx_PyInt_As_npy_uint32(PyObject *); +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); /* FastTypeChecks.proto */ #if CYTHON_COMPILING_IN_CPYTHON @@ -1571,6 +1655,9 @@ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObj /* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(void); +/* FunctionExport.proto */ +static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); + /* PyIdentifierFromString.proto */ #if !defined(__Pyx_PyIdentifier_FromString) #if PY_MAJOR_VERSION < 3 @@ -1596,8 +1683,6 @@ static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (** static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); -/* Module declarations from 'cython' */ - /* Module declarations from 'cpython.buffer' */ /* Module declarations from 'libc.string' */ @@ -1627,8 +1712,6 @@ static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0; static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0; static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/ -/* Module declarations from 'libc.math' */ - /* Module declarations from 'gensim.models.word2vec_inner' */ static __pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_scopy = 0; #define __pyx_v_6gensim_6models_14word2vec_inner_scopy (*__pyx_vp_6gensim_6models_14word2vec_inner_scopy) @@ -1655,14 +1738,19 @@ static void (*__pyx_f_6gensim_6models_14word2vec_inner_our_saxpy_noblas)(int con static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14word2vec_inner_bisect_left)(__pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG); /*proto*/ static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14word2vec_inner_random_int32)(unsigned PY_LONG_LONG *); /*proto*/ +/* Module declarations from 'cython' */ + +/* Module declarations from 'libc.math' */ + /* Module declarations from 'gensim.models.fasttext_inner' */ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_6gensim_6models_14fasttext_inner_LOG_TABLE[0x3E8]; static int __pyx_v_6gensim_6models_14fasttext_inner_ONE; static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_6gensim_6models_14fasttext_inner_ONEF; -static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ -static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ -static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ -static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static void __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static void __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static PyObject *__pyx_f_6gensim_6models_14fasttext_inner_init_ft_config(struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig *, PyObject *, PyObject *, PyObject *, PyObject *); /*proto*/ #define __Pyx_MODULE_NAME "gensim.models.fasttext_inner" extern int __pyx_module_is_main_gensim__models__fasttext_inner; int __pyx_module_is_main_gensim__models__fasttext_inner = 0; @@ -1673,6 +1761,7 @@ static PyObject *__pyx_builtin_range; static PyObject *__pyx_builtin_enumerate; static PyObject *__pyx_builtin_ValueError; static PyObject *__pyx_builtin_RuntimeError; +static const char __pyx_k_c[] = "c"; static const char __pyx_k_i[] = "i"; static const char __pyx_k_j[] = "j"; static const char __pyx_k_k[] = "k"; @@ -1687,7 +1776,6 @@ static const char __pyx_k_REAL[] = "REAL"; static const char __pyx_k_code[] = "code"; static const char __pyx_k_init[] = "init"; static const char __pyx_k_item[] = "item"; -static const char __pyx_k_l1_2[] = "l1"; static const char __pyx_k_main[] = "__main__"; static const char __pyx_k_neu1[] = "_neu1"; static const char __pyx_k_sent[] = "sent"; @@ -1697,10 +1785,7 @@ static const char __pyx_k_test[] = "__test__"; static const char __pyx_k_word[] = "word"; static const char __pyx_k_work[] = "_work"; static const char __pyx_k_alpha[] = "alpha"; -static const char __pyx_k_array[] = "array"; -static const char __pyx_k_codes[] = "codes"; static const char __pyx_k_d_res[] = "d_res"; -static const char __pyx_k_dtype[] = "dtype"; static const char __pyx_k_fblas[] = "fblas"; static const char __pyx_k_index[] = "index"; static const char __pyx_k_model[] = "model"; @@ -1711,25 +1796,18 @@ static const char __pyx_k_range[] = "range"; static const char __pyx_k_token[] = "token"; static const char __pyx_k_vocab[] = "vocab"; static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_neu1_2[] = "neu1"; -static const char __pyx_k_points[] = "points"; static const char __pyx_k_random[] = "random"; static const char __pyx_k_sample[] = "sample"; -static const char __pyx_k_uint32[] = "uint32"; static const char __pyx_k_window[] = "window"; -static const char __pyx_k_work_2[] = "work"; -static const char __pyx_k_alpha_2[] = "_alpha"; static const char __pyx_k_float32[] = "float32"; static const char __pyx_k_idx_end[] = "idx_end"; -static const char __pyx_k_indexes[] = "indexes"; static const char __pyx_k_randint[] = "randint"; static const char __pyx_k_syn1neg[] = "syn1neg"; static const char __pyx_k_vlookup[] = "vlookup"; -static const char __pyx_k_codelens[] = "codelens"; +static const char __pyx_k_workers[] = "workers"; static const char __pyx_k_expected[] = "expected"; static const char __pyx_k_negative[] = "negative"; static const char __pyx_k_sent_idx[] = "sent_idx"; -static const char __pyx_k_subwords[] = "subwords"; static const char __pyx_k_cbow_mean[] = "cbow_mean"; static const char __pyx_k_cum_table[] = "cum_table"; static const char __pyx_k_enumerate[] = "enumerate"; @@ -1737,31 +1815,19 @@ static const char __pyx_k_idx_start[] = "idx_start"; static const char __pyx_k_sentences[] = "sentences"; static const char __pyx_k_ValueError[] = "ValueError"; static const char __pyx_k_sample_int[] = "sample_int"; -static const char __pyx_k_syn0_vocab[] = "syn0_vocab"; static const char __pyx_k_trainables[] = "trainables"; static const char __pyx_k_vocabulary[] = "vocabulary"; static const char __pyx_k_ImportError[] = "ImportError"; -static const char __pyx_k_next_random[] = "next_random"; -static const char __pyx_k_syn0_ngrams[] = "syn0_ngrams"; static const char __pyx_k_vector_size[] = "vector_size"; static const char __pyx_k_FAST_VERSION[] = "FAST_VERSION"; static const char __pyx_k_RuntimeError[] = "RuntimeError"; static const char __pyx_k_buckets_word[] = "buckets_word"; -static const char __pyx_k_sentence_idx[] = "sentence_idx"; -static const char __pyx_k_subwords_idx[] = "subwords_idx"; -static const char __pyx_k_cum_table_len[] = "cum_table_len"; static const char __pyx_k_vectors_vocab[] = "vectors_vocab"; -static const char __pyx_k_word_subwords[] = "word_subwords"; -static const char __pyx_k_subword_arrays[] = "subword_arrays"; static const char __pyx_k_train_batch_sg[] = "train_batch_sg"; static const char __pyx_k_vectors_ngrams[] = "vectors_ngrams"; static const char __pyx_k_effective_words[] = "effective_words"; -static const char __pyx_k_reduced_windows[] = "reduced_windows"; -static const char __pyx_k_subwords_idx_len[] = "subwords_idx_len"; static const char __pyx_k_train_batch_cbow[] = "train_batch_cbow"; -static const char __pyx_k_word_locks_vocab[] = "word_locks_vocab"; static const char __pyx_k_scipy_linalg_blas[] = "scipy.linalg.blas"; -static const char __pyx_k_word_locks_ngrams[] = "word_locks_ngrams"; static const char __pyx_k_MAX_WORDS_IN_BATCH[] = "MAX_WORDS_IN_BATCH"; static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; static const char __pyx_k_effective_sentences[] = "effective_sentences"; @@ -1789,18 +1855,13 @@ static PyObject *__pyx_n_s_RuntimeError; static PyObject *__pyx_n_s_ValueError; static PyObject *__pyx_n_s__14; static PyObject *__pyx_n_s_alpha; -static PyObject *__pyx_n_s_alpha_2; -static PyObject *__pyx_n_s_array; static PyObject *__pyx_n_s_buckets_word; +static PyObject *__pyx_n_s_c; static PyObject *__pyx_n_s_cbow_mean; static PyObject *__pyx_n_s_cline_in_traceback; static PyObject *__pyx_n_s_code; -static PyObject *__pyx_n_s_codelens; -static PyObject *__pyx_n_s_codes; static PyObject *__pyx_n_s_cum_table; -static PyObject *__pyx_n_s_cum_table_len; static PyObject *__pyx_n_s_d_res; -static PyObject *__pyx_n_s_dtype; static PyObject *__pyx_n_s_effective_sentences; static PyObject *__pyx_n_s_effective_words; static PyObject *__pyx_n_s_enumerate; @@ -1815,46 +1876,33 @@ static PyObject *__pyx_n_s_idx_end; static PyObject *__pyx_n_s_idx_start; static PyObject *__pyx_n_s_import; static PyObject *__pyx_n_s_index; -static PyObject *__pyx_n_s_indexes; static PyObject *__pyx_n_s_init; static PyObject *__pyx_n_s_item; static PyObject *__pyx_n_s_j; static PyObject *__pyx_n_s_k; static PyObject *__pyx_n_s_l1; -static PyObject *__pyx_n_s_l1_2; static PyObject *__pyx_n_s_main; static PyObject *__pyx_n_s_model; static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous; static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou; static PyObject *__pyx_n_s_negative; static PyObject *__pyx_n_s_neu1; -static PyObject *__pyx_n_s_neu1_2; -static PyObject *__pyx_n_s_next_random; static PyObject *__pyx_n_s_np; static PyObject *__pyx_n_s_numpy; static PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to; static PyObject *__pyx_kp_s_numpy_core_umath_failed_to_impor; static PyObject *__pyx_n_s_p_res; static PyObject *__pyx_n_s_point; -static PyObject *__pyx_n_s_points; static PyObject *__pyx_n_s_randint; static PyObject *__pyx_n_s_random; static PyObject *__pyx_n_s_range; -static PyObject *__pyx_n_s_reduced_windows; static PyObject *__pyx_n_s_sample; static PyObject *__pyx_n_s_sample_int; static PyObject *__pyx_n_s_scipy_linalg_blas; static PyObject *__pyx_n_s_sent; static PyObject *__pyx_n_s_sent_idx; -static PyObject *__pyx_n_s_sentence_idx; static PyObject *__pyx_n_s_sentences; static PyObject *__pyx_n_s_size; -static PyObject *__pyx_n_s_subword_arrays; -static PyObject *__pyx_n_s_subwords; -static PyObject *__pyx_n_s_subwords_idx; -static PyObject *__pyx_n_s_subwords_idx_len; -static PyObject *__pyx_n_s_syn0_ngrams; -static PyObject *__pyx_n_s_syn0_vocab; static PyObject *__pyx_n_s_syn1; static PyObject *__pyx_n_s_syn1neg; static PyObject *__pyx_n_s_test; @@ -1862,7 +1910,6 @@ static PyObject *__pyx_n_s_token; static PyObject *__pyx_n_s_train_batch_cbow; static PyObject *__pyx_n_s_train_batch_sg; static PyObject *__pyx_n_s_trainables; -static PyObject *__pyx_n_s_uint32; static PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd; static PyObject *__pyx_n_s_vector_size; static PyObject *__pyx_n_s_vectors_ngrams; @@ -1874,11 +1921,8 @@ static PyObject *__pyx_n_s_vocab; static PyObject *__pyx_n_s_vocabulary; static PyObject *__pyx_n_s_window; static PyObject *__pyx_n_s_word; -static PyObject *__pyx_n_s_word_locks_ngrams; -static PyObject *__pyx_n_s_word_locks_vocab; -static PyObject *__pyx_n_s_word_subwords; static PyObject *__pyx_n_s_work; -static PyObject *__pyx_n_s_work_2; +static PyObject *__pyx_n_s_workers; static PyObject *__pyx_n_s_wv; static PyObject *__pyx_n_s_x; static PyObject *__pyx_n_s_y; @@ -1916,13 +1960,12 @@ static PyObject *__pyx_codeobj__20; /* "gensim/models/fasttext_inner.pyx":42 * cdef REAL_t ONEF = 1.0 * - * cdef unsigned long long fast_sentence_sg_neg( # <<<<<<<<<<<<<< + * cdef unsigned long long fasttext_fast_sentence_sg_neg( # <<<<<<<<<<<<<< * const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, * REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1neg, const int size, */ -static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_neg(int const __pyx_v_negative, __pyx_t_5numpy_uint32_t *__pyx_v_cum_table, unsigned PY_LONG_LONG __pyx_v_cum_table_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const __pyx_v_word_index, __pyx_t_5numpy_uint32_t const *__pyx_v_subwords_index, __pyx_t_5numpy_uint32_t const __pyx_v_subwords_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_l1, unsigned PY_LONG_LONG __pyx_v_next_random, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams) { - __pyx_t_5numpy_uint32_t __pyx_v_word2_index; +static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_neg(int const __pyx_v_negative, __pyx_t_5numpy_uint32_t *__pyx_v_cum_table, unsigned PY_LONG_LONG __pyx_v_cum_table_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const __pyx_v_word_index, __pyx_t_5numpy_uint32_t const __pyx_v_word2_index, __pyx_t_5numpy_uint32_t const *__pyx_v_subwords_index, __pyx_t_5numpy_uint32_t const __pyx_v_subwords_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_l1, unsigned PY_LONG_LONG __pyx_v_next_random, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams) { PY_LONG_LONG __pyx_v_row1; PY_LONG_LONG __pyx_v_row2; unsigned PY_LONG_LONG __pyx_v_modulo; @@ -1945,23 +1988,14 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente /* "gensim/models/fasttext_inner.pyx":50 * * cdef long long a - * cdef np.uint32_t word2_index = subwords_index[0] # <<<<<<<<<<<<<< - * cdef long long row1 = word2_index * size, row2 - * cdef unsigned long long modulo = 281474976710655ULL - */ - __pyx_v_word2_index = (__pyx_v_subwords_index[0]); - - /* "gensim/models/fasttext_inner.pyx":51 - * cdef long long a - * cdef np.uint32_t word2_index = subwords_index[0] * cdef long long row1 = word2_index * size, row2 # <<<<<<<<<<<<<< * cdef unsigned long long modulo = 281474976710655ULL * cdef REAL_t f, g, label, f_dot, log_e_f_dot */ __pyx_v_row1 = (__pyx_v_word2_index * __pyx_v_size); - /* "gensim/models/fasttext_inner.pyx":52 - * cdef np.uint32_t word2_index = subwords_index[0] + /* "gensim/models/fasttext_inner.pyx":51 + * cdef long long a * cdef long long row1 = word2_index * size, row2 * cdef unsigned long long modulo = 281474976710655ULL # <<<<<<<<<<<<<< * cdef REAL_t f, g, label, f_dot, log_e_f_dot @@ -1969,7 +2003,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_modulo = 281474976710655ULL; - /* "gensim/models/fasttext_inner.pyx":57 + /* "gensim/models/fasttext_inner.pyx":56 * cdef int d * * memset(work, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -1978,7 +2012,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ (void)(memset(__pyx_v_work, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/fasttext_inner.pyx":58 + /* "gensim/models/fasttext_inner.pyx":57 * * memset(work, 0, size * cython.sizeof(REAL_t)) * memset(l1, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -1987,30 +2021,30 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ (void)(memset(__pyx_v_l1, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/fasttext_inner.pyx":60 + /* "gensim/models/fasttext_inner.pyx":59 * memset(l1, 0, size * cython.sizeof(REAL_t)) * * scopy(&size, &syn0_vocab[row1], &ONE, l1, &ONE) # <<<<<<<<<<<<<< - * for d in range(1, subwords_len): + * for d in range(subwords_len): * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) */ __pyx_v_6gensim_6models_14word2vec_inner_scopy((&__pyx_v_size), (&(__pyx_v_syn0_vocab[__pyx_v_row1])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_l1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":61 + /* "gensim/models/fasttext_inner.pyx":60 * * scopy(&size, &syn0_vocab[row1], &ONE, l1, &ONE) - * for d in range(1, subwords_len): # <<<<<<<<<<<<<< + * for d in range(subwords_len): # <<<<<<<<<<<<<< * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) * cdef REAL_t norm_factor = ONEF / subwords_len */ __pyx_t_1 = __pyx_v_subwords_len; __pyx_t_2 = __pyx_t_1; - for (__pyx_t_3 = 1; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { + for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_d = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":62 + /* "gensim/models/fasttext_inner.pyx":61 * scopy(&size, &syn0_vocab[row1], &ONE, l1, &ONE) - * for d in range(1, subwords_len): + * for d in range(subwords_len): * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) # <<<<<<<<<<<<<< * cdef REAL_t norm_factor = ONEF / subwords_len * sscal(&size, &norm_factor, l1 , &ONE) @@ -2018,8 +2052,8 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_6gensim_6models_14fasttext_inner_ONEF), (&(__pyx_v_syn0_ngrams[((__pyx_v_subwords_index[__pyx_v_d]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_l1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); } - /* "gensim/models/fasttext_inner.pyx":63 - * for d in range(1, subwords_len): + /* "gensim/models/fasttext_inner.pyx":62 + * for d in range(subwords_len): * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) * cdef REAL_t norm_factor = ONEF / subwords_len # <<<<<<<<<<<<<< * sscal(&size, &norm_factor, l1 , &ONE) @@ -2027,7 +2061,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_norm_factor = (__pyx_v_6gensim_6models_14fasttext_inner_ONEF / __pyx_v_subwords_len); - /* "gensim/models/fasttext_inner.pyx":64 + /* "gensim/models/fasttext_inner.pyx":63 * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) * cdef REAL_t norm_factor = ONEF / subwords_len * sscal(&size, &norm_factor, l1 , &ONE) # <<<<<<<<<<<<<< @@ -2036,7 +2070,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_norm_factor), __pyx_v_l1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":66 + /* "gensim/models/fasttext_inner.pyx":65 * sscal(&size, &norm_factor, l1 , &ONE) * * for d in range(negative+1): # <<<<<<<<<<<<<< @@ -2048,7 +2082,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_5; __pyx_t_3+=1) { __pyx_v_d = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":67 + /* "gensim/models/fasttext_inner.pyx":66 * * for d in range(negative+1): * if d == 0: # <<<<<<<<<<<<<< @@ -2058,7 +2092,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_t_6 = ((__pyx_v_d == 0) != 0); if (__pyx_t_6) { - /* "gensim/models/fasttext_inner.pyx":68 + /* "gensim/models/fasttext_inner.pyx":67 * for d in range(negative+1): * if d == 0: * target_index = word_index # <<<<<<<<<<<<<< @@ -2067,7 +2101,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_target_index = __pyx_v_word_index; - /* "gensim/models/fasttext_inner.pyx":69 + /* "gensim/models/fasttext_inner.pyx":68 * if d == 0: * target_index = word_index * label = ONEF # <<<<<<<<<<<<<< @@ -2076,7 +2110,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_label = __pyx_v_6gensim_6models_14fasttext_inner_ONEF; - /* "gensim/models/fasttext_inner.pyx":67 + /* "gensim/models/fasttext_inner.pyx":66 * * for d in range(negative+1): * if d == 0: # <<<<<<<<<<<<<< @@ -2086,7 +2120,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente goto __pyx_L7; } - /* "gensim/models/fasttext_inner.pyx":71 + /* "gensim/models/fasttext_inner.pyx":70 * label = ONEF * else: * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) # <<<<<<<<<<<<<< @@ -2096,7 +2130,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente /*else*/ { __pyx_v_target_index = __pyx_f_6gensim_6models_14word2vec_inner_bisect_left(__pyx_v_cum_table, ((__pyx_v_next_random >> 16) % (__pyx_v_cum_table[(__pyx_v_cum_table_len - 1)])), 0, __pyx_v_cum_table_len); - /* "gensim/models/fasttext_inner.pyx":72 + /* "gensim/models/fasttext_inner.pyx":71 * else: * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo # <<<<<<<<<<<<<< @@ -2105,7 +2139,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_next_random = (((__pyx_v_next_random * ((unsigned PY_LONG_LONG)25214903917ULL)) + 11) & __pyx_v_modulo); - /* "gensim/models/fasttext_inner.pyx":73 + /* "gensim/models/fasttext_inner.pyx":72 * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: # <<<<<<<<<<<<<< @@ -2115,7 +2149,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_t_6 = ((__pyx_v_target_index == __pyx_v_word_index) != 0); if (__pyx_t_6) { - /* "gensim/models/fasttext_inner.pyx":74 + /* "gensim/models/fasttext_inner.pyx":73 * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: * continue # <<<<<<<<<<<<<< @@ -2124,7 +2158,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ goto __pyx_L5_continue; - /* "gensim/models/fasttext_inner.pyx":73 + /* "gensim/models/fasttext_inner.pyx":72 * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: # <<<<<<<<<<<<<< @@ -2133,7 +2167,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ } - /* "gensim/models/fasttext_inner.pyx":75 + /* "gensim/models/fasttext_inner.pyx":74 * if target_index == word_index: * continue * label = 0.0 # <<<<<<<<<<<<<< @@ -2144,7 +2178,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente } __pyx_L7:; - /* "gensim/models/fasttext_inner.pyx":77 + /* "gensim/models/fasttext_inner.pyx":76 * label = 0.0 * * row2 = target_index * size # <<<<<<<<<<<<<< @@ -2153,7 +2187,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_row2 = (__pyx_v_target_index * __pyx_v_size); - /* "gensim/models/fasttext_inner.pyx":78 + /* "gensim/models/fasttext_inner.pyx":77 * * row2 = target_index * size * f_dot = our_dot(&size, l1, &ONE, &syn1neg[row2], &ONE) # <<<<<<<<<<<<<< @@ -2162,7 +2196,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_f_dot = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), __pyx_v_l1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn1neg[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":79 + /* "gensim/models/fasttext_inner.pyx":78 * row2 = target_index * size * f_dot = our_dot(&size, l1, &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2180,7 +2214,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_L10_bool_binop_done:; if (__pyx_t_6) { - /* "gensim/models/fasttext_inner.pyx":80 + /* "gensim/models/fasttext_inner.pyx":79 * f_dot = our_dot(&size, l1, &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -2189,7 +2223,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ goto __pyx_L5_continue; - /* "gensim/models/fasttext_inner.pyx":79 + /* "gensim/models/fasttext_inner.pyx":78 * row2 = target_index * size * f_dot = our_dot(&size, l1, &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2198,7 +2232,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ } - /* "gensim/models/fasttext_inner.pyx":81 + /* "gensim/models/fasttext_inner.pyx":80 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -2207,7 +2241,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/fasttext_inner.pyx":82 + /* "gensim/models/fasttext_inner.pyx":81 * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * g = (label - f) * alpha # <<<<<<<<<<<<<< @@ -2216,7 +2250,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_g = ((__pyx_v_label - __pyx_v_f) * __pyx_v_alpha); - /* "gensim/models/fasttext_inner.pyx":83 + /* "gensim/models/fasttext_inner.pyx":82 * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * g = (label - f) * alpha * our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) # <<<<<<<<<<<<<< @@ -2225,41 +2259,41 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_g), (&(__pyx_v_syn1neg[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":84 + /* "gensim/models/fasttext_inner.pyx":83 * g = (label - f) * alpha * our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) * our_saxpy(&size, &g, l1, &ONE, &syn1neg[row2], &ONE) # <<<<<<<<<<<<<< * our_saxpy(&size, &word_locks_vocab[word2_index], work, &ONE, &syn0_vocab[row1], &ONE) - * for d in range(1, subwords_len): + * for d in range(subwords_len): */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_g), __pyx_v_l1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn1neg[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); __pyx_L5_continue:; } - /* "gensim/models/fasttext_inner.pyx":85 + /* "gensim/models/fasttext_inner.pyx":84 * our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) * our_saxpy(&size, &g, l1, &ONE, &syn1neg[row2], &ONE) * our_saxpy(&size, &word_locks_vocab[word2_index], work, &ONE, &syn0_vocab[row1], &ONE) # <<<<<<<<<<<<<< - * for d in range(1, subwords_len): + * for d in range(subwords_len): * our_saxpy(&size, &word_locks_ngrams[subwords_index[d]], work, &ONE, &syn0_ngrams[subwords_index[d]*size], &ONE) */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v_word_locks_vocab[__pyx_v_word2_index])), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn0_vocab[__pyx_v_row1])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":86 + /* "gensim/models/fasttext_inner.pyx":85 * our_saxpy(&size, &g, l1, &ONE, &syn1neg[row2], &ONE) * our_saxpy(&size, &word_locks_vocab[word2_index], work, &ONE, &syn0_vocab[row1], &ONE) - * for d in range(1, subwords_len): # <<<<<<<<<<<<<< + * for d in range(subwords_len): # <<<<<<<<<<<<<< * our_saxpy(&size, &word_locks_ngrams[subwords_index[d]], work, &ONE, &syn0_ngrams[subwords_index[d]*size], &ONE) * return next_random */ __pyx_t_1 = __pyx_v_subwords_len; __pyx_t_2 = __pyx_t_1; - for (__pyx_t_3 = 1; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { + for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_d = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":87 + /* "gensim/models/fasttext_inner.pyx":86 * our_saxpy(&size, &word_locks_vocab[word2_index], work, &ONE, &syn0_vocab[row1], &ONE) - * for d in range(1, subwords_len): + * for d in range(subwords_len): * our_saxpy(&size, &word_locks_ngrams[subwords_index[d]], work, &ONE, &syn0_ngrams[subwords_index[d]*size], &ONE) # <<<<<<<<<<<<<< * return next_random * @@ -2267,8 +2301,8 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v_word_locks_ngrams[(__pyx_v_subwords_index[__pyx_v_d])])), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn0_ngrams[((__pyx_v_subwords_index[__pyx_v_d]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); } - /* "gensim/models/fasttext_inner.pyx":88 - * for d in range(1, subwords_len): + /* "gensim/models/fasttext_inner.pyx":87 + * for d in range(subwords_len): * our_saxpy(&size, &word_locks_ngrams[subwords_index[d]], work, &ONE, &syn0_ngrams[subwords_index[d]*size], &ONE) * return next_random # <<<<<<<<<<<<<< * @@ -2280,7 +2314,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente /* "gensim/models/fasttext_inner.pyx":42 * cdef REAL_t ONEF = 1.0 * - * cdef unsigned long long fast_sentence_sg_neg( # <<<<<<<<<<<<<< + * cdef unsigned long long fasttext_fast_sentence_sg_neg( # <<<<<<<<<<<<<< * const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, * REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1neg, const int size, */ @@ -2290,52 +2324,42 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente return __pyx_r; } -/* "gensim/models/fasttext_inner.pyx":91 +/* "gensim/models/fasttext_inner.pyx":90 * * - * cdef void fast_sentence_sg_hs( # <<<<<<<<<<<<<< + * cdef void fasttext_fast_sentence_sg_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, * REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1, const int size, */ -static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t_5numpy_uint32_t const *__pyx_v_word_point, __pyx_t_5numpy_uint8_t const *__pyx_v_word_code, int const __pyx_v_codelen, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const *__pyx_v_subwords_index, __pyx_t_5numpy_uint32_t const __pyx_v_subwords_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_l1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams) { +static void __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_hs(__pyx_t_5numpy_uint32_t const *__pyx_v_word_point, __pyx_t_5numpy_uint8_t const *__pyx_v_word_code, int const __pyx_v_codelen, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const __pyx_v_word2_index, __pyx_t_5numpy_uint32_t const *__pyx_v_subwords_index, __pyx_t_5numpy_uint32_t const __pyx_v_subwords_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_l1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams) { PY_LONG_LONG __pyx_v_b; - __pyx_t_5numpy_uint32_t __pyx_v_word2_index; PY_LONG_LONG __pyx_v_row1; PY_LONG_LONG __pyx_v_row2; __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_f; __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_g; __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_f_dot; - long __pyx_v_d; + __pyx_t_5numpy_uint32_t __pyx_v_d; __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_norm_factor; __pyx_t_5numpy_uint32_t __pyx_t_1; __pyx_t_5numpy_uint32_t __pyx_t_2; - long __pyx_t_3; + __pyx_t_5numpy_uint32_t __pyx_t_3; int __pyx_t_4; int __pyx_t_5; PY_LONG_LONG __pyx_t_6; int __pyx_t_7; int __pyx_t_8; - /* "gensim/models/fasttext_inner.pyx":99 + /* "gensim/models/fasttext_inner.pyx":98 * * cdef long long a, b - * cdef np.uint32_t word2_index = subwords_index[0] # <<<<<<<<<<<<<< - * cdef long long row1 = word2_index * size, row2, sgn - * cdef REAL_t f, g, f_dot, lprob - */ - __pyx_v_word2_index = (__pyx_v_subwords_index[0]); - - /* "gensim/models/fasttext_inner.pyx":100 - * cdef long long a, b - * cdef np.uint32_t word2_index = subwords_index[0] * cdef long long row1 = word2_index * size, row2, sgn # <<<<<<<<<<<<<< * cdef REAL_t f, g, f_dot, lprob * */ __pyx_v_row1 = (__pyx_v_word2_index * __pyx_v_size); - /* "gensim/models/fasttext_inner.pyx":103 + /* "gensim/models/fasttext_inner.pyx":101 * cdef REAL_t f, g, f_dot, lprob * * memset(work, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -2344,7 +2368,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ (void)(memset(__pyx_v_work, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/fasttext_inner.pyx":104 + /* "gensim/models/fasttext_inner.pyx":102 * * memset(work, 0, size * cython.sizeof(REAL_t)) * memset(l1, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -2353,30 +2377,30 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ (void)(memset(__pyx_v_l1, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/fasttext_inner.pyx":106 + /* "gensim/models/fasttext_inner.pyx":104 * memset(l1, 0, size * cython.sizeof(REAL_t)) * * scopy(&size, &syn0_vocab[row1], &ONE, l1, &ONE) # <<<<<<<<<<<<<< - * for d in range(1, subwords_len): + * for d in range(subwords_len): * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) */ __pyx_v_6gensim_6models_14word2vec_inner_scopy((&__pyx_v_size), (&(__pyx_v_syn0_vocab[__pyx_v_row1])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_l1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":107 + /* "gensim/models/fasttext_inner.pyx":105 * * scopy(&size, &syn0_vocab[row1], &ONE, l1, &ONE) - * for d in range(1, subwords_len): # <<<<<<<<<<<<<< + * for d in range(subwords_len): # <<<<<<<<<<<<<< * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) * cdef REAL_t norm_factor = ONEF / subwords_len */ __pyx_t_1 = __pyx_v_subwords_len; __pyx_t_2 = __pyx_t_1; - for (__pyx_t_3 = 1; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { + for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_d = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":108 + /* "gensim/models/fasttext_inner.pyx":106 * scopy(&size, &syn0_vocab[row1], &ONE, l1, &ONE) - * for d in range(1, subwords_len): + * for d in range(subwords_len): * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) # <<<<<<<<<<<<<< * cdef REAL_t norm_factor = ONEF / subwords_len * sscal(&size, &norm_factor, l1 , &ONE) @@ -2384,8 +2408,8 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_6gensim_6models_14fasttext_inner_ONEF), (&(__pyx_v_syn0_ngrams[((__pyx_v_subwords_index[__pyx_v_d]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_l1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); } - /* "gensim/models/fasttext_inner.pyx":109 - * for d in range(1, subwords_len): + /* "gensim/models/fasttext_inner.pyx":107 + * for d in range(subwords_len): * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) * cdef REAL_t norm_factor = ONEF / subwords_len # <<<<<<<<<<<<<< * sscal(&size, &norm_factor, l1 , &ONE) @@ -2393,7 +2417,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_norm_factor = (__pyx_v_6gensim_6models_14fasttext_inner_ONEF / __pyx_v_subwords_len); - /* "gensim/models/fasttext_inner.pyx":110 + /* "gensim/models/fasttext_inner.pyx":108 * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) * cdef REAL_t norm_factor = ONEF / subwords_len * sscal(&size, &norm_factor, l1 , &ONE) # <<<<<<<<<<<<<< @@ -2402,7 +2426,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_norm_factor), __pyx_v_l1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":112 + /* "gensim/models/fasttext_inner.pyx":110 * sscal(&size, &norm_factor, l1 , &ONE) * * for b in range(codelen): # <<<<<<<<<<<<<< @@ -2414,7 +2438,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_b = __pyx_t_6; - /* "gensim/models/fasttext_inner.pyx":113 + /* "gensim/models/fasttext_inner.pyx":111 * * for b in range(codelen): * row2 = word_point[b] * size # <<<<<<<<<<<<<< @@ -2423,7 +2447,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_row2 = ((__pyx_v_word_point[__pyx_v_b]) * __pyx_v_size); - /* "gensim/models/fasttext_inner.pyx":114 + /* "gensim/models/fasttext_inner.pyx":112 * for b in range(codelen): * row2 = word_point[b] * size * f_dot = our_dot(&size, l1, &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -2432,7 +2456,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_f_dot = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), __pyx_v_l1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":115 + /* "gensim/models/fasttext_inner.pyx":113 * row2 = word_point[b] * size * f_dot = our_dot(&size, l1, &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2450,7 +2474,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t __pyx_L8_bool_binop_done:; if (__pyx_t_7) { - /* "gensim/models/fasttext_inner.pyx":116 + /* "gensim/models/fasttext_inner.pyx":114 * f_dot = our_dot(&size, l1, &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -2459,7 +2483,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ goto __pyx_L5_continue; - /* "gensim/models/fasttext_inner.pyx":115 + /* "gensim/models/fasttext_inner.pyx":113 * row2 = word_point[b] * size * f_dot = our_dot(&size, l1, &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2468,7 +2492,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ } - /* "gensim/models/fasttext_inner.pyx":117 + /* "gensim/models/fasttext_inner.pyx":115 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -2477,7 +2501,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/fasttext_inner.pyx":118 + /* "gensim/models/fasttext_inner.pyx":116 * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * g = (1 - word_code[b] - f) * alpha # <<<<<<<<<<<<<< @@ -2486,7 +2510,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_g = (((1 - (__pyx_v_word_code[__pyx_v_b])) - __pyx_v_f) * __pyx_v_alpha); - /* "gensim/models/fasttext_inner.pyx":120 + /* "gensim/models/fasttext_inner.pyx":118 * g = (1 - word_code[b] - f) * alpha * * our_saxpy(&size, &g, &syn1[row2], &ONE, work, &ONE) # <<<<<<<<<<<<<< @@ -2495,7 +2519,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_g), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":121 + /* "gensim/models/fasttext_inner.pyx":119 * * our_saxpy(&size, &g, &syn1[row2], &ONE, work, &ONE) * our_saxpy(&size, &g, l1, &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -2506,30 +2530,30 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t __pyx_L5_continue:; } - /* "gensim/models/fasttext_inner.pyx":123 + /* "gensim/models/fasttext_inner.pyx":121 * our_saxpy(&size, &g, l1, &ONE, &syn1[row2], &ONE) * * our_saxpy(&size, &word_locks_vocab[word2_index], work, &ONE, &syn0_vocab[row1], &ONE) # <<<<<<<<<<<<<< - * for d in range(1, subwords_len): + * for d in range(subwords_len): * our_saxpy(&size, &word_locks_ngrams[subwords_index[d]], work, &ONE, &syn0_ngrams[subwords_index[d]*size], &ONE) */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v_word_locks_vocab[__pyx_v_word2_index])), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn0_vocab[__pyx_v_row1])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":124 + /* "gensim/models/fasttext_inner.pyx":122 * * our_saxpy(&size, &word_locks_vocab[word2_index], work, &ONE, &syn0_vocab[row1], &ONE) - * for d in range(1, subwords_len): # <<<<<<<<<<<<<< + * for d in range(subwords_len): # <<<<<<<<<<<<<< * our_saxpy(&size, &word_locks_ngrams[subwords_index[d]], work, &ONE, &syn0_ngrams[subwords_index[d]*size], &ONE) * */ __pyx_t_1 = __pyx_v_subwords_len; __pyx_t_2 = __pyx_t_1; - for (__pyx_t_3 = 1; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { + for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_d = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":125 + /* "gensim/models/fasttext_inner.pyx":123 * our_saxpy(&size, &word_locks_vocab[word2_index], work, &ONE, &syn0_vocab[row1], &ONE) - * for d in range(1, subwords_len): + * for d in range(subwords_len): * our_saxpy(&size, &word_locks_ngrams[subwords_index[d]], work, &ONE, &syn0_ngrams[subwords_index[d]*size], &ONE) # <<<<<<<<<<<<<< * * @@ -2537,10 +2561,10 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v_word_locks_ngrams[(__pyx_v_subwords_index[__pyx_v_d])])), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn0_ngrams[((__pyx_v_subwords_index[__pyx_v_d]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); } - /* "gensim/models/fasttext_inner.pyx":91 + /* "gensim/models/fasttext_inner.pyx":90 * * - * cdef void fast_sentence_sg_hs( # <<<<<<<<<<<<<< + * cdef void fasttext_fast_sentence_sg_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, * REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1, const int size, */ @@ -2548,15 +2572,15 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs(__pyx_t /* function exit code */ } -/* "gensim/models/fasttext_inner.pyx":128 +/* "gensim/models/fasttext_inner.pyx":126 * * - * cdef unsigned long long fast_sentence_cbow_neg( # <<<<<<<<<<<<<< + * cdef unsigned long long fasttext_fast_sentence_cbow_neg( # <<<<<<<<<<<<<< * const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, int codelens[MAX_SENTENCE_LEN], * REAL_t *neu1, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1neg, const int size, */ -static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_neg(int const __pyx_v_negative, __pyx_t_5numpy_uint32_t *__pyx_v_cum_table, unsigned PY_LONG_LONG __pyx_v_cum_table_len, CYTHON_UNUSED int *__pyx_v_codelens, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const *__pyx_v_indexes, __pyx_t_5numpy_uint32_t const **__pyx_v_subwords_idx, int const *__pyx_v_subwords_idx_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, int __pyx_v_i, int __pyx_v_j, int __pyx_v_k, int __pyx_v_cbow_mean, unsigned PY_LONG_LONG __pyx_v_next_random, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams) { +static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_neg(int const __pyx_v_negative, __pyx_t_5numpy_uint32_t *__pyx_v_cum_table, unsigned PY_LONG_LONG __pyx_v_cum_table_len, CYTHON_UNUSED int *__pyx_v_codelens, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const *__pyx_v_indexes, __pyx_t_5numpy_uint32_t **__pyx_v_subwords_idx, int const *__pyx_v_subwords_idx_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, int __pyx_v_i, int __pyx_v_j, int __pyx_v_k, int __pyx_v_cbow_mean, unsigned PY_LONG_LONG __pyx_v_next_random, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams) { PY_LONG_LONG __pyx_v_row2; unsigned PY_LONG_LONG __pyx_v_modulo; __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_f; @@ -2581,7 +2605,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente long __pyx_t_9; int __pyx_t_10; - /* "gensim/models/fasttext_inner.pyx":137 + /* "gensim/models/fasttext_inner.pyx":135 * cdef long long a * cdef long long row2 * cdef unsigned long long modulo = 281474976710655ULL # <<<<<<<<<<<<<< @@ -2590,7 +2614,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_modulo = 281474976710655ULL; - /* "gensim/models/fasttext_inner.pyx":138 + /* "gensim/models/fasttext_inner.pyx":136 * cdef long long row2 * cdef unsigned long long modulo = 281474976710655ULL * cdef REAL_t f, g, count, inv_count = 1.0, label, log_e_f_dot, f_dot # <<<<<<<<<<<<<< @@ -2599,7 +2623,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_inv_count = 1.0; - /* "gensim/models/fasttext_inner.pyx":142 + /* "gensim/models/fasttext_inner.pyx":140 * cdef int d, m * * word_index = indexes[i] # <<<<<<<<<<<<<< @@ -2608,7 +2632,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_word_index = (__pyx_v_indexes[__pyx_v_i]); - /* "gensim/models/fasttext_inner.pyx":144 + /* "gensim/models/fasttext_inner.pyx":142 * word_index = indexes[i] * * memset(neu1, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -2617,7 +2641,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ (void)(memset(__pyx_v_neu1, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/fasttext_inner.pyx":145 + /* "gensim/models/fasttext_inner.pyx":143 * * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 # <<<<<<<<<<<<<< @@ -2626,7 +2650,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_count = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.0); - /* "gensim/models/fasttext_inner.pyx":146 + /* "gensim/models/fasttext_inner.pyx":144 * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 * for m in range(j, k): # <<<<<<<<<<<<<< @@ -2638,7 +2662,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente for (__pyx_t_3 = __pyx_v_j; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_m = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":147 + /* "gensim/models/fasttext_inner.pyx":145 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -2648,7 +2672,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_t_4 = ((__pyx_v_m == __pyx_v_i) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":148 + /* "gensim/models/fasttext_inner.pyx":146 * for m in range(j, k): * if m == i: * continue # <<<<<<<<<<<<<< @@ -2657,7 +2681,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ goto __pyx_L3_continue; - /* "gensim/models/fasttext_inner.pyx":147 + /* "gensim/models/fasttext_inner.pyx":145 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -2666,7 +2690,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ } - /* "gensim/models/fasttext_inner.pyx":149 + /* "gensim/models/fasttext_inner.pyx":147 * if m == i: * continue * count += ONEF # <<<<<<<<<<<<<< @@ -2675,7 +2699,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_14fasttext_inner_ONEF); - /* "gensim/models/fasttext_inner.pyx":150 + /* "gensim/models/fasttext_inner.pyx":148 * continue * count += ONEF * our_saxpy(&size, &ONEF, &syn0_vocab[indexes[m] * size], &ONE, neu1, &ONE) # <<<<<<<<<<<<<< @@ -2684,7 +2708,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_6gensim_6models_14fasttext_inner_ONEF), (&(__pyx_v_syn0_vocab[((__pyx_v_indexes[__pyx_v_m]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":151 + /* "gensim/models/fasttext_inner.pyx":149 * count += ONEF * our_saxpy(&size, &ONEF, &syn0_vocab[indexes[m] * size], &ONE, neu1, &ONE) * for d in range(subwords_idx_len[m]): # <<<<<<<<<<<<<< @@ -2696,7 +2720,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_d = __pyx_t_7; - /* "gensim/models/fasttext_inner.pyx":152 + /* "gensim/models/fasttext_inner.pyx":150 * our_saxpy(&size, &ONEF, &syn0_vocab[indexes[m] * size], &ONE, neu1, &ONE) * for d in range(subwords_idx_len[m]): * count += ONEF # <<<<<<<<<<<<<< @@ -2705,7 +2729,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_14fasttext_inner_ONEF); - /* "gensim/models/fasttext_inner.pyx":153 + /* "gensim/models/fasttext_inner.pyx":151 * for d in range(subwords_idx_len[m]): * count += ONEF * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_idx[m][d] * size], &ONE, neu1, &ONE) # <<<<<<<<<<<<<< @@ -2717,7 +2741,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_L3_continue:; } - /* "gensim/models/fasttext_inner.pyx":155 + /* "gensim/models/fasttext_inner.pyx":153 * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_idx[m][d] * size], &ONE, neu1, &ONE) * * if count > (0.5): # <<<<<<<<<<<<<< @@ -2727,7 +2751,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_t_4 = ((__pyx_v_count > ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.5)) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":156 + /* "gensim/models/fasttext_inner.pyx":154 * * if count > (0.5): * inv_count = ONEF / count # <<<<<<<<<<<<<< @@ -2736,7 +2760,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_inv_count = (__pyx_v_6gensim_6models_14fasttext_inner_ONEF / __pyx_v_count); - /* "gensim/models/fasttext_inner.pyx":155 + /* "gensim/models/fasttext_inner.pyx":153 * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_idx[m][d] * size], &ONE, neu1, &ONE) * * if count > (0.5): # <<<<<<<<<<<<<< @@ -2745,7 +2769,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ } - /* "gensim/models/fasttext_inner.pyx":157 + /* "gensim/models/fasttext_inner.pyx":155 * if count > (0.5): * inv_count = ONEF / count * if cbow_mean: # <<<<<<<<<<<<<< @@ -2755,7 +2779,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_t_4 = (__pyx_v_cbow_mean != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":158 + /* "gensim/models/fasttext_inner.pyx":156 * inv_count = ONEF / count * if cbow_mean: * sscal(&size, &inv_count, neu1, &ONE) # <<<<<<<<<<<<<< @@ -2764,7 +2788,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":157 + /* "gensim/models/fasttext_inner.pyx":155 * if count > (0.5): * inv_count = ONEF / count * if cbow_mean: # <<<<<<<<<<<<<< @@ -2773,7 +2797,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ } - /* "gensim/models/fasttext_inner.pyx":160 + /* "gensim/models/fasttext_inner.pyx":158 * sscal(&size, &inv_count, neu1, &ONE) * * memset(work, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -2782,7 +2806,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ (void)(memset(__pyx_v_work, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/fasttext_inner.pyx":162 + /* "gensim/models/fasttext_inner.pyx":160 * memset(work, 0, size * cython.sizeof(REAL_t)) * * for d in range(negative+1): # <<<<<<<<<<<<<< @@ -2794,7 +2818,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente for (__pyx_t_1 = 0; __pyx_t_1 < __pyx_t_9; __pyx_t_1+=1) { __pyx_v_d = __pyx_t_1; - /* "gensim/models/fasttext_inner.pyx":163 + /* "gensim/models/fasttext_inner.pyx":161 * * for d in range(negative+1): * if d == 0: # <<<<<<<<<<<<<< @@ -2804,7 +2828,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_t_4 = ((__pyx_v_d == 0) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":164 + /* "gensim/models/fasttext_inner.pyx":162 * for d in range(negative+1): * if d == 0: * target_index = word_index # <<<<<<<<<<<<<< @@ -2813,7 +2837,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_target_index = __pyx_v_word_index; - /* "gensim/models/fasttext_inner.pyx":165 + /* "gensim/models/fasttext_inner.pyx":163 * if d == 0: * target_index = word_index * label = ONEF # <<<<<<<<<<<<<< @@ -2822,7 +2846,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_label = __pyx_v_6gensim_6models_14fasttext_inner_ONEF; - /* "gensim/models/fasttext_inner.pyx":163 + /* "gensim/models/fasttext_inner.pyx":161 * * for d in range(negative+1): * if d == 0: # <<<<<<<<<<<<<< @@ -2832,7 +2856,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente goto __pyx_L12; } - /* "gensim/models/fasttext_inner.pyx":167 + /* "gensim/models/fasttext_inner.pyx":165 * label = ONEF * else: * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) # <<<<<<<<<<<<<< @@ -2842,7 +2866,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente /*else*/ { __pyx_v_target_index = __pyx_f_6gensim_6models_14word2vec_inner_bisect_left(__pyx_v_cum_table, ((__pyx_v_next_random >> 16) % (__pyx_v_cum_table[(__pyx_v_cum_table_len - 1)])), 0, __pyx_v_cum_table_len); - /* "gensim/models/fasttext_inner.pyx":168 + /* "gensim/models/fasttext_inner.pyx":166 * else: * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo # <<<<<<<<<<<<<< @@ -2851,7 +2875,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_next_random = (((__pyx_v_next_random * ((unsigned PY_LONG_LONG)25214903917ULL)) + 11) & __pyx_v_modulo); - /* "gensim/models/fasttext_inner.pyx":169 + /* "gensim/models/fasttext_inner.pyx":167 * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: # <<<<<<<<<<<<<< @@ -2861,7 +2885,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_t_4 = ((__pyx_v_target_index == __pyx_v_word_index) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":170 + /* "gensim/models/fasttext_inner.pyx":168 * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: * continue # <<<<<<<<<<<<<< @@ -2870,7 +2894,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ goto __pyx_L10_continue; - /* "gensim/models/fasttext_inner.pyx":169 + /* "gensim/models/fasttext_inner.pyx":167 * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: # <<<<<<<<<<<<<< @@ -2879,7 +2903,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ } - /* "gensim/models/fasttext_inner.pyx":171 + /* "gensim/models/fasttext_inner.pyx":169 * if target_index == word_index: * continue * label = 0.0 # <<<<<<<<<<<<<< @@ -2890,7 +2914,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente } __pyx_L12:; - /* "gensim/models/fasttext_inner.pyx":173 + /* "gensim/models/fasttext_inner.pyx":171 * label = 0.0 * * row2 = target_index * size # <<<<<<<<<<<<<< @@ -2899,7 +2923,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_row2 = (__pyx_v_target_index * __pyx_v_size); - /* "gensim/models/fasttext_inner.pyx":174 + /* "gensim/models/fasttext_inner.pyx":172 * * row2 = target_index * size * f_dot = our_dot(&size, neu1, &ONE, &syn1neg[row2], &ONE) # <<<<<<<<<<<<<< @@ -2908,7 +2932,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_f_dot = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn1neg[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":175 + /* "gensim/models/fasttext_inner.pyx":173 * row2 = target_index * size * f_dot = our_dot(&size, neu1, &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2926,7 +2950,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_L15_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":176 + /* "gensim/models/fasttext_inner.pyx":174 * f_dot = our_dot(&size, neu1, &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -2935,7 +2959,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ goto __pyx_L10_continue; - /* "gensim/models/fasttext_inner.pyx":175 + /* "gensim/models/fasttext_inner.pyx":173 * row2 = target_index * size * f_dot = our_dot(&size, neu1, &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2944,7 +2968,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ } - /* "gensim/models/fasttext_inner.pyx":177 + /* "gensim/models/fasttext_inner.pyx":175 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -2953,7 +2977,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/fasttext_inner.pyx":178 + /* "gensim/models/fasttext_inner.pyx":176 * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * g = (label - f) * alpha # <<<<<<<<<<<<<< @@ -2962,7 +2986,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_g = ((__pyx_v_label - __pyx_v_f) * __pyx_v_alpha); - /* "gensim/models/fasttext_inner.pyx":180 + /* "gensim/models/fasttext_inner.pyx":178 * g = (label - f) * alpha * * our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) # <<<<<<<<<<<<<< @@ -2971,7 +2995,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_g), (&(__pyx_v_syn1neg[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":181 + /* "gensim/models/fasttext_inner.pyx":179 * * our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) * our_saxpy(&size, &g, neu1, &ONE, &syn1neg[row2], &ONE) # <<<<<<<<<<<<<< @@ -2982,7 +3006,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_L10_continue:; } - /* "gensim/models/fasttext_inner.pyx":183 + /* "gensim/models/fasttext_inner.pyx":181 * our_saxpy(&size, &g, neu1, &ONE, &syn1neg[row2], &ONE) * * if not cbow_mean: # divide error over summed window vectors # <<<<<<<<<<<<<< @@ -2992,7 +3016,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_t_4 = ((!(__pyx_v_cbow_mean != 0)) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":184 + /* "gensim/models/fasttext_inner.pyx":182 * * if not cbow_mean: # divide error over summed window vectors * sscal(&size, &inv_count, work, &ONE) # <<<<<<<<<<<<<< @@ -3001,7 +3025,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":183 + /* "gensim/models/fasttext_inner.pyx":181 * our_saxpy(&size, &g, neu1, &ONE, &syn1neg[row2], &ONE) * * if not cbow_mean: # divide error over summed window vectors # <<<<<<<<<<<<<< @@ -3010,7 +3034,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ } - /* "gensim/models/fasttext_inner.pyx":186 + /* "gensim/models/fasttext_inner.pyx":184 * sscal(&size, &inv_count, work, &ONE) * * for m in range(j,k): # <<<<<<<<<<<<<< @@ -3022,7 +3046,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente for (__pyx_t_3 = __pyx_v_j; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_m = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":187 + /* "gensim/models/fasttext_inner.pyx":185 * * for m in range(j,k): * if m == i: # <<<<<<<<<<<<<< @@ -3032,7 +3056,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_t_4 = ((__pyx_v_m == __pyx_v_i) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":188 + /* "gensim/models/fasttext_inner.pyx":186 * for m in range(j,k): * if m == i: * continue # <<<<<<<<<<<<<< @@ -3041,7 +3065,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ goto __pyx_L18_continue; - /* "gensim/models/fasttext_inner.pyx":187 + /* "gensim/models/fasttext_inner.pyx":185 * * for m in range(j,k): * if m == i: # <<<<<<<<<<<<<< @@ -3050,7 +3074,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ } - /* "gensim/models/fasttext_inner.pyx":189 + /* "gensim/models/fasttext_inner.pyx":187 * if m == i: * continue * our_saxpy(&size, &word_locks_vocab[indexes[m]], work, &ONE, &syn0_vocab[indexes[m]*size], &ONE) # <<<<<<<<<<<<<< @@ -3059,7 +3083,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v_word_locks_vocab[(__pyx_v_indexes[__pyx_v_m])])), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn0_vocab[((__pyx_v_indexes[__pyx_v_m]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":190 + /* "gensim/models/fasttext_inner.pyx":188 * continue * our_saxpy(&size, &word_locks_vocab[indexes[m]], work, &ONE, &syn0_vocab[indexes[m]*size], &ONE) * for d in range(subwords_idx_len[m]): # <<<<<<<<<<<<<< @@ -3071,7 +3095,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_d = __pyx_t_7; - /* "gensim/models/fasttext_inner.pyx":191 + /* "gensim/models/fasttext_inner.pyx":189 * our_saxpy(&size, &word_locks_vocab[indexes[m]], work, &ONE, &syn0_vocab[indexes[m]*size], &ONE) * for d in range(subwords_idx_len[m]): * our_saxpy(&size, &word_locks_ngrams[subwords_idx[m][d]], work, &ONE, &syn0_ngrams[subwords_idx[m][d]*size], &ONE) # <<<<<<<<<<<<<< @@ -3083,7 +3107,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_L18_continue:; } - /* "gensim/models/fasttext_inner.pyx":193 + /* "gensim/models/fasttext_inner.pyx":191 * our_saxpy(&size, &word_locks_ngrams[subwords_idx[m][d]], work, &ONE, &syn0_ngrams[subwords_idx[m][d]*size], &ONE) * * return next_random # <<<<<<<<<<<<<< @@ -3093,10 +3117,10 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente __pyx_r = __pyx_v_next_random; goto __pyx_L0; - /* "gensim/models/fasttext_inner.pyx":128 + /* "gensim/models/fasttext_inner.pyx":126 * * - * cdef unsigned long long fast_sentence_cbow_neg( # <<<<<<<<<<<<<< + * cdef unsigned long long fasttext_fast_sentence_cbow_neg( # <<<<<<<<<<<<<< * const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, int codelens[MAX_SENTENCE_LEN], * REAL_t *neu1, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1neg, const int size, */ @@ -3106,15 +3130,15 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14fasttext_inner_fast_sente return __pyx_r; } -/* "gensim/models/fasttext_inner.pyx":196 +/* "gensim/models/fasttext_inner.pyx":194 * * - * cdef void fast_sentence_cbow_hs( # <<<<<<<<<<<<<< + * cdef void fasttext_fast_sentence_cbow_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], * REAL_t *neu1, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1, const int size, */ -static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx_t_5numpy_uint32_t const *__pyx_v_word_point, __pyx_t_5numpy_uint8_t const *__pyx_v_word_code, int *__pyx_v_codelens, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const *__pyx_v_indexes, __pyx_t_5numpy_uint32_t const **__pyx_v_subwords_idx, int const *__pyx_v_subwords_idx_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, int __pyx_v_i, int __pyx_v_j, int __pyx_v_k, int __pyx_v_cbow_mean, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams) { +static void __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_hs(__pyx_t_5numpy_uint32_t const *__pyx_v_word_point, __pyx_t_5numpy_uint8_t const *__pyx_v_word_code, int *__pyx_v_codelens, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const *__pyx_v_indexes, __pyx_t_5numpy_uint32_t **__pyx_v_subwords_idx, int const *__pyx_v_subwords_idx_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, int __pyx_v_i, int __pyx_v_j, int __pyx_v_k, int __pyx_v_cbow_mean, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams) { PY_LONG_LONG __pyx_v_b; PY_LONG_LONG __pyx_v_row2; __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_f; @@ -3134,7 +3158,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx PY_LONG_LONG __pyx_t_8; int __pyx_t_9; - /* "gensim/models/fasttext_inner.pyx":205 + /* "gensim/models/fasttext_inner.pyx":203 * cdef long long a, b * cdef long long row2, sgn * cdef REAL_t f, g, count, inv_count = 1.0, f_dot, lprob # <<<<<<<<<<<<<< @@ -3143,7 +3167,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_inv_count = 1.0; - /* "gensim/models/fasttext_inner.pyx":208 + /* "gensim/models/fasttext_inner.pyx":206 * cdef int m * * memset(neu1, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -3152,7 +3176,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ (void)(memset(__pyx_v_neu1, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/fasttext_inner.pyx":209 + /* "gensim/models/fasttext_inner.pyx":207 * * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 # <<<<<<<<<<<<<< @@ -3161,7 +3185,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_count = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.0); - /* "gensim/models/fasttext_inner.pyx":210 + /* "gensim/models/fasttext_inner.pyx":208 * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 * for m in range(j, k): # <<<<<<<<<<<<<< @@ -3173,7 +3197,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx for (__pyx_t_3 = __pyx_v_j; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_m = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":211 + /* "gensim/models/fasttext_inner.pyx":209 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -3183,7 +3207,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = ((__pyx_v_m == __pyx_v_i) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":212 + /* "gensim/models/fasttext_inner.pyx":210 * for m in range(j, k): * if m == i: * continue # <<<<<<<<<<<<<< @@ -3192,7 +3216,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ goto __pyx_L3_continue; - /* "gensim/models/fasttext_inner.pyx":211 + /* "gensim/models/fasttext_inner.pyx":209 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -3201,7 +3225,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/fasttext_inner.pyx":213 + /* "gensim/models/fasttext_inner.pyx":211 * if m == i: * continue * count += ONEF # <<<<<<<<<<<<<< @@ -3210,7 +3234,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_14fasttext_inner_ONEF); - /* "gensim/models/fasttext_inner.pyx":214 + /* "gensim/models/fasttext_inner.pyx":212 * continue * count += ONEF * our_saxpy(&size, &ONEF, &syn0_vocab[indexes[m] * size], &ONE, neu1, &ONE) # <<<<<<<<<<<<<< @@ -3219,7 +3243,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_6gensim_6models_14fasttext_inner_ONEF), (&(__pyx_v_syn0_vocab[((__pyx_v_indexes[__pyx_v_m]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":215 + /* "gensim/models/fasttext_inner.pyx":213 * count += ONEF * our_saxpy(&size, &ONEF, &syn0_vocab[indexes[m] * size], &ONE, neu1, &ONE) * for d in range(subwords_idx_len[m]): # <<<<<<<<<<<<<< @@ -3231,7 +3255,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_d = __pyx_t_7; - /* "gensim/models/fasttext_inner.pyx":216 + /* "gensim/models/fasttext_inner.pyx":214 * our_saxpy(&size, &ONEF, &syn0_vocab[indexes[m] * size], &ONE, neu1, &ONE) * for d in range(subwords_idx_len[m]): * count += ONEF # <<<<<<<<<<<<<< @@ -3240,7 +3264,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_14fasttext_inner_ONEF); - /* "gensim/models/fasttext_inner.pyx":217 + /* "gensim/models/fasttext_inner.pyx":215 * for d in range(subwords_idx_len[m]): * count += ONEF * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_idx[m][d] * size], &ONE, neu1, &ONE) # <<<<<<<<<<<<<< @@ -3252,7 +3276,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx __pyx_L3_continue:; } - /* "gensim/models/fasttext_inner.pyx":218 + /* "gensim/models/fasttext_inner.pyx":216 * count += ONEF * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_idx[m][d] * size], &ONE, neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< @@ -3262,7 +3286,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = ((__pyx_v_count > ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.5)) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":219 + /* "gensim/models/fasttext_inner.pyx":217 * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_idx[m][d] * size], &ONE, neu1, &ONE) * if count > (0.5): * inv_count = ONEF / count # <<<<<<<<<<<<<< @@ -3271,7 +3295,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_inv_count = (__pyx_v_6gensim_6models_14fasttext_inner_ONEF / __pyx_v_count); - /* "gensim/models/fasttext_inner.pyx":218 + /* "gensim/models/fasttext_inner.pyx":216 * count += ONEF * our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_idx[m][d] * size], &ONE, neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< @@ -3280,7 +3304,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/fasttext_inner.pyx":220 + /* "gensim/models/fasttext_inner.pyx":218 * if count > (0.5): * inv_count = ONEF / count * if cbow_mean: # <<<<<<<<<<<<<< @@ -3290,7 +3314,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = (__pyx_v_cbow_mean != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":221 + /* "gensim/models/fasttext_inner.pyx":219 * inv_count = ONEF / count * if cbow_mean: * sscal(&size, &inv_count, neu1, &ONE) # <<<<<<<<<<<<<< @@ -3299,7 +3323,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":220 + /* "gensim/models/fasttext_inner.pyx":218 * if count > (0.5): * inv_count = ONEF / count * if cbow_mean: # <<<<<<<<<<<<<< @@ -3308,7 +3332,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/fasttext_inner.pyx":223 + /* "gensim/models/fasttext_inner.pyx":221 * sscal(&size, &inv_count, neu1, &ONE) * * memset(work, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -3317,7 +3341,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ (void)(memset(__pyx_v_work, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/fasttext_inner.pyx":224 + /* "gensim/models/fasttext_inner.pyx":222 * * memset(work, 0, size * cython.sizeof(REAL_t)) * for b in range(codelens[i]): # <<<<<<<<<<<<<< @@ -3329,7 +3353,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx for (__pyx_t_8 = 0; __pyx_t_8 < __pyx_t_2; __pyx_t_8+=1) { __pyx_v_b = __pyx_t_8; - /* "gensim/models/fasttext_inner.pyx":225 + /* "gensim/models/fasttext_inner.pyx":223 * memset(work, 0, size * cython.sizeof(REAL_t)) * for b in range(codelens[i]): * row2 = word_point[b] * size # <<<<<<<<<<<<<< @@ -3338,7 +3362,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_row2 = ((__pyx_v_word_point[__pyx_v_b]) * __pyx_v_size); - /* "gensim/models/fasttext_inner.pyx":226 + /* "gensim/models/fasttext_inner.pyx":224 * for b in range(codelens[i]): * row2 = word_point[b] * size * f_dot = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -3347,7 +3371,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_f_dot = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":227 + /* "gensim/models/fasttext_inner.pyx":225 * row2 = word_point[b] * size * f_dot = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3365,7 +3389,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx __pyx_L13_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":228 + /* "gensim/models/fasttext_inner.pyx":226 * f_dot = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -3374,7 +3398,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ goto __pyx_L10_continue; - /* "gensim/models/fasttext_inner.pyx":227 + /* "gensim/models/fasttext_inner.pyx":225 * row2 = word_point[b] * size * f_dot = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3383,7 +3407,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/fasttext_inner.pyx":229 + /* "gensim/models/fasttext_inner.pyx":227 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -3392,7 +3416,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/fasttext_inner.pyx":230 + /* "gensim/models/fasttext_inner.pyx":228 * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * g = (1 - word_code[b] - f) * alpha # <<<<<<<<<<<<<< @@ -3401,7 +3425,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_g = (((1 - (__pyx_v_word_code[__pyx_v_b])) - __pyx_v_f) * __pyx_v_alpha); - /* "gensim/models/fasttext_inner.pyx":232 + /* "gensim/models/fasttext_inner.pyx":230 * g = (1 - word_code[b] - f) * alpha * * our_saxpy(&size, &g, &syn1[row2], &ONE, work, &ONE) # <<<<<<<<<<<<<< @@ -3410,7 +3434,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_g), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":233 + /* "gensim/models/fasttext_inner.pyx":231 * * our_saxpy(&size, &g, &syn1[row2], &ONE, work, &ONE) * our_saxpy(&size, &g, neu1, &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -3421,7 +3445,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx __pyx_L10_continue:; } - /* "gensim/models/fasttext_inner.pyx":235 + /* "gensim/models/fasttext_inner.pyx":233 * our_saxpy(&size, &g, neu1, &ONE, &syn1[row2], &ONE) * * if not cbow_mean: # divide error over summed window vectors # <<<<<<<<<<<<<< @@ -3431,7 +3455,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = ((!(__pyx_v_cbow_mean != 0)) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":236 + /* "gensim/models/fasttext_inner.pyx":234 * * if not cbow_mean: # divide error over summed window vectors * sscal(&size, &inv_count, work, &ONE) # <<<<<<<<<<<<<< @@ -3440,7 +3464,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":235 + /* "gensim/models/fasttext_inner.pyx":233 * our_saxpy(&size, &g, neu1, &ONE, &syn1[row2], &ONE) * * if not cbow_mean: # divide error over summed window vectors # <<<<<<<<<<<<<< @@ -3449,7 +3473,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/fasttext_inner.pyx":238 + /* "gensim/models/fasttext_inner.pyx":236 * sscal(&size, &inv_count, work, &ONE) * * for m in range(j,k): # <<<<<<<<<<<<<< @@ -3461,7 +3485,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx for (__pyx_t_3 = __pyx_v_j; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_m = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":239 + /* "gensim/models/fasttext_inner.pyx":237 * * for m in range(j,k): * if m == i: # <<<<<<<<<<<<<< @@ -3471,7 +3495,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = ((__pyx_v_m == __pyx_v_i) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":240 + /* "gensim/models/fasttext_inner.pyx":238 * for m in range(j,k): * if m == i: * continue # <<<<<<<<<<<<<< @@ -3480,7 +3504,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ goto __pyx_L16_continue; - /* "gensim/models/fasttext_inner.pyx":239 + /* "gensim/models/fasttext_inner.pyx":237 * * for m in range(j,k): * if m == i: # <<<<<<<<<<<<<< @@ -3489,7 +3513,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/fasttext_inner.pyx":241 + /* "gensim/models/fasttext_inner.pyx":239 * if m == i: * continue * our_saxpy(&size, &word_locks_vocab[indexes[m]], work, &ONE, &syn0_vocab[indexes[m]*size], &ONE) # <<<<<<<<<<<<<< @@ -3498,7 +3522,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v_word_locks_vocab[(__pyx_v_indexes[__pyx_v_m])])), __pyx_v_work, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), (&(__pyx_v_syn0_vocab[((__pyx_v_indexes[__pyx_v_m]) * __pyx_v_size)])), (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":242 + /* "gensim/models/fasttext_inner.pyx":240 * continue * our_saxpy(&size, &word_locks_vocab[indexes[m]], work, &ONE, &syn0_vocab[indexes[m]*size], &ONE) * for d in range(subwords_idx_len[m]): # <<<<<<<<<<<<<< @@ -3510,7 +3534,7 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_d = __pyx_t_7; - /* "gensim/models/fasttext_inner.pyx":243 + /* "gensim/models/fasttext_inner.pyx":241 * our_saxpy(&size, &word_locks_vocab[indexes[m]], work, &ONE, &syn0_vocab[indexes[m]*size], &ONE) * for d in range(subwords_idx_len[m]): * our_saxpy(&size, &word_locks_ngrams[subwords_idx[m][d]], work, &ONE, &syn0_ngrams[subwords_idx[m][d]*size], &ONE) # <<<<<<<<<<<<<< @@ -3522,10 +3546,10 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx __pyx_L16_continue:; } - /* "gensim/models/fasttext_inner.pyx":196 + /* "gensim/models/fasttext_inner.pyx":194 * * - * cdef void fast_sentence_cbow_hs( # <<<<<<<<<<<<<< + * cdef void fasttext_fast_sentence_cbow_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], * REAL_t *neu1, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1, const int size, */ @@ -3533,629 +3557,669 @@ static void __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs(__pyx /* function exit code */ } -/* "gensim/models/fasttext_inner.pyx":246 +/* "gensim/models/fasttext_inner.pyx":244 * * - * def train_batch_sg(model, sentences, alpha, _work, _l1): # <<<<<<<<<<<<<< - * """Update skip-gram model by training on a sequence of sentences. - * + * cdef init_ft_config(FastTextConfig *c, model, alpha, _work, _neu1): # <<<<<<<<<<<<<< + * c[0].hs = model.hs + * c[0].negative = model.negative */ -/* Python wrapper */ -static PyObject *__pyx_pw_6gensim_6models_14fasttext_inner_1train_batch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static char __pyx_doc_6gensim_6models_14fasttext_inner_train_batch_sg[] = "train_batch_sg(model, sentences, alpha, _work, _l1)\nUpdate skip-gram model by training on a sequence of sentences.\n\n Each sentence is a list of string tokens, which are looked up in the model's\n vocab dictionary. Called internally from :meth:`gensim.models.fasttext.FastText.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.fasttext.FastText`\n Model to be trained.\n sentences : iterable of list of str\n Corpus streamed directly from disk/network.\n alpha : float\n Learning rate.\n _work : np.ndarray\n Private working memory for each worker.\n _l1 : np.ndarray\n Private working memory for each worker.\n\n Returns\n -------\n int\n Effective number of words trained.\n\n "; -static PyMethodDef __pyx_mdef_6gensim_6models_14fasttext_inner_1train_batch_sg = {"train_batch_sg", (PyCFunction)__pyx_pw_6gensim_6models_14fasttext_inner_1train_batch_sg, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_14fasttext_inner_train_batch_sg}; -static PyObject *__pyx_pw_6gensim_6models_14fasttext_inner_1train_batch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_model = 0; - PyObject *__pyx_v_sentences = 0; - PyObject *__pyx_v_alpha = 0; - PyObject *__pyx_v__work = 0; - PyObject *__pyx_v__l1 = 0; - PyObject *__pyx_r = 0; +static PyObject *__pyx_f_6gensim_6models_14fasttext_inner_init_ft_config(struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig *__pyx_v_c, PyObject *__pyx_v_model, PyObject *__pyx_v_alpha, PyObject *__pyx_v__work, PyObject *__pyx_v__neu1) { + PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("train_batch_sg (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_sentences,&__pyx_n_s_alpha,&__pyx_n_s_work,&__pyx_n_s_l1,0}; - PyObject* values[5] = {0,0,0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_sentences)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 1); __PYX_ERR(0, 246, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 2); __PYX_ERR(0, 246, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 3: - if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 3); __PYX_ERR(0, 246, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 4: - if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_l1)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 4); __PYX_ERR(0, 246, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_batch_sg") < 0)) __PYX_ERR(0, 246, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 5) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - } - __pyx_v_model = values[0]; - __pyx_v_sentences = values[1]; - __pyx_v_alpha = values[2]; - __pyx_v__work = values[3]; - __pyx_v__l1 = values[4]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 246, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("gensim.models.fasttext_inner.train_batch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(__pyx_self, __pyx_v_model, __pyx_v_sentences, __pyx_v_alpha, __pyx_v__work, __pyx_v__l1); + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_4; + int __pyx_t_5; + Py_ssize_t __pyx_t_6; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + unsigned PY_LONG_LONG __pyx_t_9; + __Pyx_RefNannySetupContext("init_ft_config", 0); - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_sentences, PyObject *__pyx_v_alpha, PyObject *__pyx_v__work, PyObject *__pyx_v__l1) { - int __pyx_v_hs; - int __pyx_v_negative; - int __pyx_v_sample; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_l1; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; - int __pyx_v_size; - int __pyx_v_codelens[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_reduced_windows[0x2710]; - int __pyx_v_sentence_idx[(0x2710 + 1)]; - int __pyx_v_window; - int __pyx_v_i; - int __pyx_v_j; - int __pyx_v_k; - int __pyx_v_effective_words; - int __pyx_v_effective_sentences; - int __pyx_v_sent_idx; - int __pyx_v_idx_start; - int __pyx_v_idx_end; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1; - __pyx_t_5numpy_uint32_t *__pyx_v_points[0x2710]; - __pyx_t_5numpy_uint8_t *__pyx_v_codes[0x2710]; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg; - __pyx_t_5numpy_uint32_t *__pyx_v_cum_table; - unsigned PY_LONG_LONG __pyx_v_cum_table_len; - unsigned PY_LONG_LONG __pyx_v_next_random; - int __pyx_v_subwords_idx_len[0x2710]; - __pyx_t_5numpy_uint32_t *__pyx_v_subwords_idx[0x2710]; - PyObject *__pyx_v_subword_arrays = NULL; - PyObject *__pyx_v_vlookup = NULL; - PyObject *__pyx_v_sent = NULL; - PyObject *__pyx_v_token = NULL; - PyObject *__pyx_v_word = NULL; - PyObject *__pyx_v_subwords = NULL; - PyObject *__pyx_v_word_subwords = NULL; - PyObject *__pyx_v_item = NULL; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_4; - int __pyx_t_5; - Py_ssize_t __pyx_t_6; - int __pyx_t_7; - PyObject *__pyx_t_8 = NULL; - unsigned PY_LONG_LONG __pyx_t_9; - PyObject *(*__pyx_t_10)(PyObject *); - Py_ssize_t __pyx_t_11; - PyObject *(*__pyx_t_12)(PyObject *); - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - __pyx_t_5numpy_uint32_t __pyx_t_15; - PyObject *__pyx_t_16 = NULL; - PyObject *__pyx_t_17 = NULL; - Py_ssize_t __pyx_t_18; - int __pyx_t_19; - int __pyx_t_20; - int __pyx_t_21; - int __pyx_t_22; - int __pyx_t_23; - int __pyx_t_24; - int __pyx_t_25; - int __pyx_t_26; - __Pyx_RefNannySetupContext("train_batch_sg", 0); - - /* "gensim/models/fasttext_inner.pyx":271 + /* "gensim/models/fasttext_inner.pyx":245 * - * """ - * cdef int hs = model.hs # <<<<<<<<<<<<<< - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) + * cdef init_ft_config(FastTextConfig *c, model, alpha, _work, _neu1): + * c[0].hs = model.hs # <<<<<<<<<<<<<< + * c[0].negative = model.negative + * c[0].sample = (model.vocabulary.sample != 0) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 271, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 271, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 245, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_hs = __pyx_t_2; + (__pyx_v_c[0]).hs = __pyx_t_2; - /* "gensim/models/fasttext_inner.pyx":272 - * """ - * cdef int hs = model.hs - * cdef int negative = model.negative # <<<<<<<<<<<<<< - * cdef int sample = (model.vocabulary.sample != 0) - * + /* "gensim/models/fasttext_inner.pyx":246 + * cdef init_ft_config(FastTextConfig *c, model, alpha, _work, _neu1): + * c[0].hs = model.hs + * c[0].negative = model.negative # <<<<<<<<<<<<<< + * c[0].sample = (model.vocabulary.sample != 0) + * c[0].cbow_mean = model.cbow_mean */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 272, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 246, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 272, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 246, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_negative = __pyx_t_2; + (__pyx_v_c[0]).negative = __pyx_t_2; - /* "gensim/models/fasttext_inner.pyx":273 - * cdef int hs = model.hs - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< - * - * cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) + /* "gensim/models/fasttext_inner.pyx":247 + * c[0].hs = model.hs + * c[0].negative = model.negative + * c[0].sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< + * c[0].cbow_mean = model.cbow_mean + * c[0].window = model.window */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 273, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 247, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 273, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 247, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 273, __pyx_L1_error) + __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 247, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 273, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 247, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_sample = __pyx_t_2; + (__pyx_v_c[0]).sample = __pyx_t_2; - /* "gensim/models/fasttext_inner.pyx":275 - * cdef int sample = (model.vocabulary.sample != 0) + /* "gensim/models/fasttext_inner.pyx":248 + * c[0].negative = model.negative + * c[0].sample = (model.vocabulary.sample != 0) + * c[0].cbow_mean = model.cbow_mean # <<<<<<<<<<<<<< + * c[0].window = model.window + * c[0].workers = model.workers + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_cbow_mean); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 248, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 248, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).cbow_mean = __pyx_t_2; + + /* "gensim/models/fasttext_inner.pyx":249 + * c[0].sample = (model.vocabulary.sample != 0) + * c[0].cbow_mean = model.cbow_mean + * c[0].window = model.window # <<<<<<<<<<<<<< + * c[0].workers = model.workers * - * cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) # <<<<<<<<<<<<<< - * cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) - * cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 275, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 249, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 275, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 249, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 275, __pyx_L1_error) - __pyx_v_syn0_vocab = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + (__pyx_v_c[0]).window = __pyx_t_2; - /* "gensim/models/fasttext_inner.pyx":276 + /* "gensim/models/fasttext_inner.pyx":250 + * c[0].cbow_mean = model.cbow_mean + * c[0].window = model.window + * c[0].workers = model.workers # <<<<<<<<<<<<<< * - * cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) - * cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) # <<<<<<<<<<<<<< - * cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) - * cdef REAL_t *word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) + * c[0].syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 276, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_vocab_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 276, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_workers); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 250, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 276, __pyx_L1_error) - __pyx_v_word_locks_vocab = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 250, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).workers = __pyx_t_2; - /* "gensim/models/fasttext_inner.pyx":277 - * cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) - * cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) - * cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) # <<<<<<<<<<<<<< - * cdef REAL_t *word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) + /* "gensim/models/fasttext_inner.pyx":252 + * c[0].workers = model.workers * + * c[0].syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) # <<<<<<<<<<<<<< + * c[0].word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) + * c[0].syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 277, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 252, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_ngrams); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 277, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 252, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 277, __pyx_L1_error) - __pyx_v_syn0_ngrams = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 252, __pyx_L1_error) + (__pyx_v_c[0]).syn0_vocab = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/fasttext_inner.pyx":278 - * cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) - * cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) - * cdef REAL_t *word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":253 * - * cdef REAL_t *work + * c[0].syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) + * c[0].word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) # <<<<<<<<<<<<<< + * c[0].syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) + * c[0].word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 278, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 253, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_ngrams_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 278, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_vocab_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 253, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 278, __pyx_L1_error) - __pyx_v_word_locks_ngrams = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 253, __pyx_L1_error) + (__pyx_v_c[0]).word_locks_vocab = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":283 - * cdef REAL_t *l1 - * - * cdef REAL_t _alpha = alpha # <<<<<<<<<<<<<< - * cdef int size = model.wv.vector_size + /* "gensim/models/fasttext_inner.pyx":254 + * c[0].syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) + * c[0].word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) + * c[0].syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) # <<<<<<<<<<<<<< + * c[0].word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) * */ - __pyx_t_4 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_4 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 283, __pyx_L1_error) - __pyx_v__alpha = __pyx_t_4; - - /* "gensim/models/fasttext_inner.pyx":284 - * - * cdef REAL_t _alpha = alpha - * cdef int size = model.wv.vector_size # <<<<<<<<<<<<<< - * - * cdef int codelens[MAX_SENTENCE_LEN] - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 284, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 254, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 284, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_ngrams); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 254, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 284, __pyx_L1_error) + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 254, __pyx_L1_error) + (__pyx_v_c[0]).syn0_ngrams = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_size = __pyx_t_2; - /* "gensim/models/fasttext_inner.pyx":290 - * cdef np.uint32_t reduced_windows[MAX_SENTENCE_LEN] - * cdef int sentence_idx[MAX_SENTENCE_LEN + 1] - * cdef int window = model.window # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":255 + * c[0].word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) + * c[0].syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) + * c[0].word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) # <<<<<<<<<<<<<< * - * cdef int i, j, k + * c[0].alpha = alpha */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 290, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 255, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 290, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_ngrams_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 255, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_window = __pyx_t_2; + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 255, __pyx_L1_error) + (__pyx_v_c[0]).word_locks_ngrams = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":293 + /* "gensim/models/fasttext_inner.pyx":257 + * c[0].word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) * - * cdef int i, j, k - * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< - * cdef int sent_idx, idx_start, idx_end + * c[0].alpha = alpha # <<<<<<<<<<<<<< + * c[0].size = model.wv.vector_size * */ - __pyx_v_effective_words = 0; - __pyx_v_effective_sentences = 0; + __pyx_t_4 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_4 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 257, __pyx_L1_error) + (__pyx_v_c[0]).alpha = __pyx_t_4; - /* "gensim/models/fasttext_inner.pyx":313 - * # dummy dictionary to ensure that the memory locations that subwords_idx point to - * # are referenced throughout so that it isn't put back to free memory pool by Python's memory manager - * subword_arrays = {} # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":258 * - * if hs: + * c[0].alpha = alpha + * c[0].size = model.wv.vector_size # <<<<<<<<<<<<<< + * + * if c[0].hs: */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 313, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 258, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 258, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_v_subword_arrays = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 258, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + (__pyx_v_c[0]).size = __pyx_t_2; - /* "gensim/models/fasttext_inner.pyx":315 - * subword_arrays = {} + /* "gensim/models/fasttext_inner.pyx":260 + * c[0].size = model.wv.vector_size * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + * if c[0].hs: # <<<<<<<<<<<<<< + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * */ - __pyx_t_5 = (__pyx_v_hs != 0); + __pyx_t_5 = ((__pyx_v_c[0]).hs != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":316 + /* "gensim/models/fasttext_inner.pyx":261 * - * if hs: - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< + * if c[0].hs: + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< * - * if negative: + * if c[0].negative: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 316, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 261, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 316, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 261, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 316, __pyx_L1_error) - __pyx_v_syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 261, __pyx_L1_error) + (__pyx_v_c[0]).syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":315 - * subword_arrays = {} + /* "gensim/models/fasttext_inner.pyx":260 + * c[0].size = model.wv.vector_size * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + * if c[0].hs: # <<<<<<<<<<<<<< + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * */ } - /* "gensim/models/fasttext_inner.pyx":318 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/fasttext_inner.pyx":263 + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * if c[0].negative: # <<<<<<<<<<<<<< + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) */ - __pyx_t_5 = (__pyx_v_negative != 0); + __pyx_t_5 = ((__pyx_v_c[0]).negative != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":319 + /* "gensim/models/fasttext_inner.pyx":264 * - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative: + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 319, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 264, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 319, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 264, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 319, __pyx_L1_error) - __pyx_v_syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 264, __pyx_L1_error) + (__pyx_v_c[0]).syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/fasttext_inner.pyx":320 - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: + /* "gensim/models/fasttext_inner.pyx":265 + * if c[0].negative: + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 320, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 265, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 320, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 265, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 320, __pyx_L1_error) - __pyx_v_cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 265, __pyx_L1_error) + (__pyx_v_c[0]).cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":321 - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/fasttext_inner.pyx":266 + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< + * if c[0].negative or c[0].sample: + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 321, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 266, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 321, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 266, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_6 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 321, __pyx_L1_error) + __pyx_t_6 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 266, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_cum_table_len = __pyx_t_6; + (__pyx_v_c[0]).cum_table_len = __pyx_t_6; - /* "gensim/models/fasttext_inner.pyx":318 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/fasttext_inner.pyx":263 + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * if c[0].negative: # <<<<<<<<<<<<<< + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) */ } - /* "gensim/models/fasttext_inner.pyx":322 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/fasttext_inner.pyx":267 + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: # <<<<<<<<<<<<<< + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) * */ - __pyx_t_7 = (__pyx_v_negative != 0); + __pyx_t_7 = ((__pyx_v_c[0]).negative != 0); if (!__pyx_t_7) { } else { __pyx_t_5 = __pyx_t_7; goto __pyx_L6_bool_binop_done; } - __pyx_t_7 = (__pyx_v_sample != 0); + __pyx_t_7 = ((__pyx_v_c[0]).sample != 0); __pyx_t_5 = __pyx_t_7; __pyx_L6_bool_binop_done:; if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":323 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":268 + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< * * # convert Python structures to primitive types, so we can release the GIL */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_t_1 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_8 = PyNumber_Add(__pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_t_8 = PyNumber_Add(__pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_9 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_8); if (unlikely((__pyx_t_9 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_t_9 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_8); if (unlikely((__pyx_t_9 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_next_random = __pyx_t_9; + (__pyx_v_c[0]).next_random = __pyx_t_9; - /* "gensim/models/fasttext_inner.pyx":322 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/fasttext_inner.pyx":267 + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: # <<<<<<<<<<<<<< + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) * */ } - /* "gensim/models/fasttext_inner.pyx":326 + /* "gensim/models/fasttext_inner.pyx":271 * * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< - * l1 = np.PyArray_DATA(_l1) + * c[0].work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< + * c[0].neu1 = np.PyArray_DATA(_neu1) * */ - if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 326, __pyx_L1_error) - __pyx_v_work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); + if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 271, __pyx_L1_error) + (__pyx_v_c[0]).work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); - /* "gensim/models/fasttext_inner.pyx":327 + /* "gensim/models/fasttext_inner.pyx":272 * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) - * l1 = np.PyArray_DATA(_l1) # <<<<<<<<<<<<<< + * c[0].work = np.PyArray_DATA(_work) + * c[0].neu1 = np.PyArray_DATA(_neu1) # <<<<<<<<<<<<<< + * + * + */ + if (!(likely(((__pyx_v__neu1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__neu1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 272, __pyx_L1_error) + (__pyx_v_c[0]).neu1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__neu1))); + + /* "gensim/models/fasttext_inner.pyx":244 + * + * + * cdef init_ft_config(FastTextConfig *c, model, alpha, _work, _neu1): # <<<<<<<<<<<<<< + * c[0].hs = model.hs + * c[0].negative = model.negative + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("gensim.models.fasttext_inner.init_ft_config", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/fasttext_inner.pyx":275 + * + * + * def train_batch_sg(model, sentences, alpha, _work, _l1): # <<<<<<<<<<<<<< + * """Update skip-gram model by training on a sequence of sentences. + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_14fasttext_inner_1train_batch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_14fasttext_inner_train_batch_sg[] = "train_batch_sg(model, sentences, alpha, _work, _l1)\nUpdate skip-gram model by training on a sequence of sentences.\n\n Each sentence is a list of string tokens, which are looked up in the model's\n vocab dictionary. Called internally from :meth:`gensim.models.fasttext.FastText.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.fasttext.FastText`\n Model to be trained.\n sentences : iterable of list of str\n Corpus streamed directly from disk/network.\n alpha : float\n Learning rate.\n _work : np.ndarray\n Private working memory for each worker.\n _l1 : np.ndarray\n Private working memory for each worker.\n\n Returns\n -------\n int\n Effective number of words trained.\n\n "; +static PyMethodDef __pyx_mdef_6gensim_6models_14fasttext_inner_1train_batch_sg = {"train_batch_sg", (PyCFunction)__pyx_pw_6gensim_6models_14fasttext_inner_1train_batch_sg, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_14fasttext_inner_train_batch_sg}; +static PyObject *__pyx_pw_6gensim_6models_14fasttext_inner_1train_batch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_model = 0; + PyObject *__pyx_v_sentences = 0; + PyObject *__pyx_v_alpha = 0; + PyObject *__pyx_v__work = 0; + PyObject *__pyx_v__l1 = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("train_batch_sg (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_sentences,&__pyx_n_s_alpha,&__pyx_n_s_work,&__pyx_n_s_l1,0}; + PyObject* values[5] = {0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_sentences)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 1); __PYX_ERR(0, 275, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 2); __PYX_ERR(0, 275, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 3); __PYX_ERR(0, 275, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_l1)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 4); __PYX_ERR(0, 275, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_batch_sg") < 0)) __PYX_ERR(0, 275, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 5) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + } + __pyx_v_model = values[0]; + __pyx_v_sentences = values[1]; + __pyx_v_alpha = values[2]; + __pyx_v__work = values[3]; + __pyx_v__l1 = values[4]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 275, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.fasttext_inner.train_batch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(__pyx_self, __pyx_v_model, __pyx_v_sentences, __pyx_v_alpha, __pyx_v__work, __pyx_v__l1); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_sentences, PyObject *__pyx_v_alpha, PyObject *__pyx_v__work, PyObject *__pyx_v__l1) { + struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig __pyx_v_c; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_effective_words; + int __pyx_v_effective_sentences; + int __pyx_v_sent_idx; + int __pyx_v_idx_start; + int __pyx_v_idx_end; + PyObject *__pyx_v_vlookup = NULL; + PyObject *__pyx_v_sent = NULL; + PyObject *__pyx_v_token = NULL; + PyObject *__pyx_v_word = NULL; + PyObject *__pyx_v_item = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + Py_ssize_t __pyx_t_3; + PyObject *(*__pyx_t_4)(PyObject *); + int __pyx_t_5; + int __pyx_t_6; + Py_ssize_t __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + __pyx_t_5numpy_uint32_t __pyx_t_12; + Py_ssize_t __pyx_t_13; + int __pyx_t_14; + int __pyx_t_15; + PyObject *__pyx_t_16 = NULL; + int __pyx_t_17; + int __pyx_t_18; + int __pyx_t_19; + int __pyx_t_20; + int __pyx_t_21; + int __pyx_t_22; + int __pyx_t_23; + __Pyx_RefNannySetupContext("train_batch_sg", 0); + + /* "gensim/models/fasttext_inner.pyx":303 + * + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end + * + */ + __pyx_v_effective_words = 0; + __pyx_v_effective_sentences = 0; + + /* "gensim/models/fasttext_inner.pyx":306 + * cdef int sent_idx, idx_start, idx_end + * + * init_ft_config(&c, model, alpha, _work, _l1) # <<<<<<<<<<<<<< * * # prepare C structures so we can go "full C" and release the Python GIL */ - if (!(likely(((__pyx_v__l1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__l1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 327, __pyx_L1_error) - __pyx_v_l1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__l1))); + __pyx_t_1 = __pyx_f_6gensim_6models_14fasttext_inner_init_ft_config((&__pyx_v_c), __pyx_v_model, __pyx_v_alpha, __pyx_v__work, __pyx_v__l1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 306, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":330 + /* "gensim/models/fasttext_inner.pyx":309 * * # prepare C structures so we can go "full C" and release the Python GIL * vlookup = model.wv.vocab # <<<<<<<<<<<<<< - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 330, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 330, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_vlookup = __pyx_t_3; - __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_vlookup = __pyx_t_2; + __pyx_t_2 = 0; - /* "gensim/models/fasttext_inner.pyx":331 + /* "gensim/models/fasttext_inner.pyx":310 * # prepare C structures so we can go "full C" and release the Python GIL * vlookup = model.wv.vocab - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< * for sent in sentences: * if not sent: */ - (__pyx_v_sentence_idx[0]) = 0; + (__pyx_v_c.sentence_idx[0]) = 0; - /* "gensim/models/fasttext_inner.pyx":332 + /* "gensim/models/fasttext_inner.pyx":311 * vlookup = model.wv.vocab - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: # <<<<<<<<<<<<<< * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged */ if (likely(PyList_CheckExact(__pyx_v_sentences)) || PyTuple_CheckExact(__pyx_v_sentences)) { - __pyx_t_3 = __pyx_v_sentences; __Pyx_INCREF(__pyx_t_3); __pyx_t_6 = 0; - __pyx_t_10 = NULL; + __pyx_t_2 = __pyx_v_sentences; __Pyx_INCREF(__pyx_t_2); __pyx_t_3 = 0; + __pyx_t_4 = NULL; } else { - __pyx_t_6 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_sentences); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 332, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_10 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 332, __pyx_L1_error) + __pyx_t_3 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_sentences); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 311, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = Py_TYPE(__pyx_t_2)->tp_iternext; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 311, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_10)) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_3)) break; + if (likely(!__pyx_t_4)) { + if (likely(PyList_CheckExact(__pyx_t_2))) { + if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_2)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_6); __Pyx_INCREF(__pyx_t_8); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 332, __pyx_L1_error) + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_1); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 311, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 332, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 311, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } else { - if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + if (__pyx_t_3 >= PyTuple_GET_SIZE(__pyx_t_2)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_6); __Pyx_INCREF(__pyx_t_8); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 332, __pyx_L1_error) + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_1); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 311, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 332, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 311, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } } else { - __pyx_t_8 = __pyx_t_10(__pyx_t_3); - if (unlikely(!__pyx_t_8)) { + __pyx_t_1 = __pyx_t_4(__pyx_t_2); + if (unlikely(!__pyx_t_1)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 332, __pyx_L1_error) + else __PYX_ERR(0, 311, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_1); } - __Pyx_XDECREF_SET(__pyx_v_sent, __pyx_t_8); - __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_sent, __pyx_t_1); + __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":333 - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + /* "gensim/models/fasttext_inner.pyx":312 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: * if not sent: # <<<<<<<<<<<<<< * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: */ - __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_v_sent); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 333, __pyx_L1_error) - __pyx_t_7 = ((!__pyx_t_5) != 0); - if (__pyx_t_7) { + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_v_sent); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 312, __pyx_L1_error) + __pyx_t_6 = ((!__pyx_t_5) != 0); + if (__pyx_t_6) { - /* "gensim/models/fasttext_inner.pyx":334 + /* "gensim/models/fasttext_inner.pyx":313 * for sent in sentences: * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged # <<<<<<<<<<<<<< * for token in sent: * word = vlookup[token] if token in vlookup else None */ - goto __pyx_L8_continue; + goto __pyx_L3_continue; - /* "gensim/models/fasttext_inner.pyx":333 - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + /* "gensim/models/fasttext_inner.pyx":312 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: * if not sent: # <<<<<<<<<<<<<< * continue # ignore empty sentences; leave effective_sentences unchanged @@ -4163,7 +4227,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON */ } - /* "gensim/models/fasttext_inner.pyx":335 + /* "gensim/models/fasttext_inner.pyx":314 * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: # <<<<<<<<<<<<<< @@ -4171,310 +4235,257 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON * if word is None: */ if (likely(PyList_CheckExact(__pyx_v_sent)) || PyTuple_CheckExact(__pyx_v_sent)) { - __pyx_t_8 = __pyx_v_sent; __Pyx_INCREF(__pyx_t_8); __pyx_t_11 = 0; - __pyx_t_12 = NULL; + __pyx_t_1 = __pyx_v_sent; __Pyx_INCREF(__pyx_t_1); __pyx_t_7 = 0; + __pyx_t_8 = NULL; } else { - __pyx_t_11 = -1; __pyx_t_8 = PyObject_GetIter(__pyx_v_sent); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_12 = Py_TYPE(__pyx_t_8)->tp_iternext; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 335, __pyx_L1_error) + __pyx_t_7 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_sent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 314, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 314, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_12)) { - if (likely(PyList_CheckExact(__pyx_t_8))) { - if (__pyx_t_11 >= PyList_GET_SIZE(__pyx_t_8)) break; + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_8, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 335, __pyx_L1_error) + __pyx_t_9 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 314, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_8, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 314, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); #endif } else { - if (__pyx_t_11 >= PyTuple_GET_SIZE(__pyx_t_8)) break; + if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_8, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 335, __pyx_L1_error) + __pyx_t_9 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 314, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_8, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 335, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 314, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); #endif } } else { - __pyx_t_1 = __pyx_t_12(__pyx_t_8); - if (unlikely(!__pyx_t_1)) { + __pyx_t_9 = __pyx_t_8(__pyx_t_1); + if (unlikely(!__pyx_t_9)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 335, __pyx_L1_error) + else __PYX_ERR(0, 314, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_9); } - __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_1); - __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_9); + __pyx_t_9 = 0; - /* "gensim/models/fasttext_inner.pyx":336 + /* "gensim/models/fasttext_inner.pyx":315 * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: * word = vlookup[token] if token in vlookup else None # <<<<<<<<<<<<<< * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window */ - __pyx_t_7 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 336, __pyx_L1_error) - if ((__pyx_t_7 != 0)) { - __pyx_t_13 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 336, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_1 = __pyx_t_13; - __pyx_t_13 = 0; + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 315, __pyx_L1_error) + if ((__pyx_t_6 != 0)) { + __pyx_t_10 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 315, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_9 = __pyx_t_10; + __pyx_t_10 = 0; } else { __Pyx_INCREF(Py_None); - __pyx_t_1 = Py_None; + __pyx_t_9 = Py_None; } - __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_1); - __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_9); + __pyx_t_9 = 0; - /* "gensim/models/fasttext_inner.pyx":337 + /* "gensim/models/fasttext_inner.pyx":316 * for token in sent: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): */ - __pyx_t_7 = (__pyx_v_word == Py_None); - __pyx_t_5 = (__pyx_t_7 != 0); + __pyx_t_6 = (__pyx_v_word == Py_None); + __pyx_t_5 = (__pyx_t_6 != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":338 + /* "gensim/models/fasttext_inner.pyx":317 * word = vlookup[token] if token in vlookup else None * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window # <<<<<<<<<<<<<< - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue */ - goto __pyx_L11_continue; + goto __pyx_L6_continue; - /* "gensim/models/fasttext_inner.pyx":337 + /* "gensim/models/fasttext_inner.pyx":316 * for token in sent: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): */ } - /* "gensim/models/fasttext_inner.pyx":339 + /* "gensim/models/fasttext_inner.pyx":318 * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index */ - __pyx_t_7 = (__pyx_v_sample != 0); - if (__pyx_t_7) { + __pyx_t_6 = (__pyx_v_c.sample != 0); + if (__pyx_t_6) { } else { - __pyx_t_5 = __pyx_t_7; - goto __pyx_L15_bool_binop_done; + __pyx_t_5 = __pyx_t_6; + goto __pyx_L10_bool_binop_done; } - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 339, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_13 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_next_random))); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 339, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_14 = PyObject_RichCompare(__pyx_t_1, __pyx_t_13, Py_LT); __Pyx_XGOTREF(__pyx_t_14); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 339, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_14); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 339, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __pyx_t_5 = __pyx_t_7; - __pyx_L15_bool_binop_done:; + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 318, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_c.next_random))); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 318, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_11 = PyObject_RichCompare(__pyx_t_9, __pyx_t_10, Py_LT); __Pyx_XGOTREF(__pyx_t_11); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 318, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_11); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 318, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_5 = __pyx_t_6; + __pyx_L10_bool_binop_done:; if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":340 + /* "gensim/models/fasttext_inner.pyx":319 * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue # <<<<<<<<<<<<<< - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index * */ - goto __pyx_L11_continue; + goto __pyx_L6_continue; - /* "gensim/models/fasttext_inner.pyx":339 + /* "gensim/models/fasttext_inner.pyx":318 * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index */ } - /* "gensim/models/fasttext_inner.pyx":341 - * if sample and word.sample_int < random_int32(&next_random): + /* "gensim/models/fasttext_inner.pyx":320 + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue - * indexes[effective_words] = word.index # <<<<<<<<<<<<<< - * - * subwords = model.wv.buckets_word[word.index] - */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 341, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_15 = __Pyx_PyInt_As_npy_uint32(__pyx_t_14); if (unlikely((__pyx_t_15 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 341, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - (__pyx_v_indexes[__pyx_v_effective_words]) = __pyx_t_15; - - /* "gensim/models/fasttext_inner.pyx":343 - * indexes[effective_words] = word.index - * - * subwords = model.wv.buckets_word[word.index] # <<<<<<<<<<<<<< - * word_subwords = np.array((word.index,) + subwords, dtype=np.uint32) - * subwords_idx_len[effective_words] = (len(subwords) + 1) - */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 343, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_13 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_buckets_word); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 343, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 343, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_t_13, __pyx_t_14); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 343, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_XDECREF_SET(__pyx_v_subwords, __pyx_t_1); - __pyx_t_1 = 0; - - /* "gensim/models/fasttext_inner.pyx":344 + * c.indexes[effective_words] = word.index # <<<<<<<<<<<<<< * - * subwords = model.wv.buckets_word[word.index] - * word_subwords = np.array((word.index,) + subwords, dtype=np.uint32) # <<<<<<<<<<<<<< - * subwords_idx_len[effective_words] = (len(subwords) + 1) - * subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) - */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_np); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_array); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_13 = PyTuple_New(1); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_13, 0, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_t_1 = PyNumber_Add(__pyx_t_13, __pyx_v_subwords); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __pyx_t_13 = PyTuple_New(1); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_13, 0, __pyx_t_1); - __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_16 = __Pyx_GetModuleGlobalName(__pyx_n_s_np); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_16); - __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_t_16, __pyx_n_s_uint32); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; - if (PyDict_SetItem(__pyx_t_1, __pyx_n_s_dtype, __pyx_t_17) < 0) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; - __pyx_t_17 = __Pyx_PyObject_Call(__pyx_t_14, __pyx_t_13, __pyx_t_1); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 344, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_XDECREF_SET(__pyx_v_word_subwords, __pyx_t_17); - __pyx_t_17 = 0; - - /* "gensim/models/fasttext_inner.pyx":345 - * subwords = model.wv.buckets_word[word.index] - * word_subwords = np.array((word.index,) + subwords, dtype=np.uint32) - * subwords_idx_len[effective_words] = (len(subwords) + 1) # <<<<<<<<<<<<<< - * subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) - * # ensures reference count of word_subwords doesn't reach 0 + * c.subwords_idx_len[effective_words] = (len(model.wv.buckets_word[word.index])) */ - __pyx_t_18 = PyObject_Length(__pyx_v_subwords); if (unlikely(__pyx_t_18 == ((Py_ssize_t)-1))) __PYX_ERR(0, 345, __pyx_L1_error) - (__pyx_v_subwords_idx_len[__pyx_v_effective_words]) = ((int)(__pyx_t_18 + 1)); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 320, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_11); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 320, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.indexes[__pyx_v_effective_words]) = __pyx_t_12; - /* "gensim/models/fasttext_inner.pyx":346 - * word_subwords = np.array((word.index,) + subwords, dtype=np.uint32) - * subwords_idx_len[effective_words] = (len(subwords) + 1) - * subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) # <<<<<<<<<<<<<< - * # ensures reference count of word_subwords doesn't reach 0 - * subword_arrays[effective_words] = word_subwords + /* "gensim/models/fasttext_inner.pyx":322 + * c.indexes[effective_words] = word.index + * + * c.subwords_idx_len[effective_words] = (len(model.wv.buckets_word[word.index])) # <<<<<<<<<<<<<< + * c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) + * */ - if (!(likely(((__pyx_v_word_subwords) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_subwords, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 346, __pyx_L1_error) - (__pyx_v_subwords_idx[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_subwords))); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 322, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_11, __pyx_n_s_buckets_word); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 322, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 322, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_9 = __Pyx_PyObject_GetItem(__pyx_t_10, __pyx_t_11); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 322, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_13 = PyObject_Length(__pyx_t_9); if (unlikely(__pyx_t_13 == ((Py_ssize_t)-1))) __PYX_ERR(0, 322, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + (__pyx_v_c.subwords_idx_len[__pyx_v_effective_words]) = ((int)__pyx_t_13); - /* "gensim/models/fasttext_inner.pyx":348 - * subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) - * # ensures reference count of word_subwords doesn't reach 0 - * subword_arrays[effective_words] = word_subwords # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":323 + * + * c.subwords_idx_len[effective_words] = (len(model.wv.buckets_word[word.index])) + * c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) # <<<<<<<<<<<<<< * - * if hs: + * if c.hs: */ - __pyx_t_17 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 348, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - if (unlikely(PyDict_SetItem(__pyx_v_subword_arrays, __pyx_t_17, __pyx_v_word_subwords) < 0)) __PYX_ERR(0, 348, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 323, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_buckets_word); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 323, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 323, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = __Pyx_PyObject_GetItem(__pyx_t_11, __pyx_t_9); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 323, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (!(likely(((__pyx_t_10) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_10, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 323, __pyx_L1_error) + (__pyx_v_c.subwords_idx[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_10))); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - /* "gensim/models/fasttext_inner.pyx":350 - * subword_arrays[effective_words] = word_subwords + /* "gensim/models/fasttext_inner.pyx":325 + * c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) * - * if hs: # <<<<<<<<<<<<<< - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) */ - __pyx_t_5 = (__pyx_v_hs != 0); + __pyx_t_5 = (__pyx_v_c.hs != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":351 + /* "gensim/models/fasttext_inner.pyx":326 * - * if hs: - * codelens[effective_words] = len(word.code) # <<<<<<<<<<<<<< - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) + * if c.hs: + * c.codelens[effective_words] = len(word.code) # <<<<<<<<<<<<<< + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) */ - __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 351, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - __pyx_t_18 = PyObject_Length(__pyx_t_17); if (unlikely(__pyx_t_18 == ((Py_ssize_t)-1))) __PYX_ERR(0, 351, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; - (__pyx_v_codelens[__pyx_v_effective_words]) = ((int)__pyx_t_18); + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 326, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_13 = PyObject_Length(__pyx_t_10); if (unlikely(__pyx_t_13 == ((Py_ssize_t)-1))) __PYX_ERR(0, 326, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + (__pyx_v_c.codelens[__pyx_v_effective_words]) = ((int)__pyx_t_13); - /* "gensim/models/fasttext_inner.pyx":352 - * if hs: - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/fasttext_inner.pyx":327 + * if c.hs: + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< + * c.points[effective_words] = np.PyArray_DATA(word.point) * */ - __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 352, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - if (!(likely(((__pyx_t_17) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_17, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 352, __pyx_L1_error) - (__pyx_v_codes[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_17))); - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 327, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + if (!(likely(((__pyx_t_10) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_10, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 327, __pyx_L1_error) + (__pyx_v_c.codes[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_10))); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - /* "gensim/models/fasttext_inner.pyx":353 - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":328 + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< * * effective_words += 1 */ - __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 353, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - if (!(likely(((__pyx_t_17) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_17, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 353, __pyx_L1_error) - (__pyx_v_points[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_17))); - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 328, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + if (!(likely(((__pyx_t_10) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_10, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 328, __pyx_L1_error) + (__pyx_v_c.points[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_10))); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - /* "gensim/models/fasttext_inner.pyx":350 - * subword_arrays[effective_words] = word_subwords + /* "gensim/models/fasttext_inner.pyx":325 + * c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) * - * if hs: # <<<<<<<<<<<<<< - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) */ } - /* "gensim/models/fasttext_inner.pyx":355 - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/fasttext_inner.pyx":330 + * c.points[effective_words] = np.PyArray_DATA(word.point) * * effective_words += 1 # <<<<<<<<<<<<<< * if effective_words == MAX_SENTENCE_LEN: @@ -4482,7 +4493,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON */ __pyx_v_effective_words = (__pyx_v_effective_words + 1); - /* "gensim/models/fasttext_inner.pyx":356 + /* "gensim/models/fasttext_inner.pyx":331 * * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< @@ -4492,16 +4503,16 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON __pyx_t_5 = ((__pyx_v_effective_words == 0x2710) != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":357 + /* "gensim/models/fasttext_inner.pyx":332 * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: * break # <<<<<<<<<<<<<< * * # keep track of which words go into which sentence, so we don't train */ - goto __pyx_L12_break; + goto __pyx_L7_break; - /* "gensim/models/fasttext_inner.pyx":356 + /* "gensim/models/fasttext_inner.pyx":331 * * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< @@ -4510,38 +4521,38 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON */ } - /* "gensim/models/fasttext_inner.pyx":335 + /* "gensim/models/fasttext_inner.pyx":314 * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: # <<<<<<<<<<<<<< * word = vlookup[token] if token in vlookup else None * if word is None: */ - __pyx_L11_continue:; + __pyx_L6_continue:; } - __pyx_L12_break:; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_L7_break:; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":362 + /* "gensim/models/fasttext_inner.pyx":337 * # across sentence boundaries. * # indices of sentence number X are between tp_iternext; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 369, __pyx_L1_error) + __pyx_t_3 = -1; __pyx_t_10 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 344, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_4 = Py_TYPE(__pyx_t_10)->tp_iternext; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 344, __pyx_L1_error) } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; for (;;) { - if (likely(!__pyx_t_10)) { - if (likely(PyList_CheckExact(__pyx_t_17))) { - if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_17)) break; + if (likely(!__pyx_t_4)) { + if (likely(PyList_CheckExact(__pyx_t_10))) { + if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_10)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_17, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 369, __pyx_L1_error) + __pyx_t_2 = PyList_GET_ITEM(__pyx_t_10, __pyx_t_3); __Pyx_INCREF(__pyx_t_2); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 344, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_17, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 369, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = PySequence_ITEM(__pyx_t_10, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 344, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); #endif } else { - if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_17)) break; + if (__pyx_t_3 >= PyTuple_GET_SIZE(__pyx_t_10)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_17, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 369, __pyx_L1_error) + __pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_10, __pyx_t_3); __Pyx_INCREF(__pyx_t_2); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 344, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_17, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 369, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = PySequence_ITEM(__pyx_t_10, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 344, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); #endif } } else { - __pyx_t_3 = __pyx_t_10(__pyx_t_17); - if (unlikely(!__pyx_t_3)) { + __pyx_t_2 = __pyx_t_4(__pyx_t_10); + if (unlikely(!__pyx_t_2)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 369, __pyx_L1_error) + else __PYX_ERR(0, 344, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_2); } - __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_v_i = __pyx_t_2; - __pyx_t_2 = (__pyx_t_2 + 1); + __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_2); + __pyx_t_2 = 0; + __pyx_v_i = __pyx_t_14; + __pyx_t_14 = (__pyx_t_14 + 1); - /* "gensim/models/fasttext_inner.pyx":370 + /* "gensim/models/fasttext_inner.pyx":345 * # precompute "reduced window" offsets in a single randint() call - * for i, item in enumerate(model.random.randint(0, window, effective_words)): - * reduced_windows[i] = item # <<<<<<<<<<<<<< + * for i, item in enumerate(model.random.randint(0, c.window, effective_words)): + * c.reduced_windows[i] = item # <<<<<<<<<<<<<< * * with nogil: */ - __pyx_t_15 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_15 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 370, __pyx_L1_error) - (__pyx_v_reduced_windows[__pyx_v_i]) = __pyx_t_15; + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 345, __pyx_L1_error) + (__pyx_v_c.reduced_windows[__pyx_v_i]) = __pyx_t_12; - /* "gensim/models/fasttext_inner.pyx":369 + /* "gensim/models/fasttext_inner.pyx":344 * * # precompute "reduced window" offsets in a single randint() call - * for i, item in enumerate(model.random.randint(0, window, effective_words)): # <<<<<<<<<<<<<< - * reduced_windows[i] = item + * for i, item in enumerate(model.random.randint(0, c.window, effective_words)): # <<<<<<<<<<<<<< + * c.reduced_windows[i] = item * */ } - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; - /* "gensim/models/fasttext_inner.pyx":372 - * reduced_windows[i] = item + /* "gensim/models/fasttext_inner.pyx":347 + * c.reduced_windows[i] = item * * with nogil: # <<<<<<<<<<<<<< * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] + * idx_start = c.sentence_idx[sent_idx] */ { #ifdef WITH_THREAD @@ -4729,97 +4740,97 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON #endif /*try:*/ { - /* "gensim/models/fasttext_inner.pyx":373 + /* "gensim/models/fasttext_inner.pyx":348 * * with nogil: * for sent_idx in range(effective_sentences): # <<<<<<<<<<<<<< - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] */ - __pyx_t_2 = __pyx_v_effective_sentences; - __pyx_t_19 = __pyx_t_2; - for (__pyx_t_20 = 0; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { - __pyx_v_sent_idx = __pyx_t_20; + __pyx_t_14 = __pyx_v_effective_sentences; + __pyx_t_15 = __pyx_t_14; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_15; __pyx_t_17+=1) { + __pyx_v_sent_idx = __pyx_t_17; - /* "gensim/models/fasttext_inner.pyx":374 + /* "gensim/models/fasttext_inner.pyx":349 * with nogil: * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] # <<<<<<<<<<<<<< - * idx_end = sentence_idx[sent_idx + 1] + * idx_start = c.sentence_idx[sent_idx] # <<<<<<<<<<<<<< + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): */ - __pyx_v_idx_start = (__pyx_v_sentence_idx[__pyx_v_sent_idx]); + __pyx_v_idx_start = (__pyx_v_c.sentence_idx[__pyx_v_sent_idx]); - /* "gensim/models/fasttext_inner.pyx":375 + /* "gensim/models/fasttext_inner.pyx":350 * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] */ - __pyx_v_idx_end = (__pyx_v_sentence_idx[(__pyx_v_sent_idx + 1)]); + __pyx_v_idx_end = (__pyx_v_c.sentence_idx[(__pyx_v_sent_idx + 1)]); - /* "gensim/models/fasttext_inner.pyx":376 - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] + /* "gensim/models/fasttext_inner.pyx":351 + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): # <<<<<<<<<<<<<< - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: */ - __pyx_t_21 = __pyx_v_idx_end; - __pyx_t_22 = __pyx_t_21; - for (__pyx_t_23 = __pyx_v_idx_start; __pyx_t_23 < __pyx_t_22; __pyx_t_23+=1) { - __pyx_v_i = __pyx_t_23; + __pyx_t_18 = __pyx_v_idx_end; + __pyx_t_19 = __pyx_t_18; + for (__pyx_t_20 = __pyx_v_idx_start; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { + __pyx_v_i = __pyx_t_20; - /* "gensim/models/fasttext_inner.pyx":377 - * idx_end = sentence_idx[sent_idx + 1] + /* "gensim/models/fasttext_inner.pyx":352 + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< * if j < idx_start: * j = idx_start */ - __pyx_v_j = ((__pyx_v_i - __pyx_v_window) + (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/fasttext_inner.pyx":378 + /* "gensim/models/fasttext_inner.pyx":353 * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: # <<<<<<<<<<<<<< * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ __pyx_t_5 = ((__pyx_v_j < __pyx_v_idx_start) != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":379 - * j = i - window + reduced_windows[i] + /* "gensim/models/fasttext_inner.pyx":354 + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: * j = idx_start # <<<<<<<<<<<<<< - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: */ __pyx_v_j = __pyx_v_idx_start; - /* "gensim/models/fasttext_inner.pyx":378 + /* "gensim/models/fasttext_inner.pyx":353 * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: # <<<<<<<<<<<<<< * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ } - /* "gensim/models/fasttext_inner.pyx":380 + /* "gensim/models/fasttext_inner.pyx":355 * if j < idx_start: * j = idx_start - * k = i + window + 1 - reduced_windows[i] # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< * if k > idx_end: * k = idx_end */ - __pyx_v_k = (((__pyx_v_i + __pyx_v_window) + 1) - (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/fasttext_inner.pyx":381 + /* "gensim/models/fasttext_inner.pyx":356 * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: # <<<<<<<<<<<<<< * k = idx_end * for j in range(j, k): @@ -4827,8 +4838,8 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON __pyx_t_5 = ((__pyx_v_k > __pyx_v_idx_end) != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":382 - * k = i + window + 1 - reduced_windows[i] + /* "gensim/models/fasttext_inner.pyx":357 + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: * k = idx_end # <<<<<<<<<<<<<< * for j in range(j, k): @@ -4836,122 +4847,122 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON */ __pyx_v_k = __pyx_v_idx_end; - /* "gensim/models/fasttext_inner.pyx":381 + /* "gensim/models/fasttext_inner.pyx":356 * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: # <<<<<<<<<<<<<< * k = idx_end * for j in range(j, k): */ } - /* "gensim/models/fasttext_inner.pyx":383 + /* "gensim/models/fasttext_inner.pyx":358 * if k > idx_end: * k = idx_end * for j in range(j, k): # <<<<<<<<<<<<<< * if j == i: * continue */ - __pyx_t_24 = __pyx_v_k; - __pyx_t_25 = __pyx_t_24; - for (__pyx_t_26 = __pyx_v_j; __pyx_t_26 < __pyx_t_25; __pyx_t_26+=1) { - __pyx_v_j = __pyx_t_26; + __pyx_t_21 = __pyx_v_k; + __pyx_t_22 = __pyx_t_21; + for (__pyx_t_23 = __pyx_v_j; __pyx_t_23 < __pyx_t_22; __pyx_t_23+=1) { + __pyx_v_j = __pyx_t_23; - /* "gensim/models/fasttext_inner.pyx":384 + /* "gensim/models/fasttext_inner.pyx":359 * k = idx_end * for j in range(j, k): * if j == i: # <<<<<<<<<<<<<< * continue - * if hs: + * if c.hs: */ __pyx_t_5 = ((__pyx_v_j == __pyx_v_i) != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":385 + /* "gensim/models/fasttext_inner.pyx":360 * for j in range(j, k): * if j == i: * continue # <<<<<<<<<<<<<< - * if hs: - * fast_sentence_sg_hs( + * if c.hs: + * fasttext_fast_sentence_sg_hs( */ - goto __pyx_L31_continue; + goto __pyx_L26_continue; - /* "gensim/models/fasttext_inner.pyx":384 + /* "gensim/models/fasttext_inner.pyx":359 * k = idx_end * for j in range(j, k): * if j == i: # <<<<<<<<<<<<<< * continue - * if hs: + * if c.hs: */ } - /* "gensim/models/fasttext_inner.pyx":386 + /* "gensim/models/fasttext_inner.pyx":361 * if j == i: * continue - * if hs: # <<<<<<<<<<<<<< - * fast_sentence_sg_hs( - * points[j], codes[j], codelens[j], syn0_vocab, syn0_ngrams, syn1, size, + * if c.hs: # <<<<<<<<<<<<<< + * fasttext_fast_sentence_sg_hs( + * c.points[j], c.codes[j], c.codelens[j], c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, */ - __pyx_t_5 = (__pyx_v_hs != 0); + __pyx_t_5 = (__pyx_v_c.hs != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":387 + /* "gensim/models/fasttext_inner.pyx":362 * continue - * if hs: - * fast_sentence_sg_hs( # <<<<<<<<<<<<<< - * points[j], codes[j], codelens[j], syn0_vocab, syn0_ngrams, syn1, size, - * subwords_idx[i], subwords_idx_len[i], _alpha, work, l1, word_locks_vocab, + * if c.hs: + * fasttext_fast_sentence_sg_hs( # <<<<<<<<<<<<<< + * c.points[j], c.codes[j], c.codelens[j], c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + * c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, c.neu1, */ - __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_hs((__pyx_v_points[__pyx_v_j]), (__pyx_v_codes[__pyx_v_j]), (__pyx_v_codelens[__pyx_v_j]), __pyx_v_syn0_vocab, __pyx_v_syn0_ngrams, __pyx_v_syn1, __pyx_v_size, (__pyx_v_subwords_idx[__pyx_v_i]), (__pyx_v_subwords_idx_len[__pyx_v_i]), __pyx_v__alpha, __pyx_v_work, __pyx_v_l1, __pyx_v_word_locks_vocab, __pyx_v_word_locks_ngrams); + __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_hs((__pyx_v_c.points[__pyx_v_j]), (__pyx_v_c.codes[__pyx_v_j]), (__pyx_v_c.codelens[__pyx_v_j]), __pyx_v_c.syn0_vocab, __pyx_v_c.syn0_ngrams, __pyx_v_c.syn1, __pyx_v_c.size, (__pyx_v_c.indexes[__pyx_v_i]), (__pyx_v_c.subwords_idx[__pyx_v_i]), (__pyx_v_c.subwords_idx_len[__pyx_v_i]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.neu1, __pyx_v_c.word_locks_vocab, __pyx_v_c.word_locks_ngrams); - /* "gensim/models/fasttext_inner.pyx":386 + /* "gensim/models/fasttext_inner.pyx":361 * if j == i: * continue - * if hs: # <<<<<<<<<<<<<< - * fast_sentence_sg_hs( - * points[j], codes[j], codelens[j], syn0_vocab, syn0_ngrams, syn1, size, + * if c.hs: # <<<<<<<<<<<<<< + * fasttext_fast_sentence_sg_hs( + * c.points[j], c.codes[j], c.codelens[j], c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, */ } - /* "gensim/models/fasttext_inner.pyx":391 - * subwords_idx[i], subwords_idx_len[i], _alpha, work, l1, word_locks_vocab, - * word_locks_ngrams) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_sentence_sg_neg( - * negative, cum_table, cum_table_len, syn0_vocab, syn0_ngrams, syn1neg, size, + /* "gensim/models/fasttext_inner.pyx":366 + * c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, c.neu1, + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fasttext_fast_sentence_sg_neg( + * c.negative, c.cum_table, c.cum_table_len, c.syn0_vocab, c.syn0_ngrams, c.syn1neg, c.size, */ - __pyx_t_5 = (__pyx_v_negative != 0); + __pyx_t_5 = (__pyx_v_c.negative != 0); if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":392 - * word_locks_ngrams) - * if negative: - * next_random = fast_sentence_sg_neg( # <<<<<<<<<<<<<< - * negative, cum_table, cum_table_len, syn0_vocab, syn0_ngrams, syn1neg, size, - * indexes[j], subwords_idx[i], subwords_idx_len[i], _alpha, work, l1, + /* "gensim/models/fasttext_inner.pyx":367 + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: + * c.next_random = fasttext_fast_sentence_sg_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.syn0_vocab, c.syn0_ngrams, c.syn1neg, c.size, + * c.indexes[j], c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, */ - __pyx_v_next_random = __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_sg_neg(__pyx_v_negative, __pyx_v_cum_table, __pyx_v_cum_table_len, __pyx_v_syn0_vocab, __pyx_v_syn0_ngrams, __pyx_v_syn1neg, __pyx_v_size, (__pyx_v_indexes[__pyx_v_j]), (__pyx_v_subwords_idx[__pyx_v_i]), (__pyx_v_subwords_idx_len[__pyx_v_i]), __pyx_v__alpha, __pyx_v_work, __pyx_v_l1, __pyx_v_next_random, __pyx_v_word_locks_vocab, __pyx_v_word_locks_ngrams); + __pyx_v_c.next_random = __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.syn0_vocab, __pyx_v_c.syn0_ngrams, __pyx_v_c.syn1neg, __pyx_v_c.size, (__pyx_v_c.indexes[__pyx_v_j]), (__pyx_v_c.indexes[__pyx_v_i]), (__pyx_v_c.subwords_idx[__pyx_v_i]), (__pyx_v_c.subwords_idx_len[__pyx_v_i]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.neu1, __pyx_v_c.next_random, __pyx_v_c.word_locks_vocab, __pyx_v_c.word_locks_ngrams); - /* "gensim/models/fasttext_inner.pyx":391 - * subwords_idx[i], subwords_idx_len[i], _alpha, work, l1, word_locks_vocab, - * word_locks_ngrams) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_sentence_sg_neg( - * negative, cum_table, cum_table_len, syn0_vocab, syn0_ngrams, syn1neg, size, + /* "gensim/models/fasttext_inner.pyx":366 + * c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, c.neu1, + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fasttext_fast_sentence_sg_neg( + * c.negative, c.cum_table, c.cum_table_len, c.syn0_vocab, c.syn0_ngrams, c.syn1neg, c.size, */ } - __pyx_L31_continue:; + __pyx_L26_continue:; } } } } - /* "gensim/models/fasttext_inner.pyx":372 - * reduced_windows[i] = item + /* "gensim/models/fasttext_inner.pyx":347 + * c.reduced_windows[i] = item * * with nogil: # <<<<<<<<<<<<<< * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] + * idx_start = c.sentence_idx[sent_idx] */ /*finally:*/ { /*normal exit:*/{ @@ -4959,27 +4970,27 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON __Pyx_FastGIL_Forget(); Py_BLOCK_THREADS #endif - goto __pyx_L24; + goto __pyx_L19; } - __pyx_L24:; + __pyx_L19:; } } - /* "gensim/models/fasttext_inner.pyx":397 - * next_random, word_locks_vocab, word_locks_ngrams) + /* "gensim/models/fasttext_inner.pyx":372 + * c.neu1, c.next_random, c.word_locks_vocab, c.word_locks_ngrams) * * return effective_words # <<<<<<<<<<<<<< * * */ __Pyx_XDECREF(__pyx_r); - __pyx_t_17 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 397, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - __pyx_r = __pyx_t_17; - __pyx_t_17 = 0; + __pyx_t_10 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 372, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_r = __pyx_t_10; + __pyx_t_10 = 0; goto __pyx_L0; - /* "gensim/models/fasttext_inner.pyx":246 + /* "gensim/models/fasttext_inner.pyx":275 * * * def train_batch_sg(model, sentences, alpha, _work, _l1): # <<<<<<<<<<<<<< @@ -4990,29 +5001,25 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_train_batch_sg(CYTHON /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_14); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_16); - __Pyx_XDECREF(__pyx_t_17); __Pyx_AddTraceback("gensim.models.fasttext_inner.train_batch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; - __Pyx_XDECREF(__pyx_v_subword_arrays); __Pyx_XDECREF(__pyx_v_vlookup); __Pyx_XDECREF(__pyx_v_sent); __Pyx_XDECREF(__pyx_v_token); __Pyx_XDECREF(__pyx_v_word); - __Pyx_XDECREF(__pyx_v_subwords); - __Pyx_XDECREF(__pyx_v_word_subwords); __Pyx_XDECREF(__pyx_v_item); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } -/* "gensim/models/fasttext_inner.pyx":400 +/* "gensim/models/fasttext_inner.pyx":375 * * * def train_batch_cbow(model, sentences, alpha, _work, _neu1): # <<<<<<<<<<<<<< @@ -5062,29 +5069,29 @@ static PyObject *__pyx_pw_6gensim_6models_14fasttext_inner_3train_batch_cbow(PyO case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_sentences)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, 1); __PYX_ERR(0, 400, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, 1); __PYX_ERR(0, 375, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, 2); __PYX_ERR(0, 400, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, 2); __PYX_ERR(0, 375, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, 3); __PYX_ERR(0, 400, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, 3); __PYX_ERR(0, 375, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 4: if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_neu1)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, 4); __PYX_ERR(0, 400, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, 4); __PYX_ERR(0, 375, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_batch_cbow") < 0)) __PYX_ERR(0, 400, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_batch_cbow") < 0)) __PYX_ERR(0, 375, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 5) { goto __pyx_L5_argtuple_error; @@ -5103,7 +5110,7 @@ static PyObject *__pyx_pw_6gensim_6models_14fasttext_inner_3train_batch_cbow(PyO } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 400, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 5, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 375, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("gensim.models.fasttext_inner.train_batch_cbow", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); @@ -5117,22 +5124,7 @@ static PyObject *__pyx_pw_6gensim_6models_14fasttext_inner_3train_batch_cbow(PyO } static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_sentences, PyObject *__pyx_v_alpha, PyObject *__pyx_v__work, PyObject *__pyx_v__neu1) { - int __pyx_v_hs; - int __pyx_v_negative; - int __pyx_v_sample; - int __pyx_v_cbow_mean; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_vocab; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_vocab; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0_ngrams; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks_ngrams; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; - int __pyx_v_size; - int __pyx_v_codelens[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_reduced_windows[0x2710]; - int __pyx_v_sentence_idx[(0x2710 + 1)]; - int __pyx_v_window; + struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig __pyx_v_c; int __pyx_v_i; int __pyx_v_j; int __pyx_v_k; @@ -5141,501 +5133,305 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT int __pyx_v_sent_idx; int __pyx_v_idx_start; int __pyx_v_idx_end; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1; - __pyx_t_5numpy_uint32_t *__pyx_v_points[0x2710]; - __pyx_t_5numpy_uint8_t *__pyx_v_codes[0x2710]; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg; - __pyx_t_5numpy_uint32_t *__pyx_v_cum_table; - unsigned PY_LONG_LONG __pyx_v_cum_table_len; - unsigned PY_LONG_LONG __pyx_v_next_random; - int __pyx_v_subwords_idx_len[0x2710]; - __pyx_t_5numpy_uint32_t *__pyx_v_subwords_idx[0x2710]; - PyObject *__pyx_v_subword_arrays = NULL; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1; PyObject *__pyx_v_vlookup = NULL; PyObject *__pyx_v_sent = NULL; PyObject *__pyx_v_token = NULL; PyObject *__pyx_v_word = NULL; - PyObject *__pyx_v_subwords = NULL; - PyObject *__pyx_v_word_subwords = NULL; PyObject *__pyx_v_item = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_4; + Py_ssize_t __pyx_t_4; int __pyx_t_5; - Py_ssize_t __pyx_t_6; - int __pyx_t_7; - PyObject *__pyx_t_8 = NULL; - unsigned PY_LONG_LONG __pyx_t_9; + PyObject *__pyx_t_6 = NULL; + unsigned PY_LONG_LONG __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + Py_ssize_t __pyx_t_9; PyObject *(*__pyx_t_10)(PyObject *); - Py_ssize_t __pyx_t_11; - PyObject *(*__pyx_t_12)(PyObject *); - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - __pyx_t_5numpy_uint32_t __pyx_t_15; - PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + __pyx_t_5numpy_uint32_t __pyx_t_13; + Py_ssize_t __pyx_t_14; + int __pyx_t_15; + int __pyx_t_16; PyObject *__pyx_t_17 = NULL; - Py_ssize_t __pyx_t_18; + int __pyx_t_18; int __pyx_t_19; int __pyx_t_20; int __pyx_t_21; - int __pyx_t_22; - int __pyx_t_23; __Pyx_RefNannySetupContext("train_batch_cbow", 0); - /* "gensim/models/fasttext_inner.pyx":424 + /* "gensim/models/fasttext_inner.pyx":402 * - * """ - * cdef int hs = model.hs # <<<<<<<<<<<<<< - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 424, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 424, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_hs = __pyx_t_2; - - /* "gensim/models/fasttext_inner.pyx":425 - * """ - * cdef int hs = model.hs - * cdef int negative = model.negative # <<<<<<<<<<<<<< - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int cbow_mean = model.cbow_mean - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 425, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 425, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_negative = __pyx_t_2; - - /* "gensim/models/fasttext_inner.pyx":426 - * cdef int hs = model.hs - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< - * cdef int cbow_mean = model.cbow_mean - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 426, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 426, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 426, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 426, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_sample = __pyx_t_2; - - /* "gensim/models/fasttext_inner.pyx":427 - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int cbow_mean = model.cbow_mean # <<<<<<<<<<<<<< - * - * cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_cbow_mean); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 427, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 427, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_cbow_mean = __pyx_t_2; - - /* "gensim/models/fasttext_inner.pyx":429 - * cdef int cbow_mean = model.cbow_mean - * - * cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) # <<<<<<<<<<<<<< - * cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) - * cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 429, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 429, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 429, __pyx_L1_error) - __pyx_v_syn0_vocab = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "gensim/models/fasttext_inner.pyx":430 + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end * - * cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) - * cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) # <<<<<<<<<<<<<< - * cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) - * cdef REAL_t *word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 430, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_vocab_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 430, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 430, __pyx_L1_error) - __pyx_v_word_locks_vocab = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_effective_words = 0; + __pyx_v_effective_sentences = 0; - /* "gensim/models/fasttext_inner.pyx":431 - * cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) - * cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) - * cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) # <<<<<<<<<<<<<< - * cdef REAL_t *word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) + /* "gensim/models/fasttext_inner.pyx":405 + * cdef int sent_idx, idx_start, idx_end * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 431, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors_ngrams); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 431, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 431, __pyx_L1_error) - __pyx_v_syn0_ngrams = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "gensim/models/fasttext_inner.pyx":432 - * cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) - * cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) - * cdef REAL_t *word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) # <<<<<<<<<<<<<< + * init_ft_config(&c, model, alpha, _work, _neu1) # <<<<<<<<<<<<<< * - * cdef REAL_t *work + * if c.hs: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 432, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_ngrams_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 432, __pyx_L1_error) + __pyx_t_1 = __pyx_f_6gensim_6models_14fasttext_inner_init_ft_config((&__pyx_v_c), __pyx_v_model, __pyx_v_alpha, __pyx_v__work, __pyx_v__neu1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 405, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 432, __pyx_L1_error) - __pyx_v_word_locks_ngrams = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":435 + /* "gensim/models/fasttext_inner.pyx":407 + * init_ft_config(&c, model, alpha, _work, _neu1) * - * cdef REAL_t *work - * cdef REAL_t _alpha = alpha # <<<<<<<<<<<<<< - * cdef int size = model.wv.vector_size + * if c.hs: # <<<<<<<<<<<<<< + * c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) * */ - __pyx_t_4 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_4 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 435, __pyx_L1_error) - __pyx_v__alpha = __pyx_t_4; + __pyx_t_2 = (__pyx_v_c.hs != 0); + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":436 - * cdef REAL_t *work - * cdef REAL_t _alpha = alpha - * cdef int size = model.wv.vector_size # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":408 * - * cdef int codelens[MAX_SENTENCE_LEN] - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 436, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 436, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 436, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_size = __pyx_t_2; - - /* "gensim/models/fasttext_inner.pyx":442 - * cdef np.uint32_t reduced_windows[MAX_SENTENCE_LEN] - * cdef int sentence_idx[MAX_SENTENCE_LEN + 1] - * cdef int window = model.window # <<<<<<<<<<<<<< + * if c.hs: + * c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< * - * cdef int i, j, k + * if c.negative: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 442, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 442, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_window = __pyx_t_2; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 408, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 408, __pyx_L1_error) + __pyx_v_c.syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/fasttext_inner.pyx":445 - * - * cdef int i, j, k - * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< - * cdef int sent_idx, idx_start, idx_end + /* "gensim/models/fasttext_inner.pyx":407 + * init_ft_config(&c, model, alpha, _work, _neu1) * - */ - __pyx_v_effective_words = 0; - __pyx_v_effective_sentences = 0; - - /* "gensim/models/fasttext_inner.pyx":465 - * # dummy dictionary to ensure that the memory locations that subwords_idx point to - * # are referenced throughout so that it isn't put back to free memory pool by Python's memory manager - * subword_arrays = {} # <<<<<<<<<<<<<< + * if c.hs: # <<<<<<<<<<<<<< + * c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) * - * if hs: */ - __pyx_t_3 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 465, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_v_subword_arrays = ((PyObject*)__pyx_t_3); - __pyx_t_3 = 0; + } - /* "gensim/models/fasttext_inner.pyx":467 - * subword_arrays = {} - * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/fasttext_inner.pyx":410 + * c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) * + * if c.negative: # <<<<<<<<<<<<<< + * c.syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c.cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) */ - __pyx_t_5 = (__pyx_v_hs != 0); - if (__pyx_t_5) { + __pyx_t_2 = (__pyx_v_c.negative != 0); + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":468 + /* "gensim/models/fasttext_inner.pyx":411 * - * if hs: - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< - * - * if negative: + * if c.negative: + * c.syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< + * c.cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c.cum_table_len = len(model.vocabulary.cum_table) */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 468, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 411, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 468, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 411, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 468, __pyx_L1_error) - __pyx_v_syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 411, __pyx_L1_error) + __pyx_v_c.syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":467 - * subword_arrays = {} - * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - */ - } - - /* "gensim/models/fasttext_inner.pyx":470 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - */ - __pyx_t_5 = (__pyx_v_negative != 0); - if (__pyx_t_5) { - - /* "gensim/models/fasttext_inner.pyx":471 - * - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) + /* "gensim/models/fasttext_inner.pyx":412 + * if c.negative: + * c.syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c.cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< + * c.cum_table_len = len(model.vocabulary.cum_table) + * if c.negative or c.sample: */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 471, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 412, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 471, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 412, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 471, __pyx_L1_error) - __pyx_v_syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 412, __pyx_L1_error) + __pyx_v_c.cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/fasttext_inner.pyx":472 - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: + /* "gensim/models/fasttext_inner.pyx":413 + * c.syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c.cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c.cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< + * if c.negative or c.sample: + * c.next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 472, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 413, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 472, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 413, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 472, __pyx_L1_error) - __pyx_v_cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + __pyx_t_4 = PyObject_Length(__pyx_t_1); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(0, 413, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_c.cum_table_len = __pyx_t_4; - /* "gensim/models/fasttext_inner.pyx":473 - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 473, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 473, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_6 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 473, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_cum_table_len = __pyx_t_6; - - /* "gensim/models/fasttext_inner.pyx":470 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/fasttext_inner.pyx":410 + * c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * if c.negative: # <<<<<<<<<<<<<< + * c.syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c.cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) */ } - /* "gensim/models/fasttext_inner.pyx":474 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/fasttext_inner.pyx":414 + * c.cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c.cum_table_len = len(model.vocabulary.cum_table) + * if c.negative or c.sample: # <<<<<<<<<<<<<< + * c.next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) * */ - __pyx_t_7 = (__pyx_v_negative != 0); - if (!__pyx_t_7) { + __pyx_t_5 = (__pyx_v_c.negative != 0); + if (!__pyx_t_5) { } else { - __pyx_t_5 = __pyx_t_7; + __pyx_t_2 = __pyx_t_5; goto __pyx_L6_bool_binop_done; } - __pyx_t_7 = (__pyx_v_sample != 0); - __pyx_t_5 = __pyx_t_7; + __pyx_t_5 = (__pyx_v_c.sample != 0); + __pyx_t_2 = __pyx_t_5; __pyx_L6_bool_binop_done:; - if (__pyx_t_5) { + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":475 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":415 + * c.cum_table_len = len(model.vocabulary.cum_table) + * if c.negative or c.sample: + * c.next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< * - * # convert Python structures to primitive types, so we can release the GIL + * # prepare C structures so we can go "full C" and release the Python GIL */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 475, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 475, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 415, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 475, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_randint); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 415, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 475, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 415, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 475, __pyx_L1_error) + __pyx_t_3 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 415, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 475, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 475, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_8 = PyNumber_Add(__pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 475, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 415, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_randint); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 415, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 415, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = PyNumber_Add(__pyx_t_3, __pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 415, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_9 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_8); if (unlikely((__pyx_t_9 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 475, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_next_random = __pyx_t_9; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_7 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_6); if (unlikely((__pyx_t_7 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 415, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_v_c.next_random = __pyx_t_7; - /* "gensim/models/fasttext_inner.pyx":474 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/fasttext_inner.pyx":414 + * c.cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c.cum_table_len = len(model.vocabulary.cum_table) + * if c.negative or c.sample: # <<<<<<<<<<<<<< + * c.next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) * */ } - /* "gensim/models/fasttext_inner.pyx":478 - * - * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< - * neu1 = np.PyArray_DATA(_neu1) - * - */ - if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 478, __pyx_L1_error) - __pyx_v_work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); - - /* "gensim/models/fasttext_inner.pyx":479 - * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) - * neu1 = np.PyArray_DATA(_neu1) # <<<<<<<<<<<<<< - * - * # prepare C structures so we can go "full C" and release the Python GIL - */ - if (!(likely(((__pyx_v__neu1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__neu1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 479, __pyx_L1_error) - __pyx_v_neu1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__neu1))); - - /* "gensim/models/fasttext_inner.pyx":482 + /* "gensim/models/fasttext_inner.pyx":418 * * # prepare C structures so we can go "full C" and release the Python GIL * vlookup = model.wv.vocab # <<<<<<<<<<<<<< - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 482, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_vlookup = __pyx_t_3; - __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_vocab); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 418, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_v_vlookup = __pyx_t_1; + __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":483 + /* "gensim/models/fasttext_inner.pyx":419 * # prepare C structures so we can go "full C" and release the Python GIL * vlookup = model.wv.vocab - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< * for sent in sentences: * if not sent: */ - (__pyx_v_sentence_idx[0]) = 0; + (__pyx_v_c.sentence_idx[0]) = 0; - /* "gensim/models/fasttext_inner.pyx":484 + /* "gensim/models/fasttext_inner.pyx":420 * vlookup = model.wv.vocab - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: # <<<<<<<<<<<<<< * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged */ if (likely(PyList_CheckExact(__pyx_v_sentences)) || PyTuple_CheckExact(__pyx_v_sentences)) { - __pyx_t_3 = __pyx_v_sentences; __Pyx_INCREF(__pyx_t_3); __pyx_t_6 = 0; - __pyx_t_10 = NULL; + __pyx_t_1 = __pyx_v_sentences; __Pyx_INCREF(__pyx_t_1); __pyx_t_4 = 0; + __pyx_t_8 = NULL; } else { - __pyx_t_6 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_sentences); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 484, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_10 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 484, __pyx_L1_error) + __pyx_t_4 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_sentences); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 420, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_10)) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_3)) break; + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_6); __Pyx_INCREF(__pyx_t_8); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 484, __pyx_L1_error) + __pyx_t_6 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_6); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 420, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 484, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); #endif } else { - if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_6); __Pyx_INCREF(__pyx_t_8); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 484, __pyx_L1_error) + __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_6); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 420, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 484, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 420, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); #endif } } else { - __pyx_t_8 = __pyx_t_10(__pyx_t_3); - if (unlikely(!__pyx_t_8)) { + __pyx_t_6 = __pyx_t_8(__pyx_t_1); + if (unlikely(!__pyx_t_6)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 484, __pyx_L1_error) + else __PYX_ERR(0, 420, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_6); } - __Pyx_XDECREF_SET(__pyx_v_sent, __pyx_t_8); - __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_sent, __pyx_t_6); + __pyx_t_6 = 0; - /* "gensim/models/fasttext_inner.pyx":485 - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + /* "gensim/models/fasttext_inner.pyx":421 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: * if not sent: # <<<<<<<<<<<<<< * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: */ - __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_v_sent); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 485, __pyx_L1_error) - __pyx_t_7 = ((!__pyx_t_5) != 0); - if (__pyx_t_7) { + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_sent); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 421, __pyx_L1_error) + __pyx_t_5 = ((!__pyx_t_2) != 0); + if (__pyx_t_5) { - /* "gensim/models/fasttext_inner.pyx":486 + /* "gensim/models/fasttext_inner.pyx":422 * for sent in sentences: * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged # <<<<<<<<<<<<<< @@ -5644,8 +5440,8 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT */ goto __pyx_L8_continue; - /* "gensim/models/fasttext_inner.pyx":485 - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + /* "gensim/models/fasttext_inner.pyx":421 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: * if not sent: # <<<<<<<<<<<<<< * continue # ignore empty sentences; leave effective_sentences unchanged @@ -5653,7 +5449,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT */ } - /* "gensim/models/fasttext_inner.pyx":487 + /* "gensim/models/fasttext_inner.pyx":423 * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: # <<<<<<<<<<<<<< @@ -5661,318 +5457,275 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT * if word is None: */ if (likely(PyList_CheckExact(__pyx_v_sent)) || PyTuple_CheckExact(__pyx_v_sent)) { - __pyx_t_8 = __pyx_v_sent; __Pyx_INCREF(__pyx_t_8); __pyx_t_11 = 0; - __pyx_t_12 = NULL; + __pyx_t_6 = __pyx_v_sent; __Pyx_INCREF(__pyx_t_6); __pyx_t_9 = 0; + __pyx_t_10 = NULL; } else { - __pyx_t_11 = -1; __pyx_t_8 = PyObject_GetIter(__pyx_v_sent); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 487, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_12 = Py_TYPE(__pyx_t_8)->tp_iternext; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 487, __pyx_L1_error) + __pyx_t_9 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_v_sent); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 423, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_10 = Py_TYPE(__pyx_t_6)->tp_iternext; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 423, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_12)) { - if (likely(PyList_CheckExact(__pyx_t_8))) { - if (__pyx_t_11 >= PyList_GET_SIZE(__pyx_t_8)) break; + if (likely(!__pyx_t_10)) { + if (likely(PyList_CheckExact(__pyx_t_6))) { + if (__pyx_t_9 >= PyList_GET_SIZE(__pyx_t_6)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_8, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 487, __pyx_L1_error) + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_6, __pyx_t_9); __Pyx_INCREF(__pyx_t_3); __pyx_t_9++; if (unlikely(0 < 0)) __PYX_ERR(0, 423, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_8, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 487, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PySequence_ITEM(__pyx_t_6, __pyx_t_9); __pyx_t_9++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 423, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); #endif } else { - if (__pyx_t_11 >= PyTuple_GET_SIZE(__pyx_t_8)) break; + if (__pyx_t_9 >= PyTuple_GET_SIZE(__pyx_t_6)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_8, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 487, __pyx_L1_error) + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_9); __Pyx_INCREF(__pyx_t_3); __pyx_t_9++; if (unlikely(0 < 0)) __PYX_ERR(0, 423, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_8, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 487, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PySequence_ITEM(__pyx_t_6, __pyx_t_9); __pyx_t_9++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 423, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); #endif } } else { - __pyx_t_1 = __pyx_t_12(__pyx_t_8); - if (unlikely(!__pyx_t_1)) { + __pyx_t_3 = __pyx_t_10(__pyx_t_6); + if (unlikely(!__pyx_t_3)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 487, __pyx_L1_error) + else __PYX_ERR(0, 423, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_3); } - __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_1); - __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_3); + __pyx_t_3 = 0; - /* "gensim/models/fasttext_inner.pyx":488 + /* "gensim/models/fasttext_inner.pyx":424 * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: * word = vlookup[token] if token in vlookup else None # <<<<<<<<<<<<<< * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window */ - __pyx_t_7 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 488, __pyx_L1_error) - if ((__pyx_t_7 != 0)) { - __pyx_t_13 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 488, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_1 = __pyx_t_13; - __pyx_t_13 = 0; + __pyx_t_5 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 424, __pyx_L1_error) + if ((__pyx_t_5 != 0)) { + __pyx_t_11 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 424, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_3 = __pyx_t_11; + __pyx_t_11 = 0; } else { __Pyx_INCREF(Py_None); - __pyx_t_1 = Py_None; + __pyx_t_3 = Py_None; } - __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_1); - __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_3); + __pyx_t_3 = 0; - /* "gensim/models/fasttext_inner.pyx":489 + /* "gensim/models/fasttext_inner.pyx":425 * for token in sent: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): */ - __pyx_t_7 = (__pyx_v_word == Py_None); - __pyx_t_5 = (__pyx_t_7 != 0); - if (__pyx_t_5) { + __pyx_t_5 = (__pyx_v_word == Py_None); + __pyx_t_2 = (__pyx_t_5 != 0); + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":490 + /* "gensim/models/fasttext_inner.pyx":426 * word = vlookup[token] if token in vlookup else None * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window # <<<<<<<<<<<<<< - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue */ goto __pyx_L11_continue; - /* "gensim/models/fasttext_inner.pyx":489 + /* "gensim/models/fasttext_inner.pyx":425 * for token in sent: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): */ } - /* "gensim/models/fasttext_inner.pyx":491 + /* "gensim/models/fasttext_inner.pyx":427 * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index */ - __pyx_t_7 = (__pyx_v_sample != 0); - if (__pyx_t_7) { + __pyx_t_5 = (__pyx_v_c.sample != 0); + if (__pyx_t_5) { } else { - __pyx_t_5 = __pyx_t_7; + __pyx_t_2 = __pyx_t_5; goto __pyx_L15_bool_binop_done; } - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 491, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_13 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_next_random))); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 491, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_14 = PyObject_RichCompare(__pyx_t_1, __pyx_t_13, Py_LT); __Pyx_XGOTREF(__pyx_t_14); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 491, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_14); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 491, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __pyx_t_5 = __pyx_t_7; + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 427, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_11 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_c.next_random))); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 427, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = PyObject_RichCompare(__pyx_t_3, __pyx_t_11, Py_LT); __Pyx_XGOTREF(__pyx_t_12); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 427, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_12); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 427, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_2 = __pyx_t_5; __pyx_L15_bool_binop_done:; - if (__pyx_t_5) { + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":492 + /* "gensim/models/fasttext_inner.pyx":428 * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue # <<<<<<<<<<<<<< - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index * */ goto __pyx_L11_continue; - /* "gensim/models/fasttext_inner.pyx":491 + /* "gensim/models/fasttext_inner.pyx":427 * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index */ } - /* "gensim/models/fasttext_inner.pyx":493 - * if sample and word.sample_int < random_int32(&next_random): + /* "gensim/models/fasttext_inner.pyx":429 + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue - * indexes[effective_words] = word.index # <<<<<<<<<<<<<< - * - * subwords = model.wv.buckets_word[word.index] - */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 493, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_15 = __Pyx_PyInt_As_npy_uint32(__pyx_t_14); if (unlikely((__pyx_t_15 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 493, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - (__pyx_v_indexes[__pyx_v_effective_words]) = __pyx_t_15; - - /* "gensim/models/fasttext_inner.pyx":495 - * indexes[effective_words] = word.index - * - * subwords = model.wv.buckets_word[word.index] # <<<<<<<<<<<<<< - * word_subwords = np.array(subwords, dtype=np.uint32) - * subwords_idx_len[effective_words] = len(subwords) - */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 495, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_13 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_buckets_word); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 495, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 495, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_1 = __Pyx_PyObject_GetItem(__pyx_t_13, __pyx_t_14); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 495, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_XDECREF_SET(__pyx_v_subwords, __pyx_t_1); - __pyx_t_1 = 0; - - /* "gensim/models/fasttext_inner.pyx":496 + * c.indexes[effective_words] = word.index # <<<<<<<<<<<<<< * - * subwords = model.wv.buckets_word[word.index] - * word_subwords = np.array(subwords, dtype=np.uint32) # <<<<<<<<<<<<<< - * subwords_idx_len[effective_words] = len(subwords) - * subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) + * c.subwords_idx_len[effective_words] = len(model.wv.buckets_word[word.index]) */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_np); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_array); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v_subwords); - __Pyx_GIVEREF(__pyx_v_subwords); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_subwords); - __pyx_t_13 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_16 = __Pyx_GetModuleGlobalName(__pyx_n_s_np); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_16); - __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_t_16, __pyx_n_s_uint32); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; - if (PyDict_SetItem(__pyx_t_13, __pyx_n_s_dtype, __pyx_t_17) < 0) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; - __pyx_t_17 = __Pyx_PyObject_Call(__pyx_t_14, __pyx_t_1, __pyx_t_13); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 496, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __Pyx_XDECREF_SET(__pyx_v_word_subwords, __pyx_t_17); - __pyx_t_17 = 0; - - /* "gensim/models/fasttext_inner.pyx":497 - * subwords = model.wv.buckets_word[word.index] - * word_subwords = np.array(subwords, dtype=np.uint32) - * subwords_idx_len[effective_words] = len(subwords) # <<<<<<<<<<<<<< - * subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) - * # ensures reference count of word_subwords doesn't reach 0 - */ - __pyx_t_18 = PyObject_Length(__pyx_v_subwords); if (unlikely(__pyx_t_18 == ((Py_ssize_t)-1))) __PYX_ERR(0, 497, __pyx_L1_error) - (__pyx_v_subwords_idx_len[__pyx_v_effective_words]) = ((int)__pyx_t_18); - - /* "gensim/models/fasttext_inner.pyx":498 - * word_subwords = np.array(subwords, dtype=np.uint32) - * subwords_idx_len[effective_words] = len(subwords) - * subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) # <<<<<<<<<<<<<< - * # ensures reference count of word_subwords doesn't reach 0 - * subword_arrays[effective_words] = word_subwords - */ - if (!(likely(((__pyx_v_word_subwords) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_word_subwords, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 498, __pyx_L1_error) - (__pyx_v_subwords_idx[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_v_word_subwords))); - - /* "gensim/models/fasttext_inner.pyx":500 - * subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) - * # ensures reference count of word_subwords doesn't reach 0 - * subword_arrays[effective_words] = word_subwords # <<<<<<<<<<<<<< - * - * if hs: - */ - __pyx_t_17 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 500, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - if (unlikely(PyDict_SetItem(__pyx_v_subword_arrays, __pyx_t_17, __pyx_v_word_subwords) < 0)) __PYX_ERR(0, 500, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; - - /* "gensim/models/fasttext_inner.pyx":502 - * subword_arrays[effective_words] = word_subwords - * - * if hs: # <<<<<<<<<<<<<< - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) - */ - __pyx_t_5 = (__pyx_v_hs != 0); - if (__pyx_t_5) { + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_13 = __Pyx_PyInt_As_npy_uint32(__pyx_t_12); if (unlikely((__pyx_t_13 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 429, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + (__pyx_v_c.indexes[__pyx_v_effective_words]) = __pyx_t_13; - /* "gensim/models/fasttext_inner.pyx":503 + /* "gensim/models/fasttext_inner.pyx":431 + * c.indexes[effective_words] = word.index + * + * c.subwords_idx_len[effective_words] = len(model.wv.buckets_word[word.index]) # <<<<<<<<<<<<<< + * c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) * - * if hs: - * codelens[effective_words] = len(word.code) # <<<<<<<<<<<<<< - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) */ - __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 503, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - __pyx_t_18 = PyObject_Length(__pyx_t_17); if (unlikely(__pyx_t_18 == ((Py_ssize_t)-1))) __PYX_ERR(0, 503, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; - (__pyx_v_codelens[__pyx_v_effective_words]) = ((int)__pyx_t_18); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_12, __pyx_n_s_buckets_word); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_3 = __Pyx_PyObject_GetItem(__pyx_t_11, __pyx_t_12); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_14 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_14 == ((Py_ssize_t)-1))) __PYX_ERR(0, 431, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + (__pyx_v_c.subwords_idx_len[__pyx_v_effective_words]) = ((int)__pyx_t_14); - /* "gensim/models/fasttext_inner.pyx":504 - * if hs: - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/fasttext_inner.pyx":432 + * + * c.subwords_idx_len[effective_words] = len(model.wv.buckets_word[word.index]) + * c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) # <<<<<<<<<<<<<< + * + * if c.hs: + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_buckets_word); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_11 = __Pyx_PyObject_GetItem(__pyx_t_12, __pyx_t_3); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 432, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 432, __pyx_L1_error) + (__pyx_v_c.subwords_idx[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "gensim/models/fasttext_inner.pyx":434 + * c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) + * + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) + */ + __pyx_t_2 = (__pyx_v_c.hs != 0); + if (__pyx_t_2) { + + /* "gensim/models/fasttext_inner.pyx":435 + * + * if c.hs: + * c.codelens[effective_words] = len(word.code) # <<<<<<<<<<<<<< + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) + */ + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 435, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_14 = PyObject_Length(__pyx_t_11); if (unlikely(__pyx_t_14 == ((Py_ssize_t)-1))) __PYX_ERR(0, 435, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.codelens[__pyx_v_effective_words]) = ((int)__pyx_t_14); + + /* "gensim/models/fasttext_inner.pyx":436 + * if c.hs: + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 */ - __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 504, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - if (!(likely(((__pyx_t_17) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_17, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 504, __pyx_L1_error) - (__pyx_v_codes[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_17))); - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 436, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 436, __pyx_L1_error) + (__pyx_v_c.codes[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/fasttext_inner.pyx":505 - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":437 + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: */ - __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 505, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - if (!(likely(((__pyx_t_17) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_17, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 505, __pyx_L1_error) - (__pyx_v_points[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_17))); - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 437, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 437, __pyx_L1_error) + (__pyx_v_c.points[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/fasttext_inner.pyx":502 - * subword_arrays[effective_words] = word_subwords + /* "gensim/models/fasttext_inner.pyx":434 + * c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) * - * if hs: # <<<<<<<<<<<<<< - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) */ } - /* "gensim/models/fasttext_inner.pyx":506 - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/fasttext_inner.pyx":438 + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 # <<<<<<<<<<<<<< * if effective_words == MAX_SENTENCE_LEN: * break */ __pyx_v_effective_words = (__pyx_v_effective_words + 1); - /* "gensim/models/fasttext_inner.pyx":507 - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/fasttext_inner.pyx":439 + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< * break * */ - __pyx_t_5 = ((__pyx_v_effective_words == 0x2710) != 0); - if (__pyx_t_5) { + __pyx_t_2 = ((__pyx_v_effective_words == 0x2710) != 0); + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":508 + /* "gensim/models/fasttext_inner.pyx":440 * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: * break # <<<<<<<<<<<<<< @@ -5981,8 +5734,8 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT */ goto __pyx_L12_break; - /* "gensim/models/fasttext_inner.pyx":507 - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/fasttext_inner.pyx":439 + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< * break @@ -5990,7 +5743,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT */ } - /* "gensim/models/fasttext_inner.pyx":487 + /* "gensim/models/fasttext_inner.pyx":423 * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: # <<<<<<<<<<<<<< @@ -6000,37 +5753,37 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT __pyx_L11_continue:; } __pyx_L12_break:; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - /* "gensim/models/fasttext_inner.pyx":513 + /* "gensim/models/fasttext_inner.pyx":445 * # across sentence boundaries. * # indices of sentence number X are between tp_iternext; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 520, __pyx_L1_error) + __pyx_t_4 = -1; __pyx_t_11 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 452, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_8 = Py_TYPE(__pyx_t_11)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 452, __pyx_L1_error) } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; for (;;) { - if (likely(!__pyx_t_10)) { - if (likely(PyList_CheckExact(__pyx_t_17))) { - if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_17)) break; + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_11))) { + if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_11)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_17, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 520, __pyx_L1_error) + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_11, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 452, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_17, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 520, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PySequence_ITEM(__pyx_t_11, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 452, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } else { - if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_17)) break; + if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_11)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_17, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 520, __pyx_L1_error) + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_11, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 452, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_17, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 520, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PySequence_ITEM(__pyx_t_11, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 452, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } } else { - __pyx_t_3 = __pyx_t_10(__pyx_t_17); - if (unlikely(!__pyx_t_3)) { + __pyx_t_1 = __pyx_t_8(__pyx_t_11); + if (unlikely(!__pyx_t_1)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 520, __pyx_L1_error) + else __PYX_ERR(0, 452, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_1); } - __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_v_i = __pyx_t_2; - __pyx_t_2 = (__pyx_t_2 + 1); + __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_v_i = __pyx_t_15; + __pyx_t_15 = (__pyx_t_15 + 1); - /* "gensim/models/fasttext_inner.pyx":521 + /* "gensim/models/fasttext_inner.pyx":453 * # precompute "reduced window" offsets in a single randint() call - * for i, item in enumerate(model.random.randint(0, window, effective_words)): - * reduced_windows[i] = item # <<<<<<<<<<<<<< + * for i, item in enumerate(model.random.randint(0, c.window, effective_words)): + * c.reduced_windows[i] = item # <<<<<<<<<<<<<< * * # release GIL & train on all sentences */ - __pyx_t_15 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_15 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 521, __pyx_L1_error) - (__pyx_v_reduced_windows[__pyx_v_i]) = __pyx_t_15; + __pyx_t_13 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_13 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 453, __pyx_L1_error) + (__pyx_v_c.reduced_windows[__pyx_v_i]) = __pyx_t_13; - /* "gensim/models/fasttext_inner.pyx":520 + /* "gensim/models/fasttext_inner.pyx":452 * * # precompute "reduced window" offsets in a single randint() call - * for i, item in enumerate(model.random.randint(0, window, effective_words)): # <<<<<<<<<<<<<< - * reduced_windows[i] = item + * for i, item in enumerate(model.random.randint(0, c.window, effective_words)): # <<<<<<<<<<<<<< + * c.reduced_windows[i] = item * */ } - __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/fasttext_inner.pyx":524 + /* "gensim/models/fasttext_inner.pyx":456 * * # release GIL & train on all sentences * with nogil: # <<<<<<<<<<<<<< * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] + * idx_start = c.sentence_idx[sent_idx] */ { #ifdef WITH_THREAD @@ -6209,187 +5962,187 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT #endif /*try:*/ { - /* "gensim/models/fasttext_inner.pyx":525 + /* "gensim/models/fasttext_inner.pyx":457 * # release GIL & train on all sentences * with nogil: * for sent_idx in range(effective_sentences): # <<<<<<<<<<<<<< - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] */ - __pyx_t_2 = __pyx_v_effective_sentences; - __pyx_t_19 = __pyx_t_2; - for (__pyx_t_20 = 0; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { - __pyx_v_sent_idx = __pyx_t_20; + __pyx_t_15 = __pyx_v_effective_sentences; + __pyx_t_16 = __pyx_t_15; + for (__pyx_t_18 = 0; __pyx_t_18 < __pyx_t_16; __pyx_t_18+=1) { + __pyx_v_sent_idx = __pyx_t_18; - /* "gensim/models/fasttext_inner.pyx":526 + /* "gensim/models/fasttext_inner.pyx":458 * with nogil: * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] # <<<<<<<<<<<<<< - * idx_end = sentence_idx[sent_idx + 1] + * idx_start = c.sentence_idx[sent_idx] # <<<<<<<<<<<<<< + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): */ - __pyx_v_idx_start = (__pyx_v_sentence_idx[__pyx_v_sent_idx]); + __pyx_v_idx_start = (__pyx_v_c.sentence_idx[__pyx_v_sent_idx]); - /* "gensim/models/fasttext_inner.pyx":527 + /* "gensim/models/fasttext_inner.pyx":459 * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] */ - __pyx_v_idx_end = (__pyx_v_sentence_idx[(__pyx_v_sent_idx + 1)]); + __pyx_v_idx_end = (__pyx_v_c.sentence_idx[(__pyx_v_sent_idx + 1)]); - /* "gensim/models/fasttext_inner.pyx":528 - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] + /* "gensim/models/fasttext_inner.pyx":460 + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): # <<<<<<<<<<<<<< - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: */ - __pyx_t_21 = __pyx_v_idx_end; - __pyx_t_22 = __pyx_t_21; - for (__pyx_t_23 = __pyx_v_idx_start; __pyx_t_23 < __pyx_t_22; __pyx_t_23+=1) { - __pyx_v_i = __pyx_t_23; + __pyx_t_19 = __pyx_v_idx_end; + __pyx_t_20 = __pyx_t_19; + for (__pyx_t_21 = __pyx_v_idx_start; __pyx_t_21 < __pyx_t_20; __pyx_t_21+=1) { + __pyx_v_i = __pyx_t_21; - /* "gensim/models/fasttext_inner.pyx":529 - * idx_end = sentence_idx[sent_idx + 1] + /* "gensim/models/fasttext_inner.pyx":461 + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< * if j < idx_start: * j = idx_start */ - __pyx_v_j = ((__pyx_v_i - __pyx_v_window) + (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/fasttext_inner.pyx":530 + /* "gensim/models/fasttext_inner.pyx":462 * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: # <<<<<<<<<<<<<< * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ - __pyx_t_5 = ((__pyx_v_j < __pyx_v_idx_start) != 0); - if (__pyx_t_5) { + __pyx_t_2 = ((__pyx_v_j < __pyx_v_idx_start) != 0); + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":531 - * j = i - window + reduced_windows[i] + /* "gensim/models/fasttext_inner.pyx":463 + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: * j = idx_start # <<<<<<<<<<<<<< - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: */ __pyx_v_j = __pyx_v_idx_start; - /* "gensim/models/fasttext_inner.pyx":530 + /* "gensim/models/fasttext_inner.pyx":462 * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: # <<<<<<<<<<<<<< * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ } - /* "gensim/models/fasttext_inner.pyx":532 + /* "gensim/models/fasttext_inner.pyx":464 * if j < idx_start: * j = idx_start - * k = i + window + 1 - reduced_windows[i] # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< * if k > idx_end: * k = idx_end */ - __pyx_v_k = (((__pyx_v_i + __pyx_v_window) + 1) - (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/fasttext_inner.pyx":533 + /* "gensim/models/fasttext_inner.pyx":465 * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: # <<<<<<<<<<<<<< * k = idx_end * */ - __pyx_t_5 = ((__pyx_v_k > __pyx_v_idx_end) != 0); - if (__pyx_t_5) { + __pyx_t_2 = ((__pyx_v_k > __pyx_v_idx_end) != 0); + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":534 - * k = i + window + 1 - reduced_windows[i] + /* "gensim/models/fasttext_inner.pyx":466 + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: * k = idx_end # <<<<<<<<<<<<<< * - * if hs: + * if c.hs: */ __pyx_v_k = __pyx_v_idx_end; - /* "gensim/models/fasttext_inner.pyx":533 + /* "gensim/models/fasttext_inner.pyx":465 * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: # <<<<<<<<<<<<<< * k = idx_end * */ } - /* "gensim/models/fasttext_inner.pyx":536 + /* "gensim/models/fasttext_inner.pyx":468 * k = idx_end * - * if hs: # <<<<<<<<<<<<<< - * fast_sentence_cbow_hs( - * points[i], codes[i], codelens, neu1, syn0_vocab, syn0_ngrams, syn1, size,indexes, + * if c.hs: # <<<<<<<<<<<<<< + * fasttext_fast_sentence_cbow_hs( + * c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, */ - __pyx_t_5 = (__pyx_v_hs != 0); - if (__pyx_t_5) { + __pyx_t_2 = (__pyx_v_c.hs != 0); + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":537 + /* "gensim/models/fasttext_inner.pyx":469 * - * if hs: - * fast_sentence_cbow_hs( # <<<<<<<<<<<<<< - * points[i], codes[i], codelens, neu1, syn0_vocab, syn0_ngrams, syn1, size,indexes, - * subwords_idx,subwords_idx_len,_alpha, work, i, j, k, cbow_mean, word_locks_vocab, + * if c.hs: + * fasttext_fast_sentence_cbow_hs( # <<<<<<<<<<<<<< + * c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + * c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, c.cbow_mean, */ - __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_hs((__pyx_v_points[__pyx_v_i]), (__pyx_v_codes[__pyx_v_i]), __pyx_v_codelens, __pyx_v_neu1, __pyx_v_syn0_vocab, __pyx_v_syn0_ngrams, __pyx_v_syn1, __pyx_v_size, __pyx_v_indexes, __pyx_v_subwords_idx, __pyx_v_subwords_idx_len, __pyx_v__alpha, __pyx_v_work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_cbow_mean, __pyx_v_word_locks_vocab, __pyx_v_word_locks_ngrams); + __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), __pyx_v_c.codelens, __pyx_v_c.neu1, __pyx_v_c.syn0_vocab, __pyx_v_c.syn0_ngrams, __pyx_v_c.syn1, __pyx_v_c.size, __pyx_v_c.indexes, __pyx_v_c.subwords_idx, __pyx_v_c.subwords_idx_len, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_c.cbow_mean, __pyx_v_c.word_locks_vocab, __pyx_v_c.word_locks_ngrams); - /* "gensim/models/fasttext_inner.pyx":536 + /* "gensim/models/fasttext_inner.pyx":468 * k = idx_end * - * if hs: # <<<<<<<<<<<<<< - * fast_sentence_cbow_hs( - * points[i], codes[i], codelens, neu1, syn0_vocab, syn0_ngrams, syn1, size,indexes, + * if c.hs: # <<<<<<<<<<<<<< + * fasttext_fast_sentence_cbow_hs( + * c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, */ } - /* "gensim/models/fasttext_inner.pyx":541 - * subwords_idx,subwords_idx_len,_alpha, work, i, j, k, cbow_mean, word_locks_vocab, - * word_locks_ngrams) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_sentence_cbow_neg( - * negative, cum_table, cum_table_len, codelens, neu1, syn0_vocab, syn0_ngrams, + /* "gensim/models/fasttext_inner.pyx":473 + * c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, c.cbow_mean, + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fasttext_fast_sentence_cbow_neg( + * c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, */ - __pyx_t_5 = (__pyx_v_negative != 0); - if (__pyx_t_5) { + __pyx_t_2 = (__pyx_v_c.negative != 0); + if (__pyx_t_2) { - /* "gensim/models/fasttext_inner.pyx":542 - * word_locks_ngrams) - * if negative: - * next_random = fast_sentence_cbow_neg( # <<<<<<<<<<<<<< - * negative, cum_table, cum_table_len, codelens, neu1, syn0_vocab, syn0_ngrams, - * syn1neg, size, indexes, subwords_idx, subwords_idx_len, _alpha, work, i, j, k, + /* "gensim/models/fasttext_inner.pyx":474 + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: + * c.next_random = fasttext_fast_sentence_cbow_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, + * c.syn1neg, c.size, c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, */ - __pyx_v_next_random = __pyx_f_6gensim_6models_14fasttext_inner_fast_sentence_cbow_neg(__pyx_v_negative, __pyx_v_cum_table, __pyx_v_cum_table_len, __pyx_v_codelens, __pyx_v_neu1, __pyx_v_syn0_vocab, __pyx_v_syn0_ngrams, __pyx_v_syn1neg, __pyx_v_size, __pyx_v_indexes, __pyx_v_subwords_idx, __pyx_v_subwords_idx_len, __pyx_v__alpha, __pyx_v_work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_cbow_mean, __pyx_v_next_random, __pyx_v_word_locks_vocab, __pyx_v_word_locks_ngrams); + __pyx_v_c.next_random = __pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.codelens, __pyx_v_c.neu1, __pyx_v_c.syn0_vocab, __pyx_v_c.syn0_ngrams, __pyx_v_c.syn1neg, __pyx_v_c.size, __pyx_v_c.indexes, __pyx_v_c.subwords_idx, __pyx_v_c.subwords_idx_len, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_c.cbow_mean, __pyx_v_c.next_random, __pyx_v_c.word_locks_vocab, __pyx_v_c.word_locks_ngrams); - /* "gensim/models/fasttext_inner.pyx":541 - * subwords_idx,subwords_idx_len,_alpha, work, i, j, k, cbow_mean, word_locks_vocab, - * word_locks_ngrams) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_sentence_cbow_neg( - * negative, cum_table, cum_table_len, codelens, neu1, syn0_vocab, syn0_ngrams, + /* "gensim/models/fasttext_inner.pyx":473 + * c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, c.cbow_mean, + * c.word_locks_vocab, c.word_locks_ngrams) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = fasttext_fast_sentence_cbow_neg( + * c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, */ } } } } - /* "gensim/models/fasttext_inner.pyx":524 + /* "gensim/models/fasttext_inner.pyx":456 * * # release GIL & train on all sentences * with nogil: # <<<<<<<<<<<<<< * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] + * idx_start = c.sentence_idx[sent_idx] */ /*finally:*/ { /*normal exit:*/{ @@ -6403,21 +6156,21 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT } } - /* "gensim/models/fasttext_inner.pyx":547 - * cbow_mean, next_random, word_locks_vocab, word_locks_ngrams) + /* "gensim/models/fasttext_inner.pyx":479 + * c.cbow_mean, c.next_random, c.word_locks_vocab, c.word_locks_ngrams) * * return effective_words # <<<<<<<<<<<<<< * * */ __Pyx_XDECREF(__pyx_r); - __pyx_t_17 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 547, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_17); - __pyx_r = __pyx_t_17; - __pyx_t_17 = 0; + __pyx_t_11 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 479, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_r = __pyx_t_11; + __pyx_t_11 = 0; goto __pyx_L0; - /* "gensim/models/fasttext_inner.pyx":400 + /* "gensim/models/fasttext_inner.pyx":375 * * * def train_batch_cbow(model, sentences, alpha, _work, _neu1): # <<<<<<<<<<<<<< @@ -6429,28 +6182,24 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_2train_batch_cbow(CYT __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_14); - __Pyx_XDECREF(__pyx_t_16); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); __Pyx_XDECREF(__pyx_t_17); __Pyx_AddTraceback("gensim.models.fasttext_inner.train_batch_cbow", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; - __Pyx_XDECREF(__pyx_v_subword_arrays); __Pyx_XDECREF(__pyx_v_vlookup); __Pyx_XDECREF(__pyx_v_sent); __Pyx_XDECREF(__pyx_v_token); __Pyx_XDECREF(__pyx_v_word); - __Pyx_XDECREF(__pyx_v_subwords); - __Pyx_XDECREF(__pyx_v_word_subwords); __Pyx_XDECREF(__pyx_v_item); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } -/* "gensim/models/fasttext_inner.pyx":550 +/* "gensim/models/fasttext_inner.pyx":482 * * * def init(): # <<<<<<<<<<<<<< @@ -6489,7 +6238,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P int __pyx_t_4; __Pyx_RefNannySetupContext("init", 0); - /* "gensim/models/fasttext_inner.pyx":566 + /* "gensim/models/fasttext_inner.pyx":498 * * cdef int i * cdef float *x = [10.0] # <<<<<<<<<<<<<< @@ -6499,7 +6248,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P __pyx_t_1[0] = ((float)10.0); __pyx_v_x = __pyx_t_1; - /* "gensim/models/fasttext_inner.pyx":567 + /* "gensim/models/fasttext_inner.pyx":499 * cdef int i * cdef float *x = [10.0] * cdef float *y = [0.01] # <<<<<<<<<<<<<< @@ -6509,7 +6258,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P __pyx_t_2[0] = ((float)0.01); __pyx_v_y = __pyx_t_2; - /* "gensim/models/fasttext_inner.pyx":568 + /* "gensim/models/fasttext_inner.pyx":500 * cdef float *x = [10.0] * cdef float *y = [0.01] * cdef float expected = 0.1 # <<<<<<<<<<<<<< @@ -6518,7 +6267,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ __pyx_v_expected = ((float)0.1); - /* "gensim/models/fasttext_inner.pyx":569 + /* "gensim/models/fasttext_inner.pyx":501 * cdef float *y = [0.01] * cdef float expected = 0.1 * cdef int size = 1 # <<<<<<<<<<<<<< @@ -6527,7 +6276,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ __pyx_v_size = 1; - /* "gensim/models/fasttext_inner.pyx":574 + /* "gensim/models/fasttext_inner.pyx":506 * * # build the sigmoid table * for i in range(EXP_TABLE_SIZE): # <<<<<<<<<<<<<< @@ -6537,7 +6286,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P for (__pyx_t_3 = 0; __pyx_t_3 < 0x3E8; __pyx_t_3+=1) { __pyx_v_i = __pyx_t_3; - /* "gensim/models/fasttext_inner.pyx":575 + /* "gensim/models/fasttext_inner.pyx":507 * # build the sigmoid table * for i in range(EXP_TABLE_SIZE): * EXP_TABLE[i] = exp((i / EXP_TABLE_SIZE * 2 - 1) * MAX_EXP) # <<<<<<<<<<<<<< @@ -6546,7 +6295,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]) = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)exp(((((__pyx_v_i / ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0x3E8)) * 2.0) - 1.0) * 6.0))); - /* "gensim/models/fasttext_inner.pyx":576 + /* "gensim/models/fasttext_inner.pyx":508 * for i in range(EXP_TABLE_SIZE): * EXP_TABLE[i] = exp((i / EXP_TABLE_SIZE * 2 - 1) * MAX_EXP) * EXP_TABLE[i] = (EXP_TABLE[i] / (EXP_TABLE[i] + 1)) # <<<<<<<<<<<<<< @@ -6555,7 +6304,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]) = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)((__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]) / ((__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]) + 1.0))); - /* "gensim/models/fasttext_inner.pyx":577 + /* "gensim/models/fasttext_inner.pyx":509 * EXP_TABLE[i] = exp((i / EXP_TABLE_SIZE * 2 - 1) * MAX_EXP) * EXP_TABLE[i] = (EXP_TABLE[i] / (EXP_TABLE[i] + 1)) * LOG_TABLE[i] = log( EXP_TABLE[i] ) # <<<<<<<<<<<<<< @@ -6565,7 +6314,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P (__pyx_v_6gensim_6models_14fasttext_inner_LOG_TABLE[__pyx_v_i]) = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)log((__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]))); } - /* "gensim/models/fasttext_inner.pyx":580 + /* "gensim/models/fasttext_inner.pyx":512 * * # check whether sdot returns double or float * d_res = dsdot(&size, x, &ONE, y, &ONE) # <<<<<<<<<<<<<< @@ -6574,7 +6323,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ __pyx_v_d_res = __pyx_v_6gensim_6models_14word2vec_inner_dsdot((&__pyx_v_size), __pyx_v_x, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE), __pyx_v_y, (&__pyx_v_6gensim_6models_14fasttext_inner_ONE)); - /* "gensim/models/fasttext_inner.pyx":581 + /* "gensim/models/fasttext_inner.pyx":513 * # check whether sdot returns double or float * d_res = dsdot(&size, x, &ONE, y, &ONE) * p_res = &d_res # <<<<<<<<<<<<<< @@ -6583,7 +6332,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ __pyx_v_p_res = ((float *)(&__pyx_v_d_res)); - /* "gensim/models/fasttext_inner.pyx":582 + /* "gensim/models/fasttext_inner.pyx":514 * d_res = dsdot(&size, x, &ONE, y, &ONE) * p_res = &d_res * if abs(d_res - expected) < 0.0001: # <<<<<<<<<<<<<< @@ -6593,7 +6342,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P __pyx_t_4 = ((fabs((__pyx_v_d_res - __pyx_v_expected)) < 0.0001) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":583 + /* "gensim/models/fasttext_inner.pyx":515 * p_res = &d_res * if abs(d_res - expected) < 0.0001: * our_dot = our_dot_double # <<<<<<<<<<<<<< @@ -6602,7 +6351,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_dot = __pyx_f_6gensim_6models_14word2vec_inner_our_dot_double; - /* "gensim/models/fasttext_inner.pyx":584 + /* "gensim/models/fasttext_inner.pyx":516 * if abs(d_res - expected) < 0.0001: * our_dot = our_dot_double * our_saxpy = saxpy # <<<<<<<<<<<<<< @@ -6611,7 +6360,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy = __pyx_v_6gensim_6models_14word2vec_inner_saxpy; - /* "gensim/models/fasttext_inner.pyx":585 + /* "gensim/models/fasttext_inner.pyx":517 * our_dot = our_dot_double * our_saxpy = saxpy * return 0 # double # <<<<<<<<<<<<<< @@ -6623,7 +6372,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P __pyx_r = __pyx_int_0; goto __pyx_L0; - /* "gensim/models/fasttext_inner.pyx":582 + /* "gensim/models/fasttext_inner.pyx":514 * d_res = dsdot(&size, x, &ONE, y, &ONE) * p_res = &d_res * if abs(d_res - expected) < 0.0001: # <<<<<<<<<<<<<< @@ -6632,7 +6381,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ } - /* "gensim/models/fasttext_inner.pyx":586 + /* "gensim/models/fasttext_inner.pyx":518 * our_saxpy = saxpy * return 0 # double * elif abs(p_res[0] - expected) < 0.0001: # <<<<<<<<<<<<<< @@ -6642,7 +6391,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P __pyx_t_4 = ((fabsf(((__pyx_v_p_res[0]) - __pyx_v_expected)) < 0.0001) != 0); if (__pyx_t_4) { - /* "gensim/models/fasttext_inner.pyx":587 + /* "gensim/models/fasttext_inner.pyx":519 * return 0 # double * elif abs(p_res[0] - expected) < 0.0001: * our_dot = our_dot_float # <<<<<<<<<<<<<< @@ -6651,7 +6400,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_dot = __pyx_f_6gensim_6models_14word2vec_inner_our_dot_float; - /* "gensim/models/fasttext_inner.pyx":588 + /* "gensim/models/fasttext_inner.pyx":520 * elif abs(p_res[0] - expected) < 0.0001: * our_dot = our_dot_float * our_saxpy = saxpy # <<<<<<<<<<<<<< @@ -6660,7 +6409,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy = __pyx_v_6gensim_6models_14word2vec_inner_saxpy; - /* "gensim/models/fasttext_inner.pyx":589 + /* "gensim/models/fasttext_inner.pyx":521 * our_dot = our_dot_float * our_saxpy = saxpy * return 1 # float # <<<<<<<<<<<<<< @@ -6672,7 +6421,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P __pyx_r = __pyx_int_1; goto __pyx_L0; - /* "gensim/models/fasttext_inner.pyx":586 + /* "gensim/models/fasttext_inner.pyx":518 * our_saxpy = saxpy * return 0 # double * elif abs(p_res[0] - expected) < 0.0001: # <<<<<<<<<<<<<< @@ -6681,7 +6430,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ } - /* "gensim/models/fasttext_inner.pyx":593 + /* "gensim/models/fasttext_inner.pyx":525 * # neither => use cython loops, no BLAS * # actually, the BLAS is so messed up we'll probably have segfaulted above and never even reach here * our_dot = our_dot_noblas # <<<<<<<<<<<<<< @@ -6691,7 +6440,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P /*else*/ { __pyx_v_6gensim_6models_14word2vec_inner_our_dot = __pyx_f_6gensim_6models_14word2vec_inner_our_dot_noblas; - /* "gensim/models/fasttext_inner.pyx":594 + /* "gensim/models/fasttext_inner.pyx":526 * # actually, the BLAS is so messed up we'll probably have segfaulted above and never even reach here * our_dot = our_dot_noblas * our_saxpy = our_saxpy_noblas # <<<<<<<<<<<<<< @@ -6700,7 +6449,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy = __pyx_f_6gensim_6models_14word2vec_inner_our_saxpy_noblas; - /* "gensim/models/fasttext_inner.pyx":595 + /* "gensim/models/fasttext_inner.pyx":527 * our_dot = our_dot_noblas * our_saxpy = our_saxpy_noblas * return 2 # <<<<<<<<<<<<<< @@ -6713,7 +6462,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P goto __pyx_L0; } - /* "gensim/models/fasttext_inner.pyx":550 + /* "gensim/models/fasttext_inner.pyx":482 * * * def init(): # <<<<<<<<<<<<<< @@ -6728,7 +6477,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -6776,7 +6525,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_info->obj); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":222 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 * * cdef int i, ndim * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -6785,7 +6534,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_endian_detector = 1; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":223 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 * cdef int i, ndim * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -6794,7 +6543,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":225 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 * cdef bint little_endian = ((&endian_detector)[0] != 0) * * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< @@ -6803,7 +6552,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6817,7 +6566,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L4_bool_binop_done; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":228 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -6828,7 +6577,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6837,7 +6586,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -6850,7 +6599,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 229, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6859,7 +6608,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6873,7 +6622,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L7_bool_binop_done; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":232 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -6884,7 +6633,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6893,7 +6642,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -6906,7 +6655,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 233, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6915,7 +6664,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":235 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 * raise ValueError(u"ndarray is not Fortran contiguous") * * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< @@ -6924,7 +6673,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":236 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 * * info.buf = PyArray_DATA(self) * info.ndim = ndim # <<<<<<<<<<<<<< @@ -6933,7 +6682,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->ndim = __pyx_v_ndim; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -6943,7 +6692,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":240 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 * # Allocate new buffer for strides and shape info. * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< @@ -6952,7 +6701,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":241 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim # <<<<<<<<<<<<<< @@ -6961,7 +6710,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":242 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim * for i in range(ndim): # <<<<<<<<<<<<<< @@ -6973,7 +6722,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":243 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 * info.shape = info.strides + ndim * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< @@ -6982,7 +6731,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":244 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< @@ -6992,7 +6741,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -7002,7 +6751,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L9; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":246 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 * info.shape[i] = PyArray_DIMS(self)[i] * else: * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< @@ -7012,7 +6761,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":247 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 * else: * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< @@ -7023,7 +6772,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L9:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":248 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL # <<<<<<<<<<<<<< @@ -7032,7 +6781,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->suboffsets = NULL; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":249 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< @@ -7041,7 +6790,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":250 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< @@ -7050,7 +6799,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":253 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 * * cdef int t * cdef char* f = NULL # <<<<<<<<<<<<<< @@ -7059,7 +6808,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_f = NULL; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":254 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 * cdef int t * cdef char* f = NULL * cdef dtype descr = self.descr # <<<<<<<<<<<<<< @@ -7071,7 +6820,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); __pyx_t_3 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":257 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 * cdef int offset * * info.obj = self # <<<<<<<<<<<<<< @@ -7084,7 +6833,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = ((PyObject *)__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -7094,7 +6843,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":260 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 * * if not PyDataType_HASFIELDS(descr): * t = descr.type_num # <<<<<<<<<<<<<< @@ -7104,7 +6853,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_4 = __pyx_v_descr->type_num; __pyx_v_t = __pyx_t_4; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7124,7 +6873,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L15_next_or:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":262 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -7141,7 +6890,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L14_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7150,7 +6899,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -7163,7 +6912,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 263, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7172,7 +6921,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":264 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< @@ -7184,7 +6933,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"b"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":265 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< @@ -7195,7 +6944,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"B"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":266 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< @@ -7206,7 +6955,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"h"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":267 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< @@ -7217,7 +6966,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"H"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":268 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< @@ -7228,7 +6977,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"i"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":269 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< @@ -7239,7 +6988,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"I"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":270 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< @@ -7250,7 +6999,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"l"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":271 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< @@ -7261,7 +7010,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"L"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":272 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< @@ -7272,7 +7021,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"q"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":273 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< @@ -7283,7 +7032,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Q"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":274 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< @@ -7294,7 +7043,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"f"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":275 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< @@ -7305,7 +7054,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"d"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":276 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< @@ -7316,7 +7065,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"g"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":277 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< @@ -7327,7 +7076,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zf"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":278 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< @@ -7338,7 +7087,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zd"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":279 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< @@ -7349,7 +7098,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zg"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":280 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< @@ -7361,7 +7110,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; default: - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":282 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 * elif t == NPY_OBJECT: f = "O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -7382,7 +7131,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":283 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f # <<<<<<<<<<<<<< @@ -7391,7 +7140,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->format = __pyx_v_f; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":284 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f * return # <<<<<<<<<<<<<< @@ -7401,7 +7150,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_r = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -7410,7 +7159,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":286 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 * return * else: * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< @@ -7420,7 +7169,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":287 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 * else: * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< @@ -7429,7 +7178,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->format[0]) = '^'; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":288 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 # <<<<<<<<<<<<<< @@ -7438,7 +7187,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_offset = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":289 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< @@ -7448,7 +7197,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) __pyx_v_f = __pyx_t_8; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":292 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 * info.format + _buffer_format_string_len, * &offset) * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< @@ -7458,7 +7207,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_f[0]) = '\x00'; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -7490,7 +7239,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -7514,7 +7263,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s int __pyx_t_1; __Pyx_RefNannySetupContext("__releasebuffer__", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -7524,7 +7273,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":296 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) # <<<<<<<<<<<<<< @@ -7533,7 +7282,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->format); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -7542,7 +7291,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -7552,7 +7301,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":298 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): * PyObject_Free(info.strides) # <<<<<<<<<<<<<< @@ -7561,7 +7310,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->strides); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -7570,7 +7319,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -7582,7 +7331,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __Pyx_RefNannyFinishContext(); } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -7596,7 +7345,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":776 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 * * cdef inline object PyArray_MultiIterNew1(a): * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< @@ -7610,7 +7359,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -7629,7 +7378,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -7643,7 +7392,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":779 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 * * cdef inline object PyArray_MultiIterNew2(a, b): * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< @@ -7657,7 +7406,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -7676,7 +7425,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -7690,7 +7439,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":782 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 * * cdef inline object PyArray_MultiIterNew3(a, b, c): * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< @@ -7704,7 +7453,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -7723,7 +7472,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -7737,7 +7486,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":785 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< @@ -7751,7 +7500,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -7770,7 +7519,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -7784,7 +7533,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":788 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< @@ -7798,7 +7547,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -7817,7 +7566,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -7831,7 +7580,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ int __pyx_t_1; __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -7841,7 +7590,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":792 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): * return d.subarray.shape # <<<<<<<<<<<<<< @@ -7853,7 +7602,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -7862,7 +7611,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":794 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 * return d.subarray.shape * else: * return () # <<<<<<<<<<<<<< @@ -7876,7 +7625,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ goto __pyx_L0; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -7891,7 +7640,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -7920,7 +7669,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx char *__pyx_t_9; __Pyx_RefNannySetupContext("_util_dtypestring", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":801 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 * * cdef dtype child * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -7929,7 +7678,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_endian_detector = 1; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":802 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 * cdef dtype child * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -7938,7 +7687,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -7961,7 +7710,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); __pyx_t_3 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":806 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 * * for childname in descr.names: * fields = descr.fields[childname] # <<<<<<<<<<<<<< @@ -7978,7 +7727,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":807 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 * for childname in descr.names: * fields = descr.fields[childname] * child, new_offset = fields # <<<<<<<<<<<<<< @@ -8013,7 +7762,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); __pyx_t_4 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -8030,7 +7779,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); if (unlikely(__pyx_t_6)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -8043,7 +7792,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 810, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -8052,7 +7801,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8072,7 +7821,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L8_next_or:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":813 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 * * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -8089,7 +7838,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = __pyx_t_7; __pyx_L7_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8098,7 +7847,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ if (unlikely(__pyx_t_6)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -8111,7 +7860,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 814, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8120,7 +7869,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":824 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 * * # Output padding bytes * while offset[0] < new_offset: # <<<<<<<<<<<<<< @@ -8136,7 +7885,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (!__pyx_t_6) break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":825 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 * # Output padding bytes * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< @@ -8145,7 +7894,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ (__pyx_v_f[0]) = 0x78; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":826 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte * f += 1 # <<<<<<<<<<<<<< @@ -8154,7 +7903,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":827 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 * f[0] = 120 # "x"; pad byte * f += 1 * offset[0] += 1 # <<<<<<<<<<<<<< @@ -8165,7 +7914,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":829 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 * offset[0] += 1 * * offset[0] += child.itemsize # <<<<<<<<<<<<<< @@ -8175,7 +7924,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_8 = 0; (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -8185,7 +7934,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); if (__pyx_t_6) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":832 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 * * if not PyDataType_HASFIELDS(child): * t = child.type_num # <<<<<<<<<<<<<< @@ -8197,7 +7946,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); __pyx_t_4 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -8207,7 +7956,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); if (unlikely(__pyx_t_6)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -8220,7 +7969,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(1, 834, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -8229,7 +7978,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":837 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 * * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< @@ -8247,7 +7996,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":838 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< @@ -8265,7 +8014,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":839 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< @@ -8283,7 +8032,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":840 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< @@ -8301,7 +8050,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":841 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< @@ -8319,7 +8068,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":842 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< @@ -8337,7 +8086,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":843 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< @@ -8355,7 +8104,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":844 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< @@ -8373,7 +8122,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":845 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< @@ -8391,7 +8140,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":846 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< @@ -8409,7 +8158,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":847 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< @@ -8427,7 +8176,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":848 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< @@ -8445,7 +8194,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":849 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< @@ -8463,7 +8212,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":850 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< @@ -8483,7 +8232,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":851 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< @@ -8503,7 +8252,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":852 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< @@ -8523,7 +8272,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":853 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< @@ -8541,7 +8290,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":855 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 * elif t == NPY_OBJECT: f[0] = 79 #"O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -8560,7 +8309,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L15:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":856 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * f += 1 # <<<<<<<<<<<<<< @@ -8569,7 +8318,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -8579,7 +8328,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L13; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":860 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 * # Cython ignores struct boundary information ("T{...}"), * # so don't output it * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< @@ -8592,7 +8341,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L13:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -8602,7 +8351,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":861 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 * # so don't output it * f = _util_dtypestring(child, f, end, offset) * return f # <<<<<<<<<<<<<< @@ -8612,7 +8361,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_r = __pyx_v_f; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -8637,7 +8386,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -8652,7 +8401,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a int __pyx_t_2; __Pyx_RefNannySetupContext("set_array_base", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -8663,7 +8412,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":980 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 * cdef PyObject* baseptr * if base is None: * baseptr = NULL # <<<<<<<<<<<<<< @@ -8672,7 +8421,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_baseptr = NULL; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -8682,7 +8431,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a goto __pyx_L3; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":982 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 * baseptr = NULL * else: * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< @@ -8692,7 +8441,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a /*else*/ { Py_INCREF(__pyx_v_base); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":983 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 * else: * Py_INCREF(base) # important to do this before decref below! * baseptr = base # <<<<<<<<<<<<<< @@ -8703,7 +8452,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a } __pyx_L3:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":984 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 * Py_INCREF(base) # important to do this before decref below! * baseptr = base * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< @@ -8712,7 +8461,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ Py_XDECREF(__pyx_v_arr->base); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":985 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 * baseptr = base * Py_XDECREF(arr.base) * arr.base = baseptr # <<<<<<<<<<<<<< @@ -8721,7 +8470,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_arr->base = __pyx_v_baseptr; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -8733,7 +8482,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __Pyx_RefNannyFinishContext(); } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -8747,7 +8496,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py int __pyx_t_1; __Pyx_RefNannySetupContext("get_array_base", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -8757,7 +8506,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":989 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: * return None # <<<<<<<<<<<<<< @@ -8768,7 +8517,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -8777,7 +8526,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":991 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 * return None * else: * return arr.base # <<<<<<<<<<<<<< @@ -8791,7 +8540,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py goto __pyx_L0; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -8806,7 +8555,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -8827,7 +8576,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_array", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -8843,7 +8592,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":998 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 * cdef inline int import_array() except -1: * try: * _import_array() # <<<<<<<<<<<<<< @@ -8852,7 +8601,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { */ __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -8866,7 +8615,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":999 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 * try: * _import_array() * except Exception: # <<<<<<<<<<<<<< @@ -8881,7 +8630,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -8897,7 +8646,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -8912,7 +8661,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __pyx_L8_try_end:; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -8935,7 +8684,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -8956,7 +8705,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_umath", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -8972,7 +8721,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 * cdef inline int import_umath() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -8981,7 +8730,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -8995,7 +8744,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -9010,7 +8759,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -9026,7 +8775,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -9041,7 +8790,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __pyx_L8_try_end:; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -9064,7 +8813,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -9085,7 +8834,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_ufunc", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -9101,7 +8850,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 * cdef inline int import_ufunc() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -9110,7 +8859,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -9124,7 +8873,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -9138,7 +8887,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -9152,7 +8901,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -9167,7 +8916,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __pyx_L8_try_end:; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -9238,18 +8987,13 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1}, {&__pyx_n_s__14, __pyx_k__14, sizeof(__pyx_k__14), 0, 0, 1, 1}, {&__pyx_n_s_alpha, __pyx_k_alpha, sizeof(__pyx_k_alpha), 0, 0, 1, 1}, - {&__pyx_n_s_alpha_2, __pyx_k_alpha_2, sizeof(__pyx_k_alpha_2), 0, 0, 1, 1}, - {&__pyx_n_s_array, __pyx_k_array, sizeof(__pyx_k_array), 0, 0, 1, 1}, {&__pyx_n_s_buckets_word, __pyx_k_buckets_word, sizeof(__pyx_k_buckets_word), 0, 0, 1, 1}, + {&__pyx_n_s_c, __pyx_k_c, sizeof(__pyx_k_c), 0, 0, 1, 1}, {&__pyx_n_s_cbow_mean, __pyx_k_cbow_mean, sizeof(__pyx_k_cbow_mean), 0, 0, 1, 1}, {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, {&__pyx_n_s_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 0, 1, 1}, - {&__pyx_n_s_codelens, __pyx_k_codelens, sizeof(__pyx_k_codelens), 0, 0, 1, 1}, - {&__pyx_n_s_codes, __pyx_k_codes, sizeof(__pyx_k_codes), 0, 0, 1, 1}, {&__pyx_n_s_cum_table, __pyx_k_cum_table, sizeof(__pyx_k_cum_table), 0, 0, 1, 1}, - {&__pyx_n_s_cum_table_len, __pyx_k_cum_table_len, sizeof(__pyx_k_cum_table_len), 0, 0, 1, 1}, {&__pyx_n_s_d_res, __pyx_k_d_res, sizeof(__pyx_k_d_res), 0, 0, 1, 1}, - {&__pyx_n_s_dtype, __pyx_k_dtype, sizeof(__pyx_k_dtype), 0, 0, 1, 1}, {&__pyx_n_s_effective_sentences, __pyx_k_effective_sentences, sizeof(__pyx_k_effective_sentences), 0, 0, 1, 1}, {&__pyx_n_s_effective_words, __pyx_k_effective_words, sizeof(__pyx_k_effective_words), 0, 0, 1, 1}, {&__pyx_n_s_enumerate, __pyx_k_enumerate, sizeof(__pyx_k_enumerate), 0, 0, 1, 1}, @@ -9264,46 +9008,33 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_idx_start, __pyx_k_idx_start, sizeof(__pyx_k_idx_start), 0, 0, 1, 1}, {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1}, - {&__pyx_n_s_indexes, __pyx_k_indexes, sizeof(__pyx_k_indexes), 0, 0, 1, 1}, {&__pyx_n_s_init, __pyx_k_init, sizeof(__pyx_k_init), 0, 0, 1, 1}, {&__pyx_n_s_item, __pyx_k_item, sizeof(__pyx_k_item), 0, 0, 1, 1}, {&__pyx_n_s_j, __pyx_k_j, sizeof(__pyx_k_j), 0, 0, 1, 1}, {&__pyx_n_s_k, __pyx_k_k, sizeof(__pyx_k_k), 0, 0, 1, 1}, {&__pyx_n_s_l1, __pyx_k_l1, sizeof(__pyx_k_l1), 0, 0, 1, 1}, - {&__pyx_n_s_l1_2, __pyx_k_l1_2, sizeof(__pyx_k_l1_2), 0, 0, 1, 1}, {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, {&__pyx_n_s_model, __pyx_k_model, sizeof(__pyx_k_model), 0, 0, 1, 1}, {&__pyx_kp_u_ndarray_is_not_C_contiguous, __pyx_k_ndarray_is_not_C_contiguous, sizeof(__pyx_k_ndarray_is_not_C_contiguous), 0, 1, 0, 0}, {&__pyx_kp_u_ndarray_is_not_Fortran_contiguou, __pyx_k_ndarray_is_not_Fortran_contiguou, sizeof(__pyx_k_ndarray_is_not_Fortran_contiguou), 0, 1, 0, 0}, {&__pyx_n_s_negative, __pyx_k_negative, sizeof(__pyx_k_negative), 0, 0, 1, 1}, {&__pyx_n_s_neu1, __pyx_k_neu1, sizeof(__pyx_k_neu1), 0, 0, 1, 1}, - {&__pyx_n_s_neu1_2, __pyx_k_neu1_2, sizeof(__pyx_k_neu1_2), 0, 0, 1, 1}, - {&__pyx_n_s_next_random, __pyx_k_next_random, sizeof(__pyx_k_next_random), 0, 0, 1, 1}, {&__pyx_n_s_np, __pyx_k_np, sizeof(__pyx_k_np), 0, 0, 1, 1}, {&__pyx_n_s_numpy, __pyx_k_numpy, sizeof(__pyx_k_numpy), 0, 0, 1, 1}, {&__pyx_kp_s_numpy_core_multiarray_failed_to, __pyx_k_numpy_core_multiarray_failed_to, sizeof(__pyx_k_numpy_core_multiarray_failed_to), 0, 0, 1, 0}, {&__pyx_kp_s_numpy_core_umath_failed_to_impor, __pyx_k_numpy_core_umath_failed_to_impor, sizeof(__pyx_k_numpy_core_umath_failed_to_impor), 0, 0, 1, 0}, {&__pyx_n_s_p_res, __pyx_k_p_res, sizeof(__pyx_k_p_res), 0, 0, 1, 1}, {&__pyx_n_s_point, __pyx_k_point, sizeof(__pyx_k_point), 0, 0, 1, 1}, - {&__pyx_n_s_points, __pyx_k_points, sizeof(__pyx_k_points), 0, 0, 1, 1}, {&__pyx_n_s_randint, __pyx_k_randint, sizeof(__pyx_k_randint), 0, 0, 1, 1}, {&__pyx_n_s_random, __pyx_k_random, sizeof(__pyx_k_random), 0, 0, 1, 1}, {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, - {&__pyx_n_s_reduced_windows, __pyx_k_reduced_windows, sizeof(__pyx_k_reduced_windows), 0, 0, 1, 1}, {&__pyx_n_s_sample, __pyx_k_sample, sizeof(__pyx_k_sample), 0, 0, 1, 1}, {&__pyx_n_s_sample_int, __pyx_k_sample_int, sizeof(__pyx_k_sample_int), 0, 0, 1, 1}, {&__pyx_n_s_scipy_linalg_blas, __pyx_k_scipy_linalg_blas, sizeof(__pyx_k_scipy_linalg_blas), 0, 0, 1, 1}, {&__pyx_n_s_sent, __pyx_k_sent, sizeof(__pyx_k_sent), 0, 0, 1, 1}, {&__pyx_n_s_sent_idx, __pyx_k_sent_idx, sizeof(__pyx_k_sent_idx), 0, 0, 1, 1}, - {&__pyx_n_s_sentence_idx, __pyx_k_sentence_idx, sizeof(__pyx_k_sentence_idx), 0, 0, 1, 1}, {&__pyx_n_s_sentences, __pyx_k_sentences, sizeof(__pyx_k_sentences), 0, 0, 1, 1}, {&__pyx_n_s_size, __pyx_k_size, sizeof(__pyx_k_size), 0, 0, 1, 1}, - {&__pyx_n_s_subword_arrays, __pyx_k_subword_arrays, sizeof(__pyx_k_subword_arrays), 0, 0, 1, 1}, - {&__pyx_n_s_subwords, __pyx_k_subwords, sizeof(__pyx_k_subwords), 0, 0, 1, 1}, - {&__pyx_n_s_subwords_idx, __pyx_k_subwords_idx, sizeof(__pyx_k_subwords_idx), 0, 0, 1, 1}, - {&__pyx_n_s_subwords_idx_len, __pyx_k_subwords_idx_len, sizeof(__pyx_k_subwords_idx_len), 0, 0, 1, 1}, - {&__pyx_n_s_syn0_ngrams, __pyx_k_syn0_ngrams, sizeof(__pyx_k_syn0_ngrams), 0, 0, 1, 1}, - {&__pyx_n_s_syn0_vocab, __pyx_k_syn0_vocab, sizeof(__pyx_k_syn0_vocab), 0, 0, 1, 1}, {&__pyx_n_s_syn1, __pyx_k_syn1, sizeof(__pyx_k_syn1), 0, 0, 1, 1}, {&__pyx_n_s_syn1neg, __pyx_k_syn1neg, sizeof(__pyx_k_syn1neg), 0, 0, 1, 1}, {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, @@ -9311,7 +9042,6 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_train_batch_cbow, __pyx_k_train_batch_cbow, sizeof(__pyx_k_train_batch_cbow), 0, 0, 1, 1}, {&__pyx_n_s_train_batch_sg, __pyx_k_train_batch_sg, sizeof(__pyx_k_train_batch_sg), 0, 0, 1, 1}, {&__pyx_n_s_trainables, __pyx_k_trainables, sizeof(__pyx_k_trainables), 0, 0, 1, 1}, - {&__pyx_n_s_uint32, __pyx_k_uint32, sizeof(__pyx_k_uint32), 0, 0, 1, 1}, {&__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_k_unknown_dtype_code_in_numpy_pxd, sizeof(__pyx_k_unknown_dtype_code_in_numpy_pxd), 0, 1, 0, 0}, {&__pyx_n_s_vector_size, __pyx_k_vector_size, sizeof(__pyx_k_vector_size), 0, 0, 1, 1}, {&__pyx_n_s_vectors_ngrams, __pyx_k_vectors_ngrams, sizeof(__pyx_k_vectors_ngrams), 0, 0, 1, 1}, @@ -9323,11 +9053,8 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_vocabulary, __pyx_k_vocabulary, sizeof(__pyx_k_vocabulary), 0, 0, 1, 1}, {&__pyx_n_s_window, __pyx_k_window, sizeof(__pyx_k_window), 0, 0, 1, 1}, {&__pyx_n_s_word, __pyx_k_word, sizeof(__pyx_k_word), 0, 0, 1, 1}, - {&__pyx_n_s_word_locks_ngrams, __pyx_k_word_locks_ngrams, sizeof(__pyx_k_word_locks_ngrams), 0, 0, 1, 1}, - {&__pyx_n_s_word_locks_vocab, __pyx_k_word_locks_vocab, sizeof(__pyx_k_word_locks_vocab), 0, 0, 1, 1}, - {&__pyx_n_s_word_subwords, __pyx_k_word_subwords, sizeof(__pyx_k_word_subwords), 0, 0, 1, 1}, {&__pyx_n_s_work, __pyx_k_work, sizeof(__pyx_k_work), 0, 0, 1, 1}, - {&__pyx_n_s_work_2, __pyx_k_work_2, sizeof(__pyx_k_work_2), 0, 0, 1, 1}, + {&__pyx_n_s_workers, __pyx_k_workers, sizeof(__pyx_k_workers), 0, 0, 1, 1}, {&__pyx_n_s_wv, __pyx_k_wv, sizeof(__pyx_k_wv), 0, 0, 1, 1}, {&__pyx_n_s_x, __pyx_k_x, sizeof(__pyx_k_x), 0, 0, 1, 1}, {&__pyx_n_s_y, __pyx_k_y, sizeof(__pyx_k_y), 0, 0, 1, 1}, @@ -9335,8 +9062,8 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { }; static int __Pyx_InitCachedBuiltins(void) { __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(0, 21, __pyx_L1_error) - __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 61, __pyx_L1_error) - __pyx_builtin_enumerate = __Pyx_GetBuiltinName(__pyx_n_s_enumerate); if (!__pyx_builtin_enumerate) __PYX_ERR(0, 369, __pyx_L1_error) + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 60, __pyx_L1_error) + __pyx_builtin_enumerate = __Pyx_GetBuiltinName(__pyx_n_s_enumerate); if (!__pyx_builtin_enumerate) __PYX_ERR(0, 344, __pyx_L1_error) __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(1, 229, __pyx_L1_error) __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(1, 810, __pyx_L1_error) return 0; @@ -9348,35 +9075,35 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - /* "gensim/models/fasttext_inner.pyx":323 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":268 + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< * * # convert Python structures to primitive types, so we can release the GIL */ - __pyx_tuple_ = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_tuple_ = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple_); __Pyx_GIVEREF(__pyx_tuple_); - __pyx_tuple__2 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 323, __pyx_L1_error) + __pyx_tuple__2 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 268, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__2); __Pyx_GIVEREF(__pyx_tuple__2); - /* "gensim/models/fasttext_inner.pyx":475 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< + /* "gensim/models/fasttext_inner.pyx":415 + * c.cum_table_len = len(model.vocabulary.cum_table) + * if c.negative or c.sample: + * c.next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< * - * # convert Python structures to primitive types, so we can release the GIL + * # prepare C structures so we can go "full C" and release the Python GIL */ - __pyx_tuple__3 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 475, __pyx_L1_error) + __pyx_tuple__3 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 415, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__3); __Pyx_GIVEREF(__pyx_tuple__3); - __pyx_tuple__4 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 475, __pyx_L1_error) + __pyx_tuple__4 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 415, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__4); __Pyx_GIVEREF(__pyx_tuple__4); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -9387,7 +9114,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__5); __Pyx_GIVEREF(__pyx_tuple__5); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -9398,7 +9125,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__6); __Pyx_GIVEREF(__pyx_tuple__6); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -9409,7 +9136,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__7); __Pyx_GIVEREF(__pyx_tuple__7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -9420,7 +9147,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__8); __Pyx_GIVEREF(__pyx_tuple__8); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -9431,7 +9158,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__9); __Pyx_GIVEREF(__pyx_tuple__9); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -9442,7 +9169,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__10); __Pyx_GIVEREF(__pyx_tuple__10); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -9453,7 +9180,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__11); __Pyx_GIVEREF(__pyx_tuple__11); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -9464,7 +9191,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__12); __Pyx_GIVEREF(__pyx_tuple__12); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -9473,41 +9200,41 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__13); __Pyx_GIVEREF(__pyx_tuple__13); - /* "gensim/models/fasttext_inner.pyx":246 + /* "gensim/models/fasttext_inner.pyx":275 * * * def train_batch_sg(model, sentences, alpha, _work, _l1): # <<<<<<<<<<<<<< * """Update skip-gram model by training on a sequence of sentences. * */ - __pyx_tuple__15 = PyTuple_Pack(46, __pyx_n_s_model, __pyx_n_s_sentences, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_l1, __pyx_n_s_hs, __pyx_n_s_negative, __pyx_n_s_sample, __pyx_n_s_syn0_vocab, __pyx_n_s_word_locks_vocab, __pyx_n_s_syn0_ngrams, __pyx_n_s_word_locks_ngrams, __pyx_n_s_work_2, __pyx_n_s_l1_2, __pyx_n_s_alpha_2, __pyx_n_s_size, __pyx_n_s_codelens, __pyx_n_s_indexes, __pyx_n_s_reduced_windows, __pyx_n_s_sentence_idx, __pyx_n_s_window, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_syn1, __pyx_n_s_points, __pyx_n_s_codes, __pyx_n_s_syn1neg, __pyx_n_s_cum_table, __pyx_n_s_cum_table_len, __pyx_n_s_next_random, __pyx_n_s_subwords_idx_len, __pyx_n_s_subwords_idx, __pyx_n_s_subword_arrays, __pyx_n_s_vlookup, __pyx_n_s_sent, __pyx_n_s_token, __pyx_n_s_word, __pyx_n_s_subwords, __pyx_n_s_word_subwords, __pyx_n_s_item); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 246, __pyx_L1_error) + __pyx_tuple__15 = PyTuple_Pack(19, __pyx_n_s_model, __pyx_n_s_sentences, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_l1, __pyx_n_s_c, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_vlookup, __pyx_n_s_sent, __pyx_n_s_token, __pyx_n_s_word, __pyx_n_s_item); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 275, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__15); __Pyx_GIVEREF(__pyx_tuple__15); - __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(5, 0, 46, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_fasttext_inner_pyx, __pyx_n_s_train_batch_sg, 246, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(0, 246, __pyx_L1_error) + __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(5, 0, 19, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_fasttext_inner_pyx, __pyx_n_s_train_batch_sg, 275, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(0, 275, __pyx_L1_error) - /* "gensim/models/fasttext_inner.pyx":400 + /* "gensim/models/fasttext_inner.pyx":375 * * * def train_batch_cbow(model, sentences, alpha, _work, _neu1): # <<<<<<<<<<<<<< * """Update the CBOW model by training on a sequence of sentences. * */ - __pyx_tuple__17 = PyTuple_Pack(47, __pyx_n_s_model, __pyx_n_s_sentences, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_hs, __pyx_n_s_negative, __pyx_n_s_sample, __pyx_n_s_cbow_mean, __pyx_n_s_syn0_vocab, __pyx_n_s_word_locks_vocab, __pyx_n_s_syn0_ngrams, __pyx_n_s_word_locks_ngrams, __pyx_n_s_work_2, __pyx_n_s_alpha_2, __pyx_n_s_size, __pyx_n_s_codelens, __pyx_n_s_indexes, __pyx_n_s_reduced_windows, __pyx_n_s_sentence_idx, __pyx_n_s_window, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_syn1, __pyx_n_s_points, __pyx_n_s_codes, __pyx_n_s_syn1neg, __pyx_n_s_cum_table, __pyx_n_s_cum_table_len, __pyx_n_s_next_random, __pyx_n_s_subwords_idx_len, __pyx_n_s_subwords_idx, __pyx_n_s_subword_arrays, __pyx_n_s_neu1_2, __pyx_n_s_vlookup, __pyx_n_s_sent, __pyx_n_s_token, __pyx_n_s_word, __pyx_n_s_subwords, __pyx_n_s_word_subwords, __pyx_n_s_item); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 400, __pyx_L1_error) + __pyx_tuple__17 = PyTuple_Pack(19, __pyx_n_s_model, __pyx_n_s_sentences, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_c, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_vlookup, __pyx_n_s_sent, __pyx_n_s_token, __pyx_n_s_word, __pyx_n_s_item); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 375, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__17); __Pyx_GIVEREF(__pyx_tuple__17); - __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(5, 0, 47, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_fasttext_inner_pyx, __pyx_n_s_train_batch_cbow, 400, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 400, __pyx_L1_error) + __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(5, 0, 19, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_fasttext_inner_pyx, __pyx_n_s_train_batch_cbow, 375, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 375, __pyx_L1_error) - /* "gensim/models/fasttext_inner.pyx":550 + /* "gensim/models/fasttext_inner.pyx":482 * * * def init(): # <<<<<<<<<<<<<< * """Precompute function `sigmoid(x) = 1 / (1 + exp(-x))`, for x values discretized into table EXP_TABLE. * Also calculate log(sigmoid(x)) into LOG_TABLE. */ - __pyx_tuple__19 = PyTuple_Pack(7, __pyx_n_s_i, __pyx_n_s_x, __pyx_n_s_y, __pyx_n_s_expected, __pyx_n_s_size, __pyx_n_s_d_res, __pyx_n_s_p_res); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(0, 550, __pyx_L1_error) + __pyx_tuple__19 = PyTuple_Pack(7, __pyx_n_s_i, __pyx_n_s_x, __pyx_n_s_y, __pyx_n_s_expected, __pyx_n_s_size, __pyx_n_s_d_res, __pyx_n_s_p_res); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(0, 482, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__19); __Pyx_GIVEREF(__pyx_tuple__19); - __pyx_codeobj__20 = (PyObject*)__Pyx_PyCode_New(0, 0, 7, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_fasttext_inner_pyx, __pyx_n_s_init, 550, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__20)) __PYX_ERR(0, 550, __pyx_L1_error) + __pyx_codeobj__20 = (PyObject*)__Pyx_PyCode_New(0, 0, 7, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_fasttext_inner_pyx, __pyx_n_s_init, 482, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__20)) __PYX_ERR(0, 482, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; @@ -9555,8 +9282,16 @@ static int __Pyx_modinit_function_export_code(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); /*--- Function export code ---*/ + if (__Pyx_ExportFunction("fasttext_fast_sentence_sg_neg", (void (*)(void))__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("fasttext_fast_sentence_sg_hs", (void (*)(void))__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_sg_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("fasttext_fast_sentence_cbow_neg", (void (*)(void))__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("fasttext_fast_sentence_cbow_hs", (void (*)(void))__pyx_f_6gensim_6models_14fasttext_inner_fasttext_fast_sentence_cbow_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint32_t **, int const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("init_ft_config", (void (*)(void))__pyx_f_6gensim_6models_14fasttext_inner_init_ft_config, "PyObject *(struct __pyx_t_6gensim_6models_14fasttext_inner_FastTextConfig *, PyObject *, PyObject *, PyObject *, PyObject *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; } static int __Pyx_modinit_type_init_code(void) { @@ -9652,7 +9387,7 @@ static int __Pyx_modinit_function_import_code(void) { #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (!(defined(__cplusplus)) || (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4))) +#elif defined(__GNUC__) #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) #else #define CYTHON_SMALL_CODE @@ -9806,7 +9541,7 @@ if (!__Pyx_RefNanny) { /*--- Global type/function init code ---*/ (void)__Pyx_modinit_global_init_code(); (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); + if (unlikely(__Pyx_modinit_function_export_code() != 0)) goto __pyx_L1_error; (void)__Pyx_modinit_type_init_code(); if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; if (unlikely(__Pyx_modinit_variable_import_code() != 0)) goto __pyx_L1_error; @@ -9970,66 +9705,66 @@ if (!__Pyx_RefNanny) { * cdef int ONE = 1 * cdef REAL_t ONEF = 1.0 # <<<<<<<<<<<<<< * - * cdef unsigned long long fast_sentence_sg_neg( + * cdef unsigned long long fasttext_fast_sentence_sg_neg( */ __pyx_v_6gensim_6models_14fasttext_inner_ONEF = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)1.0); - /* "gensim/models/fasttext_inner.pyx":246 + /* "gensim/models/fasttext_inner.pyx":275 * * * def train_batch_sg(model, sentences, alpha, _work, _l1): # <<<<<<<<<<<<<< * """Update skip-gram model by training on a sequence of sentences. * */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14fasttext_inner_1train_batch_sg, NULL, __pyx_n_s_gensim_models_fasttext_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 246, __pyx_L1_error) + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14fasttext_inner_1train_batch_sg, NULL, __pyx_n_s_gensim_models_fasttext_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 275, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_batch_sg, __pyx_t_1) < 0) __PYX_ERR(0, 246, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_batch_sg, __pyx_t_1) < 0) __PYX_ERR(0, 275, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":400 + /* "gensim/models/fasttext_inner.pyx":375 * * * def train_batch_cbow(model, sentences, alpha, _work, _neu1): # <<<<<<<<<<<<<< * """Update the CBOW model by training on a sequence of sentences. * */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14fasttext_inner_3train_batch_cbow, NULL, __pyx_n_s_gensim_models_fasttext_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 400, __pyx_L1_error) + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14fasttext_inner_3train_batch_cbow, NULL, __pyx_n_s_gensim_models_fasttext_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 375, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_batch_cbow, __pyx_t_1) < 0) __PYX_ERR(0, 400, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_batch_cbow, __pyx_t_1) < 0) __PYX_ERR(0, 375, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":550 + /* "gensim/models/fasttext_inner.pyx":482 * * * def init(): # <<<<<<<<<<<<<< * """Precompute function `sigmoid(x) = 1 / (1 + exp(-x))`, for x values discretized into table EXP_TABLE. * Also calculate log(sigmoid(x)) into LOG_TABLE. */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14fasttext_inner_5init, NULL, __pyx_n_s_gensim_models_fasttext_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 550, __pyx_L1_error) + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14fasttext_inner_5init, NULL, __pyx_n_s_gensim_models_fasttext_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 482, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_init, __pyx_t_1) < 0) __PYX_ERR(0, 550, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_init, __pyx_t_1) < 0) __PYX_ERR(0, 482, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/fasttext_inner.pyx":597 + /* "gensim/models/fasttext_inner.pyx":529 * return 2 * * FAST_VERSION = init() # initialize the module # <<<<<<<<<<<<<< * MAX_WORDS_IN_BATCH = MAX_SENTENCE_LEN */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_init); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 597, __pyx_L1_error) + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_init); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 529, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyObject_CallNoArg(__pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 597, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_CallNoArg(__pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 529, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_FAST_VERSION, __pyx_t_7) < 0) __PYX_ERR(0, 597, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_FAST_VERSION, __pyx_t_7) < 0) __PYX_ERR(0, 529, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "gensim/models/fasttext_inner.pyx":598 + /* "gensim/models/fasttext_inner.pyx":530 * * FAST_VERSION = init() # initialize the module * MAX_WORDS_IN_BATCH = MAX_SENTENCE_LEN # <<<<<<<<<<<<<< */ - if (PyDict_SetItem(__pyx_d, __pyx_n_s_MAX_WORDS_IN_BATCH, __pyx_int_10000) < 0) __PYX_ERR(0, 598, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_MAX_WORDS_IN_BATCH, __pyx_int_10000) < 0) __PYX_ERR(0, 530, __pyx_L1_error) /* "gensim/models/fasttext_inner.pyx":1 * #!/usr/bin/env cython # <<<<<<<<<<<<<< @@ -10041,7 +9776,7 @@ if (!__Pyx_RefNanny) { if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_7) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -10123,6 +9858,39 @@ static PyObject *__Pyx_GetBuiltinName(PyObject *name) { return result; } +/* ExtTypeTest */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + /* RaiseArgTupleInvalid */ static void __Pyx_RaiseArgtupleInvalid( const char* func_name, @@ -10265,39 +10033,6 @@ static int __Pyx_ParseOptionalKeywords( return -1; } -/* ExtTypeTest */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - if (likely(__Pyx_TypeCheck(obj, type))) - return 1; - PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", - Py_TYPE(obj)->tp_name, type->tp_name); - return 0; -} - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - /* GetItemInt */ static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { PyObject *r; @@ -10414,35 +10149,8 @@ static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) { } #endif -/* GetModuleGlobalName */ -static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - if (likely(result)) { - Py_INCREF(result); - } else if (unlikely(PyErr_Occurred())) { - result = NULL; - } else { -#else - result = PyDict_GetItem(__pyx_d, name); - if (likely(result)) { - Py_INCREF(result); - } else { -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - if (!result) { - PyErr_Clear(); -#endif - result = __Pyx_GetBuiltinName(name); - } - return result; -} - /* PyFunctionFastCall */ - #if CYTHON_FAST_PYCALL +#if CYTHON_FAST_PYCALL #include "frameobject.h" static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, PyObject *globals) { @@ -10562,7 +10270,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, #endif /* PyCFunctionFastCall */ - #if CYTHON_FAST_PYCCALL +#if CYTHON_FAST_PYCCALL static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { PyCFunctionObject *func = (PyCFunctionObject*)func_obj; PyCFunction meth = PyCFunction_GET_FUNCTION(func); @@ -10585,7 +10293,7 @@ static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, P #endif /* PyErrFetchRestore */ - #if CYTHON_FAST_THREAD_STATE +#if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->curexc_type; @@ -10609,7 +10317,7 @@ static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject #endif /* RaiseException */ - #if PY_MAJOR_VERSION < 3 +#if PY_MAJOR_VERSION < 3 static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, CYTHON_UNUSED PyObject *cause) { __Pyx_PyThreadState_declare @@ -10768,7 +10476,7 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject #endif /* PyObjectCallMethO */ - #if CYTHON_COMPILING_IN_CPYTHON +#if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { PyObject *self, *result; PyCFunction cfunc; @@ -10788,7 +10496,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject #endif /* PyObjectCallOneArg */ - #if CYTHON_COMPILING_IN_CPYTHON +#if CYTHON_COMPILING_IN_CPYTHON static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { PyObject *result; PyObject *args = PyTuple_New(1); @@ -10828,7 +10536,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObjec #endif /* DictGetItem */ - #if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { PyObject *value; value = PyDict_GetItemWithError(d, key); @@ -10847,25 +10555,25 @@ static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { #endif /* RaiseTooManyValuesToUnpack */ - static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { PyErr_Format(PyExc_ValueError, "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); } /* RaiseNeedMoreValuesToUnpack */ - static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { PyErr_Format(PyExc_ValueError, "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", index, (index == 1) ? "" : "s"); } /* RaiseNoneIterError */ - static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); } /* SaveResetException */ - #if CYTHON_FAST_THREAD_STATE +#if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { #if PY_VERSION_HEX >= 0x030700A2 *type = tstate->exc_state.exc_type; @@ -10904,7 +10612,7 @@ static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject #endif /* PyErrExceptionMatches */ - #if CYTHON_FAST_THREAD_STATE +#if CYTHON_FAST_THREAD_STATE static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(tuple); @@ -10929,7 +10637,7 @@ static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tsta #endif /* GetException */ - #if CYTHON_FAST_THREAD_STATE +#if CYTHON_FAST_THREAD_STATE static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { #else static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) { @@ -10999,7 +10707,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) } /* Import */ - static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { PyObject *empty_list = 0; PyObject *module = 0; PyObject *global_dict = 0; @@ -11064,7 +10772,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) } /* ImportFrom */ - static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Format(PyExc_ImportError, @@ -11077,6 +10785,33 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) return value; } +/* GetModuleGlobalName */ + static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + if (likely(result)) { + Py_INCREF(result); + } else if (unlikely(PyErr_Occurred())) { + result = NULL; + } else { +#else + result = PyDict_GetItem(__pyx_d, name); + if (likely(result)) { + Py_INCREF(result); + } else { +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + if (!result) { + PyErr_Clear(); +#endif + result = __Pyx_GetBuiltinName(name); + } + return result; +} + /* PyObjectCallNoArg */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { @@ -11106,9 +10841,6 @@ static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_li #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); @@ -11980,19 +11712,19 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, } /* CIntFromPy */ - static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { - const long neg_one = (long) -1, const_zero = (long) 0; + static CYTHON_INLINE npy_uint32 __Pyx_PyInt_As_npy_uint32(PyObject *x) { + const npy_uint32 neg_one = (npy_uint32) -1, const_zero = (npy_uint32) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { - if (sizeof(long) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + if (sizeof(npy_uint32) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(npy_uint32, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } - return (long) val; + return (npy_uint32) val; } } else #endif @@ -12001,32 +11733,32 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { - case 0: return (long) 0; - case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 0: return (npy_uint32) 0; + case 1: __PYX_VERIFY_RETURN_INT(npy_uint32, digit, digits[0]) case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(npy_uint32) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) >= 2 * PyLong_SHIFT) { + return (npy_uint32) (((((npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0])); } } break; case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(npy_uint32) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) >= 3 * PyLong_SHIFT) { + return (npy_uint32) (((((((npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0])); } } break; case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(npy_uint32) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) >= 4 * PyLong_SHIFT) { + return (npy_uint32) (((((((((npy_uint32)digits[3]) << PyLong_SHIFT) | (npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0])); } } break; @@ -12040,86 +11772,86 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) - return (long) -1; + return (npy_uint32) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif - if (sizeof(long) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) + if (sizeof(npy_uint32) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) + } else if (sizeof(npy_uint32) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { - case 0: return (long) 0; - case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case 0: return (npy_uint32) 0; + case -1: __PYX_VERIFY_RETURN_INT(npy_uint32, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(npy_uint32, digit, +digits[0]) case -2: - if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(npy_uint32) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + __PYX_VERIFY_RETURN_INT(npy_uint32, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 2 * PyLong_SHIFT) { + return (npy_uint32) (((npy_uint32)-1)*(((((npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); } } break; case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(npy_uint32) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 2 * PyLong_SHIFT) { + return (npy_uint32) ((((((npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); } } break; case -3: - if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(npy_uint32) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + __PYX_VERIFY_RETURN_INT(npy_uint32, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 3 * PyLong_SHIFT) { + return (npy_uint32) (((npy_uint32)-1)*(((((((npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); } } break; case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(npy_uint32) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 3 * PyLong_SHIFT) { + return (npy_uint32) ((((((((npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); } } break; case -4: - if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(npy_uint32) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + __PYX_VERIFY_RETURN_INT(npy_uint32, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 4 * PyLong_SHIFT) { + return (npy_uint32) (((npy_uint32)-1)*(((((((((npy_uint32)digits[3]) << PyLong_SHIFT) | (npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); } } break; case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(npy_uint32) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 4 * PyLong_SHIFT) { + return (npy_uint32) ((((((((((npy_uint32)digits[3]) << PyLong_SHIFT) | (npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); } } break; } #endif - if (sizeof(long) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) + if (sizeof(npy_uint32) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) + } else if (sizeof(npy_uint32) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } @@ -12128,7 +11860,7 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else - long val; + npy_uint32 val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { @@ -12148,24 +11880,24 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, return val; } #endif - return (long) -1; + return (npy_uint32) -1; } } else { - long val; + npy_uint32 val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); + if (!tmp) return (npy_uint32) -1; + val = __Pyx_PyInt_As_npy_uint32(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; + "value too large to convert to npy_uint32"); + return (npy_uint32) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; + "can't convert negative value to npy_uint32"); + return (npy_uint32) -1; } /* CIntFromPy */ @@ -12547,19 +12279,19 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, } /* CIntFromPy */ - static CYTHON_INLINE npy_uint32 __Pyx_PyInt_As_npy_uint32(PyObject *x) { - const npy_uint32 neg_one = (npy_uint32) -1, const_zero = (npy_uint32) 0; + static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) -1, const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { - if (sizeof(npy_uint32) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(npy_uint32, long, PyInt_AS_LONG(x)) + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } - return (npy_uint32) val; + return (long) val; } } else #endif @@ -12568,32 +12300,32 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { - case 0: return (npy_uint32) 0; - case 1: __PYX_VERIFY_RETURN_INT(npy_uint32, digit, digits[0]) + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) case 2: - if (8 * sizeof(npy_uint32) > 1 * PyLong_SHIFT) { + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(npy_uint32) >= 2 * PyLong_SHIFT) { - return (npy_uint32) (((((npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0])); + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 3: - if (8 * sizeof(npy_uint32) > 2 * PyLong_SHIFT) { + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(npy_uint32) >= 3 * PyLong_SHIFT) { - return (npy_uint32) (((((((npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0])); + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 4: - if (8 * sizeof(npy_uint32) > 3 * PyLong_SHIFT) { + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(npy_uint32) >= 4 * PyLong_SHIFT) { - return (npy_uint32) (((((((((npy_uint32)digits[3]) << PyLong_SHIFT) | (npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0])); + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; @@ -12607,86 +12339,86 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) - return (npy_uint32) -1; + return (long) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif - if (sizeof(npy_uint32) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, unsigned long, PyLong_AsUnsignedLong(x)) + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG - } else if (sizeof(npy_uint32) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { - case 0: return (npy_uint32) 0; - case -1: __PYX_VERIFY_RETURN_INT(npy_uint32, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(npy_uint32, digit, +digits[0]) + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) case -2: - if (8 * sizeof(npy_uint32) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(npy_uint32, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(npy_uint32) - 1 > 2 * PyLong_SHIFT) { - return (npy_uint32) (((npy_uint32)-1)*(((((npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 2: - if (8 * sizeof(npy_uint32) > 1 * PyLong_SHIFT) { + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(npy_uint32) - 1 > 2 * PyLong_SHIFT) { - return (npy_uint32) ((((((npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -3: - if (8 * sizeof(npy_uint32) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(npy_uint32, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(npy_uint32) - 1 > 3 * PyLong_SHIFT) { - return (npy_uint32) (((npy_uint32)-1)*(((((((npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 3: - if (8 * sizeof(npy_uint32) > 2 * PyLong_SHIFT) { + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(npy_uint32) - 1 > 3 * PyLong_SHIFT) { - return (npy_uint32) ((((((((npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -4: - if (8 * sizeof(npy_uint32) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(npy_uint32, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(npy_uint32) - 1 > 4 * PyLong_SHIFT) { - return (npy_uint32) (((npy_uint32)-1)*(((((((((npy_uint32)digits[3]) << PyLong_SHIFT) | (npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 4: - if (8 * sizeof(npy_uint32) > 3 * PyLong_SHIFT) { + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(npy_uint32) - 1 > 4 * PyLong_SHIFT) { - return (npy_uint32) ((((((((((npy_uint32)digits[3]) << PyLong_SHIFT) | (npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; } #endif - if (sizeof(npy_uint32) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, long, PyLong_AsLong(x)) + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG - } else if (sizeof(npy_uint32) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, PY_LONG_LONG, PyLong_AsLongLong(x)) + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } @@ -12695,7 +12427,7 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else - npy_uint32 val; + long val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { @@ -12715,24 +12447,24 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line, return val; } #endif - return (npy_uint32) -1; + return (long) -1; } } else { - npy_uint32 val; + long val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (npy_uint32) -1; - val = __Pyx_PyInt_As_npy_uint32(tmp); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, - "value too large to convert to npy_uint32"); - return (npy_uint32) -1; + "value too large to convert to long"); + return (long) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to npy_uint32"); - return (npy_uint32) -1; + "can't convert negative value to long"); + return (long) -1; } /* FastTypeChecks */ @@ -12823,6 +12555,43 @@ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObj return 0; } +/* FunctionExport */ + static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + d = PyObject_GetAttrString(__pyx_m, (char *)"__pyx_capi__"); + if (!d) { + PyErr_Clear(); + d = PyDict_New(); + if (!d) + goto bad; + Py_INCREF(d); + if (PyModule_AddObject(__pyx_m, (char *)"__pyx_capi__", d) < 0) + goto bad; + } + tmp.fp = f; +#if PY_VERSION_HEX >= 0x02070000 + cobj = PyCapsule_New(tmp.p, sig, 0); +#else + cobj = PyCObject_FromVoidPtrAndDesc(tmp.p, (void *)sig, 0); +#endif + if (!cobj) + goto bad; + if (PyDict_SetItemString(d, name, cobj) < 0) + goto bad; + Py_DECREF(cobj); + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(cobj); + Py_XDECREF(d); + return -1; +} + /* ModuleImport */ #ifndef __PYX_HAVE_RT_ImportModule #define __PYX_HAVE_RT_ImportModule @@ -13250,9 +13019,6 @@ static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_DECREF(x); return ival; } -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } diff --git a/gensim/models/fasttext_inner.pxd b/gensim/models/fasttext_inner.pxd new file mode 100644 index 0000000000..d40010ec63 --- /dev/null +++ b/gensim/models/fasttext_inner.pxd @@ -0,0 +1,88 @@ +#!/usr/bin/env cython +# cython: boundscheck=False +# cython: wraparound=False +# cython: cdivision=True +# cython: embedsignature=True +# coding: utf-8 +# +# shared type definitions for fasttext_inner +# used from fasttext_corpusfile +# + + +import numpy as np +cimport numpy as np + +from word2vec_inner cimport REAL_t + + +DEF MAX_SENTENCE_LEN = 10000 + + +cdef struct FastTextConfig: + int hs, negative, sample, size, window, cbow_mean, workers + REAL_t alpha + + REAL_t *syn0_vocab + REAL_t *word_locks_vocab + REAL_t *syn0_ngrams + REAL_t *word_locks_ngrams + + REAL_t *work + REAL_t *neu1 + + int codelens[MAX_SENTENCE_LEN] + np.uint32_t indexes[MAX_SENTENCE_LEN] + np.uint32_t reduced_windows[MAX_SENTENCE_LEN] + int sentence_idx[MAX_SENTENCE_LEN + 1] + + # For hierarchical softmax + REAL_t *syn1 + np.uint32_t *points[MAX_SENTENCE_LEN] + np.uint8_t *codes[MAX_SENTENCE_LEN] + + # For negative sampling + REAL_t *syn1neg + np.uint32_t *cum_table + unsigned long long cum_table_len + # for sampling (negative and frequent-word downsampling) + unsigned long long next_random + + # For passing subwords information as C objects for nogil + int subwords_idx_len[MAX_SENTENCE_LEN] + np.uint32_t *subwords_idx[MAX_SENTENCE_LEN] + + +cdef unsigned long long fasttext_fast_sentence_sg_neg( + const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, + REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1neg, const int size, + const np.uint32_t word_index, const np.uint32_t word2_index, const np.uint32_t *subwords_index, + const np.uint32_t subwords_len, const REAL_t alpha, REAL_t *work, REAL_t *l1, unsigned long long next_random, + REAL_t *word_locks_vocab, REAL_t *word_locks_ngrams) nogil + + +cdef void fasttext_fast_sentence_sg_hs( + const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, + REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1, const int size, + const np.uint32_t word2_index, const np.uint32_t *subwords_index, const np.uint32_t subwords_len, + const REAL_t alpha, REAL_t *work, REAL_t *l1, REAL_t *word_locks_vocab, + REAL_t *word_locks_ngrams) nogil + + +cdef unsigned long long fasttext_fast_sentence_cbow_neg( + const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, int codelens[MAX_SENTENCE_LEN], + REAL_t *neu1, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1neg, const int size, + const np.uint32_t indexes[MAX_SENTENCE_LEN], np.uint32_t *subwords_idx[MAX_SENTENCE_LEN], + const int subwords_idx_len[MAX_SENTENCE_LEN], const REAL_t alpha, REAL_t *work, + int i, int j, int k, int cbow_mean, unsigned long long next_random, REAL_t *word_locks_vocab, REAL_t *word_locks_ngrams) nogil + + +cdef void fasttext_fast_sentence_cbow_hs( + const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], + REAL_t *neu1, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1, const int size, + const np.uint32_t indexes[MAX_SENTENCE_LEN], np.uint32_t *subwords_idx[MAX_SENTENCE_LEN], + const int subwords_idx_len[MAX_SENTENCE_LEN], const REAL_t alpha, REAL_t *work, + int i, int j, int k, int cbow_mean, REAL_t *word_locks_vocab, REAL_t *word_locks_ngrams) nogil + + +cdef init_ft_config(FastTextConfig *c, model, alpha, _work, _neu1) \ No newline at end of file diff --git a/gensim/models/fasttext_inner.pyx b/gensim/models/fasttext_inner.pyx index 0cdd20a4e4..f5ceb315c6 100644 --- a/gensim/models/fasttext_inner.pyx +++ b/gensim/models/fasttext_inner.pyx @@ -39,15 +39,14 @@ cdef REAL_t[EXP_TABLE_SIZE] LOG_TABLE cdef int ONE = 1 cdef REAL_t ONEF = 1.0 -cdef unsigned long long fast_sentence_sg_neg( +cdef unsigned long long fasttext_fast_sentence_sg_neg( const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1neg, const int size, - const np.uint32_t word_index, const np.uint32_t *subwords_index, const np.uint32_t subwords_len, - const REAL_t alpha, REAL_t *work, REAL_t *l1, unsigned long long next_random, REAL_t *word_locks_vocab, - REAL_t *word_locks_ngrams) nogil: + const np.uint32_t word_index, const np.uint32_t word2_index, const np.uint32_t *subwords_index, + const np.uint32_t subwords_len, const REAL_t alpha, REAL_t *work, REAL_t *l1, unsigned long long next_random, + REAL_t *word_locks_vocab, REAL_t *word_locks_ngrams) nogil: cdef long long a - cdef np.uint32_t word2_index = subwords_index[0] cdef long long row1 = word2_index * size, row2 cdef unsigned long long modulo = 281474976710655ULL cdef REAL_t f, g, label, f_dot, log_e_f_dot @@ -58,7 +57,7 @@ cdef unsigned long long fast_sentence_sg_neg( memset(l1, 0, size * cython.sizeof(REAL_t)) scopy(&size, &syn0_vocab[row1], &ONE, l1, &ONE) - for d in range(1, subwords_len): + for d in range(subwords_len): our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) cdef REAL_t norm_factor = ONEF / subwords_len sscal(&size, &norm_factor, l1 , &ONE) @@ -83,20 +82,19 @@ cdef unsigned long long fast_sentence_sg_neg( our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) our_saxpy(&size, &g, l1, &ONE, &syn1neg[row2], &ONE) our_saxpy(&size, &word_locks_vocab[word2_index], work, &ONE, &syn0_vocab[row1], &ONE) - for d in range(1, subwords_len): + for d in range(subwords_len): our_saxpy(&size, &word_locks_ngrams[subwords_index[d]], work, &ONE, &syn0_ngrams[subwords_index[d]*size], &ONE) return next_random -cdef void fast_sentence_sg_hs( +cdef void fasttext_fast_sentence_sg_hs( const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1, const int size, - const np.uint32_t *subwords_index, const np.uint32_t subwords_len, + const np.uint32_t word2_index, const np.uint32_t *subwords_index, const np.uint32_t subwords_len, const REAL_t alpha, REAL_t *work, REAL_t *l1, REAL_t *word_locks_vocab, REAL_t *word_locks_ngrams) nogil: cdef long long a, b - cdef np.uint32_t word2_index = subwords_index[0] cdef long long row1 = word2_index * size, row2, sgn cdef REAL_t f, g, f_dot, lprob @@ -104,7 +102,7 @@ cdef void fast_sentence_sg_hs( memset(l1, 0, size * cython.sizeof(REAL_t)) scopy(&size, &syn0_vocab[row1], &ONE, l1, &ONE) - for d in range(1, subwords_len): + for d in range(subwords_len): our_saxpy(&size, &ONEF, &syn0_ngrams[subwords_index[d] * size], &ONE, l1, &ONE) cdef REAL_t norm_factor = ONEF / subwords_len sscal(&size, &norm_factor, l1 , &ONE) @@ -121,14 +119,14 @@ cdef void fast_sentence_sg_hs( our_saxpy(&size, &g, l1, &ONE, &syn1[row2], &ONE) our_saxpy(&size, &word_locks_vocab[word2_index], work, &ONE, &syn0_vocab[row1], &ONE) - for d in range(1, subwords_len): + for d in range(subwords_len): our_saxpy(&size, &word_locks_ngrams[subwords_index[d]], work, &ONE, &syn0_ngrams[subwords_index[d]*size], &ONE) -cdef unsigned long long fast_sentence_cbow_neg( +cdef unsigned long long fasttext_fast_sentence_cbow_neg( const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, int codelens[MAX_SENTENCE_LEN], REAL_t *neu1, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1neg, const int size, - const np.uint32_t indexes[MAX_SENTENCE_LEN], const np.uint32_t *subwords_idx[MAX_SENTENCE_LEN], + const np.uint32_t indexes[MAX_SENTENCE_LEN], np.uint32_t *subwords_idx[MAX_SENTENCE_LEN], const int subwords_idx_len[MAX_SENTENCE_LEN], const REAL_t alpha, REAL_t *work, int i, int j, int k, int cbow_mean, unsigned long long next_random, REAL_t *word_locks_vocab, REAL_t *word_locks_ngrams) nogil: @@ -193,10 +191,10 @@ cdef unsigned long long fast_sentence_cbow_neg( return next_random -cdef void fast_sentence_cbow_hs( +cdef void fasttext_fast_sentence_cbow_hs( const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], REAL_t *neu1, REAL_t *syn0_vocab, REAL_t *syn0_ngrams, REAL_t *syn1, const int size, - const np.uint32_t indexes[MAX_SENTENCE_LEN], const np.uint32_t *subwords_idx[MAX_SENTENCE_LEN], + const np.uint32_t indexes[MAX_SENTENCE_LEN], np.uint32_t *subwords_idx[MAX_SENTENCE_LEN], const int subwords_idx_len[MAX_SENTENCE_LEN],const REAL_t alpha, REAL_t *work, int i, int j, int k, int cbow_mean, REAL_t *word_locks_vocab, REAL_t *word_locks_ngrams) nogil: @@ -243,6 +241,37 @@ cdef void fast_sentence_cbow_hs( our_saxpy(&size, &word_locks_ngrams[subwords_idx[m][d]], work, &ONE, &syn0_ngrams[subwords_idx[m][d]*size], &ONE) +cdef init_ft_config(FastTextConfig *c, model, alpha, _work, _neu1): + c[0].hs = model.hs + c[0].negative = model.negative + c[0].sample = (model.vocabulary.sample != 0) + c[0].cbow_mean = model.cbow_mean + c[0].window = model.window + c[0].workers = model.workers + + c[0].syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) + c[0].word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) + c[0].syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) + c[0].word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) + + c[0].alpha = alpha + c[0].size = model.wv.vector_size + + if c[0].hs: + c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) + + if c[0].negative: + c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + c[0].cum_table_len = len(model.vocabulary.cum_table) + if c[0].negative or c[0].sample: + c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + + # convert Python structures to primitive types, so we can release the GIL + c[0].work = np.PyArray_DATA(_work) + c[0].neu1 = np.PyArray_DATA(_neu1) + + def train_batch_sg(model, sentences, alpha, _work, _l1): """Update skip-gram model by training on a sequence of sentences. @@ -268,67 +297,17 @@ def train_batch_sg(model, sentences, alpha, _work, _l1): Effective number of words trained. """ - cdef int hs = model.hs - cdef int negative = model.negative - cdef int sample = (model.vocabulary.sample != 0) - - cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) - cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) - cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) - cdef REAL_t *word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) - - cdef REAL_t *work - cdef REAL_t *l1 - - cdef REAL_t _alpha = alpha - cdef int size = model.wv.vector_size - - cdef int codelens[MAX_SENTENCE_LEN] - cdef np.uint32_t indexes[MAX_SENTENCE_LEN] - cdef np.uint32_t reduced_windows[MAX_SENTENCE_LEN] - cdef int sentence_idx[MAX_SENTENCE_LEN + 1] - cdef int window = model.window + cdef FastTextConfig c cdef int i, j, k cdef int effective_words = 0, effective_sentences = 0 cdef int sent_idx, idx_start, idx_end - # For hierarchical softmax - cdef REAL_t *syn1 - cdef np.uint32_t *points[MAX_SENTENCE_LEN] - cdef np.uint8_t *codes[MAX_SENTENCE_LEN] - - # For negative sampling - cdef REAL_t *syn1neg - cdef np.uint32_t *cum_table - cdef unsigned long long cum_table_len - # for sampling (negative and frequent-word downsampling) - cdef unsigned long long next_random - - # For passing subwords information as C objects for nogil - cdef int subwords_idx_len[MAX_SENTENCE_LEN] - cdef np.uint32_t *subwords_idx[MAX_SENTENCE_LEN] - # dummy dictionary to ensure that the memory locations that subwords_idx point to - # are referenced throughout so that it isn't put back to free memory pool by Python's memory manager - subword_arrays = {} - - if hs: - syn1 = (np.PyArray_DATA(model.trainables.syn1)) - - if negative: - syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - cum_table_len = len(model.vocabulary.cum_table) - if negative or sample: - next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - - # convert Python structures to primitive types, so we can release the GIL - work = np.PyArray_DATA(_work) - l1 = np.PyArray_DATA(_l1) + init_ft_config(&c, model, alpha, _work, _l1) # prepare C structures so we can go "full C" and release the Python GIL vlookup = model.wv.vocab - sentence_idx[0] = 0 # indices of the first sentence always start at 0 + c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 for sent in sentences: if not sent: continue # ignore empty sentences; leave effective_sentences unchanged @@ -336,21 +315,17 @@ def train_batch_sg(model, sentences, alpha, _work, _l1): word = vlookup[token] if token in vlookup else None if word is None: continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - if sample and word.sample_int < random_int32(&next_random): + if c.sample and word.sample_int < random_int32(&c.next_random): continue - indexes[effective_words] = word.index + c.indexes[effective_words] = word.index - subwords = model.wv.buckets_word[word.index] - word_subwords = np.array((word.index,) + subwords, dtype=np.uint32) - subwords_idx_len[effective_words] = (len(subwords) + 1) - subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) - # ensures reference count of word_subwords doesn't reach 0 - subword_arrays[effective_words] = word_subwords + c.subwords_idx_len[effective_words] = (len(model.wv.buckets_word[word.index])) + c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) - if hs: - codelens[effective_words] = len(word.code) - codes[effective_words] = np.PyArray_DATA(word.code) - points[effective_words] = np.PyArray_DATA(word.point) + if c.hs: + c.codelens[effective_words] = len(word.code) + c.codes[effective_words] = np.PyArray_DATA(word.code) + c.points[effective_words] = np.PyArray_DATA(word.point) effective_words += 1 if effective_words == MAX_SENTENCE_LEN: @@ -360,39 +335,39 @@ def train_batch_sg(model, sentences, alpha, _work, _l1): # across sentence boundaries. # indices of sentence number X are between idx_end: k = idx_end for j in range(j, k): if j == i: continue - if hs: - fast_sentence_sg_hs( - points[j], codes[j], codelens[j], syn0_vocab, syn0_ngrams, syn1, size, - subwords_idx[i], subwords_idx_len[i], _alpha, work, l1, word_locks_vocab, - word_locks_ngrams) - if negative: - next_random = fast_sentence_sg_neg( - negative, cum_table, cum_table_len, syn0_vocab, syn0_ngrams, syn1neg, size, - indexes[j], subwords_idx[i], subwords_idx_len[i], _alpha, work, l1, - next_random, word_locks_vocab, word_locks_ngrams) + if c.hs: + fasttext_fast_sentence_sg_hs( + c.points[j], c.codes[j], c.codelens[j], c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, c.neu1, + c.word_locks_vocab, c.word_locks_ngrams) + if c.negative: + c.next_random = fasttext_fast_sentence_sg_neg( + c.negative, c.cum_table, c.cum_table_len, c.syn0_vocab, c.syn0_ngrams, c.syn1neg, c.size, + c.indexes[j], c.indexes[i], c.subwords_idx[i], c.subwords_idx_len[i], c.alpha, c.work, + c.neu1, c.next_random, c.word_locks_vocab, c.word_locks_ngrams) return effective_words @@ -421,66 +396,27 @@ def train_batch_cbow(model, sentences, alpha, _work, _neu1): Effective number of words trained. """ - cdef int hs = model.hs - cdef int negative = model.negative - cdef int sample = (model.vocabulary.sample != 0) - cdef int cbow_mean = model.cbow_mean - - cdef REAL_t *syn0_vocab = (np.PyArray_DATA(model.wv.vectors_vocab)) - cdef REAL_t *word_locks_vocab = (np.PyArray_DATA(model.trainables.vectors_vocab_lockf)) - cdef REAL_t *syn0_ngrams = (np.PyArray_DATA(model.wv.vectors_ngrams)) - cdef REAL_t *word_locks_ngrams = (np.PyArray_DATA(model.trainables.vectors_ngrams_lockf)) - - cdef REAL_t *work - cdef REAL_t _alpha = alpha - cdef int size = model.wv.vector_size - - cdef int codelens[MAX_SENTENCE_LEN] - cdef np.uint32_t indexes[MAX_SENTENCE_LEN] - cdef np.uint32_t reduced_windows[MAX_SENTENCE_LEN] - cdef int sentence_idx[MAX_SENTENCE_LEN + 1] - cdef int window = model.window + cdef FastTextConfig c cdef int i, j, k cdef int effective_words = 0, effective_sentences = 0 cdef int sent_idx, idx_start, idx_end - # For hierarchical softmax - cdef REAL_t *syn1 - cdef np.uint32_t *points[MAX_SENTENCE_LEN] - cdef np.uint8_t *codes[MAX_SENTENCE_LEN] - - # For negative sampling - cdef REAL_t *syn1neg - cdef np.uint32_t *cum_table - cdef unsigned long long cum_table_len - # for sampling (negative and frequent-word downsampling) - cdef unsigned long long next_random - - # For passing subwords information as C objects for nogil - cdef int subwords_idx_len[MAX_SENTENCE_LEN] - cdef np.uint32_t *subwords_idx[MAX_SENTENCE_LEN] - # dummy dictionary to ensure that the memory locations that subwords_idx point to - # are referenced throughout so that it isn't put back to free memory pool by Python's memory manager - subword_arrays = {} - - if hs: - syn1 = (np.PyArray_DATA(model.trainables.syn1)) - - if negative: - syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - cum_table_len = len(model.vocabulary.cum_table) - if negative or sample: - next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + init_ft_config(&c, model, alpha, _work, _neu1) - # convert Python structures to primitive types, so we can release the GIL - work = np.PyArray_DATA(_work) - neu1 = np.PyArray_DATA(_neu1) + if c.hs: + c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) + + if c.negative: + c.syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + c.cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + c.cum_table_len = len(model.vocabulary.cum_table) + if c.negative or c.sample: + c.next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # prepare C structures so we can go "full C" and release the Python GIL vlookup = model.wv.vocab - sentence_idx[0] = 0 # indices of the first sentence always start at 0 + c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 for sent in sentences: if not sent: continue # ignore empty sentences; leave effective_sentences unchanged @@ -488,21 +424,17 @@ def train_batch_cbow(model, sentences, alpha, _work, _neu1): word = vlookup[token] if token in vlookup else None if word is None: continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - if sample and word.sample_int < random_int32(&next_random): + if c.sample and word.sample_int < random_int32(&c.next_random): continue - indexes[effective_words] = word.index - - subwords = model.wv.buckets_word[word.index] - word_subwords = np.array(subwords, dtype=np.uint32) - subwords_idx_len[effective_words] = len(subwords) - subwords_idx[effective_words] = np.PyArray_DATA(word_subwords) - # ensures reference count of word_subwords doesn't reach 0 - subword_arrays[effective_words] = word_subwords - - if hs: - codelens[effective_words] = len(word.code) - codes[effective_words] = np.PyArray_DATA(word.code) - points[effective_words] = np.PyArray_DATA(word.point) + c.indexes[effective_words] = word.index + + c.subwords_idx_len[effective_words] = len(model.wv.buckets_word[word.index]) + c.subwords_idx[effective_words] = np.PyArray_DATA(model.wv.buckets_word[word.index]) + + if c.hs: + c.codelens[effective_words] = len(word.code) + c.codes[effective_words] = np.PyArray_DATA(word.code) + c.points[effective_words] = np.PyArray_DATA(word.point) effective_words += 1 if effective_words == MAX_SENTENCE_LEN: break @@ -511,38 +443,38 @@ def train_batch_cbow(model, sentences, alpha, _work, _neu1): # across sentence boundaries. # indices of sentence number X are between idx_end: k = idx_end - if hs: - fast_sentence_cbow_hs( - points[i], codes[i], codelens, neu1, syn0_vocab, syn0_ngrams, syn1, size,indexes, - subwords_idx,subwords_idx_len,_alpha, work, i, j, k, cbow_mean, word_locks_vocab, - word_locks_ngrams) - if negative: - next_random = fast_sentence_cbow_neg( - negative, cum_table, cum_table_len, codelens, neu1, syn0_vocab, syn0_ngrams, - syn1neg, size, indexes, subwords_idx, subwords_idx_len, _alpha, work, i, j, k, - cbow_mean, next_random, word_locks_vocab, word_locks_ngrams) + if c.hs: + fasttext_fast_sentence_cbow_hs( + c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, c.syn1, c.size, + c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, c.cbow_mean, + c.word_locks_vocab, c.word_locks_ngrams) + if c.negative: + c.next_random = fasttext_fast_sentence_cbow_neg( + c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0_vocab, c.syn0_ngrams, + c.syn1neg, c.size, c.indexes, c.subwords_idx, c.subwords_idx_len, c.alpha, c.work, i, j, k, + c.cbow_mean, c.next_random, c.word_locks_vocab, c.word_locks_ngrams) return effective_words diff --git a/gensim/models/word2vec.py b/gensim/models/word2vec.py index ff8fe23199..2c736351a7 100755 --- a/gensim/models/word2vec.py +++ b/gensim/models/word2vec.py @@ -114,7 +114,6 @@ from copy import deepcopy from collections import defaultdict import threading -import multiprocessing import itertools import warnings @@ -137,7 +136,6 @@ from gensim.utils import deprecated from six import iteritems, itervalues, string_types from six.moves import xrange -from functools import reduce logger = logging.getLogger(__name__) @@ -335,6 +333,20 @@ def score_sentence_cbow(model, sentence, work=None, neu1=None): return log_prob_sentence +try: + from gensim.models.word2vec_corpusfile import train_epoch_sg, train_epoch_cbow, CORPUSFILE_VERSION +except ImportError: + # file-based word2vec is not supported + CORPUSFILE_VERSION = -1 + + def train_epoch_sg(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, + _work, _neu1, compute_loss): + raise RuntimeError("Training with corpus_file argument is not supported") + + def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, + _work, _neu1, compute_loss): + raise RuntimeError("Training with corpus_file argument is not supported") + def train_sg_pair(model, word, context_index, alpha, learn_vectors=True, learn_hidden=True, context_vectors=None, context_locks=None, compute_loss=False, is_ft=False): @@ -630,7 +642,7 @@ class Word2Vec(BaseWordEmbeddingsModel): """ - def __init__(self, sentences=None, input_streams=None, size=100, alpha=0.025, window=5, min_count=5, + def __init__(self, sentences=None, corpus_file=None, size=100, alpha=0.025, window=5, min_count=5, max_vocab_size=None, sample=1e-3, seed=1, workers=3, min_alpha=0.0001, sg=0, hs=0, negative=5, ns_exponent=0.75, cbow_mean=1, hashfxn=hash, iter=5, null_word=0, trim_rule=None, sorted_vocab=1, batch_words=MAX_WORDS_IN_BATCH, compute_loss=False, callbacks=(), @@ -648,9 +660,10 @@ def __init__(self, sentences=None, input_streams=None, size=100, alpha=0.025, wi `_. If you don't supply `sentences`, the model is left uninitialized -- use if you plan to initialize it in some other way. - input_streams : list or tuple of iterable of iterables - The tuple or list of `sentences`-like arguments. Use it if you have multiple input streams. It is possible - to process streams in parallel, using `workers` parameter. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (or none of them). size : int, optional Dimensionality of the word vectors. window : int, optional @@ -732,8 +745,8 @@ def __init__(self, sentences=None, input_streams=None, size=100, alpha=0.025, wi Initialize and train a :class:`~gensim.models.word2vec.Word2Vec` model >>> from gensim.models import Word2Vec - >>> input_streams = [[["cat", "say", "meow"], ["dog", "say", "woof"]]] - >>> model = Word2Vec(input_streams=input_streams, min_count=1) + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> model = Word2Vec(sentences, min_count=1) """ self.max_final_vocab = max_final_vocab @@ -748,11 +761,24 @@ def __init__(self, sentences=None, input_streams=None, size=100, alpha=0.025, wi self.trainables = Word2VecTrainables(seed=seed, vector_size=size, hashfxn=hashfxn) super(Word2Vec, self).__init__( - sentences=sentences, input_streams=input_streams, workers=workers, vector_size=size, epochs=iter, + sentences=sentences, corpus_file=corpus_file, workers=workers, vector_size=size, epochs=iter, callbacks=callbacks, batch_words=batch_words, trim_rule=trim_rule, sg=sg, alpha=alpha, window=window, seed=seed, hs=hs, negative=negative, cbow_mean=cbow_mean, min_alpha=min_alpha, compute_loss=compute_loss, fast_version=FAST_VERSION) + def _do_train_epoch(self, corpus_file, thread_id, offset, cython_vocab, thread_private_mem, cur_epoch, + total_examples=None, total_words=None, **kwargs): + work, neu1 = thread_private_mem + + if self.sg: + examples, tally, raw_tally = train_epoch_sg(self, corpus_file, offset, cython_vocab, cur_epoch, + total_examples, total_words, work, neu1, self.compute_loss) + else: + examples, tally, raw_tally = train_epoch_cbow(self, corpus_file, offset, cython_vocab, cur_epoch, + total_examples, total_words, work, neu1, self.compute_loss) + + return examples, tally, raw_tally + def _do_train_job(self, sentences, alpha, inits): """Train the model on a single batch of sentences. @@ -788,7 +814,7 @@ def _set_train_params(self, **kwargs): self.compute_loss = kwargs['compute_loss'] self.running_training_loss = 0 - def train(self, sentences=None, input_streams=None, total_examples=None, total_words=None, + def train(self, sentences=None, corpus_file=None, total_examples=None, total_words=None, epochs=None, start_alpha=None, end_alpha=None, word_count=0, queue_factor=2, report_delay=1.0, compute_loss=False, callbacks=()): """Update the model's neural weights from a sequence of sentences. @@ -816,9 +842,10 @@ def train(self, sentences=None, input_streams=None, total_examples=None, total_w or :class:`~gensim.models.word2vec.LineSentence` in :mod:`~gensim.models.word2vec` module for such examples. See also the `tutorial on data streaming in Python `_. - input_streams : list or tuple of iterable of iterables - The tuple or list of `sentences`-like arguments. Use it if you have multiple input streams. It is possible - to process streams in parallel, using `workers` parameter. + corpus_file : str, optional + Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. + You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or + `corpus_file` arguments need to be passed (not both of them). total_examples : int Count of sentences. total_words : int @@ -851,17 +878,16 @@ def train(self, sentences=None, input_streams=None, total_examples=None, total_w Examples -------- >>> from gensim.models import Word2Vec - >>> input_streams = [[["cat", "say", "meow"], ["dog", "say", "woof"]]] + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] >>> >>> model = Word2Vec(min_count=1) - >>> model.build_vocab(input_streams=input_streams) # prepare the model vocabulary - >>> model.train(input_streams=input_streams, - >>> total_examples=model.corpus_count, epochs=model.iter) # train word vectors + >>> model.build_vocab(sentences) # prepare the model vocabulary + >>> model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) # train word vectors (1, 30) """ return super(Word2Vec, self).train( - sentences=sentences, input_streams=input_streams, total_examples=total_examples, total_words=total_words, + sentences=sentences, corpus_file=corpus_file, total_examples=total_examples, total_words=total_words, epochs=epochs, start_alpha=start_alpha, end_alpha=end_alpha, word_count=word_count, queue_factor=queue_factor, report_delay=report_delay, compute_loss=compute_loss, callbacks=callbacks) @@ -1505,7 +1531,7 @@ def __init__( self.max_final_vocab = max_final_vocab self.ns_exponent = ns_exponent - def _scan_vocab_singlestream(self, sentences, progress_per, trim_rule): + def _scan_vocab(self, sentences, progress_per, trim_rule): sentence_no = -1 total_words = 0 min_reduce = 1 @@ -1537,50 +1563,12 @@ def _scan_vocab_singlestream(self, sentences, progress_per, trim_rule): self.raw_vocab = vocab return total_words, corpus_count - def _scan_vocab_multistream(self, input_streams, workers, trim_rule): - manager = multiprocessing.Manager() - progress_queue = manager.Queue() - - logger.info("Scanning vocab in %i processes.", min(workers, len(input_streams))) - - workers = min(workers, len(input_streams)) - pool = multiprocessing.Pool(processes=workers) - - worker_max_vocab_size = self.max_vocab_size // workers if self.max_vocab_size else None - results = [ - pool.apply_async(_scan_vocab_worker, - (stream, progress_queue, worker_max_vocab_size, trim_rule) - ) for stream in input_streams - ] - pool.close() - - unfinished_tasks = len(results) - total_words = 0 - total_sentences = 0 - while unfinished_tasks > 0: - report = progress_queue.get() - if report is None: - unfinished_tasks -= 1 - logger.info("scan vocab task finished, processed %i sentences and %i words;" - " awaiting finish of %i more tasks", total_sentences, total_words, unfinished_tasks) - elif isinstance(report, string_types): - logger.warning(report) - else: - num_words, num_sentences = report - total_words += num_words - total_sentences += num_sentences - - self.raw_vocab = reduce(utils.merge_counts, [res.get() for res in results]) - if self.max_vocab_size: - utils.trim_vocab_by_freq(self.raw_vocab, self.max_vocab_size, trim_rule=trim_rule) - return total_words, total_sentences - - def scan_vocab(self, sentences=None, input_streams=None, progress_per=10000, workers=None, trim_rule=None): + def scan_vocab(self, sentences=None, corpus_file=None, progress_per=10000, workers=None, trim_rule=None): logger.info("collecting all words and their counts") - if sentences is not None: - total_words, corpus_count = self._scan_vocab_singlestream(sentences, progress_per, trim_rule) - else: - total_words, corpus_count = self._scan_vocab_multistream(input_streams, workers, trim_rule) + if corpus_file: + sentences = LineSentence(corpus_file) + + total_words, corpus_count = self._scan_vocab(sentences, progress_per, trim_rule) logger.info( "collected %i word types from a corpus of %i raw words and %i sentences", diff --git a/gensim/models/word2vec_corpusfile.cpp b/gensim/models/word2vec_corpusfile.cpp new file mode 100644 index 0000000000..78513c4aae --- /dev/null +++ b/gensim/models/word2vec_corpusfile.cpp @@ -0,0 +1,15734 @@ +/* Generated by Cython 0.28.2 */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_28_2" +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (0 && PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef __cplusplus + #error "Cython files generated with the C++ option must be compiled with a C++ compiler." +#endif +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #else + #define CYTHON_INLINE inline + #endif +#endif +template +void __Pyx_call_destructor(T& x) { + x.~T(); +} +template +class __Pyx_FakeReference { + public: + __Pyx_FakeReference() : ptr(NULL) { } + __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } + T *operator->() { return ptr; } + T *operator&() { return ptr; } + operator T&() { return *ptr; } + template bool operator ==(U other) { return *ptr == other; } + template bool operator !=(U other) { return *ptr != other; } + private: + T *ptr; +}; + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__gensim__models__word2vec_corpusfile +#define __PYX_HAVE_API__gensim__models__word2vec_corpusfile +/* Early includes */ +#include +#include "ios" +#include "new" +#include "stdexcept" +#include "typeinfo" +#include +#include +#include +#include +#include +#include "numpy/arrayobject.h" +#include "numpy/ufuncobject.h" +#include "fast_line_sentence.h" +#include "voidptr.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + +/* Header.proto */ +#if !defined(CYTHON_CCOMPLEX) + #if defined(__cplusplus) + #define CYTHON_CCOMPLEX 1 + #elif defined(_Complex_I) + #define CYTHON_CCOMPLEX 1 + #else + #define CYTHON_CCOMPLEX 0 + #endif +#endif +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #include + #else + #include + #endif +#endif +#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) + #undef _Complex_I + #define _Complex_I 1.0fj +#endif + + +static const char *__pyx_f[] = { + "stringsource", + "gensim/models/word2vec_corpusfile.pyx", + "gensim/models/word2vec_corpusfile.pxd", + "__init__.pxd", + "type.pxd", +}; +/* ForceInitThreads.proto */ +#ifndef __PYX_FORCE_INIT_THREADS + #define __PYX_FORCE_INIT_THREADS 0 +#endif + +/* NoFastGil.proto */ +#define __Pyx_PyGILState_Ensure PyGILState_Ensure +#define __Pyx_PyGILState_Release PyGILState_Release +#define __Pyx_FastGIL_Remember() +#define __Pyx_FastGIL_Forget() +#define __Pyx_FastGilFuncInit() + + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 + * # in Cython to enable them only on the right systems. + * + * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + */ +typedef npy_int8 __pyx_t_5numpy_int8_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 + * + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t + */ +typedef npy_int16 __pyx_t_5numpy_int16_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< + * ctypedef npy_int64 int64_t + * #ctypedef npy_int96 int96_t + */ +typedef npy_int32 __pyx_t_5numpy_int32_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< + * #ctypedef npy_int96 int96_t + * #ctypedef npy_int128 int128_t + */ +typedef npy_int64 __pyx_t_5numpy_int64_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 + * #ctypedef npy_int128 int128_t + * + * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + */ +typedef npy_uint8 __pyx_t_5numpy_uint8_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 + * + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t + */ +typedef npy_uint16 __pyx_t_5numpy_uint16_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< + * ctypedef npy_uint64 uint64_t + * #ctypedef npy_uint96 uint96_t + */ +typedef npy_uint32 __pyx_t_5numpy_uint32_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< + * #ctypedef npy_uint96 uint96_t + * #ctypedef npy_uint128 uint128_t + */ +typedef npy_uint64 __pyx_t_5numpy_uint64_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 + * #ctypedef npy_uint128 uint128_t + * + * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< + * ctypedef npy_float64 float64_t + * #ctypedef npy_float80 float80_t + */ +typedef npy_float32 __pyx_t_5numpy_float32_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 + * + * ctypedef npy_float32 float32_t + * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< + * #ctypedef npy_float80 float80_t + * #ctypedef npy_float128 float128_t + */ +typedef npy_float64 __pyx_t_5numpy_float64_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 + * # The int types are mapped a bit surprising -- + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t + */ +typedef npy_long __pyx_t_5numpy_int_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong longlong_t + * + */ +typedef npy_longlong __pyx_t_5numpy_long_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_ulong uint_t + */ +typedef npy_longlong __pyx_t_5numpy_longlong_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 + * ctypedef npy_longlong longlong_t + * + * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t + */ +typedef npy_ulong __pyx_t_5numpy_uint_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 + * + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulonglong_t + * + */ +typedef npy_ulonglong __pyx_t_5numpy_ulong_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_intp intp_t + */ +typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 + * ctypedef npy_ulonglong ulonglong_t + * + * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< + * ctypedef npy_uintp uintp_t + * + */ +typedef npy_intp __pyx_t_5numpy_intp_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 + * + * ctypedef npy_intp intp_t + * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< + * + * ctypedef npy_double float_t + */ +typedef npy_uintp __pyx_t_5numpy_uintp_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 + * ctypedef npy_uintp uintp_t + * + * ctypedef npy_double float_t # <<<<<<<<<<<<<< + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t + */ +typedef npy_double __pyx_t_5numpy_float_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 + * + * ctypedef npy_double float_t + * ctypedef npy_double double_t # <<<<<<<<<<<<<< + * ctypedef npy_longdouble longdouble_t + * + */ +typedef npy_double __pyx_t_5numpy_double_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 + * ctypedef npy_double float_t + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cfloat cfloat_t + */ +typedef npy_longdouble __pyx_t_5numpy_longdouble_t; + +/* "gensim/models/word2vec_inner.pxd":19 + * void* PyCObject_AsVoidPtr(object obj) + * + * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< + * + * # BLAS routine signatures + */ +typedef __pyx_t_5numpy_float32_t __pyx_t_6gensim_6models_14word2vec_inner_REAL_t; + +/* "gensim/models/word2vec_corpusfile.pxd":21 + * cimport numpy as np + * + * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< + * + * + */ +typedef __pyx_t_5numpy_float32_t __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t; +/* Declarations.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< float > __pyx_t_float_complex; + #else + typedef float _Complex __pyx_t_float_complex; + #endif +#else + typedef struct { float real, imag; } __pyx_t_float_complex; +#endif +static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float); + +/* Declarations.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< double > __pyx_t_double_complex; + #else + typedef double _Complex __pyx_t_double_complex; + #endif +#else + typedef struct { double real, imag; } __pyx_t_double_complex; +#endif +static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double); + + +/*--- Type declarations ---*/ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab; +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 + * ctypedef npy_longdouble longdouble_t + * + * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t + */ +typedef npy_cfloat __pyx_t_5numpy_cfloat_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 + * + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< + * ctypedef npy_clongdouble clongdouble_t + * + */ +typedef npy_cdouble __pyx_t_5numpy_cdouble_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cdouble complex_t + */ +typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 + * ctypedef npy_clongdouble clongdouble_t + * + * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew1(a): + */ +typedef npy_cdouble __pyx_t_5numpy_complex_t; +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig; +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config; + +/* "gensim/models/word2vec_inner.pxd":22 + * + * # BLAS routine signatures + * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)(int const *, float const *, int const *, float *, int const *); + +/* "gensim/models/word2vec_inner.pxd":23 + * # BLAS routine signatures + * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); + +/* "gensim/models/word2vec_inner.pxd":24 + * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil + */ +typedef float (*__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)(int const *, float const *, int const *, float const *, int const *); + +/* "gensim/models/word2vec_inner.pxd":25 + * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil + * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil + */ +typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)(int const *, float const *, int const *, float const *, int const *); + +/* "gensim/models/word2vec_inner.pxd":26 + * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< + * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil + * + */ +typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)(int const *, float const *, int const *); + +/* "gensim/models/word2vec_inner.pxd":27 + * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil + * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< + * + * cdef scopy_ptr scopy + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)(int const *, float const *, float const *, int const *); + +/* "gensim/models/word2vec_inner.pxd":44 + * + * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() + * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil + * + */ +typedef __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr)(int const *, float const *, int const *, float const *, int const *); + +/* "gensim/models/word2vec_inner.pxd":45 + * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() + * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil + * ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< + * + * cdef our_dot_ptr our_dot + */ +typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); + +/* "gensim/models/word2vec_inner.pxd":51 + * + * + * cdef struct Word2VecConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, compute_loss, size, window, cbow_mean, workers + * REAL_t running_training_loss, alpha + */ +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig { + int hs; + int negative; + int sample; + int compute_loss; + int size; + int window; + int cbow_mean; + int workers; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t running_training_loss; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + int sentence_idx[(0x2710 + 1)]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; +}; + +/* "gensim/models/word2vec_inner.pxd":125 + * + * + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=*) # <<<<<<<<<<<<<< + */ +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config { + int __pyx_n; + PyObject *_neu1; +}; +struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem; + +/* "gensim/models/word2vec_corpusfile.pxd":47 + * + * + * cdef struct VocabItem: # <<<<<<<<<<<<<< + * long long sample_int + * np.uint32_t index + */ +struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem { + PY_LONG_LONG sample_int; + __pyx_t_5numpy_uint32_t index; + __pyx_t_5numpy_uint8_t *code; + int code_len; + __pyx_t_5numpy_uint32_t *point; + int subword_idx_len; + __pyx_t_5numpy_uint32_t *subword_idx; +}; + +/* "gensim/models/word2vec_corpusfile.pxd":59 + * + * + * ctypedef unordered_map[string, VocabItem] cvocab_t # <<<<<<<<<<<<<< + * + * cdef class CythonVocab: + */ +typedef std::unordered_map __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t; + +/* "gensim/models/word2vec_corpusfile.pxd":33 + * + * + * cdef class CythonLineSentence: # <<<<<<<<<<<<<< + * cdef FastLineSentence* _thisptr + * cdef public bytes source + */ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence { + PyObject_HEAD + struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_vtab; + FastLineSentence *_thisptr; + PyObject *source; + size_t max_sentence_length; + size_t max_words_in_batch; + size_t offset; + std::vector > buf_data; +}; + + +/* "gensim/models/word2vec_corpusfile.pxd":61 + * ctypedef unordered_map[string, VocabItem] cvocab_t + * + * cdef class CythonVocab: # <<<<<<<<<<<<<< + * cdef cvocab_t vocab + * cdef subword_arrays + */ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab { + PyObject_HEAD + struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_vtab; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t vocab; + PyObject *subword_arrays; +}; + + +/* "gensim/models/word2vec_corpusfile.pyx":122 + * self._thisptr.Reset() + * + * def __iter__(self): # <<<<<<<<<<<<<< + * self.reset() + * while not self.is_eof(): + */ +struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ { + PyObject_HEAD + std::vector __pyx_v_chunk; + std::vector > __pyx_v_chunked_sentence; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self; + std::vector > ::iterator __pyx_t_0; +}; + + + +/* "gensim/models/word2vec_corpusfile.pyx":77 + * + * @cython.final + * cdef class CythonLineSentence: # <<<<<<<<<<<<<< + * def __cinit__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): + * self._thisptr = new FastLineSentence(to_bytes(source), offset) + */ + +struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence { + bool (*is_eof)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector (*read_sentence)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector > (*_read_chunked_sentence)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector > (*_chunk_sentence)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, std::vector , int __pyx_skip_dispatch); + void (*reset)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + std::vector > (*next_batch)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); +}; +static struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; +static bool __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_is_eof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); +static std::vector __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_read_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); +static std::vector > __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__read_chunked_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); +static std::vector > __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__chunk_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, std::vector , int __pyx_skip_dispatch); +static void __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_reset(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); +static std::vector > __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_next_batch(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch); + + +/* "gensim/models/word2vec_corpusfile.pyx":40 + * + * @cython.final + * cdef class CythonVocab: # <<<<<<<<<<<<<< + * def __init__(self, wv, hs=0, fasttext=0): + * cdef VocabItem word + */ + +struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab { + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *(*get_vocab_ptr)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *); +}; +static struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab; +static __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_f_6gensim_6models_19word2vec_corpusfile_11CythonVocab_get_vocab_ptr(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *); + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* GetModuleGlobalName.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* IterFinish.proto */ +static CYTHON_INLINE int __Pyx_IterFinish(void); + +/* UnpackItemEndCheck.proto */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* WriteUnraisableException.proto */ +static void __Pyx_WriteUnraisable(const char *name, int clineno, + int lineno, const char *filename, + int full_traceback, int nogil); + +/* PyObjectSetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL) +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value); +#else +#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n) +#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v) +#endif + +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/* RaiseNoneIterError.proto */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* ListCompAppend.proto */ +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len)) { + Py_INCREF(x); + PyList_SET_ITEM(list, len, x); + Py_SIZE(list) = len+1; + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_ListComp_Append(L,x) PyList_Append(L,x) +#endif + +/* IncludeStringH.proto */ +#include + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable); + +/* SetupReduce.proto */ +static int __Pyx_setup_reduce(PyObject* type_obj); + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* None.proto */ +#include + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_npy_uint32(npy_uint32 value); + +/* CppExceptionConversion.proto */ +#ifndef __Pyx_CppExn2PyErr +#include +#include +#include +#include +static void __Pyx_CppExn2PyErr() { + try { + if (PyErr_Occurred()) + ; // let the latest Python exn pass through and ignore the current one + else + throw; + } catch (const std::bad_alloc& exn) { + PyErr_SetString(PyExc_MemoryError, exn.what()); + } catch (const std::bad_cast& exn) { + PyErr_SetString(PyExc_TypeError, exn.what()); + } catch (const std::bad_typeid& exn) { + PyErr_SetString(PyExc_TypeError, exn.what()); + } catch (const std::domain_error& exn) { + PyErr_SetString(PyExc_ValueError, exn.what()); + } catch (const std::invalid_argument& exn) { + PyErr_SetString(PyExc_ValueError, exn.what()); + } catch (const std::ios_base::failure& exn) { + PyErr_SetString(PyExc_IOError, exn.what()); + } catch (const std::out_of_range& exn) { + PyErr_SetString(PyExc_IndexError, exn.what()); + } catch (const std::overflow_error& exn) { + PyErr_SetString(PyExc_OverflowError, exn.what()); + } catch (const std::range_error& exn) { + PyErr_SetString(PyExc_ArithmeticError, exn.what()); + } catch (const std::underflow_error& exn) { + PyErr_SetString(PyExc_ArithmeticError, exn.what()); + } catch (const std::exception& exn) { + PyErr_SetString(PyExc_RuntimeError, exn.what()); + } + catch (...) + { + PyErr_SetString(PyExc_RuntimeError, "Unknown exception"); + } +} +#endif + +/* None.proto */ +static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void); /* proto */ + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* RealImag.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #define __Pyx_CREAL(z) ((z).real()) + #define __Pyx_CIMAG(z) ((z).imag()) + #else + #define __Pyx_CREAL(z) (__real__(z)) + #define __Pyx_CIMAG(z) (__imag__(z)) + #endif +#else + #define __Pyx_CREAL(z) ((z).real) + #define __Pyx_CIMAG(z) ((z).imag) +#endif +#if defined(__cplusplus) && CYTHON_CCOMPLEX\ + && (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103) + #define __Pyx_SET_CREAL(z,x) ((z).real(x)) + #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) +#else + #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) + #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) +#endif + +/* Arithmetic.proto */ +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq_float(a, b) ((a)==(b)) + #define __Pyx_c_sum_float(a, b) ((a)+(b)) + #define __Pyx_c_diff_float(a, b) ((a)-(b)) + #define __Pyx_c_prod_float(a, b) ((a)*(b)) + #define __Pyx_c_quot_float(a, b) ((a)/(b)) + #define __Pyx_c_neg_float(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero_float(z) ((z)==(float)0) + #define __Pyx_c_conj_float(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs_float(z) (::std::abs(z)) + #define __Pyx_c_pow_float(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero_float(z) ((z)==0) + #define __Pyx_c_conj_float(z) (conjf(z)) + #if 1 + #define __Pyx_c_abs_float(z) (cabsf(z)) + #define __Pyx_c_pow_float(a, b) (cpowf(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex); + static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex); + #if 1 + static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex, __pyx_t_float_complex); + #endif +#endif + +/* Arithmetic.proto */ +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq_double(a, b) ((a)==(b)) + #define __Pyx_c_sum_double(a, b) ((a)+(b)) + #define __Pyx_c_diff_double(a, b) ((a)-(b)) + #define __Pyx_c_prod_double(a, b) ((a)*(b)) + #define __Pyx_c_quot_double(a, b) ((a)/(b)) + #define __Pyx_c_neg_double(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero_double(z) ((z)==(double)0) + #define __Pyx_c_conj_double(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs_double(z) (::std::abs(z)) + #define __Pyx_c_pow_double(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero_double(z) ((z)==0) + #define __Pyx_c_conj_double(z) (conj(z)) + #if 1 + #define __Pyx_c_abs_double(z) (cabs(z)) + #define __Pyx_c_pow_double(a, b) (cpow(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex); + static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex); + #if 1 + static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex, __pyx_t_double_complex); + #endif +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE npy_uint32 __Pyx_PyInt_As_npy_uint32(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_As_PY_LONG_LONG(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* FetchCommonType.proto */ +static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type); + +/* SwapException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* PyObjectCallMethod1.proto */ +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); +static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg); + +/* CoroutineBase.proto */ +typedef PyObject *(*__pyx_coroutine_body_t)(PyObject *, PyThreadState *, PyObject *); +typedef struct { + PyObject_HEAD + __pyx_coroutine_body_t body; + PyObject *closure; + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + PyObject *gi_weakreflist; + PyObject *classobj; + PyObject *yieldfrom; + PyObject *gi_name; + PyObject *gi_qualname; + PyObject *gi_modulename; + PyObject *gi_code; + int resume_label; + char is_running; +} __pyx_CoroutineObject; +static __pyx_CoroutineObject *__Pyx__Coroutine_New( + PyTypeObject *type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name); +static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit( + __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name); +static int __Pyx_Coroutine_clear(PyObject *self); +static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value); +static PyObject *__Pyx_Coroutine_Close(PyObject *self); +static PyObject *__Pyx_Coroutine_Throw(PyObject *gen, PyObject *args); +#define __Pyx_Coroutine_SwapException(self) {\ + __Pyx_ExceptionSwap(&(self)->exc_type, &(self)->exc_value, &(self)->exc_traceback);\ + __Pyx_Coroutine_ResetFrameBackpointer(self);\ + } +#define __Pyx_Coroutine_ResetAndClearException(self) {\ + __Pyx_ExceptionReset((self)->exc_type, (self)->exc_value, (self)->exc_traceback);\ + (self)->exc_type = (self)->exc_value = (self)->exc_traceback = NULL;\ + } +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyGen_FetchStopIterationValue(pvalue)\ + __Pyx_PyGen__FetchStopIterationValue(__pyx_tstate, pvalue) +#else +#define __Pyx_PyGen_FetchStopIterationValue(pvalue)\ + __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, pvalue) +#endif +static int __Pyx_PyGen__FetchStopIterationValue(PyThreadState *tstate, PyObject **pvalue); +static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__pyx_CoroutineObject *self); + +/* PatchModuleWithCoroutine.proto */ +static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code); + +/* PatchGeneratorABC.proto */ +static int __Pyx_patch_abc(void); + +/* Generator.proto */ +#define __Pyx_Generator_USED +static PyTypeObject *__pyx_GeneratorType = 0; +#define __Pyx_Generator_CheckExact(obj) (Py_TYPE(obj) == __pyx_GeneratorType) +#define __Pyx_Generator_New(body, code, closure, name, qualname, module_name)\ + __Pyx__Coroutine_New(__pyx_GeneratorType, body, code, closure, name, qualname, module_name) +static PyObject *__Pyx_Generator_Next(PyObject *self); +static int __pyx_Generator_init(void); + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* FunctionExport.proto */ +static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); + +/* PyIdentifierFromString.proto */ +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +/* ModuleImport.proto */ +static PyObject *__Pyx_ImportModule(const char *name); + +/* TypeImport.proto */ +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); + +/* VoidPtrImport.proto */ +static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig); + +/* FunctionImport.proto */ +static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +static __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_f_6gensim_6models_19word2vec_corpusfile_11CythonVocab_get_vocab_ptr(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_self); /* proto*/ +static bool __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_is_eof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch); /* proto*/ +static std::vector __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_read_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch); /* proto*/ +static std::vector > __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__read_chunked_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch); /* proto*/ +static std::vector > __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__chunk_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, std::vector __pyx_v_sent, CYTHON_UNUSED int __pyx_skip_dispatch); /* proto*/ +static void __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_reset(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch); /* proto*/ +static std::vector > __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_next_batch(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch); /* proto*/ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libcpp.string' */ + +/* Module declarations from 'libcpp.vector' */ + +/* Module declarations from 'libcpp.utility' */ + +/* Module declarations from 'libcpp.unordered_map' */ + +/* Module declarations from 'libcpp' */ + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'numpy' */ + +/* Module declarations from 'numpy' */ +static PyTypeObject *__pyx_ptype_5numpy_dtype = 0; +static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0; +static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0; +static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0; +static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0; +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/ + +/* Module declarations from 'cython' */ + +/* Module declarations from 'gensim.models.word2vec_inner' */ +static __pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_scopy = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_scopy (*__pyx_vp_6gensim_6models_14word2vec_inner_scopy) +static __pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_saxpy = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_saxpy (*__pyx_vp_6gensim_6models_14word2vec_inner_saxpy) +static __pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_sdot = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_sdot (*__pyx_vp_6gensim_6models_14word2vec_inner_sdot) +static __pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_dsdot = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_dsdot (*__pyx_vp_6gensim_6models_14word2vec_inner_dsdot) +static __pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_snrm2 = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_snrm2 (*__pyx_vp_6gensim_6models_14word2vec_inner_snrm2) +static __pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_sscal = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_sscal (*__pyx_vp_6gensim_6models_14word2vec_inner_sscal) +static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_vp_6gensim_6models_14word2vec_inner_EXP_TABLE)[0x3E8] = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE (*__pyx_vp_6gensim_6models_14word2vec_inner_EXP_TABLE) +static __pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_our_dot = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_our_dot (*__pyx_vp_6gensim_6models_14word2vec_inner_our_dot) +static __pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr *__pyx_vp_6gensim_6models_14word2vec_inner_our_saxpy = 0; +#define __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy (*__pyx_vp_6gensim_6models_14word2vec_inner_our_saxpy) +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14word2vec_inner_random_int32)(unsigned PY_LONG_LONG *); /*proto*/ +static void (*__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_hs)(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_neg)(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static void (*__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_hs)(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static unsigned PY_LONG_LONG (*__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_neg)(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static PyObject *(*__pyx_f_6gensim_6models_14word2vec_inner_init_w2v_config)(struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig *, PyObject *, PyObject *, PyObject *, PyObject *, struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config *__pyx_optional_args); /*proto*/ + +/* Module declarations from 'gensim.models.word2vec_corpusfile' */ +static PyTypeObject *__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = 0; +static PyTypeObject *__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab = 0; +static PyTypeObject *__pyx_ptype_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ = 0; +static __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha(__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int); /*proto*/ +static __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha(__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int, int, int, int, int); /*proto*/ +static PyObject *__pyx_f_6gensim_6models_19word2vec_corpusfile_to_bytes(PyObject *); /*proto*/ +static void __pyx_f_6gensim_6models_19word2vec_corpusfile_prepare_c_structures_for_batch(std::vector > &, int, int, int, int *, int *, int *, unsigned PY_LONG_LONG *, __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *, int *, __pyx_t_5numpy_uint32_t *, int *, __pyx_t_5numpy_uint8_t **, __pyx_t_5numpy_uint32_t **, __pyx_t_5numpy_uint32_t *); /*proto*/ +static std::string __pyx_convert_string_from_py_std__in_string(PyObject *); /*proto*/ +static std::vector __pyx_convert_vector_from_py_std_3a__3a_string(PyObject *); /*proto*/ +static CYTHON_INLINE PyObject *__pyx_convert_PyObject_string_to_py_std__in_string(std::string const &); /*proto*/ +static CYTHON_INLINE PyObject *__pyx_convert_PyUnicode_string_to_py_std__in_string(std::string const &); /*proto*/ +static CYTHON_INLINE PyObject *__pyx_convert_PyStr_string_to_py_std__in_string(std::string const &); /*proto*/ +static CYTHON_INLINE PyObject *__pyx_convert_PyBytes_string_to_py_std__in_string(std::string const &); /*proto*/ +static CYTHON_INLINE PyObject *__pyx_convert_PyByteArray_string_to_py_std__in_string(std::string const &); /*proto*/ +static PyObject *__pyx_convert_vector_to_py_std_3a__3a_string(const std::vector &); /*proto*/ +static PyObject *__pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___(const std::vector > &); /*proto*/ +#define __Pyx_MODULE_NAME "gensim.models.word2vec_corpusfile" +extern int __pyx_module_is_main_gensim__models__word2vec_corpusfile; +int __pyx_module_is_main_gensim__models__word2vec_corpusfile = 0; + +/* Implementation of 'gensim.models.word2vec_corpusfile' */ +static PyObject *__pyx_builtin_TypeError; +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_ValueError; +static PyObject *__pyx_builtin_RuntimeError; +static PyObject *__pyx_builtin_ImportError; +static const char __pyx_k_c[] = "c"; +static const char __pyx_k_i[] = "i"; +static const char __pyx_k_j[] = "j"; +static const char __pyx_k_k[] = "k"; +static const char __pyx_k_hs[] = "hs"; +static const char __pyx_k_np[] = "np"; +static const char __pyx_k_wv[] = "wv"; +static const char __pyx_k_six[] = "six"; +static const char __pyx_k_args[] = "args"; +static const char __pyx_k_code[] = "code"; +static const char __pyx_k_iter[] = "__iter__"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_neu1[] = "_neu1"; +static const char __pyx_k_send[] = "send"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_utf8[] = "utf8"; +static const char __pyx_k_work[] = "_work"; +static const char __pyx_k_alpha[] = "alpha"; +static const char __pyx_k_close[] = "close"; +static const char __pyx_k_index[] = "index"; +static const char __pyx_k_model[] = "model"; +static const char __pyx_k_numpy[] = "numpy"; +static const char __pyx_k_point[] = "point"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_throw[] = "throw"; +static const char __pyx_k_vocab[] = "vocab"; +static const char __pyx_k_encode[] = "encode"; +static const char __pyx_k_epochs[] = "epochs"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_offset[] = "offset"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_source[] = "source"; +static const char __pyx_k_alpha_2[] = "_alpha"; +static const char __pyx_k_idx_end[] = "idx_end"; +static const char __pyx_k_any2utf8[] = "any2utf8"; +static const char __pyx_k_fasttext[] = "fasttext"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_sent_idx[] = "sent_idx"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_TypeError[] = "TypeError"; +static const char __pyx_k_cur_epoch[] = "_cur_epoch"; +static const char __pyx_k_end_alpha[] = "end_alpha"; +static const char __pyx_k_idx_start[] = "idx_start"; +static const char __pyx_k_iteritems[] = "iteritems"; +static const char __pyx_k_min_alpha[] = "min_alpha"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_sentences[] = "sentences"; +static const char __pyx_k_ValueError[] = "ValueError"; +static const char __pyx_k_num_epochs[] = "num_epochs"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_sample_int[] = "sample_int"; +static const char __pyx_k_ImportError[] = "ImportError"; +static const char __pyx_k_corpus_file[] = "corpus_file"; +static const char __pyx_k_cur_epoch_2[] = "cur_epoch"; +static const char __pyx_k_start_alpha[] = "start_alpha"; +static const char __pyx_k_total_words[] = "total_words"; +static const char __pyx_k_RuntimeError[] = "RuntimeError"; +static const char __pyx_k_buckets_word[] = "buckets_word"; +static const char __pyx_k_compute_loss[] = "compute_loss"; +static const char __pyx_k_cython_vocab[] = "_cython_vocab"; +static const char __pyx_k_gensim_utils[] = "gensim.utils"; +static const char __pyx_k_input_stream[] = "input_stream"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_expected_words[] = "_expected_words"; +static const char __pyx_k_train_epoch_sg[] = "train_epoch_sg"; +static const char __pyx_k_effective_words[] = "effective_words"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_total_sentences[] = "total_sentences"; +static const char __pyx_k_expected_words_2[] = "expected_words"; +static const char __pyx_k_train_epoch_cbow[] = "train_epoch_cbow"; +static const char __pyx_k_expected_examples[] = "_expected_examples"; +static const char __pyx_k_CORPUSFILE_VERSION[] = "CORPUSFILE_VERSION"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_effective_sentences[] = "effective_sentences"; +static const char __pyx_k_expected_examples_2[] = "expected_examples"; +static const char __pyx_k_max_sentence_length[] = "max_sentence_length"; +static const char __pyx_k_running_training_loss[] = "running_training_loss"; +static const char __pyx_k_total_effective_words[] = "total_effective_words"; +static const char __pyx_k_CythonLineSentence___iter[] = "CythonLineSentence.__iter__"; +static const char __pyx_k_ndarray_is_not_C_contiguous[] = "ndarray is not C contiguous"; +static const char __pyx_k_rebuild_cython_line_sentence[] = "rebuild_cython_line_sentence"; +static const char __pyx_k_numpy_core_multiarray_failed_to[] = "numpy.core.multiarray failed to import"; +static const char __pyx_k_unknown_dtype_code_in_numpy_pxd[] = "unknown dtype code in numpy.pxd (%d)"; +static const char __pyx_k_Format_string_allocated_too_shor[] = "Format string allocated too short, see comment in numpy.pxd"; +static const char __pyx_k_Non_native_byte_order_not_suppor[] = "Non-native byte order not supported"; +static const char __pyx_k_Optimized_cython_functions_for_f[] = "Optimized cython functions for file-based training :class:`~gensim.models.word2vec.Word2Vec` model."; +static const char __pyx_k_gensim_models_word2vec_corpusfil[] = "gensim.models.word2vec_corpusfile"; +static const char __pyx_k_ndarray_is_not_Fortran_contiguou[] = "ndarray is not Fortran contiguous"; +static const char __pyx_k_numpy_core_umath_failed_to_impor[] = "numpy.core.umath failed to import"; +static const char __pyx_k_self_vocab_cannot_be_converted_t[] = "self.vocab cannot be converted to a Python object for pickling"; +static const char __pyx_k_Format_string_allocated_too_shor_2[] = "Format string allocated too short."; +static const char __pyx_k_gensim_models_word2vec_corpusfil_2[] = "gensim/models/word2vec_corpusfile.pyx"; +static PyObject *__pyx_n_s_CORPUSFILE_VERSION; +static PyObject *__pyx_n_s_CythonLineSentence___iter; +static PyObject *__pyx_kp_u_Format_string_allocated_too_shor; +static PyObject *__pyx_kp_u_Format_string_allocated_too_shor_2; +static PyObject *__pyx_n_s_ImportError; +static PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor; +static PyObject *__pyx_n_s_RuntimeError; +static PyObject *__pyx_n_s_TypeError; +static PyObject *__pyx_n_s_ValueError; +static PyObject *__pyx_n_s_alpha; +static PyObject *__pyx_n_s_alpha_2; +static PyObject *__pyx_n_s_any2utf8; +static PyObject *__pyx_n_s_args; +static PyObject *__pyx_n_s_buckets_word; +static PyObject *__pyx_n_s_c; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_close; +static PyObject *__pyx_n_s_code; +static PyObject *__pyx_n_s_compute_loss; +static PyObject *__pyx_n_s_corpus_file; +static PyObject *__pyx_n_s_cur_epoch; +static PyObject *__pyx_n_s_cur_epoch_2; +static PyObject *__pyx_n_s_cython_vocab; +static PyObject *__pyx_n_s_effective_sentences; +static PyObject *__pyx_n_s_effective_words; +static PyObject *__pyx_n_s_encode; +static PyObject *__pyx_n_s_end_alpha; +static PyObject *__pyx_n_s_epochs; +static PyObject *__pyx_n_s_expected_examples; +static PyObject *__pyx_n_s_expected_examples_2; +static PyObject *__pyx_n_s_expected_words; +static PyObject *__pyx_n_s_expected_words_2; +static PyObject *__pyx_n_s_fasttext; +static PyObject *__pyx_n_s_gensim_models_word2vec_corpusfil; +static PyObject *__pyx_kp_s_gensim_models_word2vec_corpusfil_2; +static PyObject *__pyx_n_s_gensim_utils; +static PyObject *__pyx_n_s_getstate; +static PyObject *__pyx_n_s_hs; +static PyObject *__pyx_n_s_i; +static PyObject *__pyx_n_s_idx_end; +static PyObject *__pyx_n_s_idx_start; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_index; +static PyObject *__pyx_n_s_input_stream; +static PyObject *__pyx_n_s_iter; +static PyObject *__pyx_n_s_iteritems; +static PyObject *__pyx_n_s_j; +static PyObject *__pyx_n_s_k; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_max_sentence_length; +static PyObject *__pyx_n_s_min_alpha; +static PyObject *__pyx_n_s_model; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous; +static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou; +static PyObject *__pyx_n_s_neu1; +static PyObject *__pyx_n_s_np; +static PyObject *__pyx_n_s_num_epochs; +static PyObject *__pyx_n_s_numpy; +static PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to; +static PyObject *__pyx_kp_s_numpy_core_umath_failed_to_impor; +static PyObject *__pyx_n_s_offset; +static PyObject *__pyx_n_s_point; +static PyObject *__pyx_n_s_pyx_vtable; +static PyObject *__pyx_n_s_range; +static PyObject *__pyx_n_s_rebuild_cython_line_sentence; +static PyObject *__pyx_n_s_reduce; +static PyObject *__pyx_n_s_reduce_cython; +static PyObject *__pyx_n_s_reduce_ex; +static PyObject *__pyx_n_s_running_training_loss; +static PyObject *__pyx_n_s_sample_int; +static PyObject *__pyx_kp_s_self_vocab_cannot_be_converted_t; +static PyObject *__pyx_n_s_send; +static PyObject *__pyx_n_s_sent_idx; +static PyObject *__pyx_n_s_sentences; +static PyObject *__pyx_n_s_setstate; +static PyObject *__pyx_n_s_setstate_cython; +static PyObject *__pyx_n_s_six; +static PyObject *__pyx_n_s_source; +static PyObject *__pyx_n_s_start_alpha; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_throw; +static PyObject *__pyx_n_s_total_effective_words; +static PyObject *__pyx_n_s_total_sentences; +static PyObject *__pyx_n_s_total_words; +static PyObject *__pyx_n_s_train_epoch_cbow; +static PyObject *__pyx_n_s_train_epoch_sg; +static PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd; +static PyObject *__pyx_n_s_utf8; +static PyObject *__pyx_n_s_vocab; +static PyObject *__pyx_n_s_work; +static PyObject *__pyx_n_s_wv; +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_11CythonVocab___init__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_self, PyObject *__pyx_v_wv, PyObject *__pyx_v_hs, PyObject *__pyx_v_fasttext); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_11CythonVocab_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_11CythonVocab_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_rebuild_cython_line_sentence(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_source, PyObject *__pyx_v_max_sentence_length); /* proto */ +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence___cinit__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_source, PyObject *__pyx_v_offset, CYTHON_UNUSED PyObject *__pyx_v_max_sentence_length); /* proto */ +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_2__init__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_source, PyObject *__pyx_v_offset, PyObject *__pyx_v_max_sentence_length); /* proto */ +static void __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_4__dealloc__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6is_eof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_8read_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_10_read_chunked_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_12_chunk_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, std::vector __pyx_v_sent); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_14reset(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_16__iter__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19__reduce__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_21next_batch(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source___get__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_2__set__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_4__del__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length___get__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length_2__set__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch___get__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch_2__set__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset___get__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self); /* proto */ +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset_2__set__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_2train_epoch_sg(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v__work, CYTHON_UNUSED PyObject *__pyx_v__neu1, PyObject *__pyx_v_compute_loss); /* proto */ +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_4train_epoch_cbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v__work, PyObject *__pyx_v__neu1, PyObject *__pyx_v_compute_loss); /* proto */ +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */ +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */ +static PyObject *__pyx_tp_new_6gensim_6models_19word2vec_corpusfile_CythonLineSentence(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_6gensim_6models_19word2vec_corpusfile_CythonVocab(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_int_0; +static PyObject *__pyx_int_1; +static PyObject *__pyx_int_10000; +static PyObject *__pyx_tuple_; +static PyObject *__pyx_tuple__2; +static PyObject *__pyx_tuple__3; +static PyObject *__pyx_tuple__4; +static PyObject *__pyx_tuple__5; +static PyObject *__pyx_tuple__6; +static PyObject *__pyx_tuple__7; +static PyObject *__pyx_tuple__8; +static PyObject *__pyx_tuple__9; +static PyObject *__pyx_tuple__10; +static PyObject *__pyx_tuple__11; +static PyObject *__pyx_tuple__12; +static PyObject *__pyx_tuple__13; +static PyObject *__pyx_tuple__15; +static PyObject *__pyx_tuple__17; +static PyObject *__pyx_codeobj__14; +static PyObject *__pyx_codeobj__16; +static PyObject *__pyx_codeobj__18; +/* Late includes */ + +/* "gensim/models/word2vec_corpusfile.pyx":41 + * @cython.final + * cdef class CythonVocab: + * def __init__(self, wv, hs=0, fasttext=0): # <<<<<<<<<<<<<< + * cdef VocabItem word + * + */ + +/* Python wrapper */ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_11CythonVocab_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_11CythonVocab_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_wv = 0; + PyObject *__pyx_v_hs = 0; + PyObject *__pyx_v_fasttext = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_wv,&__pyx_n_s_hs,&__pyx_n_s_fasttext,0}; + PyObject* values[3] = {0,0,0}; + values[1] = ((PyObject *)__pyx_int_0); + values[2] = ((PyObject *)__pyx_int_0); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_wv)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_hs); + if (value) { values[1] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 2: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_fasttext); + if (value) { values[2] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(1, 41, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_wv = values[0]; + __pyx_v_hs = values[1]; + __pyx_v_fasttext = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 41, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonVocab.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_11CythonVocab___init__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_v_self), __pyx_v_wv, __pyx_v_hs, __pyx_v_fasttext); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_11CythonVocab___init__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_self, PyObject *__pyx_v_wv, PyObject *__pyx_v_hs, PyObject *__pyx_v_fasttext) { + struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem __pyx_v_word; + PyObject *__pyx_v_py_token = NULL; + PyObject *__pyx_v_vocab_item = NULL; + PyObject *__pyx_v_token = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + Py_ssize_t __pyx_t_6; + PyObject *(*__pyx_t_7)(PyObject *); + PyObject *(*__pyx_t_8)(PyObject *); + __pyx_t_5numpy_uint32_t __pyx_t_9; + PY_LONG_LONG __pyx_t_10; + int __pyx_t_11; + Py_ssize_t __pyx_t_12; + std::string __pyx_t_13; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "gensim/models/word2vec_corpusfile.pyx":44 + * cdef VocabItem word + * + * for py_token, vocab_item in iteritems(wv.vocab): # <<<<<<<<<<<<<< + * token = any2utf8(py_token) + * word.index = vocab_item.index + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_iteritems); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_wv, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_4) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_5, 0+1, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (likely(PyList_CheckExact(__pyx_t_1)) || PyTuple_CheckExact(__pyx_t_1)) { + __pyx_t_2 = __pyx_t_1; __Pyx_INCREF(__pyx_t_2); __pyx_t_6 = 0; + __pyx_t_7 = NULL; + } else { + __pyx_t_6 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_7 = Py_TYPE(__pyx_t_2)->tp_iternext; if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 44, __pyx_L1_error) + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + for (;;) { + if (likely(!__pyx_t_7)) { + if (likely(PyList_CheckExact(__pyx_t_2))) { + if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_2)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_6); __Pyx_INCREF(__pyx_t_1); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(1, 44, __pyx_L1_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_2, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + } else { + if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_2)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_6); __Pyx_INCREF(__pyx_t_1); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(1, 44, __pyx_L1_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_2, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + } + } else { + __pyx_t_1 = __pyx_t_7(__pyx_t_2); + if (unlikely(!__pyx_t_1)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(1, 44, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_1); + } + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(1, 44, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_5 = PyList_GET_ITEM(sequence, 0); + __pyx_t_3 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(__pyx_t_3); + #else + __pyx_t_5 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_4 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_8 = Py_TYPE(__pyx_t_4)->tp_iternext; + index = 0; __pyx_t_5 = __pyx_t_8(__pyx_t_4); if (unlikely(!__pyx_t_5)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_5); + index = 1; __pyx_t_3 = __pyx_t_8(__pyx_t_4); if (unlikely(!__pyx_t_3)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_8(__pyx_t_4), 2) < 0) __PYX_ERR(1, 44, __pyx_L1_error) + __pyx_t_8 = NULL; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L6_unpacking_done; + __pyx_L5_unpacking_failed:; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_8 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(1, 44, __pyx_L1_error) + __pyx_L6_unpacking_done:; + } + __Pyx_XDECREF_SET(__pyx_v_py_token, __pyx_t_5); + __pyx_t_5 = 0; + __Pyx_XDECREF_SET(__pyx_v_vocab_item, __pyx_t_3); + __pyx_t_3 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":45 + * + * for py_token, vocab_item in iteritems(wv.vocab): + * token = any2utf8(py_token) # <<<<<<<<<<<<<< + * word.index = vocab_item.index + * word.sample_int = vocab_item.sample_int + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_any2utf8); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (!__pyx_t_5) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_py_token); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_py_token}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 45, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_py_token}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 45, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_INCREF(__pyx_v_py_token); + __Pyx_GIVEREF(__pyx_v_py_token); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v_py_token); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_1); + __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":46 + * for py_token, vocab_item in iteritems(wv.vocab): + * token = any2utf8(py_token) + * word.index = vocab_item.index # <<<<<<<<<<<<<< + * word.sample_int = vocab_item.sample_int + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_vocab_item, __pyx_n_s_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 46, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = __Pyx_PyInt_As_npy_uint32(__pyx_t_1); if (unlikely((__pyx_t_9 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(1, 46, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_word.index = __pyx_t_9; + + /* "gensim/models/word2vec_corpusfile.pyx":47 + * token = any2utf8(py_token) + * word.index = vocab_item.index + * word.sample_int = vocab_item.sample_int # <<<<<<<<<<<<<< + * + * if hs: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_vocab_item, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 47, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = __Pyx_PyInt_As_PY_LONG_LONG(__pyx_t_1); if (unlikely((__pyx_t_10 == (PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(1, 47, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_word.sample_int = __pyx_t_10; + + /* "gensim/models/word2vec_corpusfile.pyx":49 + * word.sample_int = vocab_item.sample_int + * + * if hs: # <<<<<<<<<<<<<< + * word.code = np.PyArray_DATA(vocab_item.code) + * word.code_len = len(vocab_item.code) + */ + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_v_hs); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(1, 49, __pyx_L1_error) + if (__pyx_t_11) { + + /* "gensim/models/word2vec_corpusfile.pyx":50 + * + * if hs: + * word.code = np.PyArray_DATA(vocab_item.code) # <<<<<<<<<<<<<< + * word.code_len = len(vocab_item.code) + * word.point = np.PyArray_DATA(vocab_item.point) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_vocab_item, __pyx_n_s_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(1, 50, __pyx_L1_error) + __pyx_v_word.code = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":51 + * if hs: + * word.code = np.PyArray_DATA(vocab_item.code) + * word.code_len = len(vocab_item.code) # <<<<<<<<<<<<<< + * word.point = np.PyArray_DATA(vocab_item.point) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_vocab_item, __pyx_n_s_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 51, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_12 = PyObject_Length(__pyx_t_1); if (unlikely(__pyx_t_12 == ((Py_ssize_t)-1))) __PYX_ERR(1, 51, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_word.code_len = ((int)__pyx_t_12); + + /* "gensim/models/word2vec_corpusfile.pyx":52 + * word.code = np.PyArray_DATA(vocab_item.code) + * word.code_len = len(vocab_item.code) + * word.point = np.PyArray_DATA(vocab_item.point) # <<<<<<<<<<<<<< + * + * # subwords information, used only in FastText model + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_vocab_item, __pyx_n_s_point); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 52, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(1, 52, __pyx_L1_error) + __pyx_v_word.point = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":49 + * word.sample_int = vocab_item.sample_int + * + * if hs: # <<<<<<<<<<<<<< + * word.code = np.PyArray_DATA(vocab_item.code) + * word.code_len = len(vocab_item.code) + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":55 + * + * # subwords information, used only in FastText model + * if fasttext: # <<<<<<<<<<<<<< + * word.subword_idx_len = (len(wv.buckets_word[word.index])) + * word.subword_idx = np.PyArray_DATA(wv.buckets_word[word.index]) + */ + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_v_fasttext); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(1, 55, __pyx_L1_error) + if (__pyx_t_11) { + + /* "gensim/models/word2vec_corpusfile.pyx":56 + * # subwords information, used only in FastText model + * if fasttext: + * word.subword_idx_len = (len(wv.buckets_word[word.index])) # <<<<<<<<<<<<<< + * word.subword_idx = np.PyArray_DATA(wv.buckets_word[word.index]) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_wv, __pyx_n_s_buckets_word); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_GetItemInt(__pyx_t_1, __pyx_v_word.index, __pyx_t_5numpy_uint32_t, 0, __Pyx_PyInt_From_npy_uint32, 0, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_12 == ((Py_ssize_t)-1))) __PYX_ERR(1, 56, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_word.subword_idx_len = ((int)__pyx_t_12); + + /* "gensim/models/word2vec_corpusfile.pyx":57 + * if fasttext: + * word.subword_idx_len = (len(wv.buckets_word[word.index])) + * word.subword_idx = np.PyArray_DATA(wv.buckets_word[word.index]) # <<<<<<<<<<<<<< + * + * self.vocab[token] = word + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_wv, __pyx_n_s_buckets_word); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_GetItemInt(__pyx_t_3, __pyx_v_word.index, __pyx_t_5numpy_uint32_t, 0, __Pyx_PyInt_From_npy_uint32, 0, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 57, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(1, 57, __pyx_L1_error) + __pyx_v_word.subword_idx = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":55 + * + * # subwords information, used only in FastText model + * if fasttext: # <<<<<<<<<<<<<< + * word.subword_idx_len = (len(wv.buckets_word[word.index])) + * word.subword_idx = np.PyArray_DATA(wv.buckets_word[word.index]) + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":59 + * word.subword_idx = np.PyArray_DATA(wv.buckets_word[word.index]) + * + * self.vocab[token] = word # <<<<<<<<<<<<<< + * + * cdef cvocab_t* get_vocab_ptr(self) nogil except *: + */ + __pyx_t_13 = __pyx_convert_string_from_py_std__in_string(__pyx_v_token); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 59, __pyx_L1_error) + (__pyx_v_self->vocab[__pyx_t_13]) = __pyx_v_word; + + /* "gensim/models/word2vec_corpusfile.pyx":44 + * cdef VocabItem word + * + * for py_token, vocab_item in iteritems(wv.vocab): # <<<<<<<<<<<<<< + * token = any2utf8(py_token) + * word.index = vocab_item.index + */ + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":41 + * @cython.final + * cdef class CythonVocab: + * def __init__(self, wv, hs=0, fasttext=0): # <<<<<<<<<<<<<< + * cdef VocabItem word + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonVocab.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_py_token); + __Pyx_XDECREF(__pyx_v_vocab_item); + __Pyx_XDECREF(__pyx_v_token); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":61 + * self.vocab[token] = word + * + * cdef cvocab_t* get_vocab_ptr(self) nogil except *: # <<<<<<<<<<<<<< + * return &self.vocab + * + */ + +static __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_f_6gensim_6models_19word2vec_corpusfile_11CythonVocab_get_vocab_ptr(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_self) { + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_r; + + /* "gensim/models/word2vec_corpusfile.pyx":62 + * + * cdef cvocab_t* get_vocab_ptr(self) nogil except *: + * return &self.vocab # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = (&__pyx_v_self->vocab); + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":61 + * self.vocab[token] = word + * + * cdef cvocab_t* get_vocab_ptr(self) nogil except *: # <<<<<<<<<<<<<< + * return &self.vocab + * + */ + + /* function exit code */ + __pyx_L0:; + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + * def __setstate_cython__(self, __pyx_state): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_11CythonVocab_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_11CythonVocab_2__reduce_cython__[] = "CythonVocab.__reduce_cython__(self)"; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_11CythonVocab_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_11CythonVocab_2__reduce_cython__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_11CythonVocab_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 2, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + * def __setstate_cython__(self, __pyx_state): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonVocab.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_11CythonVocab_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_11CythonVocab_4__setstate_cython__[] = "CythonVocab.__setstate_cython__(self, __pyx_state)"; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_11CythonVocab_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_11CythonVocab_4__setstate_cython__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_11CythonVocab_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":4 + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 4, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonVocab.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":65 + * + * + * def rebuild_cython_line_sentence(source, max_sentence_length): # <<<<<<<<<<<<<< + * return CythonLineSentence(source, max_sentence_length=max_sentence_length) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_1rebuild_cython_line_sentence(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_rebuild_cython_line_sentence[] = "rebuild_cython_line_sentence(source, max_sentence_length)"; +static PyMethodDef __pyx_mdef_6gensim_6models_19word2vec_corpusfile_1rebuild_cython_line_sentence = {"rebuild_cython_line_sentence", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_1rebuild_cython_line_sentence, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_rebuild_cython_line_sentence}; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_1rebuild_cython_line_sentence(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_source = 0; + PyObject *__pyx_v_max_sentence_length = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("rebuild_cython_line_sentence (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_source,&__pyx_n_s_max_sentence_length,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_source)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_sentence_length)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("rebuild_cython_line_sentence", 1, 2, 2, 1); __PYX_ERR(1, 65, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "rebuild_cython_line_sentence") < 0)) __PYX_ERR(1, 65, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_source = values[0]; + __pyx_v_max_sentence_length = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("rebuild_cython_line_sentence", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 65, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.rebuild_cython_line_sentence", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_rebuild_cython_line_sentence(__pyx_self, __pyx_v_source, __pyx_v_max_sentence_length); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_rebuild_cython_line_sentence(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_source, PyObject *__pyx_v_max_sentence_length) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("rebuild_cython_line_sentence", 0); + + /* "gensim/models/word2vec_corpusfile.pyx":66 + * + * def rebuild_cython_line_sentence(source, max_sentence_length): + * return CythonLineSentence(source, max_sentence_length=max_sentence_length) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_source); + __Pyx_GIVEREF(__pyx_v_source); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_source); + __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_max_sentence_length, __pyx_v_max_sentence_length) < 0) __PYX_ERR(1, 66, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), __pyx_t_1, __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":65 + * + * + * def rebuild_cython_line_sentence(source, max_sentence_length): # <<<<<<<<<<<<<< + * return CythonLineSentence(source, max_sentence_length=max_sentence_length) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.rebuild_cython_line_sentence", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":69 + * + * + * cdef bytes to_bytes(key): # <<<<<<<<<<<<<< + * if isinstance(key, bytes): + * return key + */ + +static PyObject *__pyx_f_6gensim_6models_19word2vec_corpusfile_to_bytes(PyObject *__pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("to_bytes", 0); + + /* "gensim/models/word2vec_corpusfile.pyx":70 + * + * cdef bytes to_bytes(key): + * if isinstance(key, bytes): # <<<<<<<<<<<<<< + * return key + * else: + */ + __pyx_t_1 = PyBytes_Check(__pyx_v_key); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "gensim/models/word2vec_corpusfile.pyx":71 + * cdef bytes to_bytes(key): + * if isinstance(key, bytes): + * return key # <<<<<<<<<<<<<< + * else: + * return key.encode('utf8') + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject*)__pyx_v_key)); + __pyx_r = ((PyObject*)__pyx_v_key); + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":70 + * + * cdef bytes to_bytes(key): + * if isinstance(key, bytes): # <<<<<<<<<<<<<< + * return key + * else: + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":73 + * return key + * else: + * return key.encode('utf8') # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_key, __pyx_n_s_encode); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 73, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 73, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PyBytes_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytes", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(1, 73, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "gensim/models/word2vec_corpusfile.pyx":69 + * + * + * cdef bytes to_bytes(key): # <<<<<<<<<<<<<< + * if isinstance(key, bytes): + * return key + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.to_bytes", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":78 + * @cython.final + * cdef class CythonLineSentence: + * def __cinit__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): # <<<<<<<<<<<<<< + * self._thisptr = new FastLineSentence(to_bytes(source), offset) + * + */ + +/* Python wrapper */ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_source = 0; + PyObject *__pyx_v_offset = 0; + CYTHON_UNUSED PyObject *__pyx_v_max_sentence_length = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_source,&__pyx_n_s_offset,&__pyx_n_s_max_sentence_length,0}; + PyObject* values[3] = {0,0,0}; + values[1] = ((PyObject *)__pyx_int_0); + values[2] = ((PyObject *)__pyx_int_10000); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_source)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset); + if (value) { values[1] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 2: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_sentence_length); + if (value) { values[2] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(1, 78, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_source = values[0]; + __pyx_v_offset = values[1]; + __pyx_v_max_sentence_length = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__cinit__", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 78, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence___cinit__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self), __pyx_v_source, __pyx_v_offset, __pyx_v_max_sentence_length); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence___cinit__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_source, PyObject *__pyx_v_offset, CYTHON_UNUSED PyObject *__pyx_v_max_sentence_length) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + std::string __pyx_t_2; + size_t __pyx_t_3; + FastLineSentence *__pyx_t_4; + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "gensim/models/word2vec_corpusfile.pyx":79 + * cdef class CythonLineSentence: + * def __cinit__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): + * self._thisptr = new FastLineSentence(to_bytes(source), offset) # <<<<<<<<<<<<<< + * + * def __init__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): + */ + __pyx_t_1 = __pyx_f_6gensim_6models_19word2vec_corpusfile_to_bytes(__pyx_v_source); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 79, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __pyx_convert_string_from_py_std__in_string(__pyx_t_1); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 79, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = __Pyx_PyInt_As_size_t(__pyx_v_offset); if (unlikely((__pyx_t_3 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 79, __pyx_L1_error) + try { + __pyx_t_4 = new FastLineSentence(__pyx_t_2, __pyx_t_3); + } catch(...) { + __Pyx_CppExn2PyErr(); + __PYX_ERR(1, 79, __pyx_L1_error) + } + __pyx_v_self->_thisptr = __pyx_t_4; + + /* "gensim/models/word2vec_corpusfile.pyx":78 + * @cython.final + * cdef class CythonLineSentence: + * def __cinit__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): # <<<<<<<<<<<<<< + * self._thisptr = new FastLineSentence(to_bytes(source), offset) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":81 + * self._thisptr = new FastLineSentence(to_bytes(source), offset) + * + * def __init__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): # <<<<<<<<<<<<<< + * self.source = to_bytes(source) + * self.offset = offset + */ + +/* Python wrapper */ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_3__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_3__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_source = 0; + PyObject *__pyx_v_offset = 0; + PyObject *__pyx_v_max_sentence_length = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_source,&__pyx_n_s_offset,&__pyx_n_s_max_sentence_length,0}; + PyObject* values[3] = {0,0,0}; + values[1] = ((PyObject *)__pyx_int_0); + values[2] = ((PyObject *)__pyx_int_10000); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_source)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset); + if (value) { values[1] = value; kw_args--; } + } + CYTHON_FALLTHROUGH; + case 2: + if (kw_args > 0) { + PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_max_sentence_length); + if (value) { values[2] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(1, 81, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_source = values[0]; + __pyx_v_offset = values[1]; + __pyx_v_max_sentence_length = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 81, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_2__init__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self), __pyx_v_source, __pyx_v_offset, __pyx_v_max_sentence_length); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_2__init__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_source, PyObject *__pyx_v_offset, PyObject *__pyx_v_max_sentence_length) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + size_t __pyx_t_2; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "gensim/models/word2vec_corpusfile.pyx":82 + * + * def __init__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): + * self.source = to_bytes(source) # <<<<<<<<<<<<<< + * self.offset = offset + * self.max_sentence_length = max_sentence_length + */ + __pyx_t_1 = __pyx_f_6gensim_6models_19word2vec_corpusfile_to_bytes(__pyx_v_source); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 82, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->source); + __Pyx_DECREF(__pyx_v_self->source); + __pyx_v_self->source = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":83 + * def __init__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): + * self.source = to_bytes(source) + * self.offset = offset # <<<<<<<<<<<<<< + * self.max_sentence_length = max_sentence_length + * self.max_words_in_batch = max_sentence_length + */ + __pyx_t_2 = __Pyx_PyInt_As_size_t(__pyx_v_offset); if (unlikely((__pyx_t_2 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 83, __pyx_L1_error) + __pyx_v_self->offset = __pyx_t_2; + + /* "gensim/models/word2vec_corpusfile.pyx":84 + * self.source = to_bytes(source) + * self.offset = offset + * self.max_sentence_length = max_sentence_length # <<<<<<<<<<<<<< + * self.max_words_in_batch = max_sentence_length + * + */ + __pyx_t_2 = __Pyx_PyInt_As_size_t(__pyx_v_max_sentence_length); if (unlikely((__pyx_t_2 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 84, __pyx_L1_error) + __pyx_v_self->max_sentence_length = __pyx_t_2; + + /* "gensim/models/word2vec_corpusfile.pyx":85 + * self.offset = offset + * self.max_sentence_length = max_sentence_length + * self.max_words_in_batch = max_sentence_length # <<<<<<<<<<<<<< + * + * def __dealloc__(self): + */ + __pyx_t_2 = __Pyx_PyInt_As_size_t(__pyx_v_max_sentence_length); if (unlikely((__pyx_t_2 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(1, 85, __pyx_L1_error) + __pyx_v_self->max_words_in_batch = __pyx_t_2; + + /* "gensim/models/word2vec_corpusfile.pyx":81 + * self._thisptr = new FastLineSentence(to_bytes(source), offset) + * + * def __init__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): # <<<<<<<<<<<<<< + * self.source = to_bytes(source) + * self.offset = offset + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":87 + * self.max_words_in_batch = max_sentence_length + * + * def __dealloc__(self): # <<<<<<<<<<<<<< + * if self._thisptr != NULL: + * del self._thisptr + */ + +/* Python wrapper */ +static void __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_5__dealloc__(PyObject *__pyx_v_self); /*proto*/ +static void __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_5__dealloc__(PyObject *__pyx_v_self) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); + __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_4__dealloc__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +static void __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_4__dealloc__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__dealloc__", 0); + + /* "gensim/models/word2vec_corpusfile.pyx":88 + * + * def __dealloc__(self): + * if self._thisptr != NULL: # <<<<<<<<<<<<<< + * del self._thisptr + * + */ + __pyx_t_1 = ((__pyx_v_self->_thisptr != NULL) != 0); + if (__pyx_t_1) { + + /* "gensim/models/word2vec_corpusfile.pyx":89 + * def __dealloc__(self): + * if self._thisptr != NULL: + * del self._thisptr # <<<<<<<<<<<<<< + * + * cpdef bool_t is_eof(self) nogil: + */ + delete __pyx_v_self->_thisptr; + + /* "gensim/models/word2vec_corpusfile.pyx":88 + * + * def __dealloc__(self): + * if self._thisptr != NULL: # <<<<<<<<<<<<<< + * del self._thisptr + * + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":87 + * self.max_words_in_batch = max_sentence_length + * + * def __dealloc__(self): # <<<<<<<<<<<<<< + * if self._thisptr != NULL: + * del self._thisptr + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "gensim/models/word2vec_corpusfile.pyx":91 + * del self._thisptr + * + * cpdef bool_t is_eof(self) nogil: # <<<<<<<<<<<<<< + * return self._thisptr.IsEof() + * + */ + +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_7is_eof(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static bool __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_is_eof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch) { + bool __pyx_r; + + /* "gensim/models/word2vec_corpusfile.pyx":92 + * + * cpdef bool_t is_eof(self) nogil: + * return self._thisptr.IsEof() # <<<<<<<<<<<<<< + * + * cpdef vector[string] read_sentence(self) nogil except *: + */ + __pyx_r = __pyx_v_self->_thisptr->IsEof(); + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":91 + * del self._thisptr + * + * cpdef bool_t is_eof(self) nogil: # <<<<<<<<<<<<<< + * return self._thisptr.IsEof() + * + */ + + /* function exit code */ + __pyx_L0:; + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_7is_eof(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6is_eof[] = "CythonLineSentence.is_eof(self) -> bool"; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_7is_eof(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("is_eof (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6is_eof(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6is_eof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("is_eof", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_is_eof(__pyx_v_self, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 91, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.is_eof", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":94 + * return self._thisptr.IsEof() + * + * cpdef vector[string] read_sentence(self) nogil except *: # <<<<<<<<<<<<<< + * return self._thisptr.ReadSentence() + * + */ + +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_9read_sentence(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static std::vector __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_read_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch) { + std::vector __pyx_r; + std::vector __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":95 + * + * cpdef vector[string] read_sentence(self) nogil except *: + * return self._thisptr.ReadSentence() # <<<<<<<<<<<<<< + * + * cpdef vector[vector[string]] _read_chunked_sentence(self) nogil except *: + */ + try { + __pyx_t_1 = __pyx_v_self->_thisptr->ReadSentence(); + } catch(...) { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_CppExn2PyErr(); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + __PYX_ERR(1, 95, __pyx_L1_error) + } + __pyx_r = __pyx_t_1; + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":94 + * return self._thisptr.IsEof() + * + * cpdef vector[string] read_sentence(self) nogil except *: # <<<<<<<<<<<<<< + * return self._thisptr.ReadSentence() + * + */ + + /* function exit code */ + __pyx_L1_error:; + { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.read_sentence", __pyx_clineno, __pyx_lineno, __pyx_filename); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + } + __Pyx_pretend_to_initialize(&__pyx_r); + __pyx_L0:; + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_9read_sentence(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_8read_sentence[] = "CythonLineSentence.read_sentence(self) -> vector[string]"; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_9read_sentence(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("read_sentence (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_8read_sentence(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_8read_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + std::vector __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("read_sentence", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_read_sentence(__pyx_v_self, 1); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 94, __pyx_L1_error) + __pyx_t_2 = __pyx_convert_vector_to_py_std_3a__3a_string(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 94, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.read_sentence", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":97 + * return self._thisptr.ReadSentence() + * + * cpdef vector[vector[string]] _read_chunked_sentence(self) nogil except *: # <<<<<<<<<<<<<< + * cdef vector[string] sent = self.read_sentence() + * return self._chunk_sentence(sent) + */ + +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_11_read_chunked_sentence(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static std::vector > __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__read_chunked_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch) { + std::vector __pyx_v_sent; + std::vector > __pyx_r; + std::vector __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":98 + * + * cpdef vector[vector[string]] _read_chunked_sentence(self) nogil except *: + * cdef vector[string] sent = self.read_sentence() # <<<<<<<<<<<<<< + * return self._chunk_sentence(sent) + * + */ + __pyx_t_1 = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_read_sentence(__pyx_v_self, 0); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(1, 98, __pyx_L1_error) + __pyx_v_sent = __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":99 + * cpdef vector[vector[string]] _read_chunked_sentence(self) nogil except *: + * cdef vector[string] sent = self.read_sentence() + * return self._chunk_sentence(sent) # <<<<<<<<<<<<<< + * + * cpdef vector[vector[string]] _chunk_sentence(self, vector[string] sent) nogil: + */ + __pyx_r = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__chunk_sentence(__pyx_v_self, __pyx_v_sent, 0); + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":97 + * return self._thisptr.ReadSentence() + * + * cpdef vector[vector[string]] _read_chunked_sentence(self) nogil except *: # <<<<<<<<<<<<<< + * cdef vector[string] sent = self.read_sentence() + * return self._chunk_sentence(sent) + */ + + /* function exit code */ + __pyx_L1_error:; + { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence._read_chunked_sentence", __pyx_clineno, __pyx_lineno, __pyx_filename); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + } + __Pyx_pretend_to_initialize(&__pyx_r); + __pyx_L0:; + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_11_read_chunked_sentence(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_10_read_chunked_sentence[] = "CythonLineSentence._read_chunked_sentence(self) -> vector[vector[string]]"; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_11_read_chunked_sentence(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_read_chunked_sentence (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_10_read_chunked_sentence(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_10_read_chunked_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + std::vector > __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("_read_chunked_sentence", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__read_chunked_sentence(__pyx_v_self, 1); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 97, __pyx_L1_error) + __pyx_t_2 = __pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 97, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence._read_chunked_sentence", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":101 + * return self._chunk_sentence(sent) + * + * cpdef vector[vector[string]] _chunk_sentence(self, vector[string] sent) nogil: # <<<<<<<<<<<<<< + * cdef vector[vector[string]] res + * cdef vector[string] chunk + */ + +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_13_chunk_sentence(PyObject *__pyx_v_self, PyObject *__pyx_arg_sent); /*proto*/ +static std::vector > __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__chunk_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, std::vector __pyx_v_sent, CYTHON_UNUSED int __pyx_skip_dispatch) { + std::vector > __pyx_v_res; + std::vector __pyx_v_chunk; + size_t __pyx_v_cur_idx; + std::vector ::size_type __pyx_v_i; + std::vector > __pyx_r; + int __pyx_t_1; + std::vector ::size_type __pyx_t_2; + size_t __pyx_t_3; + std::vector ::size_type __pyx_t_4; + std::vector ::size_type __pyx_t_5; + + /* "gensim/models/word2vec_corpusfile.pyx":104 + * cdef vector[vector[string]] res + * cdef vector[string] chunk + * cdef size_t cur_idx = 0 # <<<<<<<<<<<<<< + * + * if sent.size() > self.max_sentence_length: + */ + __pyx_v_cur_idx = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":106 + * cdef size_t cur_idx = 0 + * + * if sent.size() > self.max_sentence_length: # <<<<<<<<<<<<<< + * while cur_idx < sent.size(): + * chunk.clear() + */ + __pyx_t_1 = ((__pyx_v_sent.size() > __pyx_v_self->max_sentence_length) != 0); + if (__pyx_t_1) { + + /* "gensim/models/word2vec_corpusfile.pyx":107 + * + * if sent.size() > self.max_sentence_length: + * while cur_idx < sent.size(): # <<<<<<<<<<<<<< + * chunk.clear() + * for i in range(cur_idx, min(cur_idx + self.max_sentence_length, sent.size())): + */ + while (1) { + __pyx_t_1 = ((__pyx_v_cur_idx < __pyx_v_sent.size()) != 0); + if (!__pyx_t_1) break; + + /* "gensim/models/word2vec_corpusfile.pyx":108 + * if sent.size() > self.max_sentence_length: + * while cur_idx < sent.size(): + * chunk.clear() # <<<<<<<<<<<<<< + * for i in range(cur_idx, min(cur_idx + self.max_sentence_length, sent.size())): + * chunk.push_back(sent[i]) + */ + __pyx_v_chunk.clear(); + + /* "gensim/models/word2vec_corpusfile.pyx":109 + * while cur_idx < sent.size(): + * chunk.clear() + * for i in range(cur_idx, min(cur_idx + self.max_sentence_length, sent.size())): # <<<<<<<<<<<<<< + * chunk.push_back(sent[i]) + * + */ + __pyx_t_2 = __pyx_v_sent.size(); + __pyx_t_3 = (__pyx_v_cur_idx + __pyx_v_self->max_sentence_length); + if (((__pyx_t_2 < __pyx_t_3) != 0)) { + __pyx_t_4 = __pyx_t_2; + } else { + __pyx_t_4 = __pyx_t_3; + } + __pyx_t_2 = __pyx_t_4; + __pyx_t_4 = __pyx_t_2; + for (__pyx_t_5 = __pyx_v_cur_idx; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { + __pyx_v_i = __pyx_t_5; + + /* "gensim/models/word2vec_corpusfile.pyx":110 + * chunk.clear() + * for i in range(cur_idx, min(cur_idx + self.max_sentence_length, sent.size())): + * chunk.push_back(sent[i]) # <<<<<<<<<<<<<< + * + * res.push_back(chunk) + */ + try { + __pyx_v_chunk.push_back((__pyx_v_sent[__pyx_v_i])); + } catch(...) { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_CppExn2PyErr(); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + __PYX_ERR(1, 110, __pyx_L1_error) + } + } + + /* "gensim/models/word2vec_corpusfile.pyx":112 + * chunk.push_back(sent[i]) + * + * res.push_back(chunk) # <<<<<<<<<<<<<< + * cur_idx += chunk.size() + * else: + */ + try { + __pyx_v_res.push_back(__pyx_v_chunk); + } catch(...) { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_CppExn2PyErr(); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + __PYX_ERR(1, 112, __pyx_L1_error) + } + + /* "gensim/models/word2vec_corpusfile.pyx":113 + * + * res.push_back(chunk) + * cur_idx += chunk.size() # <<<<<<<<<<<<<< + * else: + * res.push_back(sent) + */ + __pyx_v_cur_idx = (__pyx_v_cur_idx + __pyx_v_chunk.size()); + } + + /* "gensim/models/word2vec_corpusfile.pyx":106 + * cdef size_t cur_idx = 0 + * + * if sent.size() > self.max_sentence_length: # <<<<<<<<<<<<<< + * while cur_idx < sent.size(): + * chunk.clear() + */ + goto __pyx_L3; + } + + /* "gensim/models/word2vec_corpusfile.pyx":115 + * cur_idx += chunk.size() + * else: + * res.push_back(sent) # <<<<<<<<<<<<<< + * + * return res + */ + /*else*/ { + try { + __pyx_v_res.push_back(__pyx_v_sent); + } catch(...) { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_CppExn2PyErr(); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + __PYX_ERR(1, 115, __pyx_L1_error) + } + } + __pyx_L3:; + + /* "gensim/models/word2vec_corpusfile.pyx":117 + * res.push_back(sent) + * + * return res # <<<<<<<<<<<<<< + * + * cpdef void reset(self) nogil: + */ + __pyx_r = __pyx_v_res; + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":101 + * return self._chunk_sentence(sent) + * + * cpdef vector[vector[string]] _chunk_sentence(self, vector[string] sent) nogil: # <<<<<<<<<<<<<< + * cdef vector[vector[string]] res + * cdef vector[string] chunk + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_WriteUnraisable("gensim.models.word2vec_corpusfile.CythonLineSentence._chunk_sentence", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 1); + __Pyx_pretend_to_initialize(&__pyx_r); + __pyx_L0:; + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_13_chunk_sentence(PyObject *__pyx_v_self, PyObject *__pyx_arg_sent); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_12_chunk_sentence[] = "CythonLineSentence._chunk_sentence(self, vector[string] sent) -> vector[vector[string]]"; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_13_chunk_sentence(PyObject *__pyx_v_self, PyObject *__pyx_arg_sent) { + std::vector __pyx_v_sent; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_chunk_sentence (wrapper)", 0); + assert(__pyx_arg_sent); { + __pyx_v_sent = __pyx_convert_vector_from_py_std_3a__3a_string(__pyx_arg_sent); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 101, __pyx_L3_error) + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence._chunk_sentence", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_12_chunk_sentence(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self), ((std::vector )__pyx_v_sent)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_12_chunk_sentence(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, std::vector __pyx_v_sent) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("_chunk_sentence", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___(__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__chunk_sentence(__pyx_v_self, __pyx_v_sent, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence._chunk_sentence", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":119 + * return res + * + * cpdef void reset(self) nogil: # <<<<<<<<<<<<<< + * self._thisptr.Reset() + * + */ + +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_15reset(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static void __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_reset(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch) { + + /* "gensim/models/word2vec_corpusfile.pyx":120 + * + * cpdef void reset(self) nogil: + * self._thisptr.Reset() # <<<<<<<<<<<<<< + * + * def __iter__(self): + */ + __pyx_v_self->_thisptr->Reset(); + + /* "gensim/models/word2vec_corpusfile.pyx":119 + * return res + * + * cpdef void reset(self) nogil: # <<<<<<<<<<<<<< + * self._thisptr.Reset() + * + */ + + /* function exit code */ +} + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_15reset(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_14reset[] = "CythonLineSentence.reset(self) -> void"; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_15reset(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("reset (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_14reset(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_14reset(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("reset", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_void_to_None(__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_reset(__pyx_v_self, 1)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 119, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.reset", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} +static PyObject *__pyx_gb_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18generator(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ + +/* "gensim/models/word2vec_corpusfile.pyx":122 + * self._thisptr.Reset() + * + * def __iter__(self): # <<<<<<<<<<<<<< + * self.reset() + * while not self.is_eof(): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_17__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_17__iter__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_16__iter__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_16__iter__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *__pyx_cur_scope; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__", 0); + __pyx_cur_scope = (struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *)__pyx_tp_new_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__(__pyx_ptype_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__, __pyx_empty_tuple, NULL); + if (unlikely(!__pyx_cur_scope)) { + __pyx_cur_scope = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *)Py_None); + __Pyx_INCREF(Py_None); + __PYX_ERR(1, 122, __pyx_L1_error) + } else { + __Pyx_GOTREF(__pyx_cur_scope); + } + __pyx_cur_scope->__pyx_v_self = __pyx_v_self; + __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); + __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); + { + __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18generator, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_iter, __pyx_n_s_CythonLineSentence___iter, __pyx_n_s_gensim_models_word2vec_corpusfil); if (unlikely(!gen)) __PYX_ERR(1, 122, __pyx_L1_error) + __Pyx_DECREF(__pyx_cur_scope); + __Pyx_RefNannyFinishContext(); + return (PyObject *) gen; + } + + /* function exit code */ + __pyx_L1_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_gb_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18generator(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ +{ + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *__pyx_cur_scope = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *)__pyx_generator->closure); + PyObject *__pyx_r = NULL; + int __pyx_t_1; + std::vector > __pyx_t_2; + std::vector > ::iterator __pyx_t_3; + std::vector __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__", 0); + switch (__pyx_generator->resume_label) { + case 0: goto __pyx_L3_first_run; + case 1: goto __pyx_L9_resume_from_yield; + default: /* CPython raises the right error here */ + __Pyx_RefNannyFinishContext(); + return NULL; + } + __pyx_L3_first_run:; + if (unlikely(!__pyx_sent_value)) __PYX_ERR(1, 122, __pyx_L1_error) + + /* "gensim/models/word2vec_corpusfile.pyx":123 + * + * def __iter__(self): + * self.reset() # <<<<<<<<<<<<<< + * while not self.is_eof(): + * chunked_sentence = self._read_chunked_sentence() + */ + __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_reset(__pyx_cur_scope->__pyx_v_self, 0); + + /* "gensim/models/word2vec_corpusfile.pyx":124 + * def __iter__(self): + * self.reset() + * while not self.is_eof(): # <<<<<<<<<<<<<< + * chunked_sentence = self._read_chunked_sentence() + * for chunk in chunked_sentence: + */ + while (1) { + __pyx_t_1 = ((!(__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_is_eof(__pyx_cur_scope->__pyx_v_self, 0) != 0)) != 0); + if (!__pyx_t_1) break; + + /* "gensim/models/word2vec_corpusfile.pyx":125 + * self.reset() + * while not self.is_eof(): + * chunked_sentence = self._read_chunked_sentence() # <<<<<<<<<<<<<< + * for chunk in chunked_sentence: + * if not chunk.empty(): + */ + __pyx_t_2 = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__read_chunked_sentence(__pyx_cur_scope->__pyx_v_self, 0); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 125, __pyx_L1_error) + __pyx_cur_scope->__pyx_v_chunked_sentence = __pyx_t_2; + + /* "gensim/models/word2vec_corpusfile.pyx":126 + * while not self.is_eof(): + * chunked_sentence = self._read_chunked_sentence() + * for chunk in chunked_sentence: # <<<<<<<<<<<<<< + * if not chunk.empty(): + * yield chunk + */ + __pyx_t_3 = __pyx_cur_scope->__pyx_v_chunked_sentence.begin(); + for (;;) { + if (!(__pyx_t_3 != __pyx_cur_scope->__pyx_v_chunked_sentence.end())) break; + __pyx_t_4 = *__pyx_t_3; + ++__pyx_t_3; + __pyx_cur_scope->__pyx_v_chunk = __pyx_t_4; + + /* "gensim/models/word2vec_corpusfile.pyx":127 + * chunked_sentence = self._read_chunked_sentence() + * for chunk in chunked_sentence: + * if not chunk.empty(): # <<<<<<<<<<<<<< + * yield chunk + * + */ + __pyx_t_1 = ((!(__pyx_cur_scope->__pyx_v_chunk.empty() != 0)) != 0); + if (__pyx_t_1) { + + /* "gensim/models/word2vec_corpusfile.pyx":128 + * for chunk in chunked_sentence: + * if not chunk.empty(): + * yield chunk # <<<<<<<<<<<<<< + * + * def __reduce__(self): + */ + __pyx_t_5 = __pyx_convert_vector_to_py_std_3a__3a_string(__pyx_cur_scope->__pyx_v_chunk); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 128, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + __pyx_cur_scope->__pyx_t_0 = __pyx_t_3; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + __Pyx_Coroutine_ResetAndClearException(__pyx_generator); + /* return from generator, yielding value */ + __pyx_generator->resume_label = 1; + return __pyx_r; + __pyx_L9_resume_from_yield:; + __pyx_t_3 = __pyx_cur_scope->__pyx_t_0; + if (unlikely(!__pyx_sent_value)) __PYX_ERR(1, 128, __pyx_L1_error) + + /* "gensim/models/word2vec_corpusfile.pyx":127 + * chunked_sentence = self._read_chunked_sentence() + * for chunk in chunked_sentence: + * if not chunk.empty(): # <<<<<<<<<<<<<< + * yield chunk + * + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":126 + * while not self.is_eof(): + * chunked_sentence = self._read_chunked_sentence() + * for chunk in chunked_sentence: # <<<<<<<<<<<<<< + * if not chunk.empty(): + * yield chunk + */ + } + } + CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope); + + /* "gensim/models/word2vec_corpusfile.pyx":122 + * self._thisptr.Reset() + * + * def __iter__(self): # <<<<<<<<<<<<<< + * self.reset() + * while not self.is_eof(): + */ + + /* function exit code */ + PyErr_SetNone(PyExc_StopIteration); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_L0:; + __Pyx_XDECREF(__pyx_r); __pyx_r = 0; + __Pyx_Coroutine_ResetAndClearException(__pyx_generator); + __pyx_generator->resume_label = -1; + __Pyx_Coroutine_clear((PyObject*)__pyx_generator); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":130 + * yield chunk + * + * def __reduce__(self): # <<<<<<<<<<<<<< + * # This function helps pickle to correctly serialize objects of this class. + * return rebuild_cython_line_sentence, (self.source, self.max_sentence_length) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_20__reduce__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19__reduce__[] = "CythonLineSentence.__reduce__(self)"; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_20__reduce__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19__reduce__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19__reduce__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__reduce__", 0); + + /* "gensim/models/word2vec_corpusfile.pyx":132 + * def __reduce__(self): + * # This function helps pickle to correctly serialize objects of this class. + * return rebuild_cython_line_sentence, (self.source, self.max_sentence_length) # <<<<<<<<<<<<<< + * + * cpdef vector[vector[string]] next_batch(self) nogil except *: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_rebuild_cython_line_sentence); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_self->max_sentence_length); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_self->source); + __Pyx_GIVEREF(__pyx_v_self->source); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->source); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); + __pyx_t_2 = 0; + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_3); + __pyx_t_1 = 0; + __pyx_t_3 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":130 + * yield chunk + * + * def __reduce__(self): # <<<<<<<<<<<<<< + * # This function helps pickle to correctly serialize objects of this class. + * return rebuild_cython_line_sentence, (self.source, self.max_sentence_length) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.__reduce__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":134 + * return rebuild_cython_line_sentence, (self.source, self.max_sentence_length) + * + * cpdef vector[vector[string]] next_batch(self) nogil except *: # <<<<<<<<<<<<<< + * cdef: + * vector[vector[string]] job_batch + */ + +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_22next_batch(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static std::vector > __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_next_batch(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, CYTHON_UNUSED int __pyx_skip_dispatch) { + std::vector > __pyx_v_job_batch; + std::vector > __pyx_v_chunked_sentence; + std::vector __pyx_v_data; + size_t __pyx_v_batch_size; + size_t __pyx_v_last_idx; + size_t __pyx_v_tmp; + int __pyx_v_idx; + std::vector __pyx_v_sent; + std::vector __pyx_v_chunk; + std::vector > ::size_type __pyx_v_i; + std::vector > __pyx_r; + int __pyx_t_1; + std::vector > __pyx_t_2; + std::vector > ::iterator __pyx_t_3; + std::vector __pyx_t_4; + int __pyx_t_5; + std::vector > ::size_type __pyx_t_6; + std::vector > ::size_type __pyx_t_7; + std::vector > ::size_type __pyx_t_8; + + /* "gensim/models/word2vec_corpusfile.pyx":139 + * vector[vector[string]] chunked_sentence + * vector[string] data + * size_t batch_size = 0 # <<<<<<<<<<<<<< + * size_t last_idx = 0 + * size_t tmp = 0 + */ + __pyx_v_batch_size = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":140 + * vector[string] data + * size_t batch_size = 0 + * size_t last_idx = 0 # <<<<<<<<<<<<<< + * size_t tmp = 0 + * int idx + */ + __pyx_v_last_idx = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":141 + * size_t batch_size = 0 + * size_t last_idx = 0 + * size_t tmp = 0 # <<<<<<<<<<<<<< + * int idx + * + */ + __pyx_v_tmp = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":145 + * + * # Try to read data from previous calls which was not returned + * if not self.buf_data.empty(): # <<<<<<<<<<<<<< + * job_batch = self.buf_data + * self.buf_data.clear() + */ + __pyx_t_1 = ((!(__pyx_v_self->buf_data.empty() != 0)) != 0); + if (__pyx_t_1) { + + /* "gensim/models/word2vec_corpusfile.pyx":146 + * # Try to read data from previous calls which was not returned + * if not self.buf_data.empty(): + * job_batch = self.buf_data # <<<<<<<<<<<<<< + * self.buf_data.clear() + * + */ + __pyx_t_2 = __pyx_v_self->buf_data; + __pyx_v_job_batch = __pyx_t_2; + + /* "gensim/models/word2vec_corpusfile.pyx":147 + * if not self.buf_data.empty(): + * job_batch = self.buf_data + * self.buf_data.clear() # <<<<<<<<<<<<<< + * + * for sent in job_batch: + */ + __pyx_v_self->buf_data.clear(); + + /* "gensim/models/word2vec_corpusfile.pyx":149 + * self.buf_data.clear() + * + * for sent in job_batch: # <<<<<<<<<<<<<< + * batch_size += sent.size() + * + */ + __pyx_t_3 = __pyx_v_job_batch.begin(); + for (;;) { + if (!(__pyx_t_3 != __pyx_v_job_batch.end())) break; + __pyx_t_4 = *__pyx_t_3; + ++__pyx_t_3; + __pyx_v_sent = __pyx_t_4; + + /* "gensim/models/word2vec_corpusfile.pyx":150 + * + * for sent in job_batch: + * batch_size += sent.size() # <<<<<<<<<<<<<< + * + * while not self.is_eof() and batch_size <= self.max_words_in_batch: + */ + __pyx_v_batch_size = (__pyx_v_batch_size + __pyx_v_sent.size()); + + /* "gensim/models/word2vec_corpusfile.pyx":149 + * self.buf_data.clear() + * + * for sent in job_batch: # <<<<<<<<<<<<<< + * batch_size += sent.size() + * + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":145 + * + * # Try to read data from previous calls which was not returned + * if not self.buf_data.empty(): # <<<<<<<<<<<<<< + * job_batch = self.buf_data + * self.buf_data.clear() + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":152 + * batch_size += sent.size() + * + * while not self.is_eof() and batch_size <= self.max_words_in_batch: # <<<<<<<<<<<<<< + * data = self.read_sentence() + * + */ + while (1) { + __pyx_t_5 = ((!(__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_is_eof(__pyx_v_self, 0) != 0)) != 0); + if (__pyx_t_5) { + } else { + __pyx_t_1 = __pyx_t_5; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_5 = ((__pyx_v_batch_size <= __pyx_v_self->max_words_in_batch) != 0); + __pyx_t_1 = __pyx_t_5; + __pyx_L8_bool_binop_done:; + if (!__pyx_t_1) break; + + /* "gensim/models/word2vec_corpusfile.pyx":153 + * + * while not self.is_eof() and batch_size <= self.max_words_in_batch: + * data = self.read_sentence() # <<<<<<<<<<<<<< + * + * chunked_sentence = self._chunk_sentence(data) + */ + __pyx_t_4 = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_read_sentence(__pyx_v_self, 0); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(1, 153, __pyx_L1_error) + __pyx_v_data = __pyx_t_4; + + /* "gensim/models/word2vec_corpusfile.pyx":155 + * data = self.read_sentence() + * + * chunked_sentence = self._chunk_sentence(data) # <<<<<<<<<<<<<< + * for chunk in chunked_sentence: + * job_batch.push_back(chunk) + */ + __pyx_v_chunked_sentence = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__chunk_sentence(__pyx_v_self, __pyx_v_data, 0); + + /* "gensim/models/word2vec_corpusfile.pyx":156 + * + * chunked_sentence = self._chunk_sentence(data) + * for chunk in chunked_sentence: # <<<<<<<<<<<<<< + * job_batch.push_back(chunk) + * batch_size += chunk.size() + */ + __pyx_t_3 = __pyx_v_chunked_sentence.begin(); + for (;;) { + if (!(__pyx_t_3 != __pyx_v_chunked_sentence.end())) break; + __pyx_t_4 = *__pyx_t_3; + ++__pyx_t_3; + __pyx_v_chunk = __pyx_t_4; + + /* "gensim/models/word2vec_corpusfile.pyx":157 + * chunked_sentence = self._chunk_sentence(data) + * for chunk in chunked_sentence: + * job_batch.push_back(chunk) # <<<<<<<<<<<<<< + * batch_size += chunk.size() + * + */ + try { + __pyx_v_job_batch.push_back(__pyx_v_chunk); + } catch(...) { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_CppExn2PyErr(); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + __PYX_ERR(1, 157, __pyx_L1_error) + } + + /* "gensim/models/word2vec_corpusfile.pyx":158 + * for chunk in chunked_sentence: + * job_batch.push_back(chunk) + * batch_size += chunk.size() # <<<<<<<<<<<<<< + * + * if batch_size > self.max_words_in_batch: + */ + __pyx_v_batch_size = (__pyx_v_batch_size + __pyx_v_chunk.size()); + + /* "gensim/models/word2vec_corpusfile.pyx":156 + * + * chunked_sentence = self._chunk_sentence(data) + * for chunk in chunked_sentence: # <<<<<<<<<<<<<< + * job_batch.push_back(chunk) + * batch_size += chunk.size() + */ + } + } + + /* "gensim/models/word2vec_corpusfile.pyx":160 + * batch_size += chunk.size() + * + * if batch_size > self.max_words_in_batch: # <<<<<<<<<<<<<< + * # Save data which doesn't fit in batch in order to return it later. + * self.buf_data.clear() + */ + __pyx_t_1 = ((__pyx_v_batch_size > __pyx_v_self->max_words_in_batch) != 0); + if (__pyx_t_1) { + + /* "gensim/models/word2vec_corpusfile.pyx":162 + * if batch_size > self.max_words_in_batch: + * # Save data which doesn't fit in batch in order to return it later. + * self.buf_data.clear() # <<<<<<<<<<<<<< + * + * tmp = batch_size + */ + __pyx_v_self->buf_data.clear(); + + /* "gensim/models/word2vec_corpusfile.pyx":164 + * self.buf_data.clear() + * + * tmp = batch_size # <<<<<<<<<<<<<< + * idx = job_batch.size() - 1 + * while idx >= 0: + */ + __pyx_v_tmp = __pyx_v_batch_size; + + /* "gensim/models/word2vec_corpusfile.pyx":165 + * + * tmp = batch_size + * idx = job_batch.size() - 1 # <<<<<<<<<<<<<< + * while idx >= 0: + * if tmp - job_batch[idx].size() <= self.max_words_in_batch: + */ + __pyx_v_idx = (__pyx_v_job_batch.size() - 1); + + /* "gensim/models/word2vec_corpusfile.pyx":166 + * tmp = batch_size + * idx = job_batch.size() - 1 + * while idx >= 0: # <<<<<<<<<<<<<< + * if tmp - job_batch[idx].size() <= self.max_words_in_batch: + * last_idx = idx + 1 + */ + while (1) { + __pyx_t_1 = ((__pyx_v_idx >= 0) != 0); + if (!__pyx_t_1) break; + + /* "gensim/models/word2vec_corpusfile.pyx":167 + * idx = job_batch.size() - 1 + * while idx >= 0: + * if tmp - job_batch[idx].size() <= self.max_words_in_batch: # <<<<<<<<<<<<<< + * last_idx = idx + 1 + * break + */ + __pyx_t_1 = (((__pyx_v_tmp - (__pyx_v_job_batch[__pyx_v_idx]).size()) <= __pyx_v_self->max_words_in_batch) != 0); + if (__pyx_t_1) { + + /* "gensim/models/word2vec_corpusfile.pyx":168 + * while idx >= 0: + * if tmp - job_batch[idx].size() <= self.max_words_in_batch: + * last_idx = idx + 1 # <<<<<<<<<<<<<< + * break + * else: + */ + __pyx_v_last_idx = (__pyx_v_idx + 1); + + /* "gensim/models/word2vec_corpusfile.pyx":169 + * if tmp - job_batch[idx].size() <= self.max_words_in_batch: + * last_idx = idx + 1 + * break # <<<<<<<<<<<<<< + * else: + * tmp -= job_batch[idx].size() + */ + goto __pyx_L14_break; + + /* "gensim/models/word2vec_corpusfile.pyx":167 + * idx = job_batch.size() - 1 + * while idx >= 0: + * if tmp - job_batch[idx].size() <= self.max_words_in_batch: # <<<<<<<<<<<<<< + * last_idx = idx + 1 + * break + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":171 + * break + * else: + * tmp -= job_batch[idx].size() # <<<<<<<<<<<<<< + * + * idx -= 1 + */ + /*else*/ { + __pyx_v_tmp = (__pyx_v_tmp - (__pyx_v_job_batch[__pyx_v_idx]).size()); + } + + /* "gensim/models/word2vec_corpusfile.pyx":173 + * tmp -= job_batch[idx].size() + * + * idx -= 1 # <<<<<<<<<<<<<< + * + * for i in range(last_idx, job_batch.size()): + */ + __pyx_v_idx = (__pyx_v_idx - 1); + } + __pyx_L14_break:; + + /* "gensim/models/word2vec_corpusfile.pyx":175 + * idx -= 1 + * + * for i in range(last_idx, job_batch.size()): # <<<<<<<<<<<<<< + * self.buf_data.push_back(job_batch[i]) + * job_batch.resize(last_idx) + */ + __pyx_t_6 = __pyx_v_job_batch.size(); + __pyx_t_7 = __pyx_t_6; + for (__pyx_t_8 = __pyx_v_last_idx; __pyx_t_8 < __pyx_t_7; __pyx_t_8+=1) { + __pyx_v_i = __pyx_t_8; + + /* "gensim/models/word2vec_corpusfile.pyx":176 + * + * for i in range(last_idx, job_batch.size()): + * self.buf_data.push_back(job_batch[i]) # <<<<<<<<<<<<<< + * job_batch.resize(last_idx) + * + */ + try { + __pyx_v_self->buf_data.push_back((__pyx_v_job_batch[__pyx_v_i])); + } catch(...) { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_CppExn2PyErr(); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + __PYX_ERR(1, 176, __pyx_L1_error) + } + } + + /* "gensim/models/word2vec_corpusfile.pyx":177 + * for i in range(last_idx, job_batch.size()): + * self.buf_data.push_back(job_batch[i]) + * job_batch.resize(last_idx) # <<<<<<<<<<<<<< + * + * return job_batch + */ + try { + __pyx_v_job_batch.resize(__pyx_v_last_idx); + } catch(...) { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_CppExn2PyErr(); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + __PYX_ERR(1, 177, __pyx_L1_error) + } + + /* "gensim/models/word2vec_corpusfile.pyx":160 + * batch_size += chunk.size() + * + * if batch_size > self.max_words_in_batch: # <<<<<<<<<<<<<< + * # Save data which doesn't fit in batch in order to return it later. + * self.buf_data.clear() + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":179 + * job_batch.resize(last_idx) + * + * return job_batch # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_job_batch; + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":134 + * return rebuild_cython_line_sentence, (self.source, self.max_sentence_length) + * + * cpdef vector[vector[string]] next_batch(self) nogil except *: # <<<<<<<<<<<<<< + * cdef: + * vector[vector[string]] job_batch + */ + + /* function exit code */ + __pyx_L1_error:; + { + #ifdef WITH_THREAD + PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure(); + #endif + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.next_batch", __pyx_clineno, __pyx_lineno, __pyx_filename); + #ifdef WITH_THREAD + __Pyx_PyGILState_Release(__pyx_gilstate_save); + #endif + } + __Pyx_pretend_to_initialize(&__pyx_r); + __pyx_L0:; + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_22next_batch(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_21next_batch[] = "CythonLineSentence.next_batch(self) -> vector[vector[string]]"; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_22next_batch(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("next_batch (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_21next_batch(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_21next_batch(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + std::vector > __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("next_batch", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_next_batch(__pyx_v_self, 1); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 134, __pyx_L1_error) + __pyx_t_2 = __pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.next_batch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pxd":35 + * cdef class CythonLineSentence: + * cdef FastLineSentence* _thisptr + * cdef public bytes source # <<<<<<<<<<<<<< + * cdef public size_t max_sentence_length, max_words_in_batch, offset + * cdef vector[vector[string]] buf_data + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source___get__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source___get__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->source); + __pyx_r = __pyx_v_self->source; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_2__set__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_2__set__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyBytes_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytes", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(2, 35, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->source); + __Pyx_DECREF(__pyx_v_self->source); + __pyx_v_self->source = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.source.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_4__del__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_4__del__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->source); + __Pyx_DECREF(__pyx_v_self->source); + __pyx_v_self->source = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pxd":36 + * cdef FastLineSentence* _thisptr + * cdef public bytes source + * cdef public size_t max_sentence_length, max_words_in_batch, offset # <<<<<<<<<<<<<< + * cdef vector[vector[string]] buf_data + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length___get__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length___get__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->max_sentence_length); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 36, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.max_sentence_length.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length_2__set__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length_2__set__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + size_t __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_size_t(__pyx_v_value); if (unlikely((__pyx_t_1 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 36, __pyx_L1_error) + __pyx_v_self->max_sentence_length = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.max_sentence_length.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch___get__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch___get__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->max_words_in_batch); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 36, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.max_words_in_batch.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch_2__set__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch_2__set__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + size_t __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_size_t(__pyx_v_value); if (unlikely((__pyx_t_1 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 36, __pyx_L1_error) + __pyx_v_self->max_words_in_batch = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.max_words_in_batch.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset___get__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset___get__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_FromSize_t(__pyx_v_self->offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 36, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.offset.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset_2__set__(((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset_2__set__(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + size_t __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_size_t(__pyx_v_value); if (unlikely((__pyx_t_1 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 36, __pyx_L1_error) + __pyx_v_self->offset = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.CythonLineSentence.offset.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":182 + * + * + * cdef void prepare_c_structures_for_batch( # <<<<<<<<<<<<<< + * vector[vector[string]] &sentences, int sample, int hs, int window, int *total_words, + * int *effective_words, int *effective_sentences, unsigned long long *next_random, + */ + +static void __pyx_f_6gensim_6models_19word2vec_corpusfile_prepare_c_structures_for_batch(std::vector > &__pyx_v_sentences, int __pyx_v_sample, int __pyx_v_hs, int __pyx_v_window, int *__pyx_v_total_words, int *__pyx_v_effective_words, int *__pyx_v_effective_sentences, unsigned PY_LONG_LONG *__pyx_v_next_random, __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_v_vocab, int *__pyx_v_sentence_idx, __pyx_t_5numpy_uint32_t *__pyx_v_indexes, int *__pyx_v_codelens, __pyx_t_5numpy_uint8_t **__pyx_v_codes, __pyx_t_5numpy_uint32_t **__pyx_v_points, __pyx_t_5numpy_uint32_t *__pyx_v_reduced_windows) { + struct __pyx_t_6gensim_6models_19word2vec_corpusfile_VocabItem __pyx_v_word; + std::string __pyx_v_token; + std::vector __pyx_v_sent; + int __pyx_v_i; + std::vector > ::iterator __pyx_t_1; + std::vector __pyx_t_2; + int __pyx_t_3; + long __pyx_t_4; + std::vector ::iterator __pyx_t_5; + std::string __pyx_t_6; + int __pyx_t_7; + __pyx_t_5numpy_uint32_t __pyx_t_8; + int __pyx_t_9; + __pyx_t_5numpy_uint8_t *__pyx_t_10; + __pyx_t_5numpy_uint32_t *__pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + + /* "gensim/models/word2vec_corpusfile.pyx":191 + * cdef vector[string] sent + * + * sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< + * for sent in sentences: + * if sent.empty(): + */ + (__pyx_v_sentence_idx[0]) = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":192 + * + * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * for sent in sentences: # <<<<<<<<<<<<<< + * if sent.empty(): + * continue # ignore empty sentences; leave effective_sentences unchanged + */ + __pyx_t_1 = __pyx_v_sentences.begin(); + for (;;) { + if (!(__pyx_t_1 != __pyx_v_sentences.end())) break; + __pyx_t_2 = *__pyx_t_1; + ++__pyx_t_1; + __pyx_v_sent = __pyx_t_2; + + /* "gensim/models/word2vec_corpusfile.pyx":193 + * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * for sent in sentences: + * if sent.empty(): # <<<<<<<<<<<<<< + * continue # ignore empty sentences; leave effective_sentences unchanged + * total_words[0] += sent.size() + */ + __pyx_t_3 = (__pyx_v_sent.empty() != 0); + if (__pyx_t_3) { + + /* "gensim/models/word2vec_corpusfile.pyx":194 + * for sent in sentences: + * if sent.empty(): + * continue # ignore empty sentences; leave effective_sentences unchanged # <<<<<<<<<<<<<< + * total_words[0] += sent.size() + * + */ + goto __pyx_L3_continue; + + /* "gensim/models/word2vec_corpusfile.pyx":193 + * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * for sent in sentences: + * if sent.empty(): # <<<<<<<<<<<<<< + * continue # ignore empty sentences; leave effective_sentences unchanged + * total_words[0] += sent.size() + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":195 + * if sent.empty(): + * continue # ignore empty sentences; leave effective_sentences unchanged + * total_words[0] += sent.size() # <<<<<<<<<<<<<< + * + * for token in sent: + */ + __pyx_t_4 = 0; + (__pyx_v_total_words[__pyx_t_4]) = ((__pyx_v_total_words[__pyx_t_4]) + __pyx_v_sent.size()); + + /* "gensim/models/word2vec_corpusfile.pyx":197 + * total_words[0] += sent.size() + * + * for token in sent: # <<<<<<<<<<<<<< + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): + */ + __pyx_t_5 = __pyx_v_sent.begin(); + for (;;) { + if (!(__pyx_t_5 != __pyx_v_sent.end())) break; + __pyx_t_6 = *__pyx_t_5; + ++__pyx_t_5; + __pyx_v_token = __pyx_t_6; + + /* "gensim/models/word2vec_corpusfile.pyx":199 + * for token in sent: + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): # <<<<<<<<<<<<<< + * continue + * + */ + __pyx_t_3 = (((__pyx_v_vocab[0]).find(__pyx_v_token) == (__pyx_v_vocab[0]).end()) != 0); + if (__pyx_t_3) { + + /* "gensim/models/word2vec_corpusfile.pyx":200 + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): + * continue # <<<<<<<<<<<<<< + * + * word = vocab[0][token] + */ + goto __pyx_L6_continue; + + /* "gensim/models/word2vec_corpusfile.pyx":199 + * for token in sent: + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): # <<<<<<<<<<<<<< + * continue + * + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":202 + * continue + * + * word = vocab[0][token] # <<<<<<<<<<<<<< + * if sample and word.sample_int < random_int32(next_random): + * continue + */ + __pyx_v_word = ((__pyx_v_vocab[0])[__pyx_v_token]); + + /* "gensim/models/word2vec_corpusfile.pyx":203 + * + * word = vocab[0][token] + * if sample and word.sample_int < random_int32(next_random): # <<<<<<<<<<<<<< + * continue + * indexes[effective_words[0]] = word.index + */ + __pyx_t_7 = (__pyx_v_sample != 0); + if (__pyx_t_7) { + } else { + __pyx_t_3 = __pyx_t_7; + goto __pyx_L10_bool_binop_done; + } + __pyx_t_7 = ((__pyx_v_word.sample_int < __pyx_f_6gensim_6models_14word2vec_inner_random_int32(__pyx_v_next_random)) != 0); + __pyx_t_3 = __pyx_t_7; + __pyx_L10_bool_binop_done:; + if (__pyx_t_3) { + + /* "gensim/models/word2vec_corpusfile.pyx":204 + * word = vocab[0][token] + * if sample and word.sample_int < random_int32(next_random): + * continue # <<<<<<<<<<<<<< + * indexes[effective_words[0]] = word.index + * if hs: + */ + goto __pyx_L6_continue; + + /* "gensim/models/word2vec_corpusfile.pyx":203 + * + * word = vocab[0][token] + * if sample and word.sample_int < random_int32(next_random): # <<<<<<<<<<<<<< + * continue + * indexes[effective_words[0]] = word.index + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":205 + * if sample and word.sample_int < random_int32(next_random): + * continue + * indexes[effective_words[0]] = word.index # <<<<<<<<<<<<<< + * if hs: + * codelens[effective_words[0]] = word.code_len + */ + __pyx_t_8 = __pyx_v_word.index; + (__pyx_v_indexes[(__pyx_v_effective_words[0])]) = __pyx_t_8; + + /* "gensim/models/word2vec_corpusfile.pyx":206 + * continue + * indexes[effective_words[0]] = word.index + * if hs: # <<<<<<<<<<<<<< + * codelens[effective_words[0]] = word.code_len + * codes[effective_words[0]] = word.code + */ + __pyx_t_3 = (__pyx_v_hs != 0); + if (__pyx_t_3) { + + /* "gensim/models/word2vec_corpusfile.pyx":207 + * indexes[effective_words[0]] = word.index + * if hs: + * codelens[effective_words[0]] = word.code_len # <<<<<<<<<<<<<< + * codes[effective_words[0]] = word.code + * points[effective_words[0]] = word.point + */ + __pyx_t_9 = __pyx_v_word.code_len; + (__pyx_v_codelens[(__pyx_v_effective_words[0])]) = __pyx_t_9; + + /* "gensim/models/word2vec_corpusfile.pyx":208 + * if hs: + * codelens[effective_words[0]] = word.code_len + * codes[effective_words[0]] = word.code # <<<<<<<<<<<<<< + * points[effective_words[0]] = word.point + * effective_words[0] += 1 + */ + __pyx_t_10 = __pyx_v_word.code; + (__pyx_v_codes[(__pyx_v_effective_words[0])]) = __pyx_t_10; + + /* "gensim/models/word2vec_corpusfile.pyx":209 + * codelens[effective_words[0]] = word.code_len + * codes[effective_words[0]] = word.code + * points[effective_words[0]] = word.point # <<<<<<<<<<<<<< + * effective_words[0] += 1 + * if effective_words[0] == MAX_SENTENCE_LEN: + */ + __pyx_t_11 = __pyx_v_word.point; + (__pyx_v_points[(__pyx_v_effective_words[0])]) = __pyx_t_11; + + /* "gensim/models/word2vec_corpusfile.pyx":206 + * continue + * indexes[effective_words[0]] = word.index + * if hs: # <<<<<<<<<<<<<< + * codelens[effective_words[0]] = word.code_len + * codes[effective_words[0]] = word.code + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":210 + * codes[effective_words[0]] = word.code + * points[effective_words[0]] = word.point + * effective_words[0] += 1 # <<<<<<<<<<<<<< + * if effective_words[0] == MAX_SENTENCE_LEN: + * break # TODO: log warning, tally overflow? + */ + __pyx_t_4 = 0; + (__pyx_v_effective_words[__pyx_t_4]) = ((__pyx_v_effective_words[__pyx_t_4]) + 1); + + /* "gensim/models/word2vec_corpusfile.pyx":211 + * points[effective_words[0]] = word.point + * effective_words[0] += 1 + * if effective_words[0] == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< + * break # TODO: log warning, tally overflow? + * + */ + __pyx_t_3 = (((__pyx_v_effective_words[0]) == 0x2710) != 0); + if (__pyx_t_3) { + + /* "gensim/models/word2vec_corpusfile.pyx":212 + * effective_words[0] += 1 + * if effective_words[0] == MAX_SENTENCE_LEN: + * break # TODO: log warning, tally overflow? # <<<<<<<<<<<<<< + * + * # keep track of which words go into which sentence, so we don't train + */ + goto __pyx_L7_break; + + /* "gensim/models/word2vec_corpusfile.pyx":211 + * points[effective_words[0]] = word.point + * effective_words[0] += 1 + * if effective_words[0] == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< + * break # TODO: log warning, tally overflow? + * + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":197 + * total_words[0] += sent.size() + * + * for token in sent: # <<<<<<<<<<<<<< + * # leaving `effective_words` unchanged = shortening the sentence = expanding the window + * if vocab[0].find(token) == vocab[0].end(): + */ + __pyx_L6_continue:; + } + __pyx_L7_break:; + + /* "gensim/models/word2vec_corpusfile.pyx":217 + * # across sentence boundaries. + * # indices of sentence number X are between cur_epoch) / num_epochs) + * + */ + +static __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha(__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_alpha, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_end_alpha, int __pyx_v_cur_epoch, int __pyx_v_num_epochs) { + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_r; + + /* "gensim/models/word2vec_corpusfile.pyx":229 + * + * cdef REAL_t get_alpha(REAL_t alpha, REAL_t end_alpha, int cur_epoch, int num_epochs) nogil: + * return alpha - ((alpha - end_alpha) * ( cur_epoch) / num_epochs) # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = (__pyx_v_alpha - (((__pyx_v_alpha - __pyx_v_end_alpha) * ((__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t)__pyx_v_cur_epoch)) / __pyx_v_num_epochs)); + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":228 + * + * + * cdef REAL_t get_alpha(REAL_t alpha, REAL_t end_alpha, int cur_epoch, int num_epochs) nogil: # <<<<<<<<<<<<<< + * return alpha - ((alpha - end_alpha) * ( cur_epoch) / num_epochs) + * + */ + + /* function exit code */ + __pyx_L0:; + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":232 + * + * + * cdef REAL_t get_next_alpha( # <<<<<<<<<<<<<< + * REAL_t start_alpha, REAL_t end_alpha, int total_examples, int total_words, + * int expected_examples, int expected_words, int cur_epoch, int num_epochs) nogil: + */ + +static __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha(__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_start_alpha, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_end_alpha, int __pyx_v_total_examples, int __pyx_v_total_words, int __pyx_v_expected_examples, int __pyx_v_expected_words, int __pyx_v_cur_epoch, int __pyx_v_num_epochs) { + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_epoch_progress; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_progress; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_next_alpha; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_r; + int __pyx_t_1; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_2; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_3; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_4; + + /* "gensim/models/word2vec_corpusfile.pyx":237 + * cdef REAL_t epoch_progress + * + * if expected_examples != -1: # <<<<<<<<<<<<<< + * # examples-based decay + * epoch_progress = ( total_examples) / expected_examples + */ + __pyx_t_1 = ((__pyx_v_expected_examples != -1L) != 0); + if (__pyx_t_1) { + + /* "gensim/models/word2vec_corpusfile.pyx":239 + * if expected_examples != -1: + * # examples-based decay + * epoch_progress = ( total_examples) / expected_examples # <<<<<<<<<<<<<< + * else: + * # word-based decay + */ + __pyx_v_epoch_progress = (((__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t)__pyx_v_total_examples) / __pyx_v_expected_examples); + + /* "gensim/models/word2vec_corpusfile.pyx":237 + * cdef REAL_t epoch_progress + * + * if expected_examples != -1: # <<<<<<<<<<<<<< + * # examples-based decay + * epoch_progress = ( total_examples) / expected_examples + */ + goto __pyx_L3; + } + + /* "gensim/models/word2vec_corpusfile.pyx":242 + * else: + * # word-based decay + * epoch_progress = ( total_words) / expected_words # <<<<<<<<<<<<<< + * + * cdef REAL_t progress = (cur_epoch + epoch_progress) / num_epochs + */ + /*else*/ { + __pyx_v_epoch_progress = (((__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t)__pyx_v_total_words) / __pyx_v_expected_words); + } + __pyx_L3:; + + /* "gensim/models/word2vec_corpusfile.pyx":244 + * epoch_progress = ( total_words) / expected_words + * + * cdef REAL_t progress = (cur_epoch + epoch_progress) / num_epochs # <<<<<<<<<<<<<< + * cdef REAL_t next_alpha = start_alpha - (start_alpha - end_alpha) * progress + * return max(end_alpha, next_alpha) + */ + __pyx_v_progress = ((__pyx_v_cur_epoch + __pyx_v_epoch_progress) / __pyx_v_num_epochs); + + /* "gensim/models/word2vec_corpusfile.pyx":245 + * + * cdef REAL_t progress = (cur_epoch + epoch_progress) / num_epochs + * cdef REAL_t next_alpha = start_alpha - (start_alpha - end_alpha) * progress # <<<<<<<<<<<<<< + * return max(end_alpha, next_alpha) + * + */ + __pyx_v_next_alpha = (__pyx_v_start_alpha - ((__pyx_v_start_alpha - __pyx_v_end_alpha) * __pyx_v_progress)); + + /* "gensim/models/word2vec_corpusfile.pyx":246 + * cdef REAL_t progress = (cur_epoch + epoch_progress) / num_epochs + * cdef REAL_t next_alpha = start_alpha - (start_alpha - end_alpha) * progress + * return max(end_alpha, next_alpha) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_2 = __pyx_v_next_alpha; + __pyx_t_3 = __pyx_v_end_alpha; + if (((__pyx_t_2 > __pyx_t_3) != 0)) { + __pyx_t_4 = __pyx_t_2; + } else { + __pyx_t_4 = __pyx_t_3; + } + __pyx_r = __pyx_t_4; + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":232 + * + * + * cdef REAL_t get_next_alpha( # <<<<<<<<<<<<<< + * REAL_t start_alpha, REAL_t end_alpha, int total_examples, int total_words, + * int expected_examples, int expected_words, int cur_epoch, int num_epochs) nogil: + */ + + /* function exit code */ + __pyx_L0:; + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":249 + * + * + * def train_epoch_sg(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1, compute_loss): + * """Train Skipgram model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_3train_epoch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_2train_epoch_sg[] = "train_epoch_sg(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, _neu1, compute_loss)\nTrain Skipgram model for one epoch by training on an input stream. This function is used only in multistream mode.\n\n Called internally from :meth:`~gensim.models.word2vec.Word2Vec.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.word2vec.Word2Vec`\n The Word2Vec model instance to train.\n input_stream : iterable of list of str\n The corpus used to train the model.\n _cur_epoch : int\n Current epoch number. Used for calculating and decaying learning rate.\n _work : np.ndarray\n Private working memory for each worker.\n _neu1 : np.ndarray\n Private working memory for each worker.\n compute_loss : bool\n Whether or not the training loss should be computed in this batch.\n\n Returns\n -------\n int\n Number of words in the vocabulary actually used for training (They already existed in the vocabulary\n and were not discarded by negative sampling).\n "; +static PyMethodDef __pyx_mdef_6gensim_6models_19word2vec_corpusfile_3train_epoch_sg = {"train_epoch_sg", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_3train_epoch_sg, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_2train_epoch_sg}; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_3train_epoch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_model = 0; + PyObject *__pyx_v_corpus_file = 0; + PyObject *__pyx_v_offset = 0; + PyObject *__pyx_v__cython_vocab = 0; + PyObject *__pyx_v__cur_epoch = 0; + PyObject *__pyx_v__expected_examples = 0; + PyObject *__pyx_v__expected_words = 0; + PyObject *__pyx_v__work = 0; + CYTHON_UNUSED PyObject *__pyx_v__neu1 = 0; + PyObject *__pyx_v_compute_loss = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("train_epoch_sg (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_corpus_file,&__pyx_n_s_offset,&__pyx_n_s_cython_vocab,&__pyx_n_s_cur_epoch,&__pyx_n_s_expected_examples,&__pyx_n_s_expected_words,&__pyx_n_s_work,&__pyx_n_s_neu1,&__pyx_n_s_compute_loss,0}; + PyObject* values[10] = {0,0,0,0,0,0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_corpus_file)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, 1); __PYX_ERR(1, 249, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, 2); __PYX_ERR(1, 249, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cython_vocab)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, 3); __PYX_ERR(1, 249, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cur_epoch)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, 4); __PYX_ERR(1, 249, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_examples)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, 5); __PYX_ERR(1, 249, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_words)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, 6); __PYX_ERR(1, 249, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 7: + if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, 7); __PYX_ERR(1, 249, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 8: + if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_neu1)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, 8); __PYX_ERR(1, 249, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 9: + if (likely((values[9] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_compute_loss)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, 9); __PYX_ERR(1, 249, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_epoch_sg") < 0)) __PYX_ERR(1, 249, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 10) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + } + __pyx_v_model = values[0]; + __pyx_v_corpus_file = values[1]; + __pyx_v_offset = values[2]; + __pyx_v__cython_vocab = values[3]; + __pyx_v__cur_epoch = values[4]; + __pyx_v__expected_examples = values[5]; + __pyx_v__expected_words = values[6]; + __pyx_v__work = values[7]; + __pyx_v__neu1 = values[8]; + __pyx_v_compute_loss = values[9]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("train_epoch_sg", 1, 10, 10, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 249, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.train_epoch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_2train_epoch_sg(__pyx_self, __pyx_v_model, __pyx_v_corpus_file, __pyx_v_offset, __pyx_v__cython_vocab, __pyx_v__cur_epoch, __pyx_v__expected_examples, __pyx_v__expected_words, __pyx_v__work, __pyx_v__neu1, __pyx_v_compute_loss); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_2train_epoch_sg(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v__work, CYTHON_UNUSED PyObject *__pyx_v__neu1, PyObject *__pyx_v_compute_loss) { + struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig __pyx_v_c; + int __pyx_v_cur_epoch; + int __pyx_v_num_epochs; + int __pyx_v_expected_examples; + int __pyx_v_expected_words; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_start_alpha; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_end_alpha; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v__alpha; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_input_stream = 0; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_vocab = 0; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_effective_words; + int __pyx_v_effective_sentences; + int __pyx_v_total_effective_words; + int __pyx_v_total_sentences; + int __pyx_v_total_words; + int __pyx_v_sent_idx; + int __pyx_v_idx_start; + int __pyx_v_idx_end; + std::vector > __pyx_v_sentences; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + std::vector > __pyx_t_8; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_t_9; + int __pyx_t_10; + int __pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + int __pyx_t_15; + int __pyx_t_16; + PyObject *__pyx_t_17 = NULL; + PyObject *__pyx_t_18 = NULL; + __Pyx_RefNannySetupContext("train_epoch_sg", 0); + + /* "gensim/models/word2vec_corpusfile.pyx":279 + * + * # For learning rate updates + * cdef int cur_epoch = _cur_epoch # <<<<<<<<<<<<<< + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v__cur_epoch); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 279, __pyx_L1_error) + __pyx_v_cur_epoch = __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":280 + * # For learning rate updates + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs # <<<<<<<<<<<<<< + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_epochs); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 280, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 280, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_num_epochs = __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":281 + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) # <<<<<<<<<<<<<< + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + */ + __pyx_t_3 = (__pyx_v__expected_examples == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_examples); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 281, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_examples = __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":282 + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) # <<<<<<<<<<<<<< + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + */ + __pyx_t_3 = (__pyx_v__expected_words == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_words); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 282, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_words = __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":283 + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha # <<<<<<<<<<<<<< + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 283, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(1, 283, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_start_alpha = __pyx_t_5; + + /* "gensim/models/word2vec_corpusfile.pyx":284 + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha # <<<<<<<<<<<<<< + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_min_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 284, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(1, 284, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_end_alpha = __pyx_t_5; + + /* "gensim/models/word2vec_corpusfile.pyx":285 + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) # <<<<<<<<<<<<<< + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 285, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(1, 285, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v__alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha(__pyx_t_5, __pyx_v_end_alpha, __pyx_v_cur_epoch, __pyx_v_num_epochs); + + /* "gensim/models/word2vec_corpusfile.pyx":287 + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) # <<<<<<<<<<<<<< + * cdef CythonVocab vocab = _cython_vocab + * + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 287, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_corpus_file); + __Pyx_GIVEREF(__pyx_v_corpus_file); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_corpus_file); + __Pyx_INCREF(__pyx_v_offset); + __Pyx_GIVEREF(__pyx_v_offset); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_offset); + __pyx_t_6 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), __pyx_t_2, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 287, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_input_stream = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_t_6); + __pyx_t_6 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":288 + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + * cdef CythonVocab vocab = _cython_vocab # <<<<<<<<<<<<<< + * + * cdef int i, j, k + */ + if (!(likely(((__pyx_v__cython_vocab) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__cython_vocab, __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab))))) __PYX_ERR(1, 288, __pyx_L1_error) + __pyx_t_6 = __pyx_v__cython_vocab; + __Pyx_INCREF(__pyx_t_6); + __pyx_v_vocab = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_t_6); + __pyx_t_6 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":291 + * + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< + * cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 + * cdef int sent_idx, idx_start, idx_end + */ + __pyx_v_effective_words = 0; + __pyx_v_effective_sentences = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":292 + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 + * cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end + * + */ + __pyx_v_total_effective_words = 0; + __pyx_v_total_sentences = 0; + __pyx_v_total_words = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":295 + * cdef int sent_idx, idx_start, idx_end + * + * init_w2v_config(&c, model, _alpha, compute_loss, _work) # <<<<<<<<<<<<<< + * + * cdef vector[vector[string]] sentences + */ + __pyx_t_6 = PyFloat_FromDouble(__pyx_v__alpha); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 295, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_2 = __pyx_f_6gensim_6models_14word2vec_inner_init_w2v_config((&__pyx_v_c), __pyx_v_model, __pyx_t_6, __pyx_v_compute_loss, __pyx_v__work, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 295, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":299 + * cdef vector[vector[string]] sentences + * + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + { + #ifdef WITH_THREAD + PyThreadState *_save; + Py_UNBLOCK_THREADS + __Pyx_FastGIL_Remember(); + #endif + /*try:*/ { + + /* "gensim/models/word2vec_corpusfile.pyx":300 + * + * with nogil: + * input_stream.reset() # <<<<<<<<<<<<<< + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 + */ + __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_reset(__pyx_v_input_stream, 0); + + /* "gensim/models/word2vec_corpusfile.pyx":301 + * with nogil: + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): # <<<<<<<<<<<<<< + * effective_sentences = 0 + * effective_words = 0 + */ + while (1) { + __pyx_t_7 = (__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_is_eof(__pyx_v_input_stream, 0) != 0); + if (!__pyx_t_7) { + } else { + __pyx_t_3 = __pyx_t_7; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_7 = ((__pyx_v_total_words > (__pyx_v_expected_words / __pyx_v_c.workers)) != 0); + __pyx_t_3 = __pyx_t_7; + __pyx_L8_bool_binop_done:; + __pyx_t_7 = ((!__pyx_t_3) != 0); + if (!__pyx_t_7) break; + + /* "gensim/models/word2vec_corpusfile.pyx":302 + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 # <<<<<<<<<<<<<< + * effective_words = 0 + * + */ + __pyx_v_effective_sentences = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":303 + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 + * effective_words = 0 # <<<<<<<<<<<<<< + * + * sentences = input_stream.next_batch() + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":305 + * effective_words = 0 + * + * sentences = input_stream.next_batch() # <<<<<<<<<<<<<< + * + * prepare_c_structures_for_batch( + */ + __pyx_t_8 = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_next_batch(__pyx_v_input_stream, 0); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(1, 305, __pyx_L4_error) + __pyx_v_sentences = __pyx_t_8; + + /* "gensim/models/word2vec_corpusfile.pyx":309 + * prepare_c_structures_for_batch( + * sentences, c.sample, c.hs, c.window, &total_words, &effective_words, &effective_sentences, + * &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, c.indexes, # <<<<<<<<<<<<<< + * c.codelens, c.codes, c.points, c.reduced_windows) + * + */ + __pyx_t_9 = __pyx_f_6gensim_6models_19word2vec_corpusfile_11CythonVocab_get_vocab_ptr(__pyx_v_vocab); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(1, 309, __pyx_L4_error) + + /* "gensim/models/word2vec_corpusfile.pyx":307 + * sentences = input_stream.next_batch() + * + * prepare_c_structures_for_batch( # <<<<<<<<<<<<<< + * sentences, c.sample, c.hs, c.window, &total_words, &effective_words, &effective_sentences, + * &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, c.indexes, + */ + __pyx_f_6gensim_6models_19word2vec_corpusfile_prepare_c_structures_for_batch(__pyx_v_sentences, __pyx_v_c.sample, __pyx_v_c.hs, __pyx_v_c.window, (&__pyx_v_total_words), (&__pyx_v_effective_words), (&__pyx_v_effective_sentences), (&__pyx_v_c.next_random), __pyx_t_9, __pyx_v_c.sentence_idx, __pyx_v_c.indexes, __pyx_v_c.codelens, __pyx_v_c.codes, __pyx_v_c.points, __pyx_v_c.reduced_windows); + + /* "gensim/models/word2vec_corpusfile.pyx":312 + * c.codelens, c.codes, c.points, c.reduced_windows) + * + * for sent_idx in range(effective_sentences): # <<<<<<<<<<<<<< + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] + */ + __pyx_t_1 = __pyx_v_effective_sentences; + __pyx_t_4 = __pyx_t_1; + for (__pyx_t_10 = 0; __pyx_t_10 < __pyx_t_4; __pyx_t_10+=1) { + __pyx_v_sent_idx = __pyx_t_10; + + /* "gensim/models/word2vec_corpusfile.pyx":313 + * + * for sent_idx in range(effective_sentences): + * idx_start = c.sentence_idx[sent_idx] # <<<<<<<<<<<<<< + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): + */ + __pyx_v_idx_start = (__pyx_v_c.sentence_idx[__pyx_v_sent_idx]); + + /* "gensim/models/word2vec_corpusfile.pyx":314 + * for sent_idx in range(effective_sentences): + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + */ + __pyx_v_idx_end = (__pyx_v_c.sentence_idx[(__pyx_v_sent_idx + 1)]); + + /* "gensim/models/word2vec_corpusfile.pyx":315 + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: + */ + __pyx_t_11 = __pyx_v_idx_end; + __pyx_t_12 = __pyx_t_11; + for (__pyx_t_13 = __pyx_v_idx_start; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_i = __pyx_t_13; + + /* "gensim/models/word2vec_corpusfile.pyx":316 + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< + * if j < idx_start: + * j = idx_start + */ + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/word2vec_corpusfile.pyx":317 + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: # <<<<<<<<<<<<<< + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + */ + __pyx_t_7 = ((__pyx_v_j < __pyx_v_idx_start) != 0); + if (__pyx_t_7) { + + /* "gensim/models/word2vec_corpusfile.pyx":318 + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: + * j = idx_start # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: + */ + __pyx_v_j = __pyx_v_idx_start; + + /* "gensim/models/word2vec_corpusfile.pyx":317 + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: # <<<<<<<<<<<<<< + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":319 + * if j < idx_start: + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< + * if k > idx_end: + * k = idx_end + */ + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/word2vec_corpusfile.pyx":320 + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: # <<<<<<<<<<<<<< + * k = idx_end + * for j in range(j, k): + */ + __pyx_t_7 = ((__pyx_v_k > __pyx_v_idx_end) != 0); + if (__pyx_t_7) { + + /* "gensim/models/word2vec_corpusfile.pyx":321 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: + * k = idx_end # <<<<<<<<<<<<<< + * for j in range(j, k): + * if j == i: + */ + __pyx_v_k = __pyx_v_idx_end; + + /* "gensim/models/word2vec_corpusfile.pyx":320 + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: # <<<<<<<<<<<<<< + * k = idx_end + * for j in range(j, k): + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":322 + * if k > idx_end: + * k = idx_end + * for j in range(j, k): # <<<<<<<<<<<<<< + * if j == i: + * continue + */ + __pyx_t_14 = __pyx_v_k; + __pyx_t_15 = __pyx_t_14; + for (__pyx_t_16 = __pyx_v_j; __pyx_t_16 < __pyx_t_15; __pyx_t_16+=1) { + __pyx_v_j = __pyx_t_16; + + /* "gensim/models/word2vec_corpusfile.pyx":323 + * k = idx_end + * for j in range(j, k): + * if j == i: # <<<<<<<<<<<<<< + * continue + * if c.hs: + */ + __pyx_t_7 = ((__pyx_v_j == __pyx_v_i) != 0); + if (__pyx_t_7) { + + /* "gensim/models/word2vec_corpusfile.pyx":324 + * for j in range(j, k): + * if j == i: + * continue # <<<<<<<<<<<<<< + * if c.hs: + * w2v_fast_sentence_sg_hs( + */ + goto __pyx_L16_continue; + + /* "gensim/models/word2vec_corpusfile.pyx":323 + * k = idx_end + * for j in range(j, k): + * if j == i: # <<<<<<<<<<<<<< + * continue + * if c.hs: + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":325 + * if j == i: + * continue + * if c.hs: # <<<<<<<<<<<<<< + * w2v_fast_sentence_sg_hs( + * c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], + */ + __pyx_t_7 = (__pyx_v_c.hs != 0); + if (__pyx_t_7) { + + /* "gensim/models/word2vec_corpusfile.pyx":326 + * continue + * if c.hs: + * w2v_fast_sentence_sg_hs( # <<<<<<<<<<<<<< + * c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], + * c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + */ + __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.syn0, __pyx_v_c.syn1, __pyx_v_c.size, (__pyx_v_c.indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.word_locks, __pyx_v_c.compute_loss, (&__pyx_v_c.running_training_loss)); + + /* "gensim/models/word2vec_corpusfile.pyx":325 + * if j == i: + * continue + * if c.hs: # <<<<<<<<<<<<<< + * w2v_fast_sentence_sg_hs( + * c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":329 + * c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], + * c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = w2v_fast_sentence_sg_neg( + * c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, + */ + __pyx_t_7 = (__pyx_v_c.negative != 0); + if (__pyx_t_7) { + + /* "gensim/models/word2vec_corpusfile.pyx":330 + * c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: + * c.next_random = w2v_fast_sentence_sg_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, + * c.indexes[i], c.indexes[j], c.alpha, c.work, c.next_random, c.word_locks, + */ + __pyx_v_c.next_random = __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.syn0, __pyx_v_c.syn1neg, __pyx_v_c.size, (__pyx_v_c.indexes[__pyx_v_i]), (__pyx_v_c.indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.next_random, __pyx_v_c.word_locks, __pyx_v_c.compute_loss, (&__pyx_v_c.running_training_loss)); + + /* "gensim/models/word2vec_corpusfile.pyx":329 + * c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], + * c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = w2v_fast_sentence_sg_neg( + * c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, + */ + } + __pyx_L16_continue:; + } + } + } + + /* "gensim/models/word2vec_corpusfile.pyx":335 + * c.compute_loss, &c.running_training_loss) + * + * total_sentences += sentences.size() # <<<<<<<<<<<<<< + * total_effective_words += effective_words + * + */ + __pyx_v_total_sentences = (__pyx_v_total_sentences + __pyx_v_sentences.size()); + + /* "gensim/models/word2vec_corpusfile.pyx":336 + * + * total_sentences += sentences.size() + * total_effective_words += effective_words # <<<<<<<<<<<<<< + * + * c.alpha = get_next_alpha( + */ + __pyx_v_total_effective_words = (__pyx_v_total_effective_words + __pyx_v_effective_words); + + /* "gensim/models/word2vec_corpusfile.pyx":338 + * total_effective_words += effective_words + * + * c.alpha = get_next_alpha( # <<<<<<<<<<<<<< + * start_alpha, end_alpha, total_sentences, total_words, + * expected_examples, expected_words, cur_epoch, num_epochs) + */ + __pyx_v_c.alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha(__pyx_v_start_alpha, __pyx_v_end_alpha, __pyx_v_total_sentences, __pyx_v_total_words, __pyx_v_expected_examples, __pyx_v_expected_words, __pyx_v_cur_epoch, __pyx_v_num_epochs); + } + } + + /* "gensim/models/word2vec_corpusfile.pyx":299 + * cdef vector[vector[string]] sentences + * + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + /*finally:*/ { + /*normal exit:*/{ + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L5; + } + __pyx_L4_error: { + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L1_error; + } + __pyx_L5:; + } + } + + /* "gensim/models/word2vec_corpusfile.pyx":342 + * expected_examples, expected_words, cur_epoch, num_epochs) + * + * model.running_training_loss = c.running_training_loss # <<<<<<<<<<<<<< + * return total_sentences, total_effective_words, total_words + * + */ + __pyx_t_2 = PyFloat_FromDouble(__pyx_v_c.running_training_loss); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 342, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_model, __pyx_n_s_running_training_loss, __pyx_t_2) < 0) __PYX_ERR(1, 342, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":343 + * + * model.running_training_loss = c.running_training_loss + * return total_sentences, total_effective_words, total_words # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_total_sentences); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 343, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_total_effective_words); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 343, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_17 = __Pyx_PyInt_From_int(__pyx_v_total_words); if (unlikely(!__pyx_t_17)) __PYX_ERR(1, 343, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __pyx_t_18 = PyTuple_New(3); if (unlikely(!__pyx_t_18)) __PYX_ERR(1, 343, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_18); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_18, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_18, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_17); + PyTuple_SET_ITEM(__pyx_t_18, 2, __pyx_t_17); + __pyx_t_2 = 0; + __pyx_t_6 = 0; + __pyx_t_17 = 0; + __pyx_r = __pyx_t_18; + __pyx_t_18 = 0; + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":249 + * + * + * def train_epoch_sg(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1, compute_loss): + * """Train Skipgram model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_17); + __Pyx_XDECREF(__pyx_t_18); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.train_epoch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_input_stream); + __Pyx_XDECREF((PyObject *)__pyx_v_vocab); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_corpusfile.pyx":346 + * + * + * def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1, compute_loss): + * """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_5train_epoch_cbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_19word2vec_corpusfile_4train_epoch_cbow[] = "train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, _neu1, compute_loss)\nTrain CBOW model for one epoch by training on an input stream. This function is used only in multistream mode.\n\n Called internally from :meth:`~gensim.models.word2vec.Word2Vec.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.word2vec.Word2Vec`\n The Word2Vec model instance to train.\n input_stream : iterable of list of str\n The corpus used to train the model.\n _cur_epoch : int\n Current epoch number. Used for calculating and decaying learning rate.\n _work : np.ndarray\n Private working memory for each worker.\n _neu1 : np.ndarray\n Private working memory for each worker.\n compute_loss : bool\n Whether or not the training loss should be computed in this batch.\n\n Returns\n -------\n int\n Number of words in the vocabulary actually used for training (They already existed in the vocabulary\n and were not discarded by negative sampling).\n "; +static PyMethodDef __pyx_mdef_6gensim_6models_19word2vec_corpusfile_5train_epoch_cbow = {"train_epoch_cbow", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_5train_epoch_cbow, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_4train_epoch_cbow}; +static PyObject *__pyx_pw_6gensim_6models_19word2vec_corpusfile_5train_epoch_cbow(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_model = 0; + PyObject *__pyx_v_corpus_file = 0; + PyObject *__pyx_v_offset = 0; + PyObject *__pyx_v__cython_vocab = 0; + PyObject *__pyx_v__cur_epoch = 0; + PyObject *__pyx_v__expected_examples = 0; + PyObject *__pyx_v__expected_words = 0; + PyObject *__pyx_v__work = 0; + PyObject *__pyx_v__neu1 = 0; + PyObject *__pyx_v_compute_loss = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("train_epoch_cbow (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_corpus_file,&__pyx_n_s_offset,&__pyx_n_s_cython_vocab,&__pyx_n_s_cur_epoch,&__pyx_n_s_expected_examples,&__pyx_n_s_expected_words,&__pyx_n_s_work,&__pyx_n_s_neu1,&__pyx_n_s_compute_loss,0}; + PyObject* values[10] = {0,0,0,0,0,0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_corpus_file)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, 1); __PYX_ERR(1, 346, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, 2); __PYX_ERR(1, 346, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cython_vocab)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, 3); __PYX_ERR(1, 346, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_cur_epoch)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, 4); __PYX_ERR(1, 346, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_examples)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, 5); __PYX_ERR(1, 346, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_expected_words)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, 6); __PYX_ERR(1, 346, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 7: + if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, 7); __PYX_ERR(1, 346, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 8: + if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_neu1)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, 8); __PYX_ERR(1, 346, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 9: + if (likely((values[9] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_compute_loss)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, 9); __PYX_ERR(1, 346, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_epoch_cbow") < 0)) __PYX_ERR(1, 346, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 10) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + } + __pyx_v_model = values[0]; + __pyx_v_corpus_file = values[1]; + __pyx_v_offset = values[2]; + __pyx_v__cython_vocab = values[3]; + __pyx_v__cur_epoch = values[4]; + __pyx_v__expected_examples = values[5]; + __pyx_v__expected_words = values[6]; + __pyx_v__work = values[7]; + __pyx_v__neu1 = values[8]; + __pyx_v_compute_loss = values[9]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("train_epoch_cbow", 1, 10, 10, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 346, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.train_epoch_cbow", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_19word2vec_corpusfile_4train_epoch_cbow(__pyx_self, __pyx_v_model, __pyx_v_corpus_file, __pyx_v_offset, __pyx_v__cython_vocab, __pyx_v__cur_epoch, __pyx_v__expected_examples, __pyx_v__expected_words, __pyx_v__work, __pyx_v__neu1, __pyx_v_compute_loss); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_4train_epoch_cbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_corpus_file, PyObject *__pyx_v_offset, PyObject *__pyx_v__cython_vocab, PyObject *__pyx_v__cur_epoch, PyObject *__pyx_v__expected_examples, PyObject *__pyx_v__expected_words, PyObject *__pyx_v__work, PyObject *__pyx_v__neu1, PyObject *__pyx_v_compute_loss) { + struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig __pyx_v_c; + int __pyx_v_cur_epoch; + int __pyx_v_num_epochs; + int __pyx_v_expected_examples; + int __pyx_v_expected_words; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_start_alpha; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v_end_alpha; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_v__alpha; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *__pyx_v_input_stream = 0; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *__pyx_v_vocab = 0; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_effective_words; + int __pyx_v_effective_sentences; + int __pyx_v_total_effective_words; + int __pyx_v_total_sentences; + int __pyx_v_total_words; + int __pyx_v_sent_idx; + int __pyx_v_idx_start; + int __pyx_v_idx_end; + std::vector > __pyx_v_sentences; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config __pyx_t_7; + int __pyx_t_8; + std::vector > __pyx_t_9; + __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *__pyx_t_10; + int __pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + __Pyx_RefNannySetupContext("train_epoch_cbow", 0); + + /* "gensim/models/word2vec_corpusfile.pyx":376 + * + * # For learning rate updates + * cdef int cur_epoch = _cur_epoch # <<<<<<<<<<<<<< + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + */ + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v__cur_epoch); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 376, __pyx_L1_error) + __pyx_v_cur_epoch = __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":377 + * # For learning rate updates + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs # <<<<<<<<<<<<<< + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_epochs); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 377, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 377, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_num_epochs = __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":378 + * cdef int cur_epoch = _cur_epoch + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) # <<<<<<<<<<<<<< + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + */ + __pyx_t_3 = (__pyx_v__expected_examples == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_examples); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 378, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_examples = __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":379 + * cdef int num_epochs = model.epochs + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) # <<<<<<<<<<<<<< + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + */ + __pyx_t_3 = (__pyx_v__expected_words == Py_None); + if ((__pyx_t_3 != 0)) { + __pyx_t_1 = -1; + } else { + __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v__expected_words); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 379, __pyx_L1_error) + __pyx_t_1 = __pyx_t_4; + } + __pyx_v_expected_words = __pyx_t_1; + + /* "gensim/models/word2vec_corpusfile.pyx":380 + * cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha # <<<<<<<<<<<<<< + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 380, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(1, 380, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_start_alpha = __pyx_t_5; + + /* "gensim/models/word2vec_corpusfile.pyx":381 + * cdef int expected_words = (-1 if _expected_words is None else _expected_words) + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha # <<<<<<<<<<<<<< + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_min_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 381, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(1, 381, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_end_alpha = __pyx_t_5; + + /* "gensim/models/word2vec_corpusfile.pyx":382 + * cdef REAL_t start_alpha = model.alpha + * cdef REAL_t end_alpha = model.min_alpha + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) # <<<<<<<<<<<<<< + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_alpha); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 382, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(1, 382, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v__alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha(__pyx_t_5, __pyx_v_end_alpha, __pyx_v_cur_epoch, __pyx_v_num_epochs); + + /* "gensim/models/word2vec_corpusfile.pyx":384 + * cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) # <<<<<<<<<<<<<< + * cdef CythonVocab vocab = _cython_vocab + * + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 384, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_corpus_file); + __Pyx_GIVEREF(__pyx_v_corpus_file); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_corpus_file); + __Pyx_INCREF(__pyx_v_offset); + __Pyx_GIVEREF(__pyx_v_offset); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_offset); + __pyx_t_6 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), __pyx_t_2, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 384, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_input_stream = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)__pyx_t_6); + __pyx_t_6 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":385 + * + * cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + * cdef CythonVocab vocab = _cython_vocab # <<<<<<<<<<<<<< + * + * cdef int i, j, k + */ + if (!(likely(((__pyx_v__cython_vocab) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__cython_vocab, __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab))))) __PYX_ERR(1, 385, __pyx_L1_error) + __pyx_t_6 = __pyx_v__cython_vocab; + __Pyx_INCREF(__pyx_t_6); + __pyx_v_vocab = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)__pyx_t_6); + __pyx_t_6 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":388 + * + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< + * cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 + * cdef int sent_idx, idx_start, idx_end + */ + __pyx_v_effective_words = 0; + __pyx_v_effective_sentences = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":389 + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 + * cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end + * + */ + __pyx_v_total_effective_words = 0; + __pyx_v_total_sentences = 0; + __pyx_v_total_words = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":392 + * cdef int sent_idx, idx_start, idx_end + * + * init_w2v_config(&c, model, _alpha, compute_loss, _work, _neu1) # <<<<<<<<<<<<<< + * + * cdef vector[vector[string]] sentences + */ + __pyx_t_6 = PyFloat_FromDouble(__pyx_v__alpha); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 392, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7.__pyx_n = 1; + __pyx_t_7._neu1 = __pyx_v__neu1; + __pyx_t_2 = __pyx_f_6gensim_6models_14word2vec_inner_init_w2v_config((&__pyx_v_c), __pyx_v_model, __pyx_t_6, __pyx_v_compute_loss, __pyx_v__work, &__pyx_t_7); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 392, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":396 + * cdef vector[vector[string]] sentences + * + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + { + #ifdef WITH_THREAD + PyThreadState *_save; + Py_UNBLOCK_THREADS + __Pyx_FastGIL_Remember(); + #endif + /*try:*/ { + + /* "gensim/models/word2vec_corpusfile.pyx":397 + * + * with nogil: + * input_stream.reset() # <<<<<<<<<<<<<< + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 + */ + __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_reset(__pyx_v_input_stream, 0); + + /* "gensim/models/word2vec_corpusfile.pyx":398 + * with nogil: + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): # <<<<<<<<<<<<<< + * effective_sentences = 0 + * effective_words = 0 + */ + while (1) { + __pyx_t_8 = (__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_is_eof(__pyx_v_input_stream, 0) != 0); + if (!__pyx_t_8) { + } else { + __pyx_t_3 = __pyx_t_8; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_8 = ((__pyx_v_total_words > (__pyx_v_expected_words / __pyx_v_c.workers)) != 0); + __pyx_t_3 = __pyx_t_8; + __pyx_L8_bool_binop_done:; + __pyx_t_8 = ((!__pyx_t_3) != 0); + if (!__pyx_t_8) break; + + /* "gensim/models/word2vec_corpusfile.pyx":399 + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 # <<<<<<<<<<<<<< + * effective_words = 0 + * + */ + __pyx_v_effective_sentences = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":400 + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + * effective_sentences = 0 + * effective_words = 0 # <<<<<<<<<<<<<< + * + * sentences = input_stream.next_batch() + */ + __pyx_v_effective_words = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":402 + * effective_words = 0 + * + * sentences = input_stream.next_batch() # <<<<<<<<<<<<<< + * + * prepare_c_structures_for_batch( + */ + __pyx_t_9 = __pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_next_batch(__pyx_v_input_stream, 0); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(1, 402, __pyx_L4_error) + __pyx_v_sentences = __pyx_t_9; + + /* "gensim/models/word2vec_corpusfile.pyx":406 + * prepare_c_structures_for_batch( + * sentences, c.sample, c.hs, c.window, &total_words, &effective_words, + * &effective_sentences, &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, # <<<<<<<<<<<<<< + * c.indexes, c.codelens, c.codes, c.points, c.reduced_windows) + * + */ + __pyx_t_10 = __pyx_f_6gensim_6models_19word2vec_corpusfile_11CythonVocab_get_vocab_ptr(__pyx_v_vocab); if (unlikely(__Pyx_ErrOccurredWithGIL())) __PYX_ERR(1, 406, __pyx_L4_error) + + /* "gensim/models/word2vec_corpusfile.pyx":404 + * sentences = input_stream.next_batch() + * + * prepare_c_structures_for_batch( # <<<<<<<<<<<<<< + * sentences, c.sample, c.hs, c.window, &total_words, &effective_words, + * &effective_sentences, &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, + */ + __pyx_f_6gensim_6models_19word2vec_corpusfile_prepare_c_structures_for_batch(__pyx_v_sentences, __pyx_v_c.sample, __pyx_v_c.hs, __pyx_v_c.window, (&__pyx_v_total_words), (&__pyx_v_effective_words), (&__pyx_v_effective_sentences), (&__pyx_v_c.next_random), __pyx_t_10, __pyx_v_c.sentence_idx, __pyx_v_c.indexes, __pyx_v_c.codelens, __pyx_v_c.codes, __pyx_v_c.points, __pyx_v_c.reduced_windows); + + /* "gensim/models/word2vec_corpusfile.pyx":409 + * c.indexes, c.codelens, c.codes, c.points, c.reduced_windows) + * + * for sent_idx in range(effective_sentences): # <<<<<<<<<<<<<< + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] + */ + __pyx_t_1 = __pyx_v_effective_sentences; + __pyx_t_4 = __pyx_t_1; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_4; __pyx_t_11+=1) { + __pyx_v_sent_idx = __pyx_t_11; + + /* "gensim/models/word2vec_corpusfile.pyx":410 + * + * for sent_idx in range(effective_sentences): + * idx_start = c.sentence_idx[sent_idx] # <<<<<<<<<<<<<< + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): + */ + __pyx_v_idx_start = (__pyx_v_c.sentence_idx[__pyx_v_sent_idx]); + + /* "gensim/models/word2vec_corpusfile.pyx":411 + * for sent_idx in range(effective_sentences): + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + */ + __pyx_v_idx_end = (__pyx_v_c.sentence_idx[(__pyx_v_sent_idx + 1)]); + + /* "gensim/models/word2vec_corpusfile.pyx":412 + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: + */ + __pyx_t_12 = __pyx_v_idx_end; + __pyx_t_13 = __pyx_t_12; + for (__pyx_t_14 = __pyx_v_idx_start; __pyx_t_14 < __pyx_t_13; __pyx_t_14+=1) { + __pyx_v_i = __pyx_t_14; + + /* "gensim/models/word2vec_corpusfile.pyx":413 + * idx_end = c.sentence_idx[sent_idx + 1] + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< + * if j < idx_start: + * j = idx_start + */ + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/word2vec_corpusfile.pyx":414 + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: # <<<<<<<<<<<<<< + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + */ + __pyx_t_8 = ((__pyx_v_j < __pyx_v_idx_start) != 0); + if (__pyx_t_8) { + + /* "gensim/models/word2vec_corpusfile.pyx":415 + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: + * j = idx_start # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: + */ + __pyx_v_j = __pyx_v_idx_start; + + /* "gensim/models/word2vec_corpusfile.pyx":414 + * for i in range(idx_start, idx_end): + * j = i - c.window + c.reduced_windows[i] + * if j < idx_start: # <<<<<<<<<<<<<< + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":416 + * if j < idx_start: + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< + * if k > idx_end: + * k = idx_end + */ + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); + + /* "gensim/models/word2vec_corpusfile.pyx":417 + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: # <<<<<<<<<<<<<< + * k = idx_end + * if c.hs: + */ + __pyx_t_8 = ((__pyx_v_k > __pyx_v_idx_end) != 0); + if (__pyx_t_8) { + + /* "gensim/models/word2vec_corpusfile.pyx":418 + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: + * k = idx_end # <<<<<<<<<<<<<< + * if c.hs: + * w2v_fast_sentence_cbow_hs( + */ + __pyx_v_k = __pyx_v_idx_end; + + /* "gensim/models/word2vec_corpusfile.pyx":417 + * j = idx_start + * k = i + c.window + 1 - c.reduced_windows[i] + * if k > idx_end: # <<<<<<<<<<<<<< + * k = idx_end + * if c.hs: + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":419 + * if k > idx_end: + * k = idx_end + * if c.hs: # <<<<<<<<<<<<<< + * w2v_fast_sentence_cbow_hs( + * c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, + */ + __pyx_t_8 = (__pyx_v_c.hs != 0); + if (__pyx_t_8) { + + /* "gensim/models/word2vec_corpusfile.pyx":420 + * k = idx_end + * if c.hs: + * w2v_fast_sentence_cbow_hs( # <<<<<<<<<<<<<< + * c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, + * c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + */ + __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), __pyx_v_c.codelens, __pyx_v_c.neu1, __pyx_v_c.syn0, __pyx_v_c.syn1, __pyx_v_c.size, __pyx_v_c.indexes, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_c.cbow_mean, __pyx_v_c.word_locks, __pyx_v_c.compute_loss, (&__pyx_v_c.running_training_loss)); + + /* "gensim/models/word2vec_corpusfile.pyx":419 + * if k > idx_end: + * k = idx_end + * if c.hs: # <<<<<<<<<<<<<< + * w2v_fast_sentence_cbow_hs( + * c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, + */ + } + + /* "gensim/models/word2vec_corpusfile.pyx":424 + * c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + * + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = w2v_fast_sentence_cbow_neg( + * c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, + */ + __pyx_t_8 = (__pyx_v_c.negative != 0); + if (__pyx_t_8) { + + /* "gensim/models/word2vec_corpusfile.pyx":425 + * + * if c.negative: + * c.next_random = w2v_fast_sentence_cbow_neg( # <<<<<<<<<<<<<< + * c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, + * c.syn1neg, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, + */ + __pyx_v_c.next_random = __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.codelens, __pyx_v_c.neu1, __pyx_v_c.syn0, __pyx_v_c.syn1neg, __pyx_v_c.size, __pyx_v_c.indexes, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_c.cbow_mean, __pyx_v_c.next_random, __pyx_v_c.word_locks, __pyx_v_c.compute_loss, (&__pyx_v_c.running_training_loss)); + + /* "gensim/models/word2vec_corpusfile.pyx":424 + * c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + * + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = w2v_fast_sentence_cbow_neg( + * c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, + */ + } + } + } + + /* "gensim/models/word2vec_corpusfile.pyx":430 + * c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) + * + * total_sentences += sentences.size() # <<<<<<<<<<<<<< + * total_effective_words += effective_words + * + */ + __pyx_v_total_sentences = (__pyx_v_total_sentences + __pyx_v_sentences.size()); + + /* "gensim/models/word2vec_corpusfile.pyx":431 + * + * total_sentences += sentences.size() + * total_effective_words += effective_words # <<<<<<<<<<<<<< + * + * c.alpha = get_next_alpha( + */ + __pyx_v_total_effective_words = (__pyx_v_total_effective_words + __pyx_v_effective_words); + + /* "gensim/models/word2vec_corpusfile.pyx":433 + * total_effective_words += effective_words + * + * c.alpha = get_next_alpha( # <<<<<<<<<<<<<< + * start_alpha, end_alpha, total_sentences, total_words, + * expected_examples, expected_words, cur_epoch, num_epochs) + */ + __pyx_v_c.alpha = __pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha(__pyx_v_start_alpha, __pyx_v_end_alpha, __pyx_v_total_sentences, __pyx_v_total_words, __pyx_v_expected_examples, __pyx_v_expected_words, __pyx_v_cur_epoch, __pyx_v_num_epochs); + } + } + + /* "gensim/models/word2vec_corpusfile.pyx":396 + * cdef vector[vector[string]] sentences + * + * with nogil: # <<<<<<<<<<<<<< + * input_stream.reset() + * while not (input_stream.is_eof() or total_words > expected_words / c.workers): + */ + /*finally:*/ { + /*normal exit:*/{ + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L5; + } + __pyx_L4_error: { + #ifdef WITH_THREAD + __Pyx_FastGIL_Forget(); + Py_BLOCK_THREADS + #endif + goto __pyx_L1_error; + } + __pyx_L5:; + } + } + + /* "gensim/models/word2vec_corpusfile.pyx":437 + * expected_examples, expected_words, cur_epoch, num_epochs) + * + * model.running_training_loss = c.running_training_loss # <<<<<<<<<<<<<< + * return total_sentences, total_effective_words, total_words + * + */ + __pyx_t_2 = PyFloat_FromDouble(__pyx_v_c.running_training_loss); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 437, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_model, __pyx_n_s_running_training_loss, __pyx_t_2) < 0) __PYX_ERR(1, 437, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":438 + * + * model.running_training_loss = c.running_training_loss + * return total_sentences, total_effective_words, total_words # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_total_sentences); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 438, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_total_effective_words); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 438, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_15 = __Pyx_PyInt_From_int(__pyx_v_total_words); if (unlikely(!__pyx_t_15)) __PYX_ERR(1, 438, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_16 = PyTuple_New(3); if (unlikely(!__pyx_t_16)) __PYX_ERR(1, 438, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_16, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_16, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_15); + PyTuple_SET_ITEM(__pyx_t_16, 2, __pyx_t_15); + __pyx_t_2 = 0; + __pyx_t_6 = 0; + __pyx_t_15 = 0; + __pyx_r = __pyx_t_16; + __pyx_t_16 = 0; + goto __pyx_L0; + + /* "gensim/models/word2vec_corpusfile.pyx":346 + * + * + * def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1, compute_loss): + * """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_15); + __Pyx_XDECREF(__pyx_t_16); + __Pyx_AddTraceback("gensim.models.word2vec_corpusfile.train_epoch_cbow", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_input_stream); + __Pyx_XDECREF((PyObject *)__pyx_v_vocab); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fulfill the PEP. + */ + +/* Python wrapper */ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0); + __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_v_i; + int __pyx_v_ndim; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + int __pyx_v_t; + char *__pyx_v_f; + PyArray_Descr *__pyx_v_descr = 0; + int __pyx_v_offset; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + char *__pyx_t_8; + if (__pyx_v_info == NULL) { + PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete"); + return -1; + } + __Pyx_RefNannySetupContext("__getbuffer__", 0); + __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(__pyx_v_info->obj); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 + * + * cdef int i, ndim + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + */ + __pyx_v_endian_detector = 1; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 + * cdef int i, ndim + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * + * ndim = PyArray_NDIM(self) + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + */ + __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + __pyx_t_2 = (((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not C contiguous") + * + */ + __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_C_CONTIGUOUS) != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + if (unlikely(__pyx_t_1)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(3, 229, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + __pyx_t_2 = (((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L7_bool_binop_done; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not Fortran contiguous") + * + */ + __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_F_CONTIGUOUS) != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L7_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + if (unlikely(__pyx_t_1)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 233, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(3, 233, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 + * raise ValueError(u"ndarray is not Fortran contiguous") + * + * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 + * + * info.buf = PyArray_DATA(self) + * info.ndim = ndim # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * # Allocate new buffer for strides and shape info. + */ + __pyx_v_info->ndim = __pyx_v_ndim; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< + * info.shape = info.strides + ndim + * for i in range(ndim): + */ + __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 + * # This is allocated as one block, strides first. + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) + * info.shape = info.strides + ndim # <<<<<<<<<<<<<< + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + */ + __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) + * info.shape = info.strides + ndim + * for i in range(ndim): # <<<<<<<<<<<<<< + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] + */ + __pyx_t_4 = __pyx_v_ndim; + __pyx_t_5 = __pyx_t_4; + for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { + __pyx_v_i = __pyx_t_6; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 + * info.shape = info.strides + ndim + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + */ + (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< + * else: + * info.strides = PyArray_STRIDES(self) + */ + (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + goto __pyx_L9; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + */ + /*else*/ { + __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 + * else: + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + */ + __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self)); + } + __pyx_L9:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL # <<<<<<<<<<<<<< + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) + */ + __pyx_v_info->suboffsets = NULL; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< + * info.readonly = not PyArray_ISWRITEABLE(self) + * + */ + __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< + * + * cdef int t + */ + __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 + * + * cdef int t + * cdef char* f = NULL # <<<<<<<<<<<<<< + * cdef dtype descr = self.descr + * cdef int offset + */ + __pyx_v_f = NULL; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 + * cdef int t + * cdef char* f = NULL + * cdef dtype descr = self.descr # <<<<<<<<<<<<<< + * cdef int offset + * + */ + __pyx_t_3 = ((PyObject *)__pyx_v_self->descr); + __Pyx_INCREF(__pyx_t_3); + __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 + * cdef int offset + * + * info.obj = self # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(descr): + */ + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = ((PyObject *)__pyx_v_self); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + * info.obj = self + * + * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 + * + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num # <<<<<<<<<<<<<< + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + */ + __pyx_t_4 = __pyx_v_descr->type_num; + __pyx_v_t = __pyx_t_4; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_2 = ((__pyx_v_descr->byteorder == '>') != 0); + if (!__pyx_t_2) { + goto __pyx_L15_next_or; + } else { + } + __pyx_t_2 = (__pyx_v_little_endian != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L14_bool_binop_done; + } + __pyx_L15_next_or:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + */ + __pyx_t_2 = ((__pyx_v_descr->byteorder == '<') != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L14_bool_binop_done; + } + __pyx_t_2 = ((!(__pyx_v_little_endian != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L14_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + if (unlikely(__pyx_t_1)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(3, 263, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + */ + switch (__pyx_v_t) { + case NPY_BYTE: + __pyx_v_f = ((char *)"b"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + */ + case NPY_UBYTE: + __pyx_v_f = ((char *)"B"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + */ + case NPY_SHORT: + __pyx_v_f = ((char *)"h"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + */ + case NPY_USHORT: + __pyx_v_f = ((char *)"H"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + */ + case NPY_INT: + __pyx_v_f = ((char *)"i"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + */ + case NPY_UINT: + __pyx_v_f = ((char *)"I"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + */ + case NPY_LONG: + __pyx_v_f = ((char *)"l"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + */ + case NPY_ULONG: + __pyx_v_f = ((char *)"L"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + */ + case NPY_LONGLONG: + __pyx_v_f = ((char *)"q"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + */ + case NPY_ULONGLONG: + __pyx_v_f = ((char *)"Q"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + */ + case NPY_FLOAT: + __pyx_v_f = ((char *)"f"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + */ + case NPY_DOUBLE: + __pyx_v_f = ((char *)"d"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + */ + case NPY_LONGDOUBLE: + __pyx_v_f = ((char *)"g"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + */ + case NPY_CFLOAT: + __pyx_v_f = ((char *)"Zf"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" + */ + case NPY_CDOUBLE: + __pyx_v_f = ((char *)"Zd"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f = "O" + * else: + */ + case NPY_CLONGDOUBLE: + __pyx_v_f = ((char *)"Zg"); + break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + case NPY_OBJECT: + __pyx_v_f = ((char *)"O"); + break; + default: + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 + * elif t == NPY_OBJECT: f = "O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * info.format = f + * return + */ + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_t_3); if (unlikely(!__pyx_t_7)) __PYX_ERR(3, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_7); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(3, 282, __pyx_L1_error) + break; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f # <<<<<<<<<<<<<< + * return + * else: + */ + __pyx_v_info->format = __pyx_v_f; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f + * return # <<<<<<<<<<<<<< + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + * info.obj = self + * + * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + * return + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + */ + /*else*/ { + __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, + */ + (__pyx_v_info->format[0]) = '^'; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 + * info.format = PyObject_Malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 # <<<<<<<<<<<<<< + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + */ + __pyx_v_offset = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< + * info.format + _buffer_format_string_len, + * &offset) + */ + __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(3, 289, __pyx_L1_error) + __pyx_v_f = __pyx_t_8; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + * info.format + _buffer_format_string_len, + * &offset) + * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + */ + (__pyx_v_f[0]) = '\x00'; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fulfill the PEP. + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + if (__pyx_v_info->obj != NULL) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0; + } + goto __pyx_L2; + __pyx_L0:; + if (__pyx_v_info->obj == Py_None) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0; + } + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_descr); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + */ + +/* Python wrapper */ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0); + __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__releasebuffer__", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * PyObject_Free(info.strides) + */ + PyObject_Free(__pyx_v_info->format); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * PyObject_Free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * PyObject_Free(info.strides) # <<<<<<<<<<<<<< + * # info.shape was stored after info.strides in the same block + * + */ + PyObject_Free(__pyx_v_info->strides); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * PyObject_Free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + * + * cdef inline object PyArray_MultiIterNew1(a): + * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew2(a, b): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 776, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + * + * cdef inline object PyArray_MultiIterNew2(a, b): + * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 779, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 782, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 785, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 788, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< + * return d.subarray.shape + * else: + */ + __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape # <<<<<<<<<<<<<< + * else: + * return () + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject*)__pyx_v_d->subarray->shape)); + __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< + * return d.subarray.shape + * else: + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 + * return d.subarray.shape + * else: + * return () # <<<<<<<<<<<<<< + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_empty_tuple); + __pyx_r = __pyx_empty_tuple; + goto __pyx_L0; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + * return () + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) { + PyArray_Descr *__pyx_v_child = 0; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + PyObject *__pyx_v_fields = 0; + PyObject *__pyx_v_childname = NULL; + PyObject *__pyx_v_new_offset = NULL; + PyObject *__pyx_v_t = NULL; + char *__pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + long __pyx_t_8; + char *__pyx_t_9; + __Pyx_RefNannySetupContext("_util_dtypestring", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + * + * cdef dtype child + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * cdef tuple fields + */ + __pyx_v_endian_detector = 1; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + * cdef dtype child + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * cdef tuple fields + * + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + if (unlikely(__pyx_v_descr->names == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(3, 805, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_descr->names; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(3, 805, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 805, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); + __pyx_t_3 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + * + * for childname in descr.names: + * fields = descr.fields[childname] # <<<<<<<<<<<<<< + * child, new_offset = fields + * + */ + if (unlikely(__pyx_v_descr->fields == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(3, 806, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_PyDict_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 806, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(3, 806, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 + * for childname in descr.names: + * fields = descr.fields[childname] + * child, new_offset = fields # <<<<<<<<<<<<<< + * + * if (end - f) - (new_offset - offset[0]) < 15: + */ + if (likely(__pyx_v_fields != Py_None)) { + PyObject* sequence = __pyx_v_fields; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(3, 807, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 807, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 807, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(3, 807, __pyx_L1_error) + } + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) __PYX_ERR(3, 807, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_child, ((PyArray_Descr *)__pyx_t_3)); + __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); + __pyx_t_4 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + __pyx_t_4 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 809, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 809, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(3, 809, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); + if (unlikely(__pyx_t_6)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 810, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(3, 810, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_7 = ((__pyx_v_child->byteorder == '>') != 0); + if (!__pyx_t_7) { + goto __pyx_L8_next_or; + } else { + } + __pyx_t_7 = (__pyx_v_little_endian != 0); + if (!__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L7_bool_binop_done; + } + __pyx_L8_next_or:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + * + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * # One could encode it in the format string and have Cython + */ + __pyx_t_7 = ((__pyx_v_child->byteorder == '<') != 0); + if (__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_7 = ((!(__pyx_v_little_endian != 0)) != 0); + __pyx_t_6 = __pyx_t_7; + __pyx_L7_bool_binop_done:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + if (unlikely(__pyx_t_6)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 814, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(3, 814, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + * + * # Output padding bytes + * while offset[0] < new_offset: # <<<<<<<<<<<<<< + * f[0] = 120 # "x"; pad byte + * f += 1 + */ + while (1) { + __pyx_t_3 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 824, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_3, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 824, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 824, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!__pyx_t_6) break; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + * # Output padding bytes + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< + * f += 1 + * offset[0] += 1 + */ + (__pyx_v_f[0]) = 0x78; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte + * f += 1 # <<<<<<<<<<<<<< + * offset[0] += 1 + * + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + * f[0] = 120 # "x"; pad byte + * f += 1 + * offset[0] += 1 # <<<<<<<<<<<<<< + * + * offset[0] += child.itemsize + */ + __pyx_t_8 = 0; + (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 + * offset[0] += 1 + * + * offset[0] += child.itemsize # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(child): + */ + __pyx_t_8 = 0; + (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); + if (__pyx_t_6) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 + * + * if not PyDataType_HASFIELDS(child): + * t = child.type_num # <<<<<<<<<<<<<< + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") + */ + __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_child->type_num); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 832, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); + __pyx_t_4 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); + if (unlikely(__pyx_t_6)) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 834, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(3, 834, __pyx_L1_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + * + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_BYTE); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 837, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 837, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 837, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 98; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UBYTE); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 838, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 838, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 838, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 66; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_SHORT); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 839, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 839, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 839, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x68; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_USHORT); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 840, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 840, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 840, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 72; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_INT); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 841, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 841, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 841, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x69; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UINT); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 842, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 842, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 842, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 73; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 843, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 843, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 843, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x6C; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 844, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 844, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 844, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 76; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGLONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 845, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 845, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 845, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x71; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONGLONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 846, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 846, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 846, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 81; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_FLOAT); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 847, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 847, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 847, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x66; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_DOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 848, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 848, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 848, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x64; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 849, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 849, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 849, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x67; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CFLOAT); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 850, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 850, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 850, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x66; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 851, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 851, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 851, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x64; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 852, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 852, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 852, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x67; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_OBJECT); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 853, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 853, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(3, 853, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (likely(__pyx_t_6)) { + (__pyx_v_f[0]) = 79; + goto __pyx_L15; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * f += 1 + * else: + */ + /*else*/ { + __pyx_t_3 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(3, 855, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(3, 855, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(3, 855, __pyx_L1_error) + } + __pyx_L15:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * f += 1 # <<<<<<<<<<<<<< + * else: + * # Cython ignores struct boundary information ("T{...}"), + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + goto __pyx_L13; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + * # Cython ignores struct boundary information ("T{...}"), + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< + * return f + * + */ + /*else*/ { + __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(3, 860, __pyx_L1_error) + __pyx_v_f = __pyx_t_9; + } + __pyx_L13:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) + * return f # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + * return () + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_child); + __Pyx_XDECREF(__pyx_v_fields); + __Pyx_XDECREF(__pyx_v_childname); + __Pyx_XDECREF(__pyx_v_new_offset); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + +static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) { + PyObject *__pyx_v_baseptr; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + __Pyx_RefNannySetupContext("set_array_base", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + __pyx_t_1 = (__pyx_v_base == Py_None); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 + * cdef PyObject* baseptr + * if base is None: + * baseptr = NULL # <<<<<<<<<<<<<< + * else: + * Py_INCREF(base) # important to do this before decref below! + */ + __pyx_v_baseptr = NULL; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + goto __pyx_L3; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 + * baseptr = NULL + * else: + * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< + * baseptr = base + * Py_XDECREF(arr.base) + */ + /*else*/ { + Py_INCREF(__pyx_v_base); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 + * else: + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base # <<<<<<<<<<<<<< + * Py_XDECREF(arr.base) + * arr.base = baseptr + */ + __pyx_v_baseptr = ((PyObject *)__pyx_v_base); + } + __pyx_L3:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base + * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< + * arr.base = baseptr + * + */ + Py_XDECREF(__pyx_v_arr->base); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 + * baseptr = base + * Py_XDECREF(arr.base) + * arr.base = baseptr # <<<<<<<<<<<<<< + * + * cdef inline object get_array_base(ndarray arr): + */ + __pyx_v_arr->base = __pyx_v_baseptr; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("get_array_base", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); + if (__pyx_t_1) { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: + * return None # <<<<<<<<<<<<<< + * else: + * return arr.base + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 + * return None + * else: + * return arr.base # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_arr->base)); + __pyx_r = ((PyObject *)__pyx_v_arr->base); + goto __pyx_L0; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + * # Versions of the import_* functions which are more suitable for + * # Cython code. + * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< + * try: + * _import_array() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_array", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 + * cdef inline int import_array() except -1: + * try: + * _import_array() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") + */ + __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(3, 998, __pyx_L3_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 + * try: + * _import_array() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.multiarray failed to import") + * + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(3, 999, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + * _import_array() + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_umath() except -1: + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(3, 1000, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(3, 1000, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + * # Versions of the import_* functions which are more suitable for + * # Cython code. + * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< + * try: + * _import_array() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + * raise ImportError("numpy.core.multiarray failed to import") + * + * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_umath", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + * cdef inline int import_umath() except -1: + * try: + * _import_umath() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(3, 1004, __pyx_L3_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + * try: + * _import_umath() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.umath failed to import") + * + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(3, 1005, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_ufunc() except -1: + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(3, 1006, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(3, 1006, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + * raise ImportError("numpy.core.multiarray failed to import") + * + * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_ufunc", 0); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + * cdef inline int import_ufunc() except -1: + * try: + * _import_umath() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(3, 1010, __pyx_L3_error) + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + * try: + * _import_umath() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(3, 1011, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(3, 1012, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(3, 1012, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "string.from_py":13 + * + * @cname("__pyx_convert_string_from_py_std__in_string") + * cdef string __pyx_convert_string_from_py_std__in_string(object o) except *: # <<<<<<<<<<<<<< + * cdef Py_ssize_t length + * cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) + */ + +static std::string __pyx_convert_string_from_py_std__in_string(PyObject *__pyx_v_o) { + Py_ssize_t __pyx_v_length; + char const *__pyx_v_data; + std::string __pyx_r; + __Pyx_RefNannyDeclarations + char const *__pyx_t_1; + __Pyx_RefNannySetupContext("__pyx_convert_string_from_py_std__in_string", 0); + + /* "string.from_py":15 + * cdef string __pyx_convert_string_from_py_std__in_string(object o) except *: + * cdef Py_ssize_t length + * cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) # <<<<<<<<<<<<<< + * return string(data, length) + * + */ + __pyx_t_1 = __Pyx_PyObject_AsStringAndSize(__pyx_v_o, (&__pyx_v_length)); if (unlikely(__pyx_t_1 == ((char const *)NULL))) __PYX_ERR(0, 15, __pyx_L1_error) + __pyx_v_data = __pyx_t_1; + + /* "string.from_py":16 + * cdef Py_ssize_t length + * cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) + * return string(data, length) # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = std::string(__pyx_v_data, __pyx_v_length); + goto __pyx_L0; + + /* "string.from_py":13 + * + * @cname("__pyx_convert_string_from_py_std__in_string") + * cdef string __pyx_convert_string_from_py_std__in_string(object o) except *: # <<<<<<<<<<<<<< + * cdef Py_ssize_t length + * cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_AddTraceback("string.from_py.__pyx_convert_string_from_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_pretend_to_initialize(&__pyx_r); + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "vector.from_py":45 + * + * @cname("__pyx_convert_vector_from_py_std_3a__3a_string") + * cdef vector[X] __pyx_convert_vector_from_py_std_3a__3a_string(object o) except *: # <<<<<<<<<<<<<< + * cdef vector[X] v + * for item in o: + */ + +static std::vector __pyx_convert_vector_from_py_std_3a__3a_string(PyObject *__pyx_v_o) { + std::vector __pyx_v_v; + PyObject *__pyx_v_item = NULL; + std::vector __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *(*__pyx_t_3)(PyObject *); + PyObject *__pyx_t_4 = NULL; + std::string __pyx_t_5; + __Pyx_RefNannySetupContext("__pyx_convert_vector_from_py_std_3a__3a_string", 0); + + /* "vector.from_py":47 + * cdef vector[X] __pyx_convert_vector_from_py_std_3a__3a_string(object o) except *: + * cdef vector[X] v + * for item in o: # <<<<<<<<<<<<<< + * v.push_back(item) + * return v + */ + if (likely(PyList_CheckExact(__pyx_v_o)) || PyTuple_CheckExact(__pyx_v_o)) { + __pyx_t_1 = __pyx_v_o; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + __pyx_t_3 = NULL; + } else { + __pyx_t_2 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_o); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 47, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 47, __pyx_L1_error) + } + for (;;) { + if (likely(!__pyx_t_3)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_4); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 47, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 47, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_4); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 47, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 47, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } + } else { + __pyx_t_4 = __pyx_t_3(__pyx_t_1); + if (unlikely(!__pyx_t_4)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 47, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_4); + } + __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_4); + __pyx_t_4 = 0; + + /* "vector.from_py":48 + * cdef vector[X] v + * for item in o: + * v.push_back(item) # <<<<<<<<<<<<<< + * return v + * + */ + __pyx_t_5 = __pyx_convert_string_from_py_std__in_string(__pyx_v_item); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 48, __pyx_L1_error) + __pyx_v_v.push_back(((std::string)__pyx_t_5)); + + /* "vector.from_py":47 + * cdef vector[X] __pyx_convert_vector_from_py_std_3a__3a_string(object o) except *: + * cdef vector[X] v + * for item in o: # <<<<<<<<<<<<<< + * v.push_back(item) + * return v + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "vector.from_py":49 + * for item in o: + * v.push_back(item) + * return v # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_v; + goto __pyx_L0; + + /* "vector.from_py":45 + * + * @cname("__pyx_convert_vector_from_py_std_3a__3a_string") + * cdef vector[X] __pyx_convert_vector_from_py_std_3a__3a_string(object o) except *: # <<<<<<<<<<<<<< + * cdef vector[X] v + * for item in o: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("vector.from_py.__pyx_convert_vector_from_py_std_3a__3a_string", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_pretend_to_initialize(&__pyx_r); + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_item); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "string.to_py":31 + * + * @cname("__pyx_convert_PyObject_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyObject_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyObject_FromStringAndSize(s.data(), s.size()) + * cdef extern from *: + */ + +static CYTHON_INLINE PyObject *__pyx_convert_PyObject_string_to_py_std__in_string(std::string const &__pyx_v_s) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__pyx_convert_PyObject_string_to_py_std__in_string", 0); + + /* "string.to_py":32 + * @cname("__pyx_convert_PyObject_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyObject_string_to_py_std__in_string(const string& s): + * return __Pyx_PyObject_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< + * cdef extern from *: + * cdef object __Pyx_PyUnicode_FromStringAndSize(const char*, size_t) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 32, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "string.to_py":31 + * + * @cname("__pyx_convert_PyObject_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyObject_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyObject_FromStringAndSize(s.data(), s.size()) + * cdef extern from *: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("string.to_py.__pyx_convert_PyObject_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "string.to_py":37 + * + * @cname("__pyx_convert_PyUnicode_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyUnicode_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyUnicode_FromStringAndSize(s.data(), s.size()) + * cdef extern from *: + */ + +static CYTHON_INLINE PyObject *__pyx_convert_PyUnicode_string_to_py_std__in_string(std::string const &__pyx_v_s) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__pyx_convert_PyUnicode_string_to_py_std__in_string", 0); + + /* "string.to_py":38 + * @cname("__pyx_convert_PyUnicode_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyUnicode_string_to_py_std__in_string(const string& s): + * return __Pyx_PyUnicode_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< + * cdef extern from *: + * cdef object __Pyx_PyStr_FromStringAndSize(const char*, size_t) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyUnicode_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 38, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "string.to_py":37 + * + * @cname("__pyx_convert_PyUnicode_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyUnicode_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyUnicode_FromStringAndSize(s.data(), s.size()) + * cdef extern from *: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("string.to_py.__pyx_convert_PyUnicode_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "string.to_py":43 + * + * @cname("__pyx_convert_PyStr_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyStr_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyStr_FromStringAndSize(s.data(), s.size()) + * cdef extern from *: + */ + +static CYTHON_INLINE PyObject *__pyx_convert_PyStr_string_to_py_std__in_string(std::string const &__pyx_v_s) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__pyx_convert_PyStr_string_to_py_std__in_string", 0); + + /* "string.to_py":44 + * @cname("__pyx_convert_PyStr_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyStr_string_to_py_std__in_string(const string& s): + * return __Pyx_PyStr_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< + * cdef extern from *: + * cdef object __Pyx_PyBytes_FromStringAndSize(const char*, size_t) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyStr_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 44, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "string.to_py":43 + * + * @cname("__pyx_convert_PyStr_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyStr_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyStr_FromStringAndSize(s.data(), s.size()) + * cdef extern from *: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("string.to_py.__pyx_convert_PyStr_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "string.to_py":49 + * + * @cname("__pyx_convert_PyBytes_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyBytes_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyBytes_FromStringAndSize(s.data(), s.size()) + * cdef extern from *: + */ + +static CYTHON_INLINE PyObject *__pyx_convert_PyBytes_string_to_py_std__in_string(std::string const &__pyx_v_s) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__pyx_convert_PyBytes_string_to_py_std__in_string", 0); + + /* "string.to_py":50 + * @cname("__pyx_convert_PyBytes_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyBytes_string_to_py_std__in_string(const string& s): + * return __Pyx_PyBytes_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< + * cdef extern from *: + * cdef object __Pyx_PyByteArray_FromStringAndSize(const char*, size_t) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "string.to_py":49 + * + * @cname("__pyx_convert_PyBytes_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyBytes_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyBytes_FromStringAndSize(s.data(), s.size()) + * cdef extern from *: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("string.to_py.__pyx_convert_PyBytes_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "string.to_py":55 + * + * @cname("__pyx_convert_PyByteArray_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyByteArray_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyByteArray_FromStringAndSize(s.data(), s.size()) + * + */ + +static CYTHON_INLINE PyObject *__pyx_convert_PyByteArray_string_to_py_std__in_string(std::string const &__pyx_v_s) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__pyx_convert_PyByteArray_string_to_py_std__in_string", 0); + + /* "string.to_py":56 + * @cname("__pyx_convert_PyByteArray_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyByteArray_string_to_py_std__in_string(const string& s): + * return __Pyx_PyByteArray_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyByteArray_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "string.to_py":55 + * + * @cname("__pyx_convert_PyByteArray_string_to_py_std__in_string") + * cdef inline object __pyx_convert_PyByteArray_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< + * return __Pyx_PyByteArray_FromStringAndSize(s.data(), s.size()) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("string.to_py.__pyx_convert_PyByteArray_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "vector.to_py":60 + * + * @cname("__pyx_convert_vector_to_py_std_3a__3a_string") + * cdef object __pyx_convert_vector_to_py_std_3a__3a_string(vector[X]& v): # <<<<<<<<<<<<<< + * return [v[i] for i in range(v.size())] + * + */ + +static PyObject *__pyx_convert_vector_to_py_std_3a__3a_string(const std::vector &__pyx_v_v) { + size_t __pyx_v_i; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + size_t __pyx_t_2; + size_t __pyx_t_3; + size_t __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("__pyx_convert_vector_to_py_std_3a__3a_string", 0); + + /* "vector.to_py":61 + * @cname("__pyx_convert_vector_to_py_std_3a__3a_string") + * cdef object __pyx_convert_vector_to_py_std_3a__3a_string(vector[X]& v): + * return [v[i] for i in range(v.size())] # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __pyx_v_v.size(); + __pyx_t_3 = __pyx_t_2; + for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) { + __pyx_v_i = __pyx_t_4; + __pyx_t_5 = __pyx_convert_PyBytes_string_to_py_std__in_string((__pyx_v_v[__pyx_v_i])); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_t_5))) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "vector.to_py":60 + * + * @cname("__pyx_convert_vector_to_py_std_3a__3a_string") + * cdef object __pyx_convert_vector_to_py_std_3a__3a_string(vector[X]& v): # <<<<<<<<<<<<<< + * return [v[i] for i in range(v.size())] + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("vector.to_py.__pyx_convert_vector_to_py_std_3a__3a_string", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___(const std::vector > &__pyx_v_v) { + size_t __pyx_v_i; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + size_t __pyx_t_2; + size_t __pyx_t_3; + size_t __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("__pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___", 0); + + /* "vector.to_py":61 + * @cname("__pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___") + * cdef object __pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___(vector[X]& v): + * return [v[i] for i in range(v.size())] # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __pyx_v_v.size(); + __pyx_t_3 = __pyx_t_2; + for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) { + __pyx_v_i = __pyx_t_4; + __pyx_t_5 = __pyx_convert_vector_to_py_std_3a__3a_string((__pyx_v_v[__pyx_v_i])); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_t_5))) __PYX_ERR(0, 61, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "vector.to_py":60 + * + * @cname("__pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___") + * cdef object __pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___(vector[X]& v): # <<<<<<<<<<<<<< + * return [v[i] for i in range(v.size())] + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("vector.to_py.__pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} +static struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonLineSentence __pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; + +static PyObject *__pyx_tp_new_6gensim_6models_19word2vec_corpusfile_CythonLineSentence(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *p; + PyObject *o; + o = (*t->tp_alloc)(t, 0); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)o); + p->__pyx_vtab = __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; + new((void*)&(p->buf_data)) std::vector > (); + p->source = ((PyObject*)Py_None); Py_INCREF(Py_None); + if (unlikely(__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_1__cinit__(o, a, k) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_6gensim_6models_19word2vec_corpusfile_CythonLineSentence(PyObject *o) { + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *p = (struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *)o; + { + PyObject *etype, *eval, *etb; + PyErr_Fetch(&etype, &eval, &etb); + ++Py_REFCNT(o); + __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_5__dealloc__(o); + --Py_REFCNT(o); + PyErr_Restore(etype, eval, etb); + } + __Pyx_call_destructor(p->buf_data); + Py_CLEAR(p->source); + (*Py_TYPE(o)->tp_free)(o); +} + +static PyObject *__pyx_getprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_source(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_1__get__(o); +} + +static int __pyx_setprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_source(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_3__set__(o, v); + } + else { + return __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6source_5__del__(o); + } +} + +static PyObject *__pyx_getprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_max_sentence_length(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length_1__get__(o); +} + +static int __pyx_setprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_max_sentence_length(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19max_sentence_length_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_max_words_in_batch(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch_1__get__(o); +} + +static int __pyx_setprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_max_words_in_batch(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_18max_words_in_batch_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_offset(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset_1__get__(o); +} + +static int __pyx_setprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_offset(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6offset_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyMethodDef __pyx_methods_6gensim_6models_19word2vec_corpusfile_CythonLineSentence[] = { + {"is_eof", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_7is_eof, METH_NOARGS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_6is_eof}, + {"read_sentence", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_9read_sentence, METH_NOARGS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_8read_sentence}, + {"_read_chunked_sentence", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_11_read_chunked_sentence, METH_NOARGS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_10_read_chunked_sentence}, + {"_chunk_sentence", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_13_chunk_sentence, METH_O, __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_12_chunk_sentence}, + {"reset", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_15reset, METH_NOARGS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_14reset}, + {"__reduce__", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_20__reduce__, METH_NOARGS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_19__reduce__}, + {"next_batch", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_22next_batch, METH_NOARGS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_21next_batch}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_6gensim_6models_19word2vec_corpusfile_CythonLineSentence[] = { + {(char *)"source", __pyx_getprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_source, __pyx_setprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_source, (char *)"source: bytes", 0}, + {(char *)"max_sentence_length", __pyx_getprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_max_sentence_length, __pyx_setprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_max_sentence_length, (char *)"max_sentence_length: 'size_t'", 0}, + {(char *)"max_words_in_batch", __pyx_getprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_max_words_in_batch, __pyx_setprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_max_words_in_batch, (char *)"max_words_in_batch: 'size_t'", 0}, + {(char *)"offset", __pyx_getprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_offset, __pyx_setprop_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_offset, (char *)"offset: 'size_t'", 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = { + PyVarObject_HEAD_INIT(0, 0) + "gensim.models.word2vec_corpusfile.CythonLineSentence", /*tp_name*/ + sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_6gensim_6models_19word2vec_corpusfile_CythonLineSentence, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER, /*tp_flags*/ + "CythonLineSentence(source, offset=0, max_sentence_length=10000)", /*tp_doc*/ + 0, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_17__iter__, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_6gensim_6models_19word2vec_corpusfile_CythonLineSentence, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_6gensim_6models_19word2vec_corpusfile_CythonLineSentence, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_3__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_6gensim_6models_19word2vec_corpusfile_CythonLineSentence, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_6gensim_6models_19word2vec_corpusfile_CythonVocab __pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonVocab; + +static PyObject *__pyx_tp_new_6gensim_6models_19word2vec_corpusfile_CythonVocab(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *p; + PyObject *o; + o = (*t->tp_alloc)(t, 0); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)o); + p->__pyx_vtab = __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab; + new((void*)&(p->vocab)) __pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t(); + p->subword_arrays = Py_None; Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_6gensim_6models_19word2vec_corpusfile_CythonVocab(PyObject *o) { + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *p = (struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)o; + PyObject_GC_UnTrack(o); + __Pyx_call_destructor(p->vocab); + Py_CLEAR(p->subword_arrays); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_6gensim_6models_19word2vec_corpusfile_CythonVocab(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *p = (struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)o; + if (p->subword_arrays) { + e = (*v)(p->subword_arrays, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_6gensim_6models_19word2vec_corpusfile_CythonVocab(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *p = (struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *)o; + tmp = ((PyObject*)p->subword_arrays); + p->subword_arrays = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_6gensim_6models_19word2vec_corpusfile_CythonVocab[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_11CythonVocab_3__reduce_cython__, METH_NOARGS, __pyx_doc_6gensim_6models_19word2vec_corpusfile_11CythonVocab_2__reduce_cython__}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_6gensim_6models_19word2vec_corpusfile_11CythonVocab_5__setstate_cython__, METH_O, __pyx_doc_6gensim_6models_19word2vec_corpusfile_11CythonVocab_4__setstate_cython__}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab = { + PyVarObject_HEAD_INIT(0, 0) + "gensim.models.word2vec_corpusfile.CythonVocab", /*tp_name*/ + sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_6gensim_6models_19word2vec_corpusfile_CythonVocab, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + "CythonVocab(wv, hs=0, fasttext=0)", /*tp_doc*/ + __pyx_tp_traverse_6gensim_6models_19word2vec_corpusfile_CythonVocab, /*tp_traverse*/ + __pyx_tp_clear_6gensim_6models_19word2vec_corpusfile_CythonVocab, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_6gensim_6models_19word2vec_corpusfile_CythonVocab, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_6gensim_6models_19word2vec_corpusfile_11CythonVocab_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_6gensim_6models_19word2vec_corpusfile_CythonVocab, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *__pyx_freelist_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__[8]; +static int __pyx_freecount_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ = 0; + +static PyObject *__pyx_tp_new_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *p; + PyObject *o; + if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__)))) { + o = (PyObject*)__pyx_freelist_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__[--__pyx_freecount_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__]; + memset(o, 0, sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__)); + (void) PyObject_INIT(o, t); + PyObject_GC_Track(o); + } else { + o = (*t->tp_alloc)(t, 0); + if (unlikely(!o)) return 0; + } + p = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *)o); + new((void*)&(p->__pyx_v_chunk)) std::vector (); + new((void*)&(p->__pyx_v_chunked_sentence)) std::vector > (); + new((void*)&(p->__pyx_t_0)) std::vector > ::iterator(); + return o; +} + +static void __pyx_tp_dealloc_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__(PyObject *o) { + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *p = (struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *)o; + PyObject_GC_UnTrack(o); + __Pyx_call_destructor(p->__pyx_v_chunk); + __Pyx_call_destructor(p->__pyx_v_chunked_sentence); + __Pyx_call_destructor(p->__pyx_t_0); + Py_CLEAR(p->__pyx_v_self); + if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ < 8) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__)))) { + __pyx_freelist_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__[__pyx_freecount_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__++] = ((struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *)o); + } else { + (*Py_TYPE(o)->tp_free)(o); + } +} + +static int __pyx_tp_traverse_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *p = (struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ *)o; + if (p->__pyx_v_self) { + e = (*v)(((PyObject *)p->__pyx_v_self), a); if (e) return e; + } + return 0; +} + +static PyTypeObject __pyx_type_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ = { + PyVarObject_HEAD_INIT(0, 0) + "gensim.models.word2vec_corpusfile.__pyx_scope_struct____iter__", /*tp_name*/ + sizeof(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__, /*tp_traverse*/ + 0, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_word2vec_corpusfile(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_word2vec_corpusfile}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "word2vec_corpusfile", + __pyx_k_Optimized_cython_functions_for_f, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_n_s_CORPUSFILE_VERSION, __pyx_k_CORPUSFILE_VERSION, sizeof(__pyx_k_CORPUSFILE_VERSION), 0, 0, 1, 1}, + {&__pyx_n_s_CythonLineSentence___iter, __pyx_k_CythonLineSentence___iter, sizeof(__pyx_k_CythonLineSentence___iter), 0, 0, 1, 1}, + {&__pyx_kp_u_Format_string_allocated_too_shor, __pyx_k_Format_string_allocated_too_shor, sizeof(__pyx_k_Format_string_allocated_too_shor), 0, 1, 0, 0}, + {&__pyx_kp_u_Format_string_allocated_too_shor_2, __pyx_k_Format_string_allocated_too_shor_2, sizeof(__pyx_k_Format_string_allocated_too_shor_2), 0, 1, 0, 0}, + {&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1}, + {&__pyx_kp_u_Non_native_byte_order_not_suppor, __pyx_k_Non_native_byte_order_not_suppor, sizeof(__pyx_k_Non_native_byte_order_not_suppor), 0, 1, 0, 0}, + {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, + {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1}, + {&__pyx_n_s_alpha, __pyx_k_alpha, sizeof(__pyx_k_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_alpha_2, __pyx_k_alpha_2, sizeof(__pyx_k_alpha_2), 0, 0, 1, 1}, + {&__pyx_n_s_any2utf8, __pyx_k_any2utf8, sizeof(__pyx_k_any2utf8), 0, 0, 1, 1}, + {&__pyx_n_s_args, __pyx_k_args, sizeof(__pyx_k_args), 0, 0, 1, 1}, + {&__pyx_n_s_buckets_word, __pyx_k_buckets_word, sizeof(__pyx_k_buckets_word), 0, 0, 1, 1}, + {&__pyx_n_s_c, __pyx_k_c, sizeof(__pyx_k_c), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_close, __pyx_k_close, sizeof(__pyx_k_close), 0, 0, 1, 1}, + {&__pyx_n_s_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 0, 1, 1}, + {&__pyx_n_s_compute_loss, __pyx_k_compute_loss, sizeof(__pyx_k_compute_loss), 0, 0, 1, 1}, + {&__pyx_n_s_corpus_file, __pyx_k_corpus_file, sizeof(__pyx_k_corpus_file), 0, 0, 1, 1}, + {&__pyx_n_s_cur_epoch, __pyx_k_cur_epoch, sizeof(__pyx_k_cur_epoch), 0, 0, 1, 1}, + {&__pyx_n_s_cur_epoch_2, __pyx_k_cur_epoch_2, sizeof(__pyx_k_cur_epoch_2), 0, 0, 1, 1}, + {&__pyx_n_s_cython_vocab, __pyx_k_cython_vocab, sizeof(__pyx_k_cython_vocab), 0, 0, 1, 1}, + {&__pyx_n_s_effective_sentences, __pyx_k_effective_sentences, sizeof(__pyx_k_effective_sentences), 0, 0, 1, 1}, + {&__pyx_n_s_effective_words, __pyx_k_effective_words, sizeof(__pyx_k_effective_words), 0, 0, 1, 1}, + {&__pyx_n_s_encode, __pyx_k_encode, sizeof(__pyx_k_encode), 0, 0, 1, 1}, + {&__pyx_n_s_end_alpha, __pyx_k_end_alpha, sizeof(__pyx_k_end_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_epochs, __pyx_k_epochs, sizeof(__pyx_k_epochs), 0, 0, 1, 1}, + {&__pyx_n_s_expected_examples, __pyx_k_expected_examples, sizeof(__pyx_k_expected_examples), 0, 0, 1, 1}, + {&__pyx_n_s_expected_examples_2, __pyx_k_expected_examples_2, sizeof(__pyx_k_expected_examples_2), 0, 0, 1, 1}, + {&__pyx_n_s_expected_words, __pyx_k_expected_words, sizeof(__pyx_k_expected_words), 0, 0, 1, 1}, + {&__pyx_n_s_expected_words_2, __pyx_k_expected_words_2, sizeof(__pyx_k_expected_words_2), 0, 0, 1, 1}, + {&__pyx_n_s_fasttext, __pyx_k_fasttext, sizeof(__pyx_k_fasttext), 0, 0, 1, 1}, + {&__pyx_n_s_gensim_models_word2vec_corpusfil, __pyx_k_gensim_models_word2vec_corpusfil, sizeof(__pyx_k_gensim_models_word2vec_corpusfil), 0, 0, 1, 1}, + {&__pyx_kp_s_gensim_models_word2vec_corpusfil_2, __pyx_k_gensim_models_word2vec_corpusfil_2, sizeof(__pyx_k_gensim_models_word2vec_corpusfil_2), 0, 0, 1, 0}, + {&__pyx_n_s_gensim_utils, __pyx_k_gensim_utils, sizeof(__pyx_k_gensim_utils), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_hs, __pyx_k_hs, sizeof(__pyx_k_hs), 0, 0, 1, 1}, + {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, + {&__pyx_n_s_idx_end, __pyx_k_idx_end, sizeof(__pyx_k_idx_end), 0, 0, 1, 1}, + {&__pyx_n_s_idx_start, __pyx_k_idx_start, sizeof(__pyx_k_idx_start), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1}, + {&__pyx_n_s_input_stream, __pyx_k_input_stream, sizeof(__pyx_k_input_stream), 0, 0, 1, 1}, + {&__pyx_n_s_iter, __pyx_k_iter, sizeof(__pyx_k_iter), 0, 0, 1, 1}, + {&__pyx_n_s_iteritems, __pyx_k_iteritems, sizeof(__pyx_k_iteritems), 0, 0, 1, 1}, + {&__pyx_n_s_j, __pyx_k_j, sizeof(__pyx_k_j), 0, 0, 1, 1}, + {&__pyx_n_s_k, __pyx_k_k, sizeof(__pyx_k_k), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_max_sentence_length, __pyx_k_max_sentence_length, sizeof(__pyx_k_max_sentence_length), 0, 0, 1, 1}, + {&__pyx_n_s_min_alpha, __pyx_k_min_alpha, sizeof(__pyx_k_min_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_model, __pyx_k_model, sizeof(__pyx_k_model), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_kp_u_ndarray_is_not_C_contiguous, __pyx_k_ndarray_is_not_C_contiguous, sizeof(__pyx_k_ndarray_is_not_C_contiguous), 0, 1, 0, 0}, + {&__pyx_kp_u_ndarray_is_not_Fortran_contiguou, __pyx_k_ndarray_is_not_Fortran_contiguou, sizeof(__pyx_k_ndarray_is_not_Fortran_contiguou), 0, 1, 0, 0}, + {&__pyx_n_s_neu1, __pyx_k_neu1, sizeof(__pyx_k_neu1), 0, 0, 1, 1}, + {&__pyx_n_s_np, __pyx_k_np, sizeof(__pyx_k_np), 0, 0, 1, 1}, + {&__pyx_n_s_num_epochs, __pyx_k_num_epochs, sizeof(__pyx_k_num_epochs), 0, 0, 1, 1}, + {&__pyx_n_s_numpy, __pyx_k_numpy, sizeof(__pyx_k_numpy), 0, 0, 1, 1}, + {&__pyx_kp_s_numpy_core_multiarray_failed_to, __pyx_k_numpy_core_multiarray_failed_to, sizeof(__pyx_k_numpy_core_multiarray_failed_to), 0, 0, 1, 0}, + {&__pyx_kp_s_numpy_core_umath_failed_to_impor, __pyx_k_numpy_core_umath_failed_to_impor, sizeof(__pyx_k_numpy_core_umath_failed_to_impor), 0, 0, 1, 0}, + {&__pyx_n_s_offset, __pyx_k_offset, sizeof(__pyx_k_offset), 0, 0, 1, 1}, + {&__pyx_n_s_point, __pyx_k_point, sizeof(__pyx_k_point), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_rebuild_cython_line_sentence, __pyx_k_rebuild_cython_line_sentence, sizeof(__pyx_k_rebuild_cython_line_sentence), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_running_training_loss, __pyx_k_running_training_loss, sizeof(__pyx_k_running_training_loss), 0, 0, 1, 1}, + {&__pyx_n_s_sample_int, __pyx_k_sample_int, sizeof(__pyx_k_sample_int), 0, 0, 1, 1}, + {&__pyx_kp_s_self_vocab_cannot_be_converted_t, __pyx_k_self_vocab_cannot_be_converted_t, sizeof(__pyx_k_self_vocab_cannot_be_converted_t), 0, 0, 1, 0}, + {&__pyx_n_s_send, __pyx_k_send, sizeof(__pyx_k_send), 0, 0, 1, 1}, + {&__pyx_n_s_sent_idx, __pyx_k_sent_idx, sizeof(__pyx_k_sent_idx), 0, 0, 1, 1}, + {&__pyx_n_s_sentences, __pyx_k_sentences, sizeof(__pyx_k_sentences), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_six, __pyx_k_six, sizeof(__pyx_k_six), 0, 0, 1, 1}, + {&__pyx_n_s_source, __pyx_k_source, sizeof(__pyx_k_source), 0, 0, 1, 1}, + {&__pyx_n_s_start_alpha, __pyx_k_start_alpha, sizeof(__pyx_k_start_alpha), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_throw, __pyx_k_throw, sizeof(__pyx_k_throw), 0, 0, 1, 1}, + {&__pyx_n_s_total_effective_words, __pyx_k_total_effective_words, sizeof(__pyx_k_total_effective_words), 0, 0, 1, 1}, + {&__pyx_n_s_total_sentences, __pyx_k_total_sentences, sizeof(__pyx_k_total_sentences), 0, 0, 1, 1}, + {&__pyx_n_s_total_words, __pyx_k_total_words, sizeof(__pyx_k_total_words), 0, 0, 1, 1}, + {&__pyx_n_s_train_epoch_cbow, __pyx_k_train_epoch_cbow, sizeof(__pyx_k_train_epoch_cbow), 0, 0, 1, 1}, + {&__pyx_n_s_train_epoch_sg, __pyx_k_train_epoch_sg, sizeof(__pyx_k_train_epoch_sg), 0, 0, 1, 1}, + {&__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_k_unknown_dtype_code_in_numpy_pxd, sizeof(__pyx_k_unknown_dtype_code_in_numpy_pxd), 0, 1, 0, 0}, + {&__pyx_n_s_utf8, __pyx_k_utf8, sizeof(__pyx_k_utf8), 0, 0, 1, 1}, + {&__pyx_n_s_vocab, __pyx_k_vocab, sizeof(__pyx_k_vocab), 0, 0, 1, 1}, + {&__pyx_n_s_work, __pyx_k_work, sizeof(__pyx_k_work), 0, 0, 1, 1}, + {&__pyx_n_s_wv, __pyx_k_wv, sizeof(__pyx_k_wv), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 2, __pyx_L1_error) + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(1, 109, __pyx_L1_error) + __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(3, 229, __pyx_L1_error) + __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(3, 810, __pyx_L1_error) + __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(3, 1000, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + */ + __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_self_vocab_cannot_be_converted_t); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + + /* "(tree fragment)":4 + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("self.vocab cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<< + */ + __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_s_self_vocab_cannot_be_converted_t); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "gensim/models/word2vec_corpusfile.pyx":73 + * return key + * else: + * return key.encode('utf8') # <<<<<<<<<<<<<< + * + * + */ + __pyx_tuple__3 = PyTuple_Pack(1, __pyx_n_s_utf8); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 73, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(3, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(3, 233, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(3, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(3, 810, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(3, 814, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(3, 834, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + * _import_array() + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_umath() except -1: + */ + __pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(3, 1000, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_ufunc() except -1: + */ + __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(3, 1006, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); + + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + */ + __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(3, 1012, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + + /* "gensim/models/word2vec_corpusfile.pyx":65 + * + * + * def rebuild_cython_line_sentence(source, max_sentence_length): # <<<<<<<<<<<<<< + * return CythonLineSentence(source, max_sentence_length=max_sentence_length) + * + */ + __pyx_tuple__13 = PyTuple_Pack(2, __pyx_n_s_source, __pyx_n_s_max_sentence_length); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(1, 65, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__13); + __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_corpusfil_2, __pyx_n_s_rebuild_cython_line_sentence, 65, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(1, 65, __pyx_L1_error) + + /* "gensim/models/word2vec_corpusfile.pyx":249 + * + * + * def train_epoch_sg(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1, compute_loss): + * """Train Skipgram model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + __pyx_tuple__15 = PyTuple_Pack(32, __pyx_n_s_model, __pyx_n_s_corpus_file, __pyx_n_s_offset, __pyx_n_s_cython_vocab, __pyx_n_s_cur_epoch, __pyx_n_s_expected_examples, __pyx_n_s_expected_words, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_compute_loss, __pyx_n_s_c, __pyx_n_s_cur_epoch_2, __pyx_n_s_num_epochs, __pyx_n_s_expected_examples_2, __pyx_n_s_expected_words_2, __pyx_n_s_start_alpha, __pyx_n_s_end_alpha, __pyx_n_s_alpha_2, __pyx_n_s_input_stream, __pyx_n_s_vocab, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_total_effective_words, __pyx_n_s_total_sentences, __pyx_n_s_total_words, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_sentences); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(1, 249, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(10, 0, 32, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_corpusfil_2, __pyx_n_s_train_epoch_sg, 249, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(1, 249, __pyx_L1_error) + + /* "gensim/models/word2vec_corpusfile.pyx":346 + * + * + * def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1, compute_loss): + * """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + __pyx_tuple__17 = PyTuple_Pack(32, __pyx_n_s_model, __pyx_n_s_corpus_file, __pyx_n_s_offset, __pyx_n_s_cython_vocab, __pyx_n_s_cur_epoch, __pyx_n_s_expected_examples, __pyx_n_s_expected_words, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_compute_loss, __pyx_n_s_c, __pyx_n_s_cur_epoch_2, __pyx_n_s_num_epochs, __pyx_n_s_expected_examples_2, __pyx_n_s_expected_words_2, __pyx_n_s_start_alpha, __pyx_n_s_end_alpha, __pyx_n_s_alpha_2, __pyx_n_s_input_stream, __pyx_n_s_vocab, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_total_effective_words, __pyx_n_s_total_sentences, __pyx_n_s_total_words, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_sentences); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(1, 346, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__17); + __Pyx_GIVEREF(__pyx_tuple__17); + __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(10, 0, 32, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_corpusfil_2, __pyx_n_s_train_epoch_cbow, 346, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(1, 346, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(1, 1, __pyx_L1_error); + __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_int_10000 = PyInt_FromLong(10000L); if (unlikely(!__pyx_int_10000)) __PYX_ERR(1, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_modinit_global_init_code(void); /*proto*/ +static int __Pyx_modinit_variable_export_code(void); /*proto*/ +static int __Pyx_modinit_function_export_code(void); /*proto*/ +static int __Pyx_modinit_type_init_code(void); /*proto*/ +static int __Pyx_modinit_type_import_code(void); /*proto*/ +static int __Pyx_modinit_variable_import_code(void); /*proto*/ +static int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + if (__Pyx_ExportFunction("get_alpha", (void (*)(void))__pyx_f_6gensim_6models_19word2vec_corpusfile_get_alpha, "__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("get_next_alpha", (void (*)(void))__pyx_f_6gensim_6models_19word2vec_corpusfile_get_next_alpha, "__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t (__pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, __pyx_t_6gensim_6models_19word2vec_corpusfile_REAL_t, int, int, int, int, int, int)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = &__pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; + __pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonLineSentence.is_eof = (bool (*)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch))__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_is_eof; + __pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonLineSentence.read_sentence = (std::vector (*)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch))__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_read_sentence; + __pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonLineSentence._read_chunked_sentence = (std::vector > (*)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch))__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__read_chunked_sentence; + __pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonLineSentence._chunk_sentence = (std::vector > (*)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, std::vector , int __pyx_skip_dispatch))__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence__chunk_sentence; + __pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonLineSentence.reset = (void (*)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch))__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_reset; + __pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonLineSentence.next_batch = (std::vector > (*)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence *, int __pyx_skip_dispatch))__pyx_f_6gensim_6models_19word2vec_corpusfile_18CythonLineSentence_next_batch; + if (PyType_Ready(&__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonLineSentence) < 0) __PYX_ERR(1, 77, __pyx_L1_error) + __pyx_type_6gensim_6models_19word2vec_corpusfile_CythonLineSentence.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonLineSentence.tp_dictoffset && __pyx_type_6gensim_6models_19word2vec_corpusfile_CythonLineSentence.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_6gensim_6models_19word2vec_corpusfile_CythonLineSentence.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + } + if (__Pyx_SetVtable(__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonLineSentence.tp_dict, __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonLineSentence) < 0) __PYX_ERR(1, 77, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "CythonLineSentence", (PyObject *)&__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonLineSentence) < 0) __PYX_ERR(1, 77, __pyx_L1_error) + __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonLineSentence = &__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; + __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab = &__pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonVocab; + __pyx_vtable_6gensim_6models_19word2vec_corpusfile_CythonVocab.get_vocab_ptr = (__pyx_t_6gensim_6models_19word2vec_corpusfile_cvocab_t *(*)(struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab *))__pyx_f_6gensim_6models_19word2vec_corpusfile_11CythonVocab_get_vocab_ptr; + if (PyType_Ready(&__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab) < 0) __PYX_ERR(1, 40, __pyx_L1_error) + __pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab.tp_dictoffset && __pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + } + if (__Pyx_SetVtable(__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab.tp_dict, __pyx_vtabptr_6gensim_6models_19word2vec_corpusfile_CythonVocab) < 0) __PYX_ERR(1, 40, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "CythonVocab", (PyObject *)&__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab) < 0) __PYX_ERR(1, 40, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab) < 0) __PYX_ERR(1, 40, __pyx_L1_error) + __pyx_ptype_6gensim_6models_19word2vec_corpusfile_CythonVocab = &__pyx_type_6gensim_6models_19word2vec_corpusfile_CythonVocab; + if (PyType_Ready(&__pyx_type_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__) < 0) __PYX_ERR(1, 122, __pyx_L1_error) + __pyx_type_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__.tp_print = 0; + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__.tp_dictoffset && __pyx_type_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + } + __pyx_ptype_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__ = &__pyx_type_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) __PYX_ERR(4, 9, __pyx_L1_error) + __pyx_ptype_5numpy_dtype = __Pyx_ImportType("numpy", "dtype", sizeof(PyArray_Descr), 0); if (unlikely(!__pyx_ptype_5numpy_dtype)) __PYX_ERR(3, 164, __pyx_L1_error) + __pyx_ptype_5numpy_flatiter = __Pyx_ImportType("numpy", "flatiter", sizeof(PyArrayIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_flatiter)) __PYX_ERR(3, 186, __pyx_L1_error) + __pyx_ptype_5numpy_broadcast = __Pyx_ImportType("numpy", "broadcast", sizeof(PyArrayMultiIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_broadcast)) __PYX_ERR(3, 190, __pyx_L1_error) + __pyx_ptype_5numpy_ndarray = __Pyx_ImportType("numpy", "ndarray", sizeof(PyArrayObject), 0); if (unlikely(!__pyx_ptype_5numpy_ndarray)) __PYX_ERR(3, 199, __pyx_L1_error) + __pyx_ptype_5numpy_ufunc = __Pyx_ImportType("numpy", "ufunc", sizeof(PyUFuncObject), 0); if (unlikely(!__pyx_ptype_5numpy_ufunc)) __PYX_ERR(3, 872, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __pyx_t_1 = __Pyx_ImportModule("gensim.models.word2vec_inner"); if (!__pyx_t_1) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "scopy", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_scopy, "__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "saxpy", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_saxpy, "__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "sdot", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_sdot, "__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "dsdot", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_dsdot, "__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "snrm2", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_snrm2, "__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "sscal", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_sscal, "__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "EXP_TABLE", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_EXP_TABLE, "__pyx_t_6gensim_6models_14word2vec_inner_REAL_t [0x3E8]") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "our_dot", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_our_dot, "__pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportVoidPtr(__pyx_t_1, "our_saxpy", (void **)&__pyx_vp_6gensim_6models_14word2vec_inner_our_saxpy, "__pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __pyx_t_1 = __Pyx_ImportModule("gensim.models.word2vec_inner"); if (!__pyx_t_1) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_1, "random_int32", (void (**)(void))&__pyx_f_6gensim_6models_14word2vec_inner_random_int32, "unsigned PY_LONG_LONG (unsigned PY_LONG_LONG *)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_1, "w2v_fast_sentence_sg_hs", (void (**)(void))&__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_1, "w2v_fast_sentence_sg_neg", (void (**)(void))&__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_1, "w2v_fast_sentence_cbow_hs", (void (**)(void))&__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_1, "w2v_fast_sentence_cbow_neg", (void (**)(void))&__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + if (__Pyx_ImportFunction(__pyx_t_1, "init_w2v_config", (void (**)(void))&__pyx_f_6gensim_6models_14word2vec_inner_init_w2v_config, "PyObject *(struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig *, PyObject *, PyObject *, PyObject *, PyObject *, struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config *__pyx_optional_args)") < 0) __PYX_ERR(1, 1, __pyx_L1_error) + Py_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + + +#if PY_MAJOR_VERSION < 3 +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) + #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initword2vec_corpusfile(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initword2vec_corpusfile(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_word2vec_corpusfile(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_word2vec_corpusfile(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + result = PyDict_SetItemString(moddict, to_name, value); + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__") < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static int __pyx_pymod_exec_word2vec_corpusfile(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m && __pyx_m == __pyx_pyinit_module) return 0; + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_word2vec_corpusfile(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(1, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("word2vec_corpusfile", __pyx_methods, __pyx_k_Optimized_cython_functions_for_f, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(1, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(1, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(1, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_gensim__models__word2vec_corpusfile) { + if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(1, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "gensim.models.word2vec_corpusfile")) { + if (unlikely(PyDict_SetItemString(modules, "gensim.models.word2vec_corpusfile", __pyx_m) < 0)) __PYX_ERR(1, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + if (unlikely(__Pyx_modinit_function_export_code() != 0)) goto __pyx_L1_error; + if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error; + if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; + if (unlikely(__Pyx_modinit_variable_import_code() != 0)) goto __pyx_L1_error; + if (unlikely(__Pyx_modinit_function_import_code() != 0)) goto __pyx_L1_error; + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + + /* "gensim/models/word2vec_corpusfile.pyx":15 + * + * import cython + * import numpy as np # <<<<<<<<<<<<<< + * + * from gensim.utils import any2utf8 + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_numpy, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_np, __pyx_t_1) < 0) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":17 + * import numpy as np + * + * from gensim.utils import any2utf8 # <<<<<<<<<<<<<< + * from six import iteritems + * + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_any2utf8); + __Pyx_GIVEREF(__pyx_n_s_any2utf8); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_any2utf8); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_gensim_utils, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_any2utf8); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_any2utf8, __pyx_t_1) < 0) __PYX_ERR(1, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":18 + * + * from gensim.utils import any2utf8 + * from six import iteritems # <<<<<<<<<<<<<< + * + * cimport numpy as np + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_iteritems); + __Pyx_GIVEREF(__pyx_n_s_iteritems); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_iteritems); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_six, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_iteritems); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_iteritems, __pyx_t_2) < 0) __PYX_ERR(1, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":65 + * + * + * def rebuild_cython_line_sentence(source, max_sentence_length): # <<<<<<<<<<<<<< + * return CythonLineSentence(source, max_sentence_length=max_sentence_length) + * + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_19word2vec_corpusfile_1rebuild_cython_line_sentence, NULL, __pyx_n_s_gensim_models_word2vec_corpusfil); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 65, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_rebuild_cython_line_sentence, __pyx_t_1) < 0) __PYX_ERR(1, 65, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":249 + * + * + * def train_epoch_sg(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1, compute_loss): + * """Train Skipgram model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_19word2vec_corpusfile_3train_epoch_sg, NULL, __pyx_n_s_gensim_models_word2vec_corpusfil); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 249, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_epoch_sg, __pyx_t_1) < 0) __PYX_ERR(1, 249, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":346 + * + * + * def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, # <<<<<<<<<<<<<< + * _neu1, compute_loss): + * """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_19word2vec_corpusfile_5train_epoch_cbow, NULL, __pyx_n_s_gensim_models_word2vec_corpusfil); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 346, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_epoch_cbow, __pyx_t_1) < 0) __PYX_ERR(1, 346, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/word2vec_corpusfile.pyx":441 + * + * + * CORPUSFILE_VERSION = 1 # <<<<<<<<<<<<<< + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CORPUSFILE_VERSION, __pyx_int_1) < 0) __PYX_ERR(1, 441, __pyx_L1_error) + + /* "gensim/models/word2vec_corpusfile.pyx":1 + * #!/usr/bin/env cython # <<<<<<<<<<<<<< + * # distutils: language = c++ + * # cython: boundscheck=False + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "vector.to_py":60 + * + * @cname("__pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___") + * cdef object __pyx_convert_vector_to_py_std_3a__3a_vector_3c_std_3a__3a_string_3e___(vector[X]& v): # <<<<<<<<<<<<<< + * return [v[i] for i in range(v.size())] + * + */ + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init gensim.models.word2vec_corpusfile", 0, __pyx_lineno, __pyx_filename); + } + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init gensim.models.word2vec_corpusfile"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* GetModuleGlobalName */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + if (likely(result)) { + Py_INCREF(result); + } else if (unlikely(PyErr_Occurred())) { + result = NULL; + } else { +#else + result = PyDict_GetItem(__pyx_d, name); + if (likely(result)) { + Py_INCREF(result); + } else { +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + if (!result) { + PyErr_Clear(); +#endif + result = __Pyx_GetBuiltinName(name); + } + return result; +} + +/* PyCFunctionFastCall */ + #if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)meth)) (self, args, nargs); + } +} +#endif + +/* PyFunctionFastCall */ + #if CYTHON_FAST_PYCALL +#include "frameobject.h" +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = f->f_localsplus; + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCall */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ + #if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* RaiseTooManyValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* IterFinish */ + static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +/* UnpackItemEndCheck */ + static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +/* ExtTypeTest */ + static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* GetItemInt */ + static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely((0 <= wrapped_i) & (wrapped_i < PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely((0 <= wrapped_i) & (wrapped_i < PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely((n >= 0) & (n < PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely((n >= 0) & (n < PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* PyErrFetchRestore */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* RaiseException */ + #if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* WriteUnraisableException */ + static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, + CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, + int full_traceback, CYTHON_UNUSED int nogil) { + PyObject *old_exc, *old_val, *old_tb; + PyObject *ctx; + __Pyx_PyThreadState_declare +#ifdef WITH_THREAD + PyGILState_STATE state; + if (nogil) + state = PyGILState_Ensure(); +#ifdef _MSC_VER + else state = (PyGILState_STATE)-1; +#endif +#endif + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); + if (full_traceback) { + Py_XINCREF(old_exc); + Py_XINCREF(old_val); + Py_XINCREF(old_tb); + __Pyx_ErrRestore(old_exc, old_val, old_tb); + PyErr_PrintEx(1); + } + #if PY_MAJOR_VERSION < 3 + ctx = PyString_FromString(name); + #else + ctx = PyUnicode_FromString(name); + #endif + __Pyx_ErrRestore(old_exc, old_val, old_tb); + if (!ctx) { + PyErr_WriteUnraisable(Py_None); + } else { + PyErr_WriteUnraisable(ctx); + Py_DECREF(ctx); + } +#ifdef WITH_THREAD + if (nogil) + PyGILState_Release(state); +#endif +} + +/* PyObjectSetAttrStr */ + #if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_setattro)) + return tp->tp_setattro(obj, attr_name, value); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_setattr)) + return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value); +#endif + return PyObject_SetAttr(obj, attr_name, value); +} +#endif + +/* DictGetItem */ + #if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) + PyErr_SetObject(PyExc_KeyError, args); + Py_XDECREF(args); + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* RaiseNoneIterError */ + static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +/* SaveResetException */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if PY_VERSION_HEX >= 0x030700A2 + *type = tstate->exc_state.exc_type; + *value = tstate->exc_state.exc_value; + *tb = tstate->exc_state.exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if PY_VERSION_HEX >= 0x030700A2 + tmp_type = tstate->exc_state.exc_type; + tmp_value = tstate->exc_state.exc_value; + tmp_tb = tstate->exc_state.exc_traceback; + tstate->exc_state.exc_type = type; + tstate->exc_state.exc_value = value; + tstate->exc_state.exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* PyErrExceptionMatches */ + #if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* GetException */ + #if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) { +#endif + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if PY_VERSION_HEX >= 0x030700A2 + tmp_type = tstate->exc_state.exc_type; + tmp_value = tstate->exc_state.exc_value; + tmp_tb = tstate->exc_state.exc_traceback; + tstate->exc_state.exc_type = local_type; + tstate->exc_state.exc_value = local_value; + tstate->exc_state.exc_traceback = local_tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* PyObject_GenericGetAttrNoDict */ + #if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, attr_name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(attr_name)); +#endif + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* SetVTable */ + static int __Pyx_SetVtable(PyObject *dict, void *vtable) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject *ob = PyCapsule_New(vtable, 0, 0); +#else + PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); +#endif + if (!ob) + goto bad; + if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* SetupReduce */ + static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD; +#else + if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD; +#endif +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD; + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD; + setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD; + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD; + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto GOOD; +BAD: + if (!PyErr_Occurred()) + PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} + +/* Import */ + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ + static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* CLineInTraceback */ + #ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + use_cline = __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback); + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (PyObject_Not(use_cline) != 0) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ + static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ + #include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntFromPyVerify */ + #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_npy_uint32(npy_uint32 value) { + const npy_uint32 neg_one = (npy_uint32) -1, const_zero = (npy_uint32) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(npy_uint32) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(npy_uint32) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(npy_uint32) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(npy_uint32) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(npy_uint32) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(npy_uint32), + little, !is_unsigned); + } +} + +/* None */ + static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void) { + int err; + #ifdef WITH_THREAD + PyGILState_STATE _save = PyGILState_Ensure(); + #endif + err = !!PyErr_Occurred(); + #ifdef WITH_THREAD + PyGILState_Release(_save); + #endif + return err; +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* Declarations */ + #if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return ::std::complex< float >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return x + y*(__pyx_t_float_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + __pyx_t_float_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +/* Arithmetic */ + #if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + #if 1 + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + if (b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real); + } else if (fabsf(b.real) >= fabsf(b.imag)) { + if (b.real == 0 && b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.imag); + } else { + float r = b.imag / b.real; + float s = 1.0 / (b.real + b.imag * r); + return __pyx_t_float_complex_from_parts( + (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); + } + } else { + float r = b.real / b.imag; + float s = 1.0 / (b.imag + b.real * r); + return __pyx_t_float_complex_from_parts( + (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); + } + } + #else + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + if (b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real); + } else { + float denom = b.real * b.real + b.imag * b.imag; + return __pyx_t_float_complex_from_parts( + (a.real * b.real + a.imag * b.imag) / denom, + (a.imag * b.real - a.real * b.imag) / denom); + } + } + #endif + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrtf(z.real*z.real + z.imag*z.imag); + #else + return hypotf(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + float denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(a, a); + case 3: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(z, a); + case 4: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } else if (b.imag == 0) { + z.real = powf(a.real, b.real); + z.imag = 0; + return z; + } else if (a.real > 0) { + r = a.real; + theta = 0; + } else { + r = -a.real; + theta = atan2f(0, -1); + } + } else { + r = __Pyx_c_abs_float(a); + theta = atan2f(a.imag, a.real); + } + lnr = logf(r); + z_r = expf(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cosf(z_theta); + z.imag = z_r * sinf(z_theta); + return z; + } + #endif +#endif + +/* Declarations */ + #if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return ::std::complex< double >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return x + y*(__pyx_t_double_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + __pyx_t_double_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +/* Arithmetic */ + #if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + #if 1 + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + if (b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real); + } else if (fabs(b.real) >= fabs(b.imag)) { + if (b.real == 0 && b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.imag); + } else { + double r = b.imag / b.real; + double s = 1.0 / (b.real + b.imag * r); + return __pyx_t_double_complex_from_parts( + (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); + } + } else { + double r = b.real / b.imag; + double s = 1.0 / (b.imag + b.real * r); + return __pyx_t_double_complex_from_parts( + (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); + } + } + #else + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + if (b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real); + } else { + double denom = b.real * b.real + b.imag * b.imag; + return __pyx_t_double_complex_from_parts( + (a.real * b.real + a.imag * b.imag) / denom, + (a.imag * b.real - a.real * b.imag) / denom); + } + } + #endif + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrt(z.real*z.real + z.imag*z.imag); + #else + return hypot(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + double denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(a, a); + case 3: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(z, a); + case 4: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } else if (b.imag == 0) { + z.real = pow(a.real, b.real); + z.imag = 0; + return z; + } else if (a.real > 0) { + r = a.real; + theta = 0; + } else { + r = -a.real; + theta = atan2(0, -1); + } + } else { + r = __Pyx_c_abs_double(a); + theta = atan2(a.imag, a.real); + } + lnr = log(r); + z_r = exp(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cos(z_theta); + z.imag = z_r * sin(z_theta); + return z; + } + #endif +#endif + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value) { + const enum NPY_TYPES neg_one = (enum NPY_TYPES) -1, const_zero = (enum NPY_TYPES) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(enum NPY_TYPES) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(enum NPY_TYPES) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum NPY_TYPES) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(enum NPY_TYPES), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE npy_uint32 __Pyx_PyInt_As_npy_uint32(PyObject *x) { + const npy_uint32 neg_one = (npy_uint32) -1, const_zero = (npy_uint32) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(npy_uint32) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(npy_uint32, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (npy_uint32) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (npy_uint32) 0; + case 1: __PYX_VERIFY_RETURN_INT(npy_uint32, digit, digits[0]) + case 2: + if (8 * sizeof(npy_uint32) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) >= 2 * PyLong_SHIFT) { + return (npy_uint32) (((((npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(npy_uint32) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) >= 3 * PyLong_SHIFT) { + return (npy_uint32) (((((((npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(npy_uint32) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) >= 4 * PyLong_SHIFT) { + return (npy_uint32) (((((((((npy_uint32)digits[3]) << PyLong_SHIFT) | (npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (npy_uint32) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(npy_uint32) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(npy_uint32) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (npy_uint32) 0; + case -1: __PYX_VERIFY_RETURN_INT(npy_uint32, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(npy_uint32, digit, +digits[0]) + case -2: + if (8 * sizeof(npy_uint32) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(npy_uint32, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 2 * PyLong_SHIFT) { + return (npy_uint32) (((npy_uint32)-1)*(((((npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(npy_uint32) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 2 * PyLong_SHIFT) { + return (npy_uint32) ((((((npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(npy_uint32) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(npy_uint32, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 3 * PyLong_SHIFT) { + return (npy_uint32) (((npy_uint32)-1)*(((((((npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(npy_uint32) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 3 * PyLong_SHIFT) { + return (npy_uint32) ((((((((npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(npy_uint32) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(npy_uint32, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 4 * PyLong_SHIFT) { + return (npy_uint32) (((npy_uint32)-1)*(((((((((npy_uint32)digits[3]) << PyLong_SHIFT) | (npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(npy_uint32) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(npy_uint32, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(npy_uint32) - 1 > 4 * PyLong_SHIFT) { + return (npy_uint32) ((((((((((npy_uint32)digits[3]) << PyLong_SHIFT) | (npy_uint32)digits[2]) << PyLong_SHIFT) | (npy_uint32)digits[1]) << PyLong_SHIFT) | (npy_uint32)digits[0]))); + } + } + break; + } +#endif + if (sizeof(npy_uint32) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(npy_uint32) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(npy_uint32, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + npy_uint32 val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (npy_uint32) -1; + } + } else { + npy_uint32 val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (npy_uint32) -1; + val = __Pyx_PyInt_As_npy_uint32(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to npy_uint32"); + return (npy_uint32) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to npy_uint32"); + return (npy_uint32) -1; +} + +/* CIntFromPy */ + static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_As_PY_LONG_LONG(PyObject *x) { + const PY_LONG_LONG neg_one = (PY_LONG_LONG) -1, const_zero = (PY_LONG_LONG) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(PY_LONG_LONG) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (PY_LONG_LONG) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (PY_LONG_LONG) 0; + case 1: __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, digit, digits[0]) + case 2: + if (8 * sizeof(PY_LONG_LONG) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(PY_LONG_LONG) >= 2 * PyLong_SHIFT) { + return (PY_LONG_LONG) (((((PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(PY_LONG_LONG) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(PY_LONG_LONG) >= 3 * PyLong_SHIFT) { + return (PY_LONG_LONG) (((((((PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(PY_LONG_LONG) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(PY_LONG_LONG) >= 4 * PyLong_SHIFT) { + return (PY_LONG_LONG) (((((((((PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (PY_LONG_LONG) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(PY_LONG_LONG) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(PY_LONG_LONG, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(PY_LONG_LONG) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(PY_LONG_LONG, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (PY_LONG_LONG) 0; + case -1: __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, digit, +digits[0]) + case -2: + if (8 * sizeof(PY_LONG_LONG) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + return (PY_LONG_LONG) (((PY_LONG_LONG)-1)*(((((PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(PY_LONG_LONG) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + return (PY_LONG_LONG) ((((((PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + return (PY_LONG_LONG) (((PY_LONG_LONG)-1)*(((((((PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(PY_LONG_LONG) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + return (PY_LONG_LONG) ((((((((PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + return (PY_LONG_LONG) (((PY_LONG_LONG)-1)*(((((((((PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(PY_LONG_LONG) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(PY_LONG_LONG, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + return (PY_LONG_LONG) ((((((((((PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (PY_LONG_LONG)digits[0]))); + } + } + break; + } +#endif + if (sizeof(PY_LONG_LONG) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(PY_LONG_LONG, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(PY_LONG_LONG) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(PY_LONG_LONG, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + PY_LONG_LONG val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (PY_LONG_LONG) -1; + } + } else { + PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (PY_LONG_LONG) -1; + val = __Pyx_PyInt_As_PY_LONG_LONG(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to PY_LONG_LONG"); + return (PY_LONG_LONG) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to PY_LONG_LONG"); + return (PY_LONG_LONG) -1; +} + +/* CIntFromPy */ + static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *x) { + const size_t neg_one = (size_t) -1, const_zero = (size_t) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(size_t) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(size_t, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (size_t) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (size_t) 0; + case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, digits[0]) + case 2: + if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) >= 2 * PyLong_SHIFT) { + return (size_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) >= 3 * PyLong_SHIFT) { + return (size_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) >= 4 * PyLong_SHIFT) { + return (size_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (size_t) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(size_t) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(size_t) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (size_t) 0; + case -1: __PYX_VERIFY_RETURN_INT(size_t, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, +digits[0]) + case -2: + if (8 * sizeof(size_t) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { + return (size_t) (((size_t)-1)*(((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { + return (size_t) ((((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { + return (size_t) (((size_t)-1)*(((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { + return (size_t) ((((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) { + return (size_t) (((size_t)-1)*(((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) { + return (size_t) ((((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); + } + } + break; + } +#endif + if (sizeof(size_t) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(size_t, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(size_t) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(size_t, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + size_t val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (size_t) -1; + } + } else { + size_t val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (size_t) -1; + val = __Pyx_PyInt_As_size_t(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to size_t"); + return (size_t) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to size_t"); + return (size_t) -1; +} + +/* CIntFromPy */ + static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* FastTypeChecks */ + #if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) { + if (likely(err == exc_type)) return 1; + if (likely(PyExceptionClass_Check(err))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type); + } + return PyErr_GivenExceptionMatches(err, exc_type); +} +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) { + if (likely(err == exc_type1 || err == exc_type2)) return 1; + if (likely(PyExceptionClass_Check(err))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2); + } + return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2)); +} +#endif + +/* FetchCommonType */ + static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) { + PyObject* fake_module; + PyTypeObject* cached_type = NULL; + fake_module = PyImport_AddModule((char*) "_cython_" CYTHON_ABI); + if (!fake_module) return NULL; + Py_INCREF(fake_module); + cached_type = (PyTypeObject*) PyObject_GetAttrString(fake_module, type->tp_name); + if (cached_type) { + if (!PyType_Check((PyObject*)cached_type)) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s is not a type object", + type->tp_name); + goto bad; + } + if (cached_type->tp_basicsize != type->tp_basicsize) { + PyErr_Format(PyExc_TypeError, + "Shared Cython type %.200s has the wrong size, try recompiling", + type->tp_name); + goto bad; + } + } else { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad; + PyErr_Clear(); + if (PyType_Ready(type) < 0) goto bad; + if (PyObject_SetAttrString(fake_module, type->tp_name, (PyObject*) type) < 0) + goto bad; + Py_INCREF(type); + cached_type = type; + } +done: + Py_DECREF(fake_module); + return cached_type; +bad: + Py_XDECREF(cached_type); + cached_type = NULL; + goto done; +} + +/* SwapException */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if PY_VERSION_HEX >= 0x030700A2 + tmp_type = tstate->exc_state.exc_type; + tmp_value = tstate->exc_state.exc_value; + tmp_tb = tstate->exc_state.exc_traceback; + tstate->exc_state.exc_type = *type; + tstate->exc_state.exc_value = *value; + tstate->exc_state.exc_traceback = *tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = *type; + tstate->exc_value = *value; + tstate->exc_traceback = *tb; + #endif + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); + PyErr_SetExcInfo(*type, *value, *tb); + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#endif + +/* PyObjectCallMethod1 */ + static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) { + PyObject *result = NULL; +#if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(method))) { + PyObject *self = PyMethod_GET_SELF(method); + if (likely(self)) { + PyObject *args; + PyObject *function = PyMethod_GET_FUNCTION(method); + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {self, arg}; + result = __Pyx_PyFunction_FastCall(function, args, 2); + goto done; + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {self, arg}; + result = __Pyx_PyCFunction_FastCall(function, args, 2); + goto done; + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); + return result; + } + } +#endif + result = __Pyx_PyObject_CallOneArg(method, arg); + goto done; +done: + return result; +} +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { + PyObject *method, *result; + method = __Pyx_PyObject_GetAttrStr(obj, method_name); + if (unlikely(!method)) return NULL; + result = __Pyx__PyObject_CallMethod1(method, arg); + Py_DECREF(method); + return result; +} + +/* CoroutineBase */ + #include +#include +#define __Pyx_Coroutine_Undelegate(gen) Py_CLEAR((gen)->yieldfrom) +static int __Pyx_PyGen__FetchStopIterationValue(CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject **pvalue) { + PyObject *et, *ev, *tb; + PyObject *value = NULL; + __Pyx_ErrFetch(&et, &ev, &tb); + if (!et) { + Py_XDECREF(tb); + Py_XDECREF(ev); + Py_INCREF(Py_None); + *pvalue = Py_None; + return 0; + } + if (likely(et == PyExc_StopIteration)) { + if (!ev) { + Py_INCREF(Py_None); + value = Py_None; + } +#if PY_VERSION_HEX >= 0x030300A0 + else if (Py_TYPE(ev) == (PyTypeObject*)PyExc_StopIteration) { + value = ((PyStopIterationObject *)ev)->value; + Py_INCREF(value); + Py_DECREF(ev); + } +#endif + else if (unlikely(PyTuple_Check(ev))) { + if (PyTuple_GET_SIZE(ev) >= 1) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + value = PyTuple_GET_ITEM(ev, 0); + Py_INCREF(value); +#else + value = PySequence_ITEM(ev, 0); +#endif + } else { + Py_INCREF(Py_None); + value = Py_None; + } + Py_DECREF(ev); + } + else if (!__Pyx_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration)) { + value = ev; + } + if (likely(value)) { + Py_XDECREF(tb); + Py_DECREF(et); + *pvalue = value; + return 0; + } + } else if (!__Pyx_PyErr_GivenExceptionMatches(et, PyExc_StopIteration)) { + __Pyx_ErrRestore(et, ev, tb); + return -1; + } + PyErr_NormalizeException(&et, &ev, &tb); + if (unlikely(!PyObject_TypeCheck(ev, (PyTypeObject*)PyExc_StopIteration))) { + __Pyx_ErrRestore(et, ev, tb); + return -1; + } + Py_XDECREF(tb); + Py_DECREF(et); +#if PY_VERSION_HEX >= 0x030300A0 + value = ((PyStopIterationObject *)ev)->value; + Py_INCREF(value); + Py_DECREF(ev); +#else + { + PyObject* args = __Pyx_PyObject_GetAttrStr(ev, __pyx_n_s_args); + Py_DECREF(ev); + if (likely(args)) { + value = PySequence_GetItem(args, 0); + Py_DECREF(args); + } + if (unlikely(!value)) { + __Pyx_ErrRestore(NULL, NULL, NULL); + Py_INCREF(Py_None); + value = Py_None; + } + } +#endif + *pvalue = value; + return 0; +} +static CYTHON_INLINE +void __Pyx_Coroutine_ExceptionClear(__pyx_CoroutineObject *self) { + PyObject *exc_type = self->exc_type; + PyObject *exc_value = self->exc_value; + PyObject *exc_traceback = self->exc_traceback; + self->exc_type = NULL; + self->exc_value = NULL; + self->exc_traceback = NULL; + Py_XDECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_traceback); +} +#define __Pyx_Coroutine_AlreadyRunningError(gen) (__Pyx__Coroutine_AlreadyRunningError(gen), (PyObject*)NULL) +static void __Pyx__Coroutine_AlreadyRunningError(CYTHON_UNUSED __pyx_CoroutineObject *gen) { + const char *msg; + if (0) { + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_Coroutine_Check((PyObject*)gen)) { + msg = "coroutine already executing"; + #endif + #ifdef __Pyx_AsyncGen_USED + } else if (__Pyx_AsyncGen_CheckExact((PyObject*)gen)) { + msg = "async generator already executing"; + #endif + } else { + msg = "generator already executing"; + } + PyErr_SetString(PyExc_ValueError, msg); +} +#define __Pyx_Coroutine_NotStartedError(gen) (__Pyx__Coroutine_NotStartedError(gen), (PyObject*)NULL) +static void __Pyx__Coroutine_NotStartedError(CYTHON_UNUSED PyObject *gen) { + const char *msg; + if (0) { + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_Coroutine_Check(gen)) { + msg = "can't send non-None value to a just-started coroutine"; + #endif + #ifdef __Pyx_AsyncGen_USED + } else if (__Pyx_AsyncGen_CheckExact(gen)) { + msg = "can't send non-None value to a just-started async generator"; + #endif + } else { + msg = "can't send non-None value to a just-started generator"; + } + PyErr_SetString(PyExc_TypeError, msg); +} +#define __Pyx_Coroutine_AlreadyTerminatedError(gen, value, closing) (__Pyx__Coroutine_AlreadyTerminatedError(gen, value, closing), (PyObject*)NULL) +static void __Pyx__Coroutine_AlreadyTerminatedError(CYTHON_UNUSED PyObject *gen, PyObject *value, CYTHON_UNUSED int closing) { + #ifdef __Pyx_Coroutine_USED + if (!closing && __Pyx_Coroutine_Check(gen)) { + PyErr_SetString(PyExc_RuntimeError, "cannot reuse already awaited coroutine"); + } else + #endif + if (value) { + #ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(gen)) + PyErr_SetNone(__Pyx_PyExc_StopAsyncIteration); + else + #endif + PyErr_SetNone(PyExc_StopIteration); + } +} +static +PyObject *__Pyx_Coroutine_SendEx(__pyx_CoroutineObject *self, PyObject *value, int closing) { + __Pyx_PyThreadState_declare + PyThreadState *tstate; + PyObject *retval; + assert(!self->is_running); + if (unlikely(self->resume_label == 0)) { + if (unlikely(value && value != Py_None)) { + return __Pyx_Coroutine_NotStartedError((PyObject*)self); + } + } + if (unlikely(self->resume_label == -1)) { + return __Pyx_Coroutine_AlreadyTerminatedError((PyObject*)self, value, closing); + } +#if CYTHON_FAST_THREAD_STATE + __Pyx_PyThreadState_assign + tstate = __pyx_tstate; +#else + tstate = __Pyx_PyThreadState_Current; +#endif + if (self->exc_type) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON +#else + if (self->exc_traceback) { + PyTracebackObject *tb = (PyTracebackObject *) self->exc_traceback; + PyFrameObject *f = tb->tb_frame; + Py_XINCREF(tstate->frame); + assert(f->f_back == NULL); + f->f_back = tstate->frame; + } +#endif + __Pyx_ExceptionSwap(&self->exc_type, &self->exc_value, + &self->exc_traceback); + } else { + __Pyx_Coroutine_ExceptionClear(self); + __Pyx_ExceptionSave(&self->exc_type, &self->exc_value, &self->exc_traceback); + } + self->is_running = 1; + retval = self->body((PyObject *) self, tstate, value); + self->is_running = 0; + return retval; +} +static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__pyx_CoroutineObject *self) { + if (likely(self->exc_traceback)) { +#if CYTHON_COMPILING_IN_PYPY || CYTHON_COMPILING_IN_PYSTON +#else + PyTracebackObject *tb = (PyTracebackObject *) self->exc_traceback; + PyFrameObject *f = tb->tb_frame; + Py_CLEAR(f->f_back); +#endif + } +} +static CYTHON_INLINE +PyObject *__Pyx_Coroutine_MethodReturn(CYTHON_UNUSED PyObject* gen, PyObject *retval) { + if (unlikely(!retval)) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (!__Pyx_PyErr_Occurred()) { + PyObject *exc = PyExc_StopIteration; + #ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(gen)) + exc = __Pyx_PyExc_StopAsyncIteration; + #endif + __Pyx_PyErr_SetNone(exc); + } + } + return retval; +} +static CYTHON_INLINE +PyObject *__Pyx_Coroutine_FinishDelegation(__pyx_CoroutineObject *gen) { + PyObject *ret; + PyObject *val = NULL; + __Pyx_Coroutine_Undelegate(gen); + __Pyx_PyGen__FetchStopIterationValue(__Pyx_PyThreadState_Current, &val); + ret = __Pyx_Coroutine_SendEx(gen, val, 0); + Py_XDECREF(val); + return ret; +} +static PyObject *__Pyx_Coroutine_Send(PyObject *self, PyObject *value) { + PyObject *retval; + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; + PyObject *yf = gen->yieldfrom; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + PyObject *ret; + gen->is_running = 1; + #ifdef __Pyx_Generator_USED + if (__Pyx_Generator_CheckExact(yf)) { + ret = __Pyx_Coroutine_Send(yf, value); + } else + #endif + #ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(yf)) { + ret = __Pyx_Coroutine_Send(yf, value); + } else + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_PyAsyncGenASend_CheckExact(yf)) { + ret = __Pyx_async_gen_asend_send(yf, value); + } else + #endif + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) + if (PyGen_CheckExact(yf)) { + ret = _PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); + } else + #endif + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03050000 && defined(PyCoro_CheckExact) && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) + if (PyCoro_CheckExact(yf)) { + ret = _PyGen_Send((PyGenObject*)yf, value == Py_None ? NULL : value); + } else + #endif + { + if (value == Py_None) + ret = Py_TYPE(yf)->tp_iternext(yf); + else + ret = __Pyx_PyObject_CallMethod1(yf, __pyx_n_s_send, value); + } + gen->is_running = 0; + if (likely(ret)) { + return ret; + } + retval = __Pyx_Coroutine_FinishDelegation(gen); + } else { + retval = __Pyx_Coroutine_SendEx(gen, value, 0); + } + return __Pyx_Coroutine_MethodReturn(self, retval); +} +static int __Pyx_Coroutine_CloseIter(__pyx_CoroutineObject *gen, PyObject *yf) { + PyObject *retval = NULL; + int err = 0; + #ifdef __Pyx_Generator_USED + if (__Pyx_Generator_CheckExact(yf)) { + retval = __Pyx_Coroutine_Close(yf); + if (!retval) + return -1; + } else + #endif + #ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(yf)) { + retval = __Pyx_Coroutine_Close(yf); + if (!retval) + return -1; + } else + if (__Pyx_CoroutineAwait_CheckExact(yf)) { + retval = __Pyx_CoroutineAwait_Close((__pyx_CoroutineAwaitObject*)yf); + if (!retval) + return -1; + } else + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_PyAsyncGenASend_CheckExact(yf)) { + retval = __Pyx_async_gen_asend_close(yf, NULL); + } else + if (__pyx_PyAsyncGenAThrow_CheckExact(yf)) { + retval = __Pyx_async_gen_athrow_close(yf, NULL); + } else + #endif + { + PyObject *meth; + gen->is_running = 1; + meth = __Pyx_PyObject_GetAttrStr(yf, __pyx_n_s_close); + if (unlikely(!meth)) { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_WriteUnraisable(yf); + } + PyErr_Clear(); + } else { + retval = PyObject_CallFunction(meth, NULL); + Py_DECREF(meth); + if (!retval) + err = -1; + } + gen->is_running = 0; + } + Py_XDECREF(retval); + return err; +} +static PyObject *__Pyx_Generator_Next(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject*) self; + PyObject *yf = gen->yieldfrom; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + PyObject *ret; + gen->is_running = 1; + #ifdef __Pyx_Generator_USED + if (__Pyx_Generator_CheckExact(yf)) { + ret = __Pyx_Generator_Next(yf); + } else + #endif + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03030000 && (defined(__linux__) || PY_VERSION_HEX >= 0x030600B3) + if (PyGen_CheckExact(yf)) { + ret = _PyGen_Send((PyGenObject*)yf, NULL); + } else + #endif + #ifdef __Pyx_Coroutine_USED + if (__Pyx_Coroutine_Check(yf)) { + ret = __Pyx_Coroutine_Send(yf, Py_None); + } else + #endif + ret = Py_TYPE(yf)->tp_iternext(yf); + gen->is_running = 0; + if (likely(ret)) { + return ret; + } + return __Pyx_Coroutine_FinishDelegation(gen); + } + return __Pyx_Coroutine_SendEx(gen, Py_None, 0); +} +static PyObject *__Pyx_Coroutine_Close(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + PyObject *retval, *raised_exception; + PyObject *yf = gen->yieldfrom; + int err = 0; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + Py_INCREF(yf); + err = __Pyx_Coroutine_CloseIter(gen, yf); + __Pyx_Coroutine_Undelegate(gen); + Py_DECREF(yf); + } + if (err == 0) + PyErr_SetNone(PyExc_GeneratorExit); + retval = __Pyx_Coroutine_SendEx(gen, NULL, 1); + if (unlikely(retval)) { + const char *msg; + Py_DECREF(retval); + if ((0)) { + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_Coroutine_Check(self)) { + msg = "coroutine ignored GeneratorExit"; + #endif + #ifdef __Pyx_AsyncGen_USED + } else if (__Pyx_AsyncGen_CheckExact(self)) { +#if PY_VERSION_HEX < 0x03060000 + msg = "async generator ignored GeneratorExit - might require Python 3.6+ finalisation (PEP 525)"; +#else + msg = "async generator ignored GeneratorExit"; +#endif + #endif + } else { + msg = "generator ignored GeneratorExit"; + } + PyErr_SetString(PyExc_RuntimeError, msg); + return NULL; + } + raised_exception = PyErr_Occurred(); + if (likely(!raised_exception || __Pyx_PyErr_GivenExceptionMatches2(raised_exception, PyExc_GeneratorExit, PyExc_StopIteration))) { + if (raised_exception) PyErr_Clear(); + Py_INCREF(Py_None); + return Py_None; + } + return NULL; +} +static PyObject *__Pyx__Coroutine_Throw(PyObject *self, PyObject *typ, PyObject *val, PyObject *tb, + PyObject *args, int close_on_genexit) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + PyObject *yf = gen->yieldfrom; + if (unlikely(gen->is_running)) + return __Pyx_Coroutine_AlreadyRunningError(gen); + if (yf) { + PyObject *ret; + Py_INCREF(yf); + if (__Pyx_PyErr_GivenExceptionMatches(typ, PyExc_GeneratorExit) && close_on_genexit) { + int err = __Pyx_Coroutine_CloseIter(gen, yf); + Py_DECREF(yf); + __Pyx_Coroutine_Undelegate(gen); + if (err < 0) + return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0)); + goto throw_here; + } + gen->is_running = 1; + if (0 + #ifdef __Pyx_Generator_USED + || __Pyx_Generator_CheckExact(yf) + #endif + #ifdef __Pyx_Coroutine_USED + || __Pyx_Coroutine_Check(yf) + #endif + ) { + ret = __Pyx__Coroutine_Throw(yf, typ, val, tb, args, close_on_genexit); + #ifdef __Pyx_Coroutine_USED + } else if (__Pyx_CoroutineAwait_CheckExact(yf)) { + ret = __Pyx__Coroutine_Throw(((__pyx_CoroutineAwaitObject*)yf)->coroutine, typ, val, tb, args, close_on_genexit); + #endif + } else { + PyObject *meth = __Pyx_PyObject_GetAttrStr(yf, __pyx_n_s_throw); + if (unlikely(!meth)) { + Py_DECREF(yf); + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { + gen->is_running = 0; + return NULL; + } + PyErr_Clear(); + __Pyx_Coroutine_Undelegate(gen); + gen->is_running = 0; + goto throw_here; + } + if (likely(args)) { + ret = PyObject_CallObject(meth, args); + } else { + ret = PyObject_CallFunctionObjArgs(meth, typ, val, tb, NULL); + } + Py_DECREF(meth); + } + gen->is_running = 0; + Py_DECREF(yf); + if (!ret) { + ret = __Pyx_Coroutine_FinishDelegation(gen); + } + return __Pyx_Coroutine_MethodReturn(self, ret); + } +throw_here: + __Pyx_Raise(typ, val, tb, NULL); + return __Pyx_Coroutine_MethodReturn(self, __Pyx_Coroutine_SendEx(gen, NULL, 0)); +} +static PyObject *__Pyx_Coroutine_Throw(PyObject *self, PyObject *args) { + PyObject *typ; + PyObject *val = NULL; + PyObject *tb = NULL; + if (!PyArg_UnpackTuple(args, (char *)"throw", 1, 3, &typ, &val, &tb)) + return NULL; + return __Pyx__Coroutine_Throw(self, typ, val, tb, args, 1); +} +static int __Pyx_Coroutine_traverse(__pyx_CoroutineObject *gen, visitproc visit, void *arg) { + Py_VISIT(gen->closure); + Py_VISIT(gen->classobj); + Py_VISIT(gen->yieldfrom); + Py_VISIT(gen->exc_type); + Py_VISIT(gen->exc_value); + Py_VISIT(gen->exc_traceback); + return 0; +} +static int __Pyx_Coroutine_clear(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + Py_CLEAR(gen->closure); + Py_CLEAR(gen->classobj); + Py_CLEAR(gen->yieldfrom); + Py_CLEAR(gen->exc_type); + Py_CLEAR(gen->exc_value); + Py_CLEAR(gen->exc_traceback); +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(self)) { + Py_CLEAR(((__pyx_PyAsyncGenObject*)gen)->ag_finalizer); + } +#endif + Py_CLEAR(gen->gi_code); + Py_CLEAR(gen->gi_name); + Py_CLEAR(gen->gi_qualname); + Py_CLEAR(gen->gi_modulename); + return 0; +} +static void __Pyx_Coroutine_dealloc(PyObject *self) { + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + PyObject_GC_UnTrack(gen); + if (gen->gi_weakreflist != NULL) + PyObject_ClearWeakRefs(self); + if (gen->resume_label >= 0) { + PyObject_GC_Track(self); +#if PY_VERSION_HEX >= 0x030400a1 && CYTHON_USE_TP_FINALIZE + if (PyObject_CallFinalizerFromDealloc(self)) +#else + Py_TYPE(gen)->tp_del(self); + if (self->ob_refcnt > 0) +#endif + { + return; + } + PyObject_GC_UnTrack(self); + } +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(self)) { + /* We have to handle this case for asynchronous generators + right here, because this code has to be between UNTRACK + and GC_Del. */ + Py_CLEAR(((__pyx_PyAsyncGenObject*)self)->ag_finalizer); + } +#endif + __Pyx_Coroutine_clear(self); + PyObject_GC_Del(gen); +} +static void __Pyx_Coroutine_del(PyObject *self) { + PyObject *error_type, *error_value, *error_traceback; + __pyx_CoroutineObject *gen = (__pyx_CoroutineObject *) self; + __Pyx_PyThreadState_declare + if (gen->resume_label < 0) { + return; + } +#if !CYTHON_USE_TP_FINALIZE + assert(self->ob_refcnt == 0); + self->ob_refcnt = 1; +#endif + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&error_type, &error_value, &error_traceback); +#ifdef __Pyx_AsyncGen_USED + if (__Pyx_AsyncGen_CheckExact(self)) { + __pyx_PyAsyncGenObject *agen = (__pyx_PyAsyncGenObject*)self; + PyObject *finalizer = agen->ag_finalizer; + if (finalizer && !agen->ag_closed) { + PyObject *res = __Pyx_PyObject_CallOneArg(finalizer, self); + if (unlikely(!res)) { + PyErr_WriteUnraisable(self); + } else { + Py_DECREF(res); + } + __Pyx_ErrRestore(error_type, error_value, error_traceback); + return; + } + } +#endif + if (unlikely(gen->resume_label == 0 && !error_value)) { +#ifdef __Pyx_Coroutine_USED +#ifdef __Pyx_Generator_USED + if (!__Pyx_Generator_CheckExact(self)) +#endif + { + PyObject_GC_UnTrack(self); +#if PY_MAJOR_VERSION >= 3 || defined(PyErr_WarnFormat) + if (unlikely(PyErr_WarnFormat(PyExc_RuntimeWarning, 1, "coroutine '%.50S' was never awaited", gen->gi_qualname) < 0)) + PyErr_WriteUnraisable(self); +#else + {PyObject *msg; + char *cmsg; + #if CYTHON_COMPILING_IN_PYPY + msg = NULL; + cmsg = (char*) "coroutine was never awaited"; + #else + char *cname; + PyObject *qualname; + qualname = gen->gi_qualname; + cname = PyString_AS_STRING(qualname); + msg = PyString_FromFormat("coroutine '%.50s' was never awaited", cname); + if (unlikely(!msg)) { + PyErr_Clear(); + cmsg = (char*) "coroutine was never awaited"; + } else { + cmsg = PyString_AS_STRING(msg); + } + #endif + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, cmsg, 1) < 0)) + PyErr_WriteUnraisable(self); + Py_XDECREF(msg);} +#endif + PyObject_GC_Track(self); + } +#endif + } else { + PyObject *res = __Pyx_Coroutine_Close(self); + if (unlikely(!res)) { + if (PyErr_Occurred()) + PyErr_WriteUnraisable(self); + } else { + Py_DECREF(res); + } + } + __Pyx_ErrRestore(error_type, error_value, error_traceback); +#if !CYTHON_USE_TP_FINALIZE + assert(self->ob_refcnt > 0); + if (--self->ob_refcnt == 0) { + return; + } + { + Py_ssize_t refcnt = self->ob_refcnt; + _Py_NewReference(self); + self->ob_refcnt = refcnt; + } +#if CYTHON_COMPILING_IN_CPYTHON + assert(PyType_IS_GC(self->ob_type) && + _Py_AS_GC(self)->gc.gc_refs != _PyGC_REFS_UNTRACKED); + _Py_DEC_REFTOTAL; +#endif +#ifdef COUNT_ALLOCS + --Py_TYPE(self)->tp_frees; + --Py_TYPE(self)->tp_allocs; +#endif +#endif +} +static PyObject * +__Pyx_Coroutine_get_name(__pyx_CoroutineObject *self) +{ + PyObject *name = self->gi_name; + if (unlikely(!name)) name = Py_None; + Py_INCREF(name); + return name; +} +static int +__Pyx_Coroutine_set_name(__pyx_CoroutineObject *self, PyObject *value) +{ + PyObject *tmp; +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) { +#else + if (unlikely(value == NULL || !PyString_Check(value))) { +#endif + PyErr_SetString(PyExc_TypeError, + "__name__ must be set to a string object"); + return -1; + } + tmp = self->gi_name; + Py_INCREF(value); + self->gi_name = value; + Py_XDECREF(tmp); + return 0; +} +static PyObject * +__Pyx_Coroutine_get_qualname(__pyx_CoroutineObject *self) +{ + PyObject *name = self->gi_qualname; + if (unlikely(!name)) name = Py_None; + Py_INCREF(name); + return name; +} +static int +__Pyx_Coroutine_set_qualname(__pyx_CoroutineObject *self, PyObject *value) +{ + PyObject *tmp; +#if PY_MAJOR_VERSION >= 3 + if (unlikely(value == NULL || !PyUnicode_Check(value))) { +#else + if (unlikely(value == NULL || !PyString_Check(value))) { +#endif + PyErr_SetString(PyExc_TypeError, + "__qualname__ must be set to a string object"); + return -1; + } + tmp = self->gi_qualname; + Py_INCREF(value); + self->gi_qualname = value; + Py_XDECREF(tmp); + return 0; +} +static __pyx_CoroutineObject *__Pyx__Coroutine_New( + PyTypeObject* type, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name) { + __pyx_CoroutineObject *gen = PyObject_GC_New(__pyx_CoroutineObject, type); + if (unlikely(!gen)) + return NULL; + return __Pyx__Coroutine_NewInit(gen, body, code, closure, name, qualname, module_name); +} +static __pyx_CoroutineObject *__Pyx__Coroutine_NewInit( + __pyx_CoroutineObject *gen, __pyx_coroutine_body_t body, PyObject *code, PyObject *closure, + PyObject *name, PyObject *qualname, PyObject *module_name) { + gen->body = body; + gen->closure = closure; + Py_XINCREF(closure); + gen->is_running = 0; + gen->resume_label = 0; + gen->classobj = NULL; + gen->yieldfrom = NULL; + gen->exc_type = NULL; + gen->exc_value = NULL; + gen->exc_traceback = NULL; + gen->gi_weakreflist = NULL; + Py_XINCREF(qualname); + gen->gi_qualname = qualname; + Py_XINCREF(name); + gen->gi_name = name; + Py_XINCREF(module_name); + gen->gi_modulename = module_name; + Py_XINCREF(code); + gen->gi_code = code; + PyObject_GC_Track(gen); + return gen; +} + +/* PatchModuleWithCoroutine */ + static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code) { +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + int result; + PyObject *globals, *result_obj; + globals = PyDict_New(); if (unlikely(!globals)) goto ignore; + result = PyDict_SetItemString(globals, "_cython_coroutine_type", + #ifdef __Pyx_Coroutine_USED + (PyObject*)__pyx_CoroutineType); + #else + Py_None); + #endif + if (unlikely(result < 0)) goto ignore; + result = PyDict_SetItemString(globals, "_cython_generator_type", + #ifdef __Pyx_Generator_USED + (PyObject*)__pyx_GeneratorType); + #else + Py_None); + #endif + if (unlikely(result < 0)) goto ignore; + if (unlikely(PyDict_SetItemString(globals, "_module", module) < 0)) goto ignore; + if (unlikely(PyDict_SetItemString(globals, "__builtins__", __pyx_b) < 0)) goto ignore; + result_obj = PyRun_String(py_code, Py_file_input, globals, globals); + if (unlikely(!result_obj)) goto ignore; + Py_DECREF(result_obj); + Py_DECREF(globals); + return module; +ignore: + Py_XDECREF(globals); + PyErr_WriteUnraisable(module); + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, "Cython module failed to patch module with custom type", 1) < 0)) { + Py_DECREF(module); + module = NULL; + } +#else + py_code++; +#endif + return module; +} + +/* PatchGeneratorABC */ + #ifndef CYTHON_REGISTER_ABCS +#define CYTHON_REGISTER_ABCS 1 +#endif +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) +static PyObject* __Pyx_patch_abc_module(PyObject *module); +static PyObject* __Pyx_patch_abc_module(PyObject *module) { + module = __Pyx_Coroutine_patch_module( + module, "" +"if _cython_generator_type is not None:\n" +" try: Generator = _module.Generator\n" +" except AttributeError: pass\n" +" else: Generator.register(_cython_generator_type)\n" +"if _cython_coroutine_type is not None:\n" +" try: Coroutine = _module.Coroutine\n" +" except AttributeError: pass\n" +" else: Coroutine.register(_cython_coroutine_type)\n" + ); + return module; +} +#endif +static int __Pyx_patch_abc(void) { +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + static int abc_patched = 0; + if (CYTHON_REGISTER_ABCS && !abc_patched) { + PyObject *module; + module = PyImport_ImportModule((PY_MAJOR_VERSION >= 3) ? "collections.abc" : "collections"); + if (!module) { + PyErr_WriteUnraisable(NULL); + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, + ((PY_MAJOR_VERSION >= 3) ? + "Cython module failed to register with collections.abc module" : + "Cython module failed to register with collections module"), 1) < 0)) { + return -1; + } + } else { + module = __Pyx_patch_abc_module(module); + abc_patched = 1; + if (unlikely(!module)) + return -1; + Py_DECREF(module); + } + module = PyImport_ImportModule("backports_abc"); + if (module) { + module = __Pyx_patch_abc_module(module); + Py_XDECREF(module); + } + if (!module) { + PyErr_Clear(); + } + } +#else + if ((0)) __Pyx_Coroutine_patch_module(NULL, NULL); +#endif + return 0; +} + +/* Generator */ + static PyMethodDef __pyx_Generator_methods[] = { + {"send", (PyCFunction) __Pyx_Coroutine_Send, METH_O, + (char*) PyDoc_STR("send(arg) -> send 'arg' into generator,\nreturn next yielded value or raise StopIteration.")}, + {"throw", (PyCFunction) __Pyx_Coroutine_Throw, METH_VARARGS, + (char*) PyDoc_STR("throw(typ[,val[,tb]]) -> raise exception in generator,\nreturn next yielded value or raise StopIteration.")}, + {"close", (PyCFunction) __Pyx_Coroutine_Close, METH_NOARGS, + (char*) PyDoc_STR("close() -> raise GeneratorExit inside generator.")}, + {0, 0, 0, 0} +}; +static PyMemberDef __pyx_Generator_memberlist[] = { + {(char *) "gi_running", T_BOOL, offsetof(__pyx_CoroutineObject, is_running), READONLY, NULL}, + {(char*) "gi_yieldfrom", T_OBJECT, offsetof(__pyx_CoroutineObject, yieldfrom), READONLY, + (char*) PyDoc_STR("object being iterated by 'yield from', or None")}, + {(char*) "gi_code", T_OBJECT, offsetof(__pyx_CoroutineObject, gi_code), READONLY, NULL}, + {0, 0, 0, 0, 0} +}; +static PyGetSetDef __pyx_Generator_getsets[] = { + {(char *) "__name__", (getter)__Pyx_Coroutine_get_name, (setter)__Pyx_Coroutine_set_name, + (char*) PyDoc_STR("name of the generator"), 0}, + {(char *) "__qualname__", (getter)__Pyx_Coroutine_get_qualname, (setter)__Pyx_Coroutine_set_qualname, + (char*) PyDoc_STR("qualified name of the generator"), 0}, + {0, 0, 0, 0, 0} +}; +static PyTypeObject __pyx_GeneratorType_type = { + PyVarObject_HEAD_INIT(0, 0) + "generator", + sizeof(__pyx_CoroutineObject), + 0, + (destructor) __Pyx_Coroutine_dealloc, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_FINALIZE, + 0, + (traverseproc) __Pyx_Coroutine_traverse, + 0, + 0, + offsetof(__pyx_CoroutineObject, gi_weakreflist), + 0, + (iternextfunc) __Pyx_Generator_Next, + __pyx_Generator_methods, + __pyx_Generator_memberlist, + __pyx_Generator_getsets, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, +#if CYTHON_USE_TP_FINALIZE + 0, +#else + __Pyx_Coroutine_del, +#endif + 0, +#if CYTHON_USE_TP_FINALIZE + __Pyx_Coroutine_del, +#elif PY_VERSION_HEX >= 0x030400a1 + 0, +#endif +}; +static int __pyx_Generator_init(void) { + __pyx_GeneratorType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict; + __pyx_GeneratorType_type.tp_iter = PyObject_SelfIter; + __pyx_GeneratorType = __Pyx_FetchCommonType(&__pyx_GeneratorType_type); + if (unlikely(!__pyx_GeneratorType)) { + return -1; + } + return 0; +} + +/* CheckBinaryVersion */ + static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* FunctionExport */ + static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + d = PyObject_GetAttrString(__pyx_m, (char *)"__pyx_capi__"); + if (!d) { + PyErr_Clear(); + d = PyDict_New(); + if (!d) + goto bad; + Py_INCREF(d); + if (PyModule_AddObject(__pyx_m, (char *)"__pyx_capi__", d) < 0) + goto bad; + } + tmp.fp = f; +#if PY_VERSION_HEX >= 0x02070000 + cobj = PyCapsule_New(tmp.p, sig, 0); +#else + cobj = PyCObject_FromVoidPtrAndDesc(tmp.p, (void *)sig, 0); +#endif + if (!cobj) + goto bad; + if (PyDict_SetItemString(d, name, cobj) < 0) + goto bad; + Py_DECREF(cobj); + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(cobj); + Py_XDECREF(d); + return -1; +} + +/* ModuleImport */ + #ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +/* TypeImport */ + #ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +/* VoidPtrImport */ + #ifndef __PYX_HAVE_RT_ImportVoidPtr +#define __PYX_HAVE_RT_ImportVoidPtr +static int __Pyx_ImportVoidPtr(PyObject *module, const char *name, void **p, const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); + if (!d) + goto bad; + cobj = PyDict_GetItemString(d, name); + if (!cobj) { + PyErr_Format(PyExc_ImportError, + "%.200s does not export expected C variable %.200s", + PyModule_GetName(module), name); + goto bad; + } +#if PY_VERSION_HEX >= 0x02070000 + if (!PyCapsule_IsValid(cobj, sig)) { + PyErr_Format(PyExc_TypeError, + "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), name, sig, PyCapsule_GetName(cobj)); + goto bad; + } + *p = PyCapsule_GetPointer(cobj, sig); +#else + {const char *desc, *s1, *s2; + desc = (const char *)PyCObject_GetDesc(cobj); + if (!desc) + goto bad; + s1 = desc; s2 = sig; + while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } + if (*s1 != *s2) { + PyErr_Format(PyExc_TypeError, + "C variable %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), name, sig, desc); + goto bad; + } + *p = PyCObject_AsVoidPtr(cobj);} +#endif + if (!(*p)) + goto bad; + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(d); + return -1; +} +#endif + +/* FunctionImport */ + #ifndef __PYX_HAVE_RT_ImportFunction +#define __PYX_HAVE_RT_ImportFunction +static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig) { + PyObject *d = 0; + PyObject *cobj = 0; + union { + void (*fp)(void); + void *p; + } tmp; + d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); + if (!d) + goto bad; + cobj = PyDict_GetItemString(d, funcname); + if (!cobj) { + PyErr_Format(PyExc_ImportError, + "%.200s does not export expected C function %.200s", + PyModule_GetName(module), funcname); + goto bad; + } +#if PY_VERSION_HEX >= 0x02070000 + if (!PyCapsule_IsValid(cobj, sig)) { + PyErr_Format(PyExc_TypeError, + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), funcname, sig, PyCapsule_GetName(cobj)); + goto bad; + } + tmp.p = PyCapsule_GetPointer(cobj, sig); +#else + {const char *desc, *s1, *s2; + desc = (const char *)PyCObject_GetDesc(cobj); + if (!desc) + goto bad; + s1 = desc; s2 = sig; + while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } + if (*s1 != *s2) { + PyErr_Format(PyExc_TypeError, + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", + PyModule_GetName(module), funcname, sig, desc); + goto bad; + } + tmp.p = PyCObject_AsVoidPtr(cobj);} +#endif + *f = tmp.fp; + if (!(*f)) + goto bad; + Py_DECREF(d); + return 0; +bad: + Py_XDECREF(d); + return -1; +} +#endif + +/* InitStrings */ + static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(x); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/gensim/models/word2vec_corpusfile.pxd b/gensim/models/word2vec_corpusfile.pxd new file mode 100644 index 0000000000..c4b203d39d --- /dev/null +++ b/gensim/models/word2vec_corpusfile.pxd @@ -0,0 +1,69 @@ +# distutils: language = c++ +# cython: boundscheck=False +# cython: wraparound=False +# cython: cdivision=True +# cython: embedsignature=True +# coding: utf-8 +# +# shared type definitions for word2vec_corpusfile +# also used from fasttext_corpusfile and doc2vec_corpusfile +# +# Copyright (C) 2018 Dmitry Persiyanov +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +from libcpp.string cimport string +from libcpp.vector cimport vector +from libcpp.unordered_map cimport unordered_map +from libcpp cimport bool as bool_t + +cimport numpy as np + +ctypedef np.float32_t REAL_t + + +cdef extern from "fast_line_sentence.h": + cdef cppclass FastLineSentence: + FastLineSentence() except + + FastLineSentence(string&, size_t) except + + vector[string] ReadSentence() nogil except + + bool_t IsEof() nogil + void Reset() nogil + + +cdef class CythonLineSentence: + cdef FastLineSentence* _thisptr + cdef public bytes source + cdef public size_t max_sentence_length, max_words_in_batch, offset + cdef vector[vector[string]] buf_data + + cpdef bool_t is_eof(self) nogil + cpdef vector[string] read_sentence(self) nogil except * + cpdef vector[vector[string]] _read_chunked_sentence(self) nogil except * + cpdef vector[vector[string]] _chunk_sentence(self, vector[string] sent) nogil + cpdef void reset(self) nogil + cpdef vector[vector[string]] next_batch(self) nogil except * + + +cdef struct VocabItem: + long long sample_int + np.uint32_t index + np.uint8_t *code + int code_len + np.uint32_t *point + + # for FastText + int subword_idx_len + np.uint32_t *subword_idx + + +ctypedef unordered_map[string, VocabItem] cvocab_t + +cdef class CythonVocab: + cdef cvocab_t vocab + cdef subword_arrays + cdef cvocab_t* get_vocab_ptr(self) nogil except * + + +cdef REAL_t get_alpha(REAL_t alpha, REAL_t end_alpha, int cur_epoch, int num_epochs) nogil +cdef REAL_t get_next_alpha(REAL_t start_alpha, REAL_t end_alpha, int total_examples, int total_words, + int expected_examples, int expected_words, int cur_epoch, int num_epochs) nogil diff --git a/gensim/models/word2vec_corpusfile.pyx b/gensim/models/word2vec_corpusfile.pyx new file mode 100644 index 0000000000..4861fbd923 --- /dev/null +++ b/gensim/models/word2vec_corpusfile.pyx @@ -0,0 +1,441 @@ +#!/usr/bin/env cython +# distutils: language = c++ +# cython: boundscheck=False +# cython: wraparound=False +# cython: cdivision=True +# cython: embedsignature=True +# coding: utf-8 +# +# Copyright (C) 2018 Dmitry Persiyanov +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +"""Optimized cython functions for file-based training :class:`~gensim.models.word2vec.Word2Vec` model.""" + +import cython +import numpy as np + +from gensim.utils import any2utf8 +from six import iteritems + +cimport numpy as np + +from libcpp.string cimport string +from libcpp.vector cimport vector +from libcpp cimport bool as bool_t + +from gensim.models.word2vec_inner cimport ( + w2v_fast_sentence_sg_hs, + w2v_fast_sentence_sg_neg, + w2v_fast_sentence_cbow_hs, + w2v_fast_sentence_cbow_neg, + random_int32, + init_w2v_config, + Word2VecConfig +) + +DEF MAX_SENTENCE_LEN = 10000 + + +@cython.final +cdef class CythonVocab: + def __init__(self, wv, hs=0, fasttext=0): + cdef VocabItem word + + for py_token, vocab_item in iteritems(wv.vocab): + token = any2utf8(py_token) + word.index = vocab_item.index + word.sample_int = vocab_item.sample_int + + if hs: + word.code = np.PyArray_DATA(vocab_item.code) + word.code_len = len(vocab_item.code) + word.point = np.PyArray_DATA(vocab_item.point) + + # subwords information, used only in FastText model + if fasttext: + word.subword_idx_len = (len(wv.buckets_word[word.index])) + word.subword_idx = np.PyArray_DATA(wv.buckets_word[word.index]) + + self.vocab[token] = word + + cdef cvocab_t* get_vocab_ptr(self) nogil except *: + return &self.vocab + + +def rebuild_cython_line_sentence(source, max_sentence_length): + return CythonLineSentence(source, max_sentence_length=max_sentence_length) + + +cdef bytes to_bytes(key): + if isinstance(key, bytes): + return key + else: + return key.encode('utf8') + + +@cython.final +cdef class CythonLineSentence: + def __cinit__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): + self._thisptr = new FastLineSentence(to_bytes(source), offset) + + def __init__(self, source, offset=0, max_sentence_length=MAX_SENTENCE_LEN): + self.source = to_bytes(source) + self.offset = offset + self.max_sentence_length = max_sentence_length + self.max_words_in_batch = max_sentence_length + + def __dealloc__(self): + if self._thisptr != NULL: + del self._thisptr + + cpdef bool_t is_eof(self) nogil: + return self._thisptr.IsEof() + + cpdef vector[string] read_sentence(self) nogil except *: + return self._thisptr.ReadSentence() + + cpdef vector[vector[string]] _read_chunked_sentence(self) nogil except *: + cdef vector[string] sent = self.read_sentence() + return self._chunk_sentence(sent) + + cpdef vector[vector[string]] _chunk_sentence(self, vector[string] sent) nogil: + cdef vector[vector[string]] res + cdef vector[string] chunk + cdef size_t cur_idx = 0 + + if sent.size() > self.max_sentence_length: + while cur_idx < sent.size(): + chunk.clear() + for i in range(cur_idx, min(cur_idx + self.max_sentence_length, sent.size())): + chunk.push_back(sent[i]) + + res.push_back(chunk) + cur_idx += chunk.size() + else: + res.push_back(sent) + + return res + + cpdef void reset(self) nogil: + self._thisptr.Reset() + + def __iter__(self): + self.reset() + while not self.is_eof(): + chunked_sentence = self._read_chunked_sentence() + for chunk in chunked_sentence: + if not chunk.empty(): + yield chunk + + def __reduce__(self): + # This function helps pickle to correctly serialize objects of this class. + return rebuild_cython_line_sentence, (self.source, self.max_sentence_length) + + cpdef vector[vector[string]] next_batch(self) nogil except *: + cdef: + vector[vector[string]] job_batch + vector[vector[string]] chunked_sentence + vector[string] data + size_t batch_size = 0 + size_t last_idx = 0 + size_t tmp = 0 + int idx + + # Try to read data from previous calls which was not returned + if not self.buf_data.empty(): + job_batch = self.buf_data + self.buf_data.clear() + + for sent in job_batch: + batch_size += sent.size() + + while not self.is_eof() and batch_size <= self.max_words_in_batch: + data = self.read_sentence() + + chunked_sentence = self._chunk_sentence(data) + for chunk in chunked_sentence: + job_batch.push_back(chunk) + batch_size += chunk.size() + + if batch_size > self.max_words_in_batch: + # Save data which doesn't fit in batch in order to return it later. + self.buf_data.clear() + + tmp = batch_size + idx = job_batch.size() - 1 + while idx >= 0: + if tmp - job_batch[idx].size() <= self.max_words_in_batch: + last_idx = idx + 1 + break + else: + tmp -= job_batch[idx].size() + + idx -= 1 + + for i in range(last_idx, job_batch.size()): + self.buf_data.push_back(job_batch[i]) + job_batch.resize(last_idx) + + return job_batch + + +cdef void prepare_c_structures_for_batch( + vector[vector[string]] &sentences, int sample, int hs, int window, int *total_words, + int *effective_words, int *effective_sentences, unsigned long long *next_random, + cvocab_t *vocab, int *sentence_idx, np.uint32_t *indexes, int *codelens, + np.uint8_t **codes, np.uint32_t **points, np.uint32_t *reduced_windows) nogil: + cdef VocabItem word + cdef string token + cdef vector[string] sent + + sentence_idx[0] = 0 # indices of the first sentence always start at 0 + for sent in sentences: + if sent.empty(): + continue # ignore empty sentences; leave effective_sentences unchanged + total_words[0] += sent.size() + + for token in sent: + # leaving `effective_words` unchanged = shortening the sentence = expanding the window + if vocab[0].find(token) == vocab[0].end(): + continue + + word = vocab[0][token] + if sample and word.sample_int < random_int32(next_random): + continue + indexes[effective_words[0]] = word.index + if hs: + codelens[effective_words[0]] = word.code_len + codes[effective_words[0]] = word.code + points[effective_words[0]] = word.point + effective_words[0] += 1 + if effective_words[0] == MAX_SENTENCE_LEN: + break # TODO: log warning, tally overflow? + + # keep track of which words go into which sentence, so we don't train + # across sentence boundaries. + # indices of sentence number X are between cur_epoch) / num_epochs) + + +cdef REAL_t get_next_alpha( + REAL_t start_alpha, REAL_t end_alpha, int total_examples, int total_words, + int expected_examples, int expected_words, int cur_epoch, int num_epochs) nogil: + cdef REAL_t epoch_progress + + if expected_examples != -1: + # examples-based decay + epoch_progress = ( total_examples) / expected_examples + else: + # word-based decay + epoch_progress = ( total_words) / expected_words + + cdef REAL_t progress = (cur_epoch + epoch_progress) / num_epochs + cdef REAL_t next_alpha = start_alpha - (start_alpha - end_alpha) * progress + return max(end_alpha, next_alpha) + + +def train_epoch_sg(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, + _neu1, compute_loss): + """Train Skipgram model for one epoch by training on an input stream. This function is used only in multistream mode. + + Called internally from :meth:`~gensim.models.word2vec.Word2Vec.train`. + + Parameters + ---------- + model : :class:`~gensim.models.word2vec.Word2Vec` + The Word2Vec model instance to train. + input_stream : iterable of list of str + The corpus used to train the model. + _cur_epoch : int + Current epoch number. Used for calculating and decaying learning rate. + _work : np.ndarray + Private working memory for each worker. + _neu1 : np.ndarray + Private working memory for each worker. + compute_loss : bool + Whether or not the training loss should be computed in this batch. + + Returns + ------- + int + Number of words in the vocabulary actually used for training (They already existed in the vocabulary + and were not discarded by negative sampling). + """ + cdef Word2VecConfig c + + # For learning rate updates + cdef int cur_epoch = _cur_epoch + cdef int num_epochs = model.epochs + cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + cdef int expected_words = (-1 if _expected_words is None else _expected_words) + cdef REAL_t start_alpha = model.alpha + cdef REAL_t end_alpha = model.min_alpha + cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + + cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + cdef CythonVocab vocab = _cython_vocab + + cdef int i, j, k + cdef int effective_words = 0, effective_sentences = 0 + cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 + cdef int sent_idx, idx_start, idx_end + + init_w2v_config(&c, model, _alpha, compute_loss, _work) + + cdef vector[vector[string]] sentences + + with nogil: + input_stream.reset() + while not (input_stream.is_eof() or total_words > expected_words / c.workers): + effective_sentences = 0 + effective_words = 0 + + sentences = input_stream.next_batch() + + prepare_c_structures_for_batch( + sentences, c.sample, c.hs, c.window, &total_words, &effective_words, &effective_sentences, + &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, c.indexes, + c.codelens, c.codes, c.points, c.reduced_windows) + + for sent_idx in range(effective_sentences): + idx_start = c.sentence_idx[sent_idx] + idx_end = c.sentence_idx[sent_idx + 1] + for i in range(idx_start, idx_end): + j = i - c.window + c.reduced_windows[i] + if j < idx_start: + j = idx_start + k = i + c.window + 1 - c.reduced_windows[i] + if k > idx_end: + k = idx_end + for j in range(j, k): + if j == i: + continue + if c.hs: + w2v_fast_sentence_sg_hs( + c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], + c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + if c.negative: + c.next_random = w2v_fast_sentence_sg_neg( + c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, + c.indexes[i], c.indexes[j], c.alpha, c.work, c.next_random, c.word_locks, + c.compute_loss, &c.running_training_loss) + + total_sentences += sentences.size() + total_effective_words += effective_words + + c.alpha = get_next_alpha( + start_alpha, end_alpha, total_sentences, total_words, + expected_examples, expected_words, cur_epoch, num_epochs) + + model.running_training_loss = c.running_training_loss + return total_sentences, total_effective_words, total_words + + +def train_epoch_cbow(model, corpus_file, offset, _cython_vocab, _cur_epoch, _expected_examples, _expected_words, _work, + _neu1, compute_loss): + """Train CBOW model for one epoch by training on an input stream. This function is used only in multistream mode. + + Called internally from :meth:`~gensim.models.word2vec.Word2Vec.train`. + + Parameters + ---------- + model : :class:`~gensim.models.word2vec.Word2Vec` + The Word2Vec model instance to train. + input_stream : iterable of list of str + The corpus used to train the model. + _cur_epoch : int + Current epoch number. Used for calculating and decaying learning rate. + _work : np.ndarray + Private working memory for each worker. + _neu1 : np.ndarray + Private working memory for each worker. + compute_loss : bool + Whether or not the training loss should be computed in this batch. + + Returns + ------- + int + Number of words in the vocabulary actually used for training (They already existed in the vocabulary + and were not discarded by negative sampling). + """ + cdef Word2VecConfig c + + # For learning rate updates + cdef int cur_epoch = _cur_epoch + cdef int num_epochs = model.epochs + cdef int expected_examples = (-1 if _expected_examples is None else _expected_examples) + cdef int expected_words = (-1 if _expected_words is None else _expected_words) + cdef REAL_t start_alpha = model.alpha + cdef REAL_t end_alpha = model.min_alpha + cdef REAL_t _alpha = get_alpha(model.alpha, end_alpha, cur_epoch, num_epochs) + + cdef CythonLineSentence input_stream = CythonLineSentence(corpus_file, offset) + cdef CythonVocab vocab = _cython_vocab + + cdef int i, j, k + cdef int effective_words = 0, effective_sentences = 0 + cdef int total_effective_words = 0, total_sentences = 0, total_words = 0 + cdef int sent_idx, idx_start, idx_end + + init_w2v_config(&c, model, _alpha, compute_loss, _work, _neu1) + + cdef vector[vector[string]] sentences + + with nogil: + input_stream.reset() + while not (input_stream.is_eof() or total_words > expected_words / c.workers): + effective_sentences = 0 + effective_words = 0 + + sentences = input_stream.next_batch() + + prepare_c_structures_for_batch( + sentences, c.sample, c.hs, c.window, &total_words, &effective_words, + &effective_sentences, &c.next_random, vocab.get_vocab_ptr(), c.sentence_idx, + c.indexes, c.codelens, c.codes, c.points, c.reduced_windows) + + for sent_idx in range(effective_sentences): + idx_start = c.sentence_idx[sent_idx] + idx_end = c.sentence_idx[sent_idx + 1] + for i in range(idx_start, idx_end): + j = i - c.window + c.reduced_windows[i] + if j < idx_start: + j = idx_start + k = i + c.window + 1 - c.reduced_windows[i] + if k > idx_end: + k = idx_end + if c.hs: + w2v_fast_sentence_cbow_hs( + c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, + c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + + if c.negative: + c.next_random = w2v_fast_sentence_cbow_neg( + c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, + c.syn1neg, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, + c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) + + total_sentences += sentences.size() + total_effective_words += effective_words + + c.alpha = get_next_alpha( + start_alpha, end_alpha, total_sentences, total_words, + expected_examples, expected_words, cur_epoch, num_epochs) + + model.running_training_loss = c.running_training_loss + return total_sentences, total_effective_words, total_words + + +CORPUSFILE_VERSION = 1 diff --git a/gensim/models/word2vec_inner.c b/gensim/models/word2vec_inner.c index a164655853..79ec298beb 100644 --- a/gensim/models/word2vec_inner.c +++ b/gensim/models/word2vec_inner.c @@ -1,4 +1,4 @@ -/* Generated by Cython 0.28.3 */ +/* Generated by Cython 0.28.2 */ #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -7,7 +7,7 @@ #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else -#define CYTHON_ABI "0_28_3" +#define CYTHON_ABI "0_28_2" #define CYTHON_FUTURE_DIVISION 0 #include #ifndef offsetof @@ -453,7 +453,6 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact - #define PyObject_Unicode PyObject_Str #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -562,11 +561,11 @@ static CYTHON_INLINE float __PYX_NAN() { #define __PYX_HAVE__gensim__models__word2vec_inner #define __PYX_HAVE_API__gensim__models__word2vec_inner /* Early includes */ -#include "voidptr.h" #include #include #include "numpy/arrayobject.h" #include "numpy/ufuncobject.h" +#include "voidptr.h" #include #ifdef _OPENMP #include @@ -652,7 +651,7 @@ static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ @@ -760,7 +759,7 @@ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_cython_runtime; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; @@ -810,7 +809,7 @@ static const char *__pyx_f[] = { #endif -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":730 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 * # in Cython to enable them only on the right systems. * * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< @@ -819,7 +818,7 @@ static const char *__pyx_f[] = { */ typedef npy_int8 __pyx_t_5numpy_int8_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":731 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 * * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< @@ -828,7 +827,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t; */ typedef npy_int16 __pyx_t_5numpy_int16_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":732 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< @@ -837,7 +836,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t; */ typedef npy_int32 __pyx_t_5numpy_int32_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":733 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< @@ -846,7 +845,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t; */ typedef npy_int64 __pyx_t_5numpy_int64_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":737 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 * #ctypedef npy_int128 int128_t * * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< @@ -855,7 +854,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t; */ typedef npy_uint8 __pyx_t_5numpy_uint8_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":738 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 * * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< @@ -864,7 +863,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t; */ typedef npy_uint16 __pyx_t_5numpy_uint16_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":739 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< @@ -873,7 +872,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t; */ typedef npy_uint32 __pyx_t_5numpy_uint32_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":740 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< @@ -882,7 +881,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t; */ typedef npy_uint64 __pyx_t_5numpy_uint64_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":744 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 * #ctypedef npy_uint128 uint128_t * * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< @@ -891,7 +890,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t; */ typedef npy_float32 __pyx_t_5numpy_float32_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":745 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 * * ctypedef npy_float32 float32_t * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< @@ -900,7 +899,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t; */ typedef npy_float64 __pyx_t_5numpy_float64_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":754 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 * # The int types are mapped a bit surprising -- * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t # <<<<<<<<<<<<<< @@ -909,7 +908,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t; */ typedef npy_long __pyx_t_5numpy_int_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":755 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< @@ -918,7 +917,7 @@ typedef npy_long __pyx_t_5numpy_int_t; */ typedef npy_longlong __pyx_t_5numpy_long_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":756 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 * ctypedef npy_long int_t * ctypedef npy_longlong long_t * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< @@ -927,7 +926,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t; */ typedef npy_longlong __pyx_t_5numpy_longlong_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":758 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 * ctypedef npy_longlong longlong_t * * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< @@ -936,7 +935,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t; */ typedef npy_ulong __pyx_t_5numpy_uint_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":759 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 * * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< @@ -945,7 +944,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":760 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< @@ -954,7 +953,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":762 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 * ctypedef npy_ulonglong ulonglong_t * * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< @@ -963,7 +962,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; */ typedef npy_intp __pyx_t_5numpy_intp_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":763 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 * * ctypedef npy_intp intp_t * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< @@ -972,7 +971,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t; */ typedef npy_uintp __pyx_t_5numpy_uintp_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":765 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 * ctypedef npy_uintp uintp_t * * ctypedef npy_double float_t # <<<<<<<<<<<<<< @@ -981,7 +980,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t; */ typedef npy_double __pyx_t_5numpy_float_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":766 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 * * ctypedef npy_double float_t * ctypedef npy_double double_t # <<<<<<<<<<<<<< @@ -990,7 +989,7 @@ typedef npy_double __pyx_t_5numpy_float_t; */ typedef npy_double __pyx_t_5numpy_double_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":767 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 * ctypedef npy_double float_t * ctypedef npy_double double_t * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< @@ -999,9 +998,9 @@ typedef npy_double __pyx_t_5numpy_double_t; */ typedef npy_longdouble __pyx_t_5numpy_longdouble_t; -/* "gensim/models/word2vec_inner.pxd":12 +/* "gensim/models/word2vec_inner.pxd":19 + * void* PyCObject_AsVoidPtr(object obj) * - * cimport numpy as np * ctypedef np.float32_t REAL_t # <<<<<<<<<<<<<< * * # BLAS routine signatures @@ -1034,7 +1033,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do /*--- Type declarations ---*/ -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":769 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 * ctypedef npy_longdouble longdouble_t * * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< @@ -1043,7 +1042,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do */ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":770 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 * * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< @@ -1052,7 +1051,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; */ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":771 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< @@ -1061,7 +1060,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; */ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":773 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 * ctypedef npy_clongdouble clongdouble_t * * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< @@ -1069,8 +1068,10 @@ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; * cdef inline object PyArray_MultiIterNew1(a): */ typedef npy_cdouble __pyx_t_5numpy_complex_t; +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig; +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config; -/* "gensim/models/word2vec_inner.pxd":15 +/* "gensim/models/word2vec_inner.pxd":22 * * # BLAS routine signatures * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1079,7 +1080,7 @@ typedef npy_cdouble __pyx_t_5numpy_complex_t; */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)(int const *, float const *, int const *, float *, int const *); -/* "gensim/models/word2vec_inner.pxd":16 +/* "gensim/models/word2vec_inner.pxd":23 * # BLAS routine signatures * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1088,7 +1089,7 @@ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)(int const *, */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); -/* "gensim/models/word2vec_inner.pxd":17 +/* "gensim/models/word2vec_inner.pxd":24 * ctypedef void (*scopy_ptr) (const int *N, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1097,7 +1098,7 @@ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)(int const *, */ typedef float (*__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)(int const *, float const *, int const *, float const *, int const *); -/* "gensim/models/word2vec_inner.pxd":18 +/* "gensim/models/word2vec_inner.pxd":25 * ctypedef void (*saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1106,7 +1107,7 @@ typedef float (*__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)(int const *, */ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)(int const *, float const *, int const *, float const *, int const *); -/* "gensim/models/word2vec_inner.pxd":19 +/* "gensim/models/word2vec_inner.pxd":26 * ctypedef float (*sdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< @@ -1115,7 +1116,7 @@ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)(int const * */ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)(int const *, float const *, int const *); -/* "gensim/models/word2vec_inner.pxd":20 +/* "gensim/models/word2vec_inner.pxd":27 * ctypedef double (*dsdot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef double (*snrm2_ptr) (const int *N, const float *X, const int *incX) nogil * ctypedef void (*sscal_ptr) (const int *N, const float *alpha, const float *X, const int *incX) nogil # <<<<<<<<<<<<<< @@ -1124,7 +1125,7 @@ typedef double (*__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)(int const * */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)(int const *, float const *, float const *, int const *); -/* "gensim/models/word2vec_inner.pxd":35 +/* "gensim/models/word2vec_inner.pxd":44 * * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1133,7 +1134,7 @@ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)(int const *, */ typedef __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_t_6gensim_6models_14word2vec_inner_our_dot_ptr)(int const *, float const *, int const *, float const *, int const *); -/* "gensim/models/word2vec_inner.pxd":36 +/* "gensim/models/word2vec_inner.pxd":45 * # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() * ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil * ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil # <<<<<<<<<<<<<< @@ -1142,6 +1143,51 @@ typedef __pyx_t_6gensim_6models_14word2vec_inner_REAL_t (*__pyx_t_6gensim_6model */ typedef void (*__pyx_t_6gensim_6models_14word2vec_inner_our_saxpy_ptr)(int const *, float const *, float const *, int const *, float *, int const *); +/* "gensim/models/word2vec_inner.pxd":51 + * + * + * cdef struct Word2VecConfig: # <<<<<<<<<<<<<< + * int hs, negative, sample, compute_loss, size, window, cbow_mean, workers + * REAL_t running_training_loss, alpha + */ +struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig { + int hs; + int negative; + int sample; + int compute_loss; + int size; + int window; + int cbow_mean; + int workers; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t running_training_loss; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t alpha; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn0; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *word_locks; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *work; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *neu1; + int codelens[0x2710]; + __pyx_t_5numpy_uint32_t indexes[0x2710]; + __pyx_t_5numpy_uint32_t reduced_windows[0x2710]; + int sentence_idx[(0x2710 + 1)]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1; + __pyx_t_5numpy_uint32_t *points[0x2710]; + __pyx_t_5numpy_uint8_t *codes[0x2710]; + __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *syn1neg; + __pyx_t_5numpy_uint32_t *cum_table; + unsigned PY_LONG_LONG cum_table_len; + unsigned PY_LONG_LONG next_random; +}; + +/* "gensim/models/word2vec_inner.pxd":125 + * + * + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=*) # <<<<<<<<<<<<<< + */ +struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config { + int __pyx_n; + PyObject *_neu1; +}; + /* --- Runtime support code (head) --- */ /* Refnanny.proto */ #ifndef CYTHON_REFNANNY @@ -1216,6 +1262,16 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject /* GetBuiltinName.proto */ static PyObject *__Pyx_GetBuiltinName(PyObject *name); +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + /* RaiseArgTupleInvalid.proto */ static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); @@ -1228,16 +1284,6 @@ static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ const char* function_name); -/* ExtTypeTest.proto */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - /* PySequenceContains.proto */ static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { int result = PySequence_Contains(seq, item); @@ -1657,10 +1703,11 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 static void __pyx_f_6gensim_6models_14word2vec_inner_our_saxpy_noblas(int const *, float const *, float const *, int const *, float *, int const *); /*proto*/ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_bisect_left(__pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG); /*proto*/ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_random_int32(unsigned PY_LONG_LONG *); /*proto*/ -static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ -static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ -static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ -static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static void __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static void __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_neg(int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ +static PyObject *__pyx_f_6gensim_6models_14word2vec_inner_init_w2v_config(struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig *, PyObject *, PyObject *, PyObject *, PyObject *, struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config *__pyx_optional_args); /*proto*/ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *); /*proto*/ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int); /*proto*/ #define __Pyx_MODULE_NAME "gensim.models.word2vec_inner" @@ -1673,6 +1720,7 @@ static PyObject *__pyx_builtin_range; static PyObject *__pyx_builtin_enumerate; static PyObject *__pyx_builtin_ValueError; static PyObject *__pyx_builtin_RuntimeError; +static const char __pyx_k_c[] = "c"; static const char __pyx_k_i[] = "i"; static const char __pyx_k_j[] = "j"; static const char __pyx_k_k[] = "k"; @@ -1681,7 +1729,7 @@ static const char __pyx_k_y[] = "y"; static const char __pyx_k_hs[] = "hs"; static const char __pyx_k_np[] = "np"; static const char __pyx_k_wv[] = "wv"; -static const char __pyx_k__14[] = "*"; +static const char __pyx_k__12[] = "*"; static const char __pyx_k_REAL[] = "REAL"; static const char __pyx_k_code[] = "code"; static const char __pyx_k_init[] = "init"; @@ -1691,13 +1739,11 @@ static const char __pyx_k_neu1[] = "_neu1"; static const char __pyx_k_sdot[] = "sdot"; static const char __pyx_k_sent[] = "sent"; static const char __pyx_k_size[] = "size"; -static const char __pyx_k_syn0[] = "syn0"; static const char __pyx_k_syn1[] = "syn1"; static const char __pyx_k_test[] = "__test__"; static const char __pyx_k_word[] = "word"; static const char __pyx_k_work[] = "_work"; static const char __pyx_k_alpha[] = "alpha"; -static const char __pyx_k_codes[] = "codes"; static const char __pyx_k_d_res[] = "d_res"; static const char __pyx_k_dsdot[] = "dsdot"; static const char __pyx_k_fblas[] = "fblas"; @@ -1714,23 +1760,18 @@ static const char __pyx_k_sscal[] = "sscal"; static const char __pyx_k_token[] = "token"; static const char __pyx_k_vocab[] = "vocab"; static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_neu1_2[] = "neu1"; -static const char __pyx_k_points[] = "points"; static const char __pyx_k_random[] = "random"; static const char __pyx_k_result[] = "result"; static const char __pyx_k_sample[] = "sample"; static const char __pyx_k_window[] = "window"; -static const char __pyx_k_work_2[] = "work"; -static const char __pyx_k_alpha_2[] = "_alpha"; static const char __pyx_k_float32[] = "float32"; static const char __pyx_k_idx_end[] = "idx_end"; -static const char __pyx_k_indexes[] = "indexes"; static const char __pyx_k_our_dot[] = "our_dot"; static const char __pyx_k_randint[] = "randint"; static const char __pyx_k_syn1neg[] = "syn1neg"; static const char __pyx_k_vectors[] = "vectors"; static const char __pyx_k_vlookup[] = "vlookup"; -static const char __pyx_k_codelens[] = "codelens"; +static const char __pyx_k_workers[] = "workers"; static const char __pyx_k_cpointer[] = "_cpointer"; static const char __pyx_k_expected[] = "expected"; static const char __pyx_k_negative[] = "negative"; @@ -1748,21 +1789,15 @@ static const char __pyx_k_ValueError[] = "ValueError"; static const char __pyx_k_sample_int[] = "sample_int"; static const char __pyx_k_trainables[] = "trainables"; static const char __pyx_k_vocabulary[] = "vocabulary"; -static const char __pyx_k_word_locks[] = "word_locks"; static const char __pyx_k_ImportError[] = "ImportError"; -static const char __pyx_k_next_random[] = "next_random"; static const char __pyx_k_vector_size[] = "vector_size"; static const char __pyx_k_FAST_VERSION[] = "FAST_VERSION"; static const char __pyx_k_RuntimeError[] = "RuntimeError"; static const char __pyx_k_compute_loss[] = "compute_loss"; -static const char __pyx_k_sentence_idx[] = "sentence_idx"; static const char __pyx_k_sentence_len[] = "sentence_len"; -static const char __pyx_k_cum_table_len[] = "cum_table_len"; static const char __pyx_k_vectors_lockf[] = "vectors_lockf"; -static const char __pyx_k_compute_loss_2[] = "_compute_loss"; static const char __pyx_k_train_batch_sg[] = "train_batch_sg"; static const char __pyx_k_effective_words[] = "effective_words"; -static const char __pyx_k_reduced_windows[] = "reduced_windows"; static const char __pyx_k_train_batch_cbow[] = "train_batch_cbow"; static const char __pyx_k_scipy_linalg_blas[] = "scipy.linalg.blas"; static const char __pyx_k_score_sentence_sg[] = "score_sentence_sg"; @@ -1771,7 +1806,6 @@ static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; static const char __pyx_k_effective_sentences[] = "effective_sentences"; static const char __pyx_k_score_sentence_cbow[] = "score_sentence_cbow"; static const char __pyx_k_running_training_loss[] = "running_training_loss"; -static const char __pyx_k_running_training_loss_2[] = "_running_training_loss"; static const char __pyx_k_ndarray_is_not_C_contiguous[] = "ndarray is not C contiguous"; static const char __pyx_k_gensim_models_word2vec_inner[] = "gensim.models.word2vec_inner"; static const char __pyx_k_numpy_core_multiarray_failed_to[] = "numpy.core.multiarray failed to import"; @@ -1793,19 +1827,15 @@ static PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor; static PyObject *__pyx_n_s_REAL; static PyObject *__pyx_n_s_RuntimeError; static PyObject *__pyx_n_s_ValueError; -static PyObject *__pyx_n_s__14; +static PyObject *__pyx_n_s__12; static PyObject *__pyx_n_s_alpha; -static PyObject *__pyx_n_s_alpha_2; +static PyObject *__pyx_n_s_c; static PyObject *__pyx_n_s_cbow_mean; static PyObject *__pyx_n_s_cline_in_traceback; static PyObject *__pyx_n_s_code; -static PyObject *__pyx_n_s_codelens; -static PyObject *__pyx_n_s_codes; static PyObject *__pyx_n_s_compute_loss; -static PyObject *__pyx_n_s_compute_loss_2; static PyObject *__pyx_n_s_cpointer; static PyObject *__pyx_n_s_cum_table; -static PyObject *__pyx_n_s_cum_table_len; static PyObject *__pyx_n_s_d_res; static PyObject *__pyx_n_s_dsdot; static PyObject *__pyx_n_s_effective_sentences; @@ -1822,7 +1852,6 @@ static PyObject *__pyx_n_s_idx_end; static PyObject *__pyx_n_s_idx_start; static PyObject *__pyx_n_s_import; static PyObject *__pyx_n_s_index; -static PyObject *__pyx_n_s_indexes; static PyObject *__pyx_n_s_init; static PyObject *__pyx_n_s_item; static PyObject *__pyx_n_s_j; @@ -1833,8 +1862,6 @@ static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous; static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou; static PyObject *__pyx_n_s_negative; static PyObject *__pyx_n_s_neu1; -static PyObject *__pyx_n_s_neu1_2; -static PyObject *__pyx_n_s_next_random; static PyObject *__pyx_n_s_np; static PyObject *__pyx_n_s_numpy; static PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to; @@ -1843,15 +1870,12 @@ static PyObject *__pyx_n_s_our_dot; static PyObject *__pyx_n_s_our_saxpy; static PyObject *__pyx_n_s_p_res; static PyObject *__pyx_n_s_point; -static PyObject *__pyx_n_s_points; static PyObject *__pyx_n_s_pyx_capi; static PyObject *__pyx_n_s_randint; static PyObject *__pyx_n_s_random; static PyObject *__pyx_n_s_range; -static PyObject *__pyx_n_s_reduced_windows; static PyObject *__pyx_n_s_result; static PyObject *__pyx_n_s_running_training_loss; -static PyObject *__pyx_n_s_running_training_loss_2; static PyObject *__pyx_n_s_sample; static PyObject *__pyx_n_s_sample_int; static PyObject *__pyx_n_s_saxpy; @@ -1863,13 +1887,11 @@ static PyObject *__pyx_n_s_sdot; static PyObject *__pyx_n_s_sent; static PyObject *__pyx_n_s_sent_idx; static PyObject *__pyx_n_s_sentence; -static PyObject *__pyx_n_s_sentence_idx; static PyObject *__pyx_n_s_sentence_len; static PyObject *__pyx_n_s_sentences; static PyObject *__pyx_n_s_size; static PyObject *__pyx_n_s_snrm2; static PyObject *__pyx_n_s_sscal; -static PyObject *__pyx_n_s_syn0; static PyObject *__pyx_n_s_syn1; static PyObject *__pyx_n_s_syn1neg; static PyObject *__pyx_n_s_test; @@ -1886,9 +1908,8 @@ static PyObject *__pyx_n_s_vocab; static PyObject *__pyx_n_s_vocabulary; static PyObject *__pyx_n_s_window; static PyObject *__pyx_n_s_word; -static PyObject *__pyx_n_s_word_locks; static PyObject *__pyx_n_s_work; -static PyObject *__pyx_n_s_work_2; +static PyObject *__pyx_n_s_workers; static PyObject *__pyx_n_s_wv; static PyObject *__pyx_n_s_x; static PyObject *__pyx_n_s_y; @@ -1915,21 +1936,19 @@ static PyObject *__pyx_tuple__8; static PyObject *__pyx_tuple__9; static PyObject *__pyx_tuple__10; static PyObject *__pyx_tuple__11; -static PyObject *__pyx_tuple__12; static PyObject *__pyx_tuple__13; static PyObject *__pyx_tuple__15; static PyObject *__pyx_tuple__17; static PyObject *__pyx_tuple__19; static PyObject *__pyx_tuple__21; -static PyObject *__pyx_tuple__23; +static PyObject *__pyx_codeobj__14; static PyObject *__pyx_codeobj__16; static PyObject *__pyx_codeobj__18; static PyObject *__pyx_codeobj__20; static PyObject *__pyx_codeobj__22; -static PyObject *__pyx_codeobj__24; /* Late includes */ -/* "gensim/models/word2vec_inner.pyx":49 +/* "gensim/models/word2vec_inner.pyx":51 * * # for when fblas.sdot returns a double * cdef REAL_t our_dot_double(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil: # <<<<<<<<<<<<<< @@ -1940,7 +1959,7 @@ static PyObject *__pyx_codeobj__24; static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_14word2vec_inner_our_dot_double(int const *__pyx_v_N, float const *__pyx_v_X, int const *__pyx_v_incX, float const *__pyx_v_Y, int const *__pyx_v_incY) { __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_r; - /* "gensim/models/word2vec_inner.pyx":50 + /* "gensim/models/word2vec_inner.pyx":52 * # for when fblas.sdot returns a double * cdef REAL_t our_dot_double(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil: * return dsdot(N, X, incX, Y, incY) # <<<<<<<<<<<<<< @@ -1950,7 +1969,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 __pyx_r = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)__pyx_v_6gensim_6models_14word2vec_inner_dsdot(__pyx_v_N, __pyx_v_X, __pyx_v_incX, __pyx_v_Y, __pyx_v_incY)); goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":49 + /* "gensim/models/word2vec_inner.pyx":51 * * # for when fblas.sdot returns a double * cdef REAL_t our_dot_double(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil: # <<<<<<<<<<<<<< @@ -1963,7 +1982,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":53 +/* "gensim/models/word2vec_inner.pyx":55 * * # for when fblas.sdot returns a float * cdef REAL_t our_dot_float(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil: # <<<<<<<<<<<<<< @@ -1974,7 +1993,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_14word2vec_inner_our_dot_float(int const *__pyx_v_N, float const *__pyx_v_X, int const *__pyx_v_incX, float const *__pyx_v_Y, int const *__pyx_v_incY) { __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_r; - /* "gensim/models/word2vec_inner.pyx":54 + /* "gensim/models/word2vec_inner.pyx":56 * # for when fblas.sdot returns a float * cdef REAL_t our_dot_float(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil: * return sdot(N, X, incX, Y, incY) # <<<<<<<<<<<<<< @@ -1984,7 +2003,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 __pyx_r = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)__pyx_v_6gensim_6models_14word2vec_inner_sdot(__pyx_v_N, __pyx_v_X, __pyx_v_incX, __pyx_v_Y, __pyx_v_incY)); goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":53 + /* "gensim/models/word2vec_inner.pyx":55 * * # for when fblas.sdot returns a float * cdef REAL_t our_dot_float(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil: # <<<<<<<<<<<<<< @@ -1997,7 +2016,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":57 +/* "gensim/models/word2vec_inner.pyx":59 * * # for when no blas available * cdef REAL_t our_dot_noblas(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil: # <<<<<<<<<<<<<< @@ -2011,7 +2030,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_r; int __pyx_t_1; - /* "gensim/models/word2vec_inner.pyx":61 + /* "gensim/models/word2vec_inner.pyx":63 * cdef int i * cdef REAL_t a * a = 0.0 # <<<<<<<<<<<<<< @@ -2020,7 +2039,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 */ __pyx_v_a = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.0); - /* "gensim/models/word2vec_inner.pyx":62 + /* "gensim/models/word2vec_inner.pyx":64 * cdef REAL_t a * a = 0.0 * for i from 0 <= i < N[0] by 1: # <<<<<<<<<<<<<< @@ -2030,7 +2049,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 __pyx_t_1 = (__pyx_v_N[0]); for (__pyx_v_i = 0; __pyx_v_i < __pyx_t_1; __pyx_v_i+=1) { - /* "gensim/models/word2vec_inner.pyx":63 + /* "gensim/models/word2vec_inner.pyx":65 * a = 0.0 * for i from 0 <= i < N[0] by 1: * a += X[i] * Y[i] # <<<<<<<<<<<<<< @@ -2040,7 +2059,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 __pyx_v_a = (__pyx_v_a + ((__pyx_v_X[__pyx_v_i]) * (__pyx_v_Y[__pyx_v_i]))); } - /* "gensim/models/word2vec_inner.pyx":64 + /* "gensim/models/word2vec_inner.pyx":66 * for i from 0 <= i < N[0] by 1: * a += X[i] * Y[i] * return a # <<<<<<<<<<<<<< @@ -2050,7 +2069,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 __pyx_r = __pyx_v_a; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":57 + /* "gensim/models/word2vec_inner.pyx":59 * * # for when no blas available * cdef REAL_t our_dot_noblas(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil: # <<<<<<<<<<<<<< @@ -2063,7 +2082,7 @@ static __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_f_6gensim_6models_1 return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":67 +/* "gensim/models/word2vec_inner.pyx":69 * * # for when no blas available * cdef void our_saxpy_noblas(const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil: # <<<<<<<<<<<<<< @@ -2075,7 +2094,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_our_saxpy_noblas(int const int __pyx_v_i; int __pyx_t_1; - /* "gensim/models/word2vec_inner.pyx":69 + /* "gensim/models/word2vec_inner.pyx":71 * cdef void our_saxpy_noblas(const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil: * cdef int i * for i from 0 <= i < N[0] by 1: # <<<<<<<<<<<<<< @@ -2085,17 +2104,17 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_our_saxpy_noblas(int const __pyx_t_1 = (__pyx_v_N[0]); for (__pyx_v_i = 0; __pyx_v_i < __pyx_t_1; __pyx_v_i+=1) { - /* "gensim/models/word2vec_inner.pyx":70 + /* "gensim/models/word2vec_inner.pyx":72 * cdef int i * for i from 0 <= i < N[0] by 1: * Y[i * (incY[0])] = (alpha[0]) * X[i * (incX[0])] + Y[i * (incY[0])] # <<<<<<<<<<<<<< * - * cdef void fast_sentence_sg_hs( + * cdef void w2v_fast_sentence_sg_hs( */ (__pyx_v_Y[(__pyx_v_i * (__pyx_v_incY[0]))]) = (((__pyx_v_alpha[0]) * (__pyx_v_X[(__pyx_v_i * (__pyx_v_incX[0]))])) + (__pyx_v_Y[(__pyx_v_i * (__pyx_v_incY[0]))])); } - /* "gensim/models/word2vec_inner.pyx":67 + /* "gensim/models/word2vec_inner.pyx":69 * * # for when no blas available * cdef void our_saxpy_noblas(const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil: # <<<<<<<<<<<<<< @@ -2106,15 +2125,15 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_our_saxpy_noblas(int const /* function exit code */ } -/* "gensim/models/word2vec_inner.pyx":72 +/* "gensim/models/word2vec_inner.pyx":74 * Y[i * (incY[0])] = (alpha[0]) * X[i * (incX[0])] + Y[i * (incY[0])] * - * cdef void fast_sentence_sg_hs( # <<<<<<<<<<<<<< + * cdef void w2v_fast_sentence_sg_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, * REAL_t *syn0, REAL_t *syn1, const int size, */ -static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t_5numpy_uint32_t const *__pyx_v_word_point, __pyx_t_5numpy_uint8_t const *__pyx_v_word_code, int const __pyx_v_codelen, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const __pyx_v_word2_index, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks, int const __pyx_v__compute_loss, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__running_training_loss_param) { +static void __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_hs(__pyx_t_5numpy_uint32_t const *__pyx_v_word_point, __pyx_t_5numpy_uint8_t const *__pyx_v_word_code, int const __pyx_v_codelen, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const __pyx_v_word2_index, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks, int const __pyx_v__compute_loss, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__running_training_loss_param) { PY_LONG_LONG __pyx_v_b; PY_LONG_LONG __pyx_v_row1; PY_LONG_LONG __pyx_v_row2; @@ -2129,7 +2148,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t int __pyx_t_4; int __pyx_t_5; - /* "gensim/models/word2vec_inner.pyx":113 + /* "gensim/models/word2vec_inner.pyx":115 * * cdef long long a, b * cdef long long row1 = word2_index * size, row2, sgn # <<<<<<<<<<<<<< @@ -2138,7 +2157,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_row1 = (__pyx_v_word2_index * __pyx_v_size); - /* "gensim/models/word2vec_inner.pyx":116 + /* "gensim/models/word2vec_inner.pyx":118 * cdef REAL_t f, g, f_dot, lprob * * memset(work, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -2147,7 +2166,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ (void)(memset(__pyx_v_work, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/word2vec_inner.pyx":117 + /* "gensim/models/word2vec_inner.pyx":119 * * memset(work, 0, size * cython.sizeof(REAL_t)) * for b in range(codelen): # <<<<<<<<<<<<<< @@ -2159,7 +2178,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_b = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":118 + /* "gensim/models/word2vec_inner.pyx":120 * memset(work, 0, size * cython.sizeof(REAL_t)) * for b in range(codelen): * row2 = word_point[b] * size # <<<<<<<<<<<<<< @@ -2168,7 +2187,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_row2 = ((__pyx_v_word_point[__pyx_v_b]) * __pyx_v_size); - /* "gensim/models/word2vec_inner.pyx":119 + /* "gensim/models/word2vec_inner.pyx":121 * for b in range(codelen): * row2 = word_point[b] * size * f_dot = our_dot(&size, &syn0[row1], &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -2177,7 +2196,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_f_dot = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), (&(__pyx_v_syn0[__pyx_v_row1])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":120 + /* "gensim/models/word2vec_inner.pyx":122 * row2 = word_point[b] * size * f_dot = our_dot(&size, &syn0[row1], &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2195,7 +2214,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t __pyx_L6_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":121 + /* "gensim/models/word2vec_inner.pyx":123 * f_dot = our_dot(&size, &syn0[row1], &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -2204,7 +2223,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":120 + /* "gensim/models/word2vec_inner.pyx":122 * row2 = word_point[b] * size * f_dot = our_dot(&size, &syn0[row1], &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2213,7 +2232,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ } - /* "gensim/models/word2vec_inner.pyx":122 + /* "gensim/models/word2vec_inner.pyx":124 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -2222,7 +2241,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":123 + /* "gensim/models/word2vec_inner.pyx":125 * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * g = (1 - word_code[b] - f) * alpha # <<<<<<<<<<<<<< @@ -2231,7 +2250,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_g = (((1 - (__pyx_v_word_code[__pyx_v_b])) - __pyx_v_f) * __pyx_v_alpha); - /* "gensim/models/word2vec_inner.pyx":125 + /* "gensim/models/word2vec_inner.pyx":127 * g = (1 - word_code[b] - f) * alpha * * if _compute_loss == 1: # <<<<<<<<<<<<<< @@ -2241,7 +2260,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t __pyx_t_4 = ((__pyx_v__compute_loss == 1) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":126 + /* "gensim/models/word2vec_inner.pyx":128 * * if _compute_loss == 1: * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 # <<<<<<<<<<<<<< @@ -2250,7 +2269,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_sgn = __Pyx_pow_long(-1L, ((long)(__pyx_v_word_code[__pyx_v_b]))); - /* "gensim/models/word2vec_inner.pyx":127 + /* "gensim/models/word2vec_inner.pyx":129 * if _compute_loss == 1: * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * lprob = -1*sgn*f_dot # <<<<<<<<<<<<<< @@ -2259,7 +2278,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_lprob = ((-1LL * __pyx_v_sgn) * __pyx_v_f_dot); - /* "gensim/models/word2vec_inner.pyx":128 + /* "gensim/models/word2vec_inner.pyx":130 * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * lprob = -1*sgn*f_dot * if lprob <= -MAX_EXP or lprob >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2277,7 +2296,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t __pyx_L10_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":129 + /* "gensim/models/word2vec_inner.pyx":131 * lprob = -1*sgn*f_dot * if lprob <= -MAX_EXP or lprob >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -2286,7 +2305,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":128 + /* "gensim/models/word2vec_inner.pyx":130 * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * lprob = -1*sgn*f_dot * if lprob <= -MAX_EXP or lprob >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2295,7 +2314,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ } - /* "gensim/models/word2vec_inner.pyx":130 + /* "gensim/models/word2vec_inner.pyx":132 * if lprob <= -MAX_EXP or lprob >= MAX_EXP: * continue * lprob = LOG_TABLE[((lprob + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -2304,7 +2323,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_lprob = (__pyx_v_6gensim_6models_14word2vec_inner_LOG_TABLE[((int)((__pyx_v_lprob + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":131 + /* "gensim/models/word2vec_inner.pyx":133 * continue * lprob = LOG_TABLE[((lprob + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * _running_training_loss_param[0] = _running_training_loss_param[0] - lprob # <<<<<<<<<<<<<< @@ -2313,7 +2332,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ (__pyx_v__running_training_loss_param[0]) = ((__pyx_v__running_training_loss_param[0]) - __pyx_v_lprob); - /* "gensim/models/word2vec_inner.pyx":125 + /* "gensim/models/word2vec_inner.pyx":127 * g = (1 - word_code[b] - f) * alpha * * if _compute_loss == 1: # <<<<<<<<<<<<<< @@ -2322,7 +2341,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ } - /* "gensim/models/word2vec_inner.pyx":133 + /* "gensim/models/word2vec_inner.pyx":135 * _running_training_loss_param[0] = _running_training_loss_param[0] - lprob * * our_saxpy(&size, &g, &syn1[row2], &ONE, work, &ONE) # <<<<<<<<<<<<<< @@ -2331,7 +2350,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_g), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), __pyx_v_work, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":134 + /* "gensim/models/word2vec_inner.pyx":136 * * our_saxpy(&size, &g, &syn1[row2], &ONE, work, &ONE) * our_saxpy(&size, &g, &syn0[row1], &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -2342,7 +2361,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t __pyx_L3_continue:; } - /* "gensim/models/word2vec_inner.pyx":136 + /* "gensim/models/word2vec_inner.pyx":138 * our_saxpy(&size, &g, &syn0[row1], &ONE, &syn1[row2], &ONE) * * our_saxpy(&size, &word_locks[word2_index], work, &ONE, &syn0[row1], &ONE) # <<<<<<<<<<<<<< @@ -2351,10 +2370,10 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v_word_locks[__pyx_v_word2_index])), __pyx_v_work, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), (&(__pyx_v_syn0[__pyx_v_row1])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":72 + /* "gensim/models/word2vec_inner.pyx":74 * Y[i * (incY[0])] = (alpha[0]) * X[i * (incX[0])] + Y[i * (incY[0])] * - * cdef void fast_sentence_sg_hs( # <<<<<<<<<<<<<< + * cdef void w2v_fast_sentence_sg_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, * REAL_t *syn0, REAL_t *syn1, const int size, */ @@ -2362,7 +2381,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs(__pyx_t /* function exit code */ } -/* "gensim/models/word2vec_inner.pyx":140 +/* "gensim/models/word2vec_inner.pyx":142 * * # to support random draws from negative-sampling cum_table * cdef inline unsigned long long bisect_left(np.uint32_t *a, unsigned long long x, unsigned long long lo, unsigned long long hi) nogil: # <<<<<<<<<<<<<< @@ -2375,7 +2394,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in unsigned PY_LONG_LONG __pyx_r; int __pyx_t_1; - /* "gensim/models/word2vec_inner.pyx":142 + /* "gensim/models/word2vec_inner.pyx":144 * cdef inline unsigned long long bisect_left(np.uint32_t *a, unsigned long long x, unsigned long long lo, unsigned long long hi) nogil: * cdef unsigned long long mid * while hi > lo: # <<<<<<<<<<<<<< @@ -2386,7 +2405,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in __pyx_t_1 = ((__pyx_v_hi > __pyx_v_lo) != 0); if (!__pyx_t_1) break; - /* "gensim/models/word2vec_inner.pyx":143 + /* "gensim/models/word2vec_inner.pyx":145 * cdef unsigned long long mid * while hi > lo: * mid = (lo + hi) >> 1 # <<<<<<<<<<<<<< @@ -2395,7 +2414,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in */ __pyx_v_mid = ((__pyx_v_lo + __pyx_v_hi) >> 1); - /* "gensim/models/word2vec_inner.pyx":144 + /* "gensim/models/word2vec_inner.pyx":146 * while hi > lo: * mid = (lo + hi) >> 1 * if a[mid] >= x: # <<<<<<<<<<<<<< @@ -2405,7 +2424,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in __pyx_t_1 = (((__pyx_v_a[__pyx_v_mid]) >= __pyx_v_x) != 0); if (__pyx_t_1) { - /* "gensim/models/word2vec_inner.pyx":145 + /* "gensim/models/word2vec_inner.pyx":147 * mid = (lo + hi) >> 1 * if a[mid] >= x: * hi = mid # <<<<<<<<<<<<<< @@ -2414,7 +2433,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in */ __pyx_v_hi = __pyx_v_mid; - /* "gensim/models/word2vec_inner.pyx":144 + /* "gensim/models/word2vec_inner.pyx":146 * while hi > lo: * mid = (lo + hi) >> 1 * if a[mid] >= x: # <<<<<<<<<<<<<< @@ -2424,7 +2443,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in goto __pyx_L5; } - /* "gensim/models/word2vec_inner.pyx":147 + /* "gensim/models/word2vec_inner.pyx":149 * hi = mid * else: * lo = mid + 1 # <<<<<<<<<<<<<< @@ -2437,7 +2456,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in __pyx_L5:; } - /* "gensim/models/word2vec_inner.pyx":148 + /* "gensim/models/word2vec_inner.pyx":150 * else: * lo = mid + 1 * return lo # <<<<<<<<<<<<<< @@ -2447,7 +2466,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in __pyx_r = __pyx_v_lo; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":140 + /* "gensim/models/word2vec_inner.pyx":142 * * # to support random draws from negative-sampling cum_table * cdef inline unsigned long long bisect_left(np.uint32_t *a, unsigned long long x, unsigned long long lo, unsigned long long hi) nogil: # <<<<<<<<<<<<<< @@ -2460,7 +2479,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":152 +/* "gensim/models/word2vec_inner.pyx":154 * # this quick & dirty RNG apparently matches Java's (non-Secure)Random * # note this function side-effects next_random to set up the next number * cdef inline unsigned long long random_int32(unsigned long long *next_random) nogil: # <<<<<<<<<<<<<< @@ -2472,7 +2491,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in unsigned PY_LONG_LONG __pyx_v_this_random; unsigned PY_LONG_LONG __pyx_r; - /* "gensim/models/word2vec_inner.pyx":153 + /* "gensim/models/word2vec_inner.pyx":155 * # note this function side-effects next_random to set up the next number * cdef inline unsigned long long random_int32(unsigned long long *next_random) nogil: * cdef unsigned long long this_random = next_random[0] >> 16 # <<<<<<<<<<<<<< @@ -2481,7 +2500,7 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in */ __pyx_v_this_random = ((__pyx_v_next_random[0]) >> 16); - /* "gensim/models/word2vec_inner.pyx":154 + /* "gensim/models/word2vec_inner.pyx":156 * cdef inline unsigned long long random_int32(unsigned long long *next_random) nogil: * cdef unsigned long long this_random = next_random[0] >> 16 * next_random[0] = (next_random[0] * 25214903917ULL + 11) & 281474976710655ULL # <<<<<<<<<<<<<< @@ -2490,17 +2509,17 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in */ (__pyx_v_next_random[0]) = ((((__pyx_v_next_random[0]) * ((unsigned PY_LONG_LONG)25214903917ULL)) + 11) & 281474976710655ULL); - /* "gensim/models/word2vec_inner.pyx":155 + /* "gensim/models/word2vec_inner.pyx":157 * cdef unsigned long long this_random = next_random[0] >> 16 * next_random[0] = (next_random[0] * 25214903917ULL + 11) & 281474976710655ULL * return this_random # <<<<<<<<<<<<<< * - * cdef unsigned long long fast_sentence_sg_neg( + * cdef unsigned long long w2v_fast_sentence_sg_neg( */ __pyx_r = __pyx_v_this_random; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":152 + /* "gensim/models/word2vec_inner.pyx":154 * # this quick & dirty RNG apparently matches Java's (non-Secure)Random * # note this function side-effects next_random to set up the next number * cdef inline unsigned long long random_int32(unsigned long long *next_random) nogil: # <<<<<<<<<<<<<< @@ -2513,15 +2532,15 @@ static CYTHON_INLINE unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_in return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":157 +/* "gensim/models/word2vec_inner.pyx":159 * return this_random * - * cdef unsigned long long fast_sentence_sg_neg( # <<<<<<<<<<<<<< + * cdef unsigned long long w2v_fast_sentence_sg_neg( # <<<<<<<<<<<<<< * const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, * REAL_t *syn0, REAL_t *syn1neg, const int size, const np.uint32_t word_index, */ -static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_neg(int const __pyx_v_negative, __pyx_t_5numpy_uint32_t *__pyx_v_cum_table, unsigned PY_LONG_LONG __pyx_v_cum_table_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const __pyx_v_word_index, __pyx_t_5numpy_uint32_t const __pyx_v_word2_index, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, unsigned PY_LONG_LONG __pyx_v_next_random, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks, int const __pyx_v__compute_loss, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__running_training_loss_param) { +static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_neg(int const __pyx_v_negative, __pyx_t_5numpy_uint32_t *__pyx_v_cum_table, unsigned PY_LONG_LONG __pyx_v_cum_table_len, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const __pyx_v_word_index, __pyx_t_5numpy_uint32_t const __pyx_v_word2_index, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, unsigned PY_LONG_LONG __pyx_v_next_random, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks, int const __pyx_v__compute_loss, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__running_training_loss_param) { PY_LONG_LONG __pyx_v_row1; PY_LONG_LONG __pyx_v_row2; unsigned PY_LONG_LONG __pyx_v_modulo; @@ -2540,7 +2559,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente int __pyx_t_5; __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_6; - /* "gensim/models/word2vec_inner.pyx":207 + /* "gensim/models/word2vec_inner.pyx":209 * """ * cdef long long a * cdef long long row1 = word2_index * size, row2 # <<<<<<<<<<<<<< @@ -2549,7 +2568,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_row1 = (__pyx_v_word2_index * __pyx_v_size); - /* "gensim/models/word2vec_inner.pyx":208 + /* "gensim/models/word2vec_inner.pyx":210 * cdef long long a * cdef long long row1 = word2_index * size, row2 * cdef unsigned long long modulo = 281474976710655ULL # <<<<<<<<<<<<<< @@ -2558,7 +2577,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_modulo = 281474976710655ULL; - /* "gensim/models/word2vec_inner.pyx":213 + /* "gensim/models/word2vec_inner.pyx":215 * cdef int d * * memset(work, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -2567,7 +2586,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ (void)(memset(__pyx_v_work, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/word2vec_inner.pyx":215 + /* "gensim/models/word2vec_inner.pyx":217 * memset(work, 0, size * cython.sizeof(REAL_t)) * * for d in range(negative+1): # <<<<<<<<<<<<<< @@ -2579,7 +2598,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_d = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":216 + /* "gensim/models/word2vec_inner.pyx":218 * * for d in range(negative+1): * if d == 0: # <<<<<<<<<<<<<< @@ -2589,7 +2608,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((__pyx_v_d == 0) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":217 + /* "gensim/models/word2vec_inner.pyx":219 * for d in range(negative+1): * if d == 0: * target_index = word_index # <<<<<<<<<<<<<< @@ -2598,7 +2617,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_target_index = __pyx_v_word_index; - /* "gensim/models/word2vec_inner.pyx":218 + /* "gensim/models/word2vec_inner.pyx":220 * if d == 0: * target_index = word_index * label = ONEF # <<<<<<<<<<<<<< @@ -2607,7 +2626,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_label = __pyx_v_6gensim_6models_14word2vec_inner_ONEF; - /* "gensim/models/word2vec_inner.pyx":216 + /* "gensim/models/word2vec_inner.pyx":218 * * for d in range(negative+1): * if d == 0: # <<<<<<<<<<<<<< @@ -2617,7 +2636,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente goto __pyx_L5; } - /* "gensim/models/word2vec_inner.pyx":220 + /* "gensim/models/word2vec_inner.pyx":222 * label = ONEF * else: * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) # <<<<<<<<<<<<<< @@ -2627,7 +2646,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente /*else*/ { __pyx_v_target_index = __pyx_f_6gensim_6models_14word2vec_inner_bisect_left(__pyx_v_cum_table, ((__pyx_v_next_random >> 16) % (__pyx_v_cum_table[(__pyx_v_cum_table_len - 1)])), 0, __pyx_v_cum_table_len); - /* "gensim/models/word2vec_inner.pyx":221 + /* "gensim/models/word2vec_inner.pyx":223 * else: * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo # <<<<<<<<<<<<<< @@ -2636,7 +2655,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_next_random = (((__pyx_v_next_random * ((unsigned PY_LONG_LONG)25214903917ULL)) + 11) & __pyx_v_modulo); - /* "gensim/models/word2vec_inner.pyx":222 + /* "gensim/models/word2vec_inner.pyx":224 * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: # <<<<<<<<<<<<<< @@ -2646,7 +2665,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((__pyx_v_target_index == __pyx_v_word_index) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":223 + /* "gensim/models/word2vec_inner.pyx":225 * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: * continue # <<<<<<<<<<<<<< @@ -2655,7 +2674,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":222 + /* "gensim/models/word2vec_inner.pyx":224 * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: # <<<<<<<<<<<<<< @@ -2664,7 +2683,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":224 + /* "gensim/models/word2vec_inner.pyx":226 * if target_index == word_index: * continue * label = 0.0 # <<<<<<<<<<<<<< @@ -2675,7 +2694,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente } __pyx_L5:; - /* "gensim/models/word2vec_inner.pyx":226 + /* "gensim/models/word2vec_inner.pyx":228 * label = 0.0 * * row2 = target_index * size # <<<<<<<<<<<<<< @@ -2684,7 +2703,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_row2 = (__pyx_v_target_index * __pyx_v_size); - /* "gensim/models/word2vec_inner.pyx":227 + /* "gensim/models/word2vec_inner.pyx":229 * * row2 = target_index * size * f_dot = our_dot(&size, &syn0[row1], &ONE, &syn1neg[row2], &ONE) # <<<<<<<<<<<<<< @@ -2693,7 +2712,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_f_dot = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), (&(__pyx_v_syn0[__pyx_v_row1])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), (&(__pyx_v_syn1neg[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":228 + /* "gensim/models/word2vec_inner.pyx":230 * row2 = target_index * size * f_dot = our_dot(&size, &syn0[row1], &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2711,7 +2730,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_L8_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":229 + /* "gensim/models/word2vec_inner.pyx":231 * f_dot = our_dot(&size, &syn0[row1], &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -2720,7 +2739,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":228 + /* "gensim/models/word2vec_inner.pyx":230 * row2 = target_index * size * f_dot = our_dot(&size, &syn0[row1], &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2729,7 +2748,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":230 + /* "gensim/models/word2vec_inner.pyx":232 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -2738,7 +2757,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":231 + /* "gensim/models/word2vec_inner.pyx":233 * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * g = (label - f) * alpha # <<<<<<<<<<<<<< @@ -2747,7 +2766,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_g = ((__pyx_v_label - __pyx_v_f) * __pyx_v_alpha); - /* "gensim/models/word2vec_inner.pyx":233 + /* "gensim/models/word2vec_inner.pyx":235 * g = (label - f) * alpha * * if _compute_loss == 1: # <<<<<<<<<<<<<< @@ -2757,7 +2776,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((__pyx_v__compute_loss == 1) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":234 + /* "gensim/models/word2vec_inner.pyx":236 * * if _compute_loss == 1: * f_dot = (f_dot if d == 0 else -f_dot) # <<<<<<<<<<<<<< @@ -2771,7 +2790,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente } __pyx_v_f_dot = __pyx_t_6; - /* "gensim/models/word2vec_inner.pyx":235 + /* "gensim/models/word2vec_inner.pyx":237 * if _compute_loss == 1: * f_dot = (f_dot if d == 0 else -f_dot) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2789,7 +2808,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_L12_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":236 + /* "gensim/models/word2vec_inner.pyx":238 * f_dot = (f_dot if d == 0 else -f_dot) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -2798,7 +2817,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":235 + /* "gensim/models/word2vec_inner.pyx":237 * if _compute_loss == 1: * f_dot = (f_dot if d == 0 else -f_dot) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -2807,7 +2826,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":237 + /* "gensim/models/word2vec_inner.pyx":239 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * log_e_f_dot = LOG_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -2816,7 +2835,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_log_e_f_dot = (__pyx_v_6gensim_6models_14word2vec_inner_LOG_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":238 + /* "gensim/models/word2vec_inner.pyx":240 * continue * log_e_f_dot = LOG_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * _running_training_loss_param[0] = _running_training_loss_param[0] - log_e_f_dot # <<<<<<<<<<<<<< @@ -2825,7 +2844,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ (__pyx_v__running_training_loss_param[0]) = ((__pyx_v__running_training_loss_param[0]) - __pyx_v_log_e_f_dot); - /* "gensim/models/word2vec_inner.pyx":233 + /* "gensim/models/word2vec_inner.pyx":235 * g = (label - f) * alpha * * if _compute_loss == 1: # <<<<<<<<<<<<<< @@ -2834,7 +2853,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":240 + /* "gensim/models/word2vec_inner.pyx":242 * _running_training_loss_param[0] = _running_training_loss_param[0] - log_e_f_dot * * our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) # <<<<<<<<<<<<<< @@ -2843,7 +2862,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_g), (&(__pyx_v_syn1neg[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), __pyx_v_work, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":241 + /* "gensim/models/word2vec_inner.pyx":243 * * our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) * our_saxpy(&size, &g, &syn0[row1], &ONE, &syn1neg[row2], &ONE) # <<<<<<<<<<<<<< @@ -2854,7 +2873,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_L3_continue:; } - /* "gensim/models/word2vec_inner.pyx":243 + /* "gensim/models/word2vec_inner.pyx":245 * our_saxpy(&size, &g, &syn0[row1], &ONE, &syn1neg[row2], &ONE) * * our_saxpy(&size, &word_locks[word2_index], work, &ONE, &syn0[row1], &ONE) # <<<<<<<<<<<<<< @@ -2863,7 +2882,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&(__pyx_v_word_locks[__pyx_v_word2_index])), __pyx_v_work, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), (&(__pyx_v_syn0[__pyx_v_row1])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":245 + /* "gensim/models/word2vec_inner.pyx":247 * our_saxpy(&size, &word_locks[word2_index], work, &ONE, &syn0[row1], &ONE) * * return next_random # <<<<<<<<<<<<<< @@ -2873,10 +2892,10 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_r = __pyx_v_next_random; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":157 + /* "gensim/models/word2vec_inner.pyx":159 * return this_random * - * cdef unsigned long long fast_sentence_sg_neg( # <<<<<<<<<<<<<< + * cdef unsigned long long w2v_fast_sentence_sg_neg( # <<<<<<<<<<<<<< * const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, * REAL_t *syn0, REAL_t *syn1neg, const int size, const np.uint32_t word_index, */ @@ -2886,15 +2905,15 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":248 +/* "gensim/models/word2vec_inner.pyx":250 * * - * cdef void fast_sentence_cbow_hs( # <<<<<<<<<<<<<< + * cdef void w2v_fast_sentence_cbow_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], * REAL_t *neu1, REAL_t *syn0, REAL_t *syn1, const int size, */ -static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx_t_5numpy_uint32_t const *__pyx_v_word_point, __pyx_t_5numpy_uint8_t const *__pyx_v_word_code, int *__pyx_v_codelens, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const *__pyx_v_indexes, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, int __pyx_v_i, int __pyx_v_j, int __pyx_v_k, int __pyx_v_cbow_mean, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks, int const __pyx_v__compute_loss, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__running_training_loss_param) { +static void __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_hs(__pyx_t_5numpy_uint32_t const *__pyx_v_word_point, __pyx_t_5numpy_uint8_t const *__pyx_v_word_code, int *__pyx_v_codelens, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const *__pyx_v_indexes, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, int __pyx_v_i, int __pyx_v_j, int __pyx_v_k, int __pyx_v_cbow_mean, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks, int const __pyx_v__compute_loss, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__running_training_loss_param) { PY_LONG_LONG __pyx_v_b; PY_LONG_LONG __pyx_v_row2; PY_LONG_LONG __pyx_v_sgn; @@ -2912,7 +2931,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx PY_LONG_LONG __pyx_t_5; int __pyx_t_6; - /* "gensim/models/word2vec_inner.pyx":300 + /* "gensim/models/word2vec_inner.pyx":302 * cdef long long a, b * cdef long long row2, sgn * cdef REAL_t f, g, count, inv_count = 1.0, f_dot, lprob # <<<<<<<<<<<<<< @@ -2921,7 +2940,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_inv_count = 1.0; - /* "gensim/models/word2vec_inner.pyx":303 + /* "gensim/models/word2vec_inner.pyx":305 * cdef int m * * memset(neu1, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -2930,7 +2949,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ (void)(memset(__pyx_v_neu1, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/word2vec_inner.pyx":304 + /* "gensim/models/word2vec_inner.pyx":306 * * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 # <<<<<<<<<<<<<< @@ -2939,7 +2958,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_count = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.0); - /* "gensim/models/word2vec_inner.pyx":305 + /* "gensim/models/word2vec_inner.pyx":307 * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 * for m in range(j, k): # <<<<<<<<<<<<<< @@ -2951,7 +2970,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx for (__pyx_t_3 = __pyx_v_j; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_m = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":306 + /* "gensim/models/word2vec_inner.pyx":308 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -2961,7 +2980,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = ((__pyx_v_m == __pyx_v_i) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":307 + /* "gensim/models/word2vec_inner.pyx":309 * for m in range(j, k): * if m == i: * continue # <<<<<<<<<<<<<< @@ -2970,7 +2989,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":306 + /* "gensim/models/word2vec_inner.pyx":308 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -2979,7 +2998,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/word2vec_inner.pyx":309 + /* "gensim/models/word2vec_inner.pyx":311 * continue * else: * count += ONEF # <<<<<<<<<<<<<< @@ -2989,7 +3008,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx /*else*/ { __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_14word2vec_inner_ONEF); - /* "gensim/models/word2vec_inner.pyx":310 + /* "gensim/models/word2vec_inner.pyx":312 * else: * count += ONEF * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) # <<<<<<<<<<<<<< @@ -3001,7 +3020,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_L3_continue:; } - /* "gensim/models/word2vec_inner.pyx":311 + /* "gensim/models/word2vec_inner.pyx":313 * count += ONEF * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< @@ -3011,7 +3030,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = ((__pyx_v_count > ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.5)) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":312 + /* "gensim/models/word2vec_inner.pyx":314 * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) * if count > (0.5): * inv_count = ONEF/count # <<<<<<<<<<<<<< @@ -3020,7 +3039,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_inv_count = (__pyx_v_6gensim_6models_14word2vec_inner_ONEF / __pyx_v_count); - /* "gensim/models/word2vec_inner.pyx":311 + /* "gensim/models/word2vec_inner.pyx":313 * count += ONEF * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< @@ -3029,7 +3048,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/word2vec_inner.pyx":313 + /* "gensim/models/word2vec_inner.pyx":315 * if count > (0.5): * inv_count = ONEF/count * if cbow_mean: # <<<<<<<<<<<<<< @@ -3039,7 +3058,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = (__pyx_v_cbow_mean != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":314 + /* "gensim/models/word2vec_inner.pyx":316 * inv_count = ONEF/count * if cbow_mean: * sscal(&size, &inv_count, neu1, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< @@ -3048,7 +3067,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":313 + /* "gensim/models/word2vec_inner.pyx":315 * if count > (0.5): * inv_count = ONEF/count * if cbow_mean: # <<<<<<<<<<<<<< @@ -3057,7 +3076,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/word2vec_inner.pyx":316 + /* "gensim/models/word2vec_inner.pyx":318 * sscal(&size, &inv_count, neu1, &ONE) # (does this need BLAS-variants like saxpy?) * * memset(work, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -3066,7 +3085,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ (void)(memset(__pyx_v_work, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/word2vec_inner.pyx":317 + /* "gensim/models/word2vec_inner.pyx":319 * * memset(work, 0, size * cython.sizeof(REAL_t)) * for b in range(codelens[i]): # <<<<<<<<<<<<<< @@ -3078,7 +3097,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_2; __pyx_t_5+=1) { __pyx_v_b = __pyx_t_5; - /* "gensim/models/word2vec_inner.pyx":318 + /* "gensim/models/word2vec_inner.pyx":320 * memset(work, 0, size * cython.sizeof(REAL_t)) * for b in range(codelens[i]): * row2 = word_point[b] * size # <<<<<<<<<<<<<< @@ -3087,7 +3106,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_row2 = ((__pyx_v_word_point[__pyx_v_b]) * __pyx_v_size); - /* "gensim/models/word2vec_inner.pyx":319 + /* "gensim/models/word2vec_inner.pyx":321 * for b in range(codelens[i]): * row2 = word_point[b] * size * f_dot = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -3096,7 +3115,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_f_dot = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":320 + /* "gensim/models/word2vec_inner.pyx":322 * row2 = word_point[b] * size * f_dot = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3114,7 +3133,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_L11_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":321 + /* "gensim/models/word2vec_inner.pyx":323 * f_dot = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -3123,7 +3142,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ goto __pyx_L8_continue; - /* "gensim/models/word2vec_inner.pyx":320 + /* "gensim/models/word2vec_inner.pyx":322 * row2 = word_point[b] * size * f_dot = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3132,7 +3151,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/word2vec_inner.pyx":322 + /* "gensim/models/word2vec_inner.pyx":324 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -3141,7 +3160,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":323 + /* "gensim/models/word2vec_inner.pyx":325 * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * g = (1 - word_code[b] - f) * alpha # <<<<<<<<<<<<<< @@ -3150,7 +3169,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_g = (((1 - (__pyx_v_word_code[__pyx_v_b])) - __pyx_v_f) * __pyx_v_alpha); - /* "gensim/models/word2vec_inner.pyx":325 + /* "gensim/models/word2vec_inner.pyx":327 * g = (1 - word_code[b] - f) * alpha * * if _compute_loss == 1: # <<<<<<<<<<<<<< @@ -3160,7 +3179,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = ((__pyx_v__compute_loss == 1) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":326 + /* "gensim/models/word2vec_inner.pyx":328 * * if _compute_loss == 1: * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 # <<<<<<<<<<<<<< @@ -3169,7 +3188,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_sgn = __Pyx_pow_long(-1L, ((long)(__pyx_v_word_code[__pyx_v_b]))); - /* "gensim/models/word2vec_inner.pyx":327 + /* "gensim/models/word2vec_inner.pyx":329 * if _compute_loss == 1: * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * lprob = -1*sgn*f_dot # <<<<<<<<<<<<<< @@ -3178,7 +3197,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_lprob = ((-1LL * __pyx_v_sgn) * __pyx_v_f_dot); - /* "gensim/models/word2vec_inner.pyx":328 + /* "gensim/models/word2vec_inner.pyx":330 * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * lprob = -1*sgn*f_dot * if lprob <= -MAX_EXP or lprob >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3196,7 +3215,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_L15_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":329 + /* "gensim/models/word2vec_inner.pyx":331 * lprob = -1*sgn*f_dot * if lprob <= -MAX_EXP or lprob >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -3205,7 +3224,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ goto __pyx_L8_continue; - /* "gensim/models/word2vec_inner.pyx":328 + /* "gensim/models/word2vec_inner.pyx":330 * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * lprob = -1*sgn*f_dot * if lprob <= -MAX_EXP or lprob >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3214,7 +3233,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/word2vec_inner.pyx":330 + /* "gensim/models/word2vec_inner.pyx":332 * if lprob <= -MAX_EXP or lprob >= MAX_EXP: * continue * lprob = LOG_TABLE[((lprob + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -3223,7 +3242,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_lprob = (__pyx_v_6gensim_6models_14word2vec_inner_LOG_TABLE[((int)((__pyx_v_lprob + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":331 + /* "gensim/models/word2vec_inner.pyx":333 * continue * lprob = LOG_TABLE[((lprob + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * _running_training_loss_param[0] = _running_training_loss_param[0] - lprob # <<<<<<<<<<<<<< @@ -3232,7 +3251,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ (__pyx_v__running_training_loss_param[0]) = ((__pyx_v__running_training_loss_param[0]) - __pyx_v_lprob); - /* "gensim/models/word2vec_inner.pyx":325 + /* "gensim/models/word2vec_inner.pyx":327 * g = (1 - word_code[b] - f) * alpha * * if _compute_loss == 1: # <<<<<<<<<<<<<< @@ -3241,7 +3260,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/word2vec_inner.pyx":333 + /* "gensim/models/word2vec_inner.pyx":335 * _running_training_loss_param[0] = _running_training_loss_param[0] - lprob * * our_saxpy(&size, &g, &syn1[row2], &ONE, work, &ONE) # <<<<<<<<<<<<<< @@ -3250,7 +3269,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_g), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), __pyx_v_work, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":334 + /* "gensim/models/word2vec_inner.pyx":336 * * our_saxpy(&size, &g, &syn1[row2], &ONE, work, &ONE) * our_saxpy(&size, &g, neu1, &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -3261,7 +3280,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_L8_continue:; } - /* "gensim/models/word2vec_inner.pyx":336 + /* "gensim/models/word2vec_inner.pyx":338 * our_saxpy(&size, &g, neu1, &ONE, &syn1[row2], &ONE) * * if not cbow_mean: # divide error over summed window vectors # <<<<<<<<<<<<<< @@ -3271,7 +3290,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = ((!(__pyx_v_cbow_mean != 0)) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":337 + /* "gensim/models/word2vec_inner.pyx":339 * * if not cbow_mean: # divide error over summed window vectors * sscal(&size, &inv_count, work, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< @@ -3280,7 +3299,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_work, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":336 + /* "gensim/models/word2vec_inner.pyx":338 * our_saxpy(&size, &g, neu1, &ONE, &syn1[row2], &ONE) * * if not cbow_mean: # divide error over summed window vectors # <<<<<<<<<<<<<< @@ -3289,7 +3308,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/word2vec_inner.pyx":339 + /* "gensim/models/word2vec_inner.pyx":341 * sscal(&size, &inv_count, work, &ONE) # (does this need BLAS-variants like saxpy?) * * for m in range(j, k): # <<<<<<<<<<<<<< @@ -3301,7 +3320,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx for (__pyx_t_3 = __pyx_v_j; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_m = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":340 + /* "gensim/models/word2vec_inner.pyx":342 * * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -3311,7 +3330,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_t_4 = ((__pyx_v_m == __pyx_v_i) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":341 + /* "gensim/models/word2vec_inner.pyx":343 * for m in range(j, k): * if m == i: * continue # <<<<<<<<<<<<<< @@ -3320,7 +3339,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ goto __pyx_L18_continue; - /* "gensim/models/word2vec_inner.pyx":340 + /* "gensim/models/word2vec_inner.pyx":342 * * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -3329,7 +3348,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx */ } - /* "gensim/models/word2vec_inner.pyx":343 + /* "gensim/models/word2vec_inner.pyx":345 * continue * else: * our_saxpy(&size, &word_locks[indexes[m]], work, &ONE, &syn0[indexes[m] * size], &ONE) # <<<<<<<<<<<<<< @@ -3342,10 +3361,10 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx __pyx_L18_continue:; } - /* "gensim/models/word2vec_inner.pyx":248 + /* "gensim/models/word2vec_inner.pyx":250 * * - * cdef void fast_sentence_cbow_hs( # <<<<<<<<<<<<<< + * cdef void w2v_fast_sentence_cbow_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], * REAL_t *neu1, REAL_t *syn0, REAL_t *syn1, const int size, */ @@ -3353,15 +3372,15 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs(__pyx /* function exit code */ } -/* "gensim/models/word2vec_inner.pyx":346 +/* "gensim/models/word2vec_inner.pyx":348 * * - * cdef unsigned long long fast_sentence_cbow_neg( # <<<<<<<<<<<<<< + * cdef unsigned long long w2v_fast_sentence_cbow_neg( # <<<<<<<<<<<<<< * const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, int codelens[MAX_SENTENCE_LEN], * REAL_t *neu1, REAL_t *syn0, REAL_t *syn1neg, const int size, */ -static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_neg(int const __pyx_v_negative, __pyx_t_5numpy_uint32_t *__pyx_v_cum_table, unsigned PY_LONG_LONG __pyx_v_cum_table_len, CYTHON_UNUSED int *__pyx_v_codelens, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const *__pyx_v_indexes, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, int __pyx_v_i, int __pyx_v_j, int __pyx_v_k, int __pyx_v_cbow_mean, unsigned PY_LONG_LONG __pyx_v_next_random, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks, int const __pyx_v__compute_loss, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__running_training_loss_param) { +static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_neg(int const __pyx_v_negative, __pyx_t_5numpy_uint32_t *__pyx_v_cum_table, unsigned PY_LONG_LONG __pyx_v_cum_table_len, CYTHON_UNUSED int *__pyx_v_codelens, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg, int const __pyx_v_size, __pyx_t_5numpy_uint32_t const *__pyx_v_indexes, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const __pyx_v_alpha, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work, int __pyx_v_i, int __pyx_v_j, int __pyx_v_k, int __pyx_v_cbow_mean, unsigned PY_LONG_LONG __pyx_v_next_random, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks, int const __pyx_v__compute_loss, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v__running_training_loss_param) { PY_LONG_LONG __pyx_v_row2; unsigned PY_LONG_LONG __pyx_v_modulo; __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v_f; @@ -3385,7 +3404,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente int __pyx_t_7; __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_8; - /* "gensim/models/word2vec_inner.pyx":403 + /* "gensim/models/word2vec_inner.pyx":405 * cdef long long a * cdef long long row2 * cdef unsigned long long modulo = 281474976710655ULL # <<<<<<<<<<<<<< @@ -3394,7 +3413,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_modulo = 281474976710655ULL; - /* "gensim/models/word2vec_inner.pyx":404 + /* "gensim/models/word2vec_inner.pyx":406 * cdef long long row2 * cdef unsigned long long modulo = 281474976710655ULL * cdef REAL_t f, g, count, inv_count = 1.0, label, log_e_f_dot, f_dot # <<<<<<<<<<<<<< @@ -3403,7 +3422,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_inv_count = 1.0; - /* "gensim/models/word2vec_inner.pyx":408 + /* "gensim/models/word2vec_inner.pyx":410 * cdef int d, m * * word_index = indexes[i] # <<<<<<<<<<<<<< @@ -3412,7 +3431,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_word_index = (__pyx_v_indexes[__pyx_v_i]); - /* "gensim/models/word2vec_inner.pyx":410 + /* "gensim/models/word2vec_inner.pyx":412 * word_index = indexes[i] * * memset(neu1, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -3421,7 +3440,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ (void)(memset(__pyx_v_neu1, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/word2vec_inner.pyx":411 + /* "gensim/models/word2vec_inner.pyx":413 * * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 # <<<<<<<<<<<<<< @@ -3430,7 +3449,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_count = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.0); - /* "gensim/models/word2vec_inner.pyx":412 + /* "gensim/models/word2vec_inner.pyx":414 * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 * for m in range(j, k): # <<<<<<<<<<<<<< @@ -3442,7 +3461,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente for (__pyx_t_3 = __pyx_v_j; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_m = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":413 + /* "gensim/models/word2vec_inner.pyx":415 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -3452,7 +3471,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((__pyx_v_m == __pyx_v_i) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":414 + /* "gensim/models/word2vec_inner.pyx":416 * for m in range(j, k): * if m == i: * continue # <<<<<<<<<<<<<< @@ -3461,7 +3480,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":413 + /* "gensim/models/word2vec_inner.pyx":415 * count = 0.0 * for m in range(j, k): * if m == i: # <<<<<<<<<<<<<< @@ -3470,7 +3489,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":416 + /* "gensim/models/word2vec_inner.pyx":418 * continue * else: * count += ONEF # <<<<<<<<<<<<<< @@ -3480,7 +3499,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente /*else*/ { __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_14word2vec_inner_ONEF); - /* "gensim/models/word2vec_inner.pyx":417 + /* "gensim/models/word2vec_inner.pyx":419 * else: * count += ONEF * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) # <<<<<<<<<<<<<< @@ -3492,7 +3511,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_L3_continue:; } - /* "gensim/models/word2vec_inner.pyx":418 + /* "gensim/models/word2vec_inner.pyx":420 * count += ONEF * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< @@ -3502,7 +3521,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((__pyx_v_count > ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.5)) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":419 + /* "gensim/models/word2vec_inner.pyx":421 * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) * if count > (0.5): * inv_count = ONEF/count # <<<<<<<<<<<<<< @@ -3511,7 +3530,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_inv_count = (__pyx_v_6gensim_6models_14word2vec_inner_ONEF / __pyx_v_count); - /* "gensim/models/word2vec_inner.pyx":418 + /* "gensim/models/word2vec_inner.pyx":420 * count += ONEF * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< @@ -3520,7 +3539,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":420 + /* "gensim/models/word2vec_inner.pyx":422 * if count > (0.5): * inv_count = ONEF/count * if cbow_mean: # <<<<<<<<<<<<<< @@ -3530,7 +3549,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = (__pyx_v_cbow_mean != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":421 + /* "gensim/models/word2vec_inner.pyx":423 * inv_count = ONEF/count * if cbow_mean: * sscal(&size, &inv_count, neu1, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< @@ -3539,7 +3558,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":420 + /* "gensim/models/word2vec_inner.pyx":422 * if count > (0.5): * inv_count = ONEF/count * if cbow_mean: # <<<<<<<<<<<<<< @@ -3548,7 +3567,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":423 + /* "gensim/models/word2vec_inner.pyx":425 * sscal(&size, &inv_count, neu1, &ONE) # (does this need BLAS-variants like saxpy?) * * memset(work, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -3557,7 +3576,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ (void)(memset(__pyx_v_work, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/word2vec_inner.pyx":425 + /* "gensim/models/word2vec_inner.pyx":427 * memset(work, 0, size * cython.sizeof(REAL_t)) * * for d in range(negative+1): # <<<<<<<<<<<<<< @@ -3569,7 +3588,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente for (__pyx_t_1 = 0; __pyx_t_1 < __pyx_t_6; __pyx_t_1+=1) { __pyx_v_d = __pyx_t_1; - /* "gensim/models/word2vec_inner.pyx":426 + /* "gensim/models/word2vec_inner.pyx":428 * * for d in range(negative+1): * if d == 0: # <<<<<<<<<<<<<< @@ -3579,7 +3598,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((__pyx_v_d == 0) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":427 + /* "gensim/models/word2vec_inner.pyx":429 * for d in range(negative+1): * if d == 0: * target_index = word_index # <<<<<<<<<<<<<< @@ -3588,7 +3607,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_target_index = __pyx_v_word_index; - /* "gensim/models/word2vec_inner.pyx":428 + /* "gensim/models/word2vec_inner.pyx":430 * if d == 0: * target_index = word_index * label = ONEF # <<<<<<<<<<<<<< @@ -3597,7 +3616,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_label = __pyx_v_6gensim_6models_14word2vec_inner_ONEF; - /* "gensim/models/word2vec_inner.pyx":426 + /* "gensim/models/word2vec_inner.pyx":428 * * for d in range(negative+1): * if d == 0: # <<<<<<<<<<<<<< @@ -3607,7 +3626,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente goto __pyx_L10; } - /* "gensim/models/word2vec_inner.pyx":430 + /* "gensim/models/word2vec_inner.pyx":432 * label = ONEF * else: * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) # <<<<<<<<<<<<<< @@ -3617,7 +3636,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente /*else*/ { __pyx_v_target_index = __pyx_f_6gensim_6models_14word2vec_inner_bisect_left(__pyx_v_cum_table, ((__pyx_v_next_random >> 16) % (__pyx_v_cum_table[(__pyx_v_cum_table_len - 1)])), 0, __pyx_v_cum_table_len); - /* "gensim/models/word2vec_inner.pyx":431 + /* "gensim/models/word2vec_inner.pyx":433 * else: * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo # <<<<<<<<<<<<<< @@ -3626,7 +3645,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_next_random = (((__pyx_v_next_random * ((unsigned PY_LONG_LONG)25214903917ULL)) + 11) & __pyx_v_modulo); - /* "gensim/models/word2vec_inner.pyx":432 + /* "gensim/models/word2vec_inner.pyx":434 * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: # <<<<<<<<<<<<<< @@ -3636,7 +3655,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((__pyx_v_target_index == __pyx_v_word_index) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":433 + /* "gensim/models/word2vec_inner.pyx":435 * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: * continue # <<<<<<<<<<<<<< @@ -3645,7 +3664,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ goto __pyx_L8_continue; - /* "gensim/models/word2vec_inner.pyx":432 + /* "gensim/models/word2vec_inner.pyx":434 * target_index = bisect_left(cum_table, (next_random >> 16) % cum_table[cum_table_len-1], 0, cum_table_len) * next_random = (next_random * 25214903917ULL + 11) & modulo * if target_index == word_index: # <<<<<<<<<<<<<< @@ -3654,7 +3673,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":434 + /* "gensim/models/word2vec_inner.pyx":436 * if target_index == word_index: * continue * label = 0.0 # <<<<<<<<<<<<<< @@ -3665,7 +3684,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente } __pyx_L10:; - /* "gensim/models/word2vec_inner.pyx":436 + /* "gensim/models/word2vec_inner.pyx":438 * label = 0.0 * * row2 = target_index * size # <<<<<<<<<<<<<< @@ -3674,7 +3693,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_row2 = (__pyx_v_target_index * __pyx_v_size); - /* "gensim/models/word2vec_inner.pyx":437 + /* "gensim/models/word2vec_inner.pyx":439 * * row2 = target_index * size * f_dot = our_dot(&size, neu1, &ONE, &syn1neg[row2], &ONE) # <<<<<<<<<<<<<< @@ -3683,7 +3702,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_f_dot = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), (&(__pyx_v_syn1neg[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":438 + /* "gensim/models/word2vec_inner.pyx":440 * row2 = target_index * size * f_dot = our_dot(&size, neu1, &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3701,7 +3720,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_L13_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":439 + /* "gensim/models/word2vec_inner.pyx":441 * f_dot = our_dot(&size, neu1, &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -3710,7 +3729,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ goto __pyx_L8_continue; - /* "gensim/models/word2vec_inner.pyx":438 + /* "gensim/models/word2vec_inner.pyx":440 * row2 = target_index * size * f_dot = our_dot(&size, neu1, &ONE, &syn1neg[row2], &ONE) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3719,7 +3738,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":440 + /* "gensim/models/word2vec_inner.pyx":442 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -3728,7 +3747,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":441 + /* "gensim/models/word2vec_inner.pyx":443 * continue * f = EXP_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * g = (label - f) * alpha # <<<<<<<<<<<<<< @@ -3737,7 +3756,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_g = ((__pyx_v_label - __pyx_v_f) * __pyx_v_alpha); - /* "gensim/models/word2vec_inner.pyx":443 + /* "gensim/models/word2vec_inner.pyx":445 * g = (label - f) * alpha * * if _compute_loss == 1: # <<<<<<<<<<<<<< @@ -3747,7 +3766,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((__pyx_v__compute_loss == 1) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":444 + /* "gensim/models/word2vec_inner.pyx":446 * * if _compute_loss == 1: * f_dot = (f_dot if d == 0 else -f_dot) # <<<<<<<<<<<<<< @@ -3761,7 +3780,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente } __pyx_v_f_dot = __pyx_t_8; - /* "gensim/models/word2vec_inner.pyx":445 + /* "gensim/models/word2vec_inner.pyx":447 * if _compute_loss == 1: * f_dot = (f_dot if d == 0 else -f_dot) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3779,7 +3798,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_L17_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":446 + /* "gensim/models/word2vec_inner.pyx":448 * f_dot = (f_dot if d == 0 else -f_dot) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -3788,7 +3807,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ goto __pyx_L8_continue; - /* "gensim/models/word2vec_inner.pyx":445 + /* "gensim/models/word2vec_inner.pyx":447 * if _compute_loss == 1: * f_dot = (f_dot if d == 0 else -f_dot) * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: # <<<<<<<<<<<<<< @@ -3797,7 +3816,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":447 + /* "gensim/models/word2vec_inner.pyx":449 * if f_dot <= -MAX_EXP or f_dot >= MAX_EXP: * continue * log_e_f_dot = LOG_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -3806,7 +3825,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_log_e_f_dot = (__pyx_v_6gensim_6models_14word2vec_inner_LOG_TABLE[((int)((__pyx_v_f_dot + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":448 + /* "gensim/models/word2vec_inner.pyx":450 * continue * log_e_f_dot = LOG_TABLE[((f_dot + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * _running_training_loss_param[0] = _running_training_loss_param[0] - log_e_f_dot # <<<<<<<<<<<<<< @@ -3815,7 +3834,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ (__pyx_v__running_training_loss_param[0]) = ((__pyx_v__running_training_loss_param[0]) - __pyx_v_log_e_f_dot); - /* "gensim/models/word2vec_inner.pyx":443 + /* "gensim/models/word2vec_inner.pyx":445 * g = (label - f) * alpha * * if _compute_loss == 1: # <<<<<<<<<<<<<< @@ -3824,7 +3843,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":450 + /* "gensim/models/word2vec_inner.pyx":452 * _running_training_loss_param[0] = _running_training_loss_param[0] - log_e_f_dot * * our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) # <<<<<<<<<<<<<< @@ -3833,7 +3852,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy((&__pyx_v_size), (&__pyx_v_g), (&(__pyx_v_syn1neg[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), __pyx_v_work, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":451 + /* "gensim/models/word2vec_inner.pyx":453 * * our_saxpy(&size, &g, &syn1neg[row2], &ONE, work, &ONE) * our_saxpy(&size, &g, neu1, &ONE, &syn1neg[row2], &ONE) # <<<<<<<<<<<<<< @@ -3844,7 +3863,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_L8_continue:; } - /* "gensim/models/word2vec_inner.pyx":453 + /* "gensim/models/word2vec_inner.pyx":455 * our_saxpy(&size, &g, neu1, &ONE, &syn1neg[row2], &ONE) * * if not cbow_mean: # divide error over summed window vectors # <<<<<<<<<<<<<< @@ -3854,7 +3873,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((!(__pyx_v_cbow_mean != 0)) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":454 + /* "gensim/models/word2vec_inner.pyx":456 * * if not cbow_mean: # divide error over summed window vectors * sscal(&size, &inv_count, work, &ONE) # (does this need BLAS-variants like saxpy?) # <<<<<<<<<<<<<< @@ -3863,7 +3882,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_work, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":453 + /* "gensim/models/word2vec_inner.pyx":455 * our_saxpy(&size, &g, neu1, &ONE, &syn1neg[row2], &ONE) * * if not cbow_mean: # divide error over summed window vectors # <<<<<<<<<<<<<< @@ -3872,7 +3891,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":456 + /* "gensim/models/word2vec_inner.pyx":458 * sscal(&size, &inv_count, work, &ONE) # (does this need BLAS-variants like saxpy?) * * for m in range(j,k): # <<<<<<<<<<<<<< @@ -3884,7 +3903,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente for (__pyx_t_3 = __pyx_v_j; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_m = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":457 + /* "gensim/models/word2vec_inner.pyx":459 * * for m in range(j,k): * if m == i: # <<<<<<<<<<<<<< @@ -3894,7 +3913,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_t_4 = ((__pyx_v_m == __pyx_v_i) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":458 + /* "gensim/models/word2vec_inner.pyx":460 * for m in range(j,k): * if m == i: * continue # <<<<<<<<<<<<<< @@ -3903,7 +3922,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ goto __pyx_L20_continue; - /* "gensim/models/word2vec_inner.pyx":457 + /* "gensim/models/word2vec_inner.pyx":459 * * for m in range(j,k): * if m == i: # <<<<<<<<<<<<<< @@ -3912,7 +3931,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente */ } - /* "gensim/models/word2vec_inner.pyx":460 + /* "gensim/models/word2vec_inner.pyx":462 * continue * else: * our_saxpy(&size, &word_locks[indexes[m]], work, &ONE, &syn0[indexes[m]*size], &ONE) # <<<<<<<<<<<<<< @@ -3925,7 +3944,7 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_L20_continue:; } - /* "gensim/models/word2vec_inner.pyx":462 + /* "gensim/models/word2vec_inner.pyx":464 * our_saxpy(&size, &word_locks[indexes[m]], work, &ONE, &syn0[indexes[m]*size], &ONE) * * return next_random # <<<<<<<<<<<<<< @@ -3935,10 +3954,10 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente __pyx_r = __pyx_v_next_random; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":346 + /* "gensim/models/word2vec_inner.pyx":348 * * - * cdef unsigned long long fast_sentence_cbow_neg( # <<<<<<<<<<<<<< + * cdef unsigned long long w2v_fast_sentence_cbow_neg( # <<<<<<<<<<<<<< * const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, int codelens[MAX_SENTENCE_LEN], * REAL_t *neu1, REAL_t *syn0, REAL_t *syn1neg, const int size, */ @@ -3948,146 +3967,16 @@ static unsigned PY_LONG_LONG __pyx_f_6gensim_6models_14word2vec_inner_fast_sente return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":465 +/* "gensim/models/word2vec_inner.pyx":467 * * - * def train_batch_sg(model, sentences, alpha, _work, compute_loss): # <<<<<<<<<<<<<< - * """Update skip-gram model by training on a batch of sentences. - * + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=None): # <<<<<<<<<<<<<< + * c[0].hs = model.hs + * c[0].negative = model.negative */ -/* Python wrapper */ -static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_1train_batch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static char __pyx_doc_6gensim_6models_14word2vec_inner_train_batch_sg[] = "train_batch_sg(model, sentences, alpha, _work, compute_loss)\nUpdate skip-gram model by training on a batch of sentences.\n\n Called internally from :meth:`~gensim.models.word2vec.Word2Vec.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.word2Vec.Word2Vec`\n The Word2Vec model instance to train.\n sentences : iterable of list of str\n The corpus used to train the model.\n alpha : float\n The learning rate\n _work : np.ndarray\n Private working memory for each worker.\n compute_loss : bool\n Whether or not the training loss should be computed in this batch.\n\n Returns\n -------\n int\n Number of words in the vocabulary actually used for training (They already existed in the vocabulary\n and were not discarded by negative sampling).\n\n "; -static PyMethodDef __pyx_mdef_6gensim_6models_14word2vec_inner_1train_batch_sg = {"train_batch_sg", (PyCFunction)__pyx_pw_6gensim_6models_14word2vec_inner_1train_batch_sg, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_14word2vec_inner_train_batch_sg}; -static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_1train_batch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_model = 0; - PyObject *__pyx_v_sentences = 0; - PyObject *__pyx_v_alpha = 0; - PyObject *__pyx_v__work = 0; - PyObject *__pyx_v_compute_loss = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("train_batch_sg (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_sentences,&__pyx_n_s_alpha,&__pyx_n_s_work,&__pyx_n_s_compute_loss,0}; - PyObject* values[5] = {0,0,0,0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - CYTHON_FALLTHROUGH; - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - CYTHON_FALLTHROUGH; - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - CYTHON_FALLTHROUGH; - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_sentences)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 1); __PYX_ERR(0, 465, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 2: - if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 2); __PYX_ERR(0, 465, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 3: - if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 3); __PYX_ERR(0, 465, __pyx_L3_error) - } - CYTHON_FALLTHROUGH; - case 4: - if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_compute_loss)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 4); __PYX_ERR(0, 465, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_batch_sg") < 0)) __PYX_ERR(0, 465, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 5) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - } - __pyx_v_model = values[0]; - __pyx_v_sentences = values[1]; - __pyx_v_alpha = values[2]; - __pyx_v__work = values[3]; - __pyx_v_compute_loss = values[4]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 465, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("gensim.models.word2vec_inner.train_batch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(__pyx_self, __pyx_v_model, __pyx_v_sentences, __pyx_v_alpha, __pyx_v__work, __pyx_v_compute_loss); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_sentences, PyObject *__pyx_v_alpha, PyObject *__pyx_v__work, PyObject *__pyx_v_compute_loss) { - int __pyx_v_hs; - int __pyx_v_negative; - int __pyx_v_sample; - int __pyx_v__compute_loss; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__running_training_loss; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; - int __pyx_v_size; - int __pyx_v_codelens[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_reduced_windows[0x2710]; - int __pyx_v_sentence_idx[(0x2710 + 1)]; - int __pyx_v_window; - int __pyx_v_i; - int __pyx_v_j; - int __pyx_v_k; - int __pyx_v_effective_words; - int __pyx_v_effective_sentences; - int __pyx_v_sent_idx; - int __pyx_v_idx_start; - int __pyx_v_idx_end; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1; - __pyx_t_5numpy_uint32_t *__pyx_v_points[0x2710]; - __pyx_t_5numpy_uint8_t *__pyx_v_codes[0x2710]; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg; - __pyx_t_5numpy_uint32_t *__pyx_v_cum_table; - unsigned PY_LONG_LONG __pyx_v_cum_table_len; - unsigned PY_LONG_LONG __pyx_v_next_random; - PyObject *__pyx_v_vlookup = NULL; - PyObject *__pyx_v_sent = NULL; - PyObject *__pyx_v_token = NULL; - PyObject *__pyx_v_word = NULL; - PyObject *__pyx_v_item = NULL; +static PyObject *__pyx_f_6gensim_6models_14word2vec_inner_init_w2v_config(struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig *__pyx_v_c, PyObject *__pyx_v_model, PyObject *__pyx_v_alpha, PyObject *__pyx_v_compute_loss, PyObject *__pyx_v__work, struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config *__pyx_optional_args) { + PyObject *__pyx_v__neu1 = ((PyObject *)Py_None); PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; @@ -4099,445 +3988,670 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON int __pyx_t_7; PyObject *__pyx_t_8 = NULL; unsigned PY_LONG_LONG __pyx_t_9; - PyObject *(*__pyx_t_10)(PyObject *); - Py_ssize_t __pyx_t_11; - PyObject *(*__pyx_t_12)(PyObject *); - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - __pyx_t_5numpy_uint32_t __pyx_t_15; - Py_ssize_t __pyx_t_16; - int __pyx_t_17; - PyObject *__pyx_t_18 = NULL; - int __pyx_t_19; - int __pyx_t_20; - int __pyx_t_21; - int __pyx_t_22; - int __pyx_t_23; - int __pyx_t_24; - int __pyx_t_25; - __Pyx_RefNannySetupContext("train_batch_sg", 0); + __Pyx_RefNannySetupContext("init_w2v_config", 0); + if (__pyx_optional_args) { + if (__pyx_optional_args->__pyx_n > 0) { + __pyx_v__neu1 = __pyx_optional_args->_neu1; + } + } - /* "gensim/models/word2vec_inner.pyx":490 + /* "gensim/models/word2vec_inner.pyx":468 * - * """ - * cdef int hs = model.hs # <<<<<<<<<<<<<< - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=None): + * c[0].hs = model.hs # <<<<<<<<<<<<<< + * c[0].negative = model.negative + * c[0].sample = (model.vocabulary.sample != 0) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 490, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 468, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 490, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 468, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_hs = __pyx_t_2; + (__pyx_v_c[0]).hs = __pyx_t_2; - /* "gensim/models/word2vec_inner.pyx":491 - * """ - * cdef int hs = model.hs - * cdef int negative = model.negative # <<<<<<<<<<<<<< - * cdef int sample = (model.vocabulary.sample != 0) - * + /* "gensim/models/word2vec_inner.pyx":469 + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=None): + * c[0].hs = model.hs + * c[0].negative = model.negative # <<<<<<<<<<<<<< + * c[0].sample = (model.vocabulary.sample != 0) + * c[0].cbow_mean = model.cbow_mean */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 491, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 469, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 491, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 469, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_negative = __pyx_t_2; + (__pyx_v_c[0]).negative = __pyx_t_2; - /* "gensim/models/word2vec_inner.pyx":492 - * cdef int hs = model.hs - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< - * - * cdef int _compute_loss = (1 if compute_loss else 0) + /* "gensim/models/word2vec_inner.pyx":470 + * c[0].hs = model.hs + * c[0].negative = model.negative + * c[0].sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< + * c[0].cbow_mean = model.cbow_mean + * c[0].window = model.window */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 492, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 470, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 492, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 470, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 492, __pyx_L1_error) + __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 470, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 492, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 470, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_sample = __pyx_t_2; + (__pyx_v_c[0]).sample = __pyx_t_2; - /* "gensim/models/word2vec_inner.pyx":494 - * cdef int sample = (model.vocabulary.sample != 0) - * - * cdef int _compute_loss = (1 if compute_loss else 0) # <<<<<<<<<<<<<< - * cdef REAL_t _running_training_loss = model.running_training_loss - * + /* "gensim/models/word2vec_inner.pyx":471 + * c[0].negative = model.negative + * c[0].sample = (model.vocabulary.sample != 0) + * c[0].cbow_mean = model.cbow_mean # <<<<<<<<<<<<<< + * c[0].window = model.window + * c[0].workers = model.workers */ - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_v_compute_loss); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 494, __pyx_L1_error) - if (__pyx_t_4) { - __pyx_t_2 = 1; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_cbow_mean); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 471, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 471, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).cbow_mean = __pyx_t_2; + + /* "gensim/models/word2vec_inner.pyx":472 + * c[0].sample = (model.vocabulary.sample != 0) + * c[0].cbow_mean = model.cbow_mean + * c[0].window = model.window # <<<<<<<<<<<<<< + * c[0].workers = model.workers + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 472, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 472, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).window = __pyx_t_2; + + /* "gensim/models/word2vec_inner.pyx":473 + * c[0].cbow_mean = model.cbow_mean + * c[0].window = model.window + * c[0].workers = model.workers # <<<<<<<<<<<<<< + * + * c[0].compute_loss = (1 if compute_loss else 0) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_workers); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 473, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 473, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + (__pyx_v_c[0]).workers = __pyx_t_2; + + /* "gensim/models/word2vec_inner.pyx":475 + * c[0].workers = model.workers + * + * c[0].compute_loss = (1 if compute_loss else 0) # <<<<<<<<<<<<<< + * c[0].running_training_loss = model.running_training_loss + * + */ + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_v_compute_loss); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 475, __pyx_L1_error) + if (__pyx_t_4) { + __pyx_t_2 = 1; } else { __pyx_t_2 = 0; } - __pyx_v__compute_loss = __pyx_t_2; + (__pyx_v_c[0]).compute_loss = __pyx_t_2; - /* "gensim/models/word2vec_inner.pyx":495 + /* "gensim/models/word2vec_inner.pyx":476 * - * cdef int _compute_loss = (1 if compute_loss else 0) - * cdef REAL_t _running_training_loss = model.running_training_loss # <<<<<<<<<<<<<< + * c[0].compute_loss = (1 if compute_loss else 0) + * c[0].running_training_loss = model.running_training_loss # <<<<<<<<<<<<<< * - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) + * c[0].syn0 = (np.PyArray_DATA(model.wv.vectors)) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_running_training_loss); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 495, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_running_training_loss); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 476, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_1); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 495, __pyx_L1_error) + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_1); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 476, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v__running_training_loss = __pyx_t_5; + (__pyx_v_c[0]).running_training_loss = __pyx_t_5; - /* "gensim/models/word2vec_inner.pyx":497 - * cdef REAL_t _running_training_loss = model.running_training_loss + /* "gensim/models/word2vec_inner.pyx":478 + * c[0].running_training_loss = model.running_training_loss * - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) # <<<<<<<<<<<<<< - * cdef REAL_t *word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) - * cdef REAL_t *work + * c[0].syn0 = (np.PyArray_DATA(model.wv.vectors)) # <<<<<<<<<<<<<< + * c[0].word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) + * c[0].alpha = alpha */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 497, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 478, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 497, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 478, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 497, __pyx_L1_error) - __pyx_v_syn0 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 478, __pyx_L1_error) + (__pyx_v_c[0]).syn0 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/word2vec_inner.pyx":498 + /* "gensim/models/word2vec_inner.pyx":479 * - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) - * cdef REAL_t *word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) # <<<<<<<<<<<<<< - * cdef REAL_t *work - * cdef REAL_t _alpha = alpha + * c[0].syn0 = (np.PyArray_DATA(model.wv.vectors)) + * c[0].word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) # <<<<<<<<<<<<<< + * c[0].alpha = alpha + * c[0].size = model.wv.vector_size */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 498, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 479, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 498, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 479, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 498, __pyx_L1_error) - __pyx_v_word_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 479, __pyx_L1_error) + (__pyx_v_c[0]).word_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":500 - * cdef REAL_t *word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) - * cdef REAL_t *work - * cdef REAL_t _alpha = alpha # <<<<<<<<<<<<<< - * cdef int size = model.wv.vector_size + /* "gensim/models/word2vec_inner.pyx":480 + * c[0].syn0 = (np.PyArray_DATA(model.wv.vectors)) + * c[0].word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) + * c[0].alpha = alpha # <<<<<<<<<<<<<< + * c[0].size = model.wv.vector_size * */ - __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 500, __pyx_L1_error) - __pyx_v__alpha = __pyx_t_5; + __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 480, __pyx_L1_error) + (__pyx_v_c[0]).alpha = __pyx_t_5; - /* "gensim/models/word2vec_inner.pyx":501 - * cdef REAL_t *work - * cdef REAL_t _alpha = alpha - * cdef int size = model.wv.vector_size # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":481 + * c[0].word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) + * c[0].alpha = alpha + * c[0].size = model.wv.vector_size # <<<<<<<<<<<<<< * - * cdef int codelens[MAX_SENTENCE_LEN] + * if c[0].hs: */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 501, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 481, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 501, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 481, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 501, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 481, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_size = __pyx_t_2; - - /* "gensim/models/word2vec_inner.pyx":507 - * cdef np.uint32_t reduced_windows[MAX_SENTENCE_LEN] - * cdef int sentence_idx[MAX_SENTENCE_LEN + 1] - * cdef int window = model.window # <<<<<<<<<<<<<< - * - * cdef int i, j, k - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 507, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 507, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_window = __pyx_t_2; - - /* "gensim/models/word2vec_inner.pyx":510 - * - * cdef int i, j, k - * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< - * cdef int sent_idx, idx_start, idx_end - * - */ - __pyx_v_effective_words = 0; - __pyx_v_effective_sentences = 0; + (__pyx_v_c[0]).size = __pyx_t_2; - /* "gensim/models/word2vec_inner.pyx":525 - * cdef unsigned long long next_random + /* "gensim/models/word2vec_inner.pyx":483 + * c[0].size = model.wv.vector_size * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + * if c[0].hs: # <<<<<<<<<<<<<< + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * */ - __pyx_t_4 = (__pyx_v_hs != 0); + __pyx_t_4 = ((__pyx_v_c[0]).hs != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":526 + /* "gensim/models/word2vec_inner.pyx":484 * - * if hs: - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< + * if c[0].hs: + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< * - * if negative: + * if c[0].negative: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 526, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 484, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 526, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 484, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 526, __pyx_L1_error) - __pyx_v_syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 484, __pyx_L1_error) + (__pyx_v_c[0]).syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":525 - * cdef unsigned long long next_random + /* "gensim/models/word2vec_inner.pyx":483 + * c[0].size = model.wv.vector_size * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + * if c[0].hs: # <<<<<<<<<<<<<< + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * */ } - /* "gensim/models/word2vec_inner.pyx":528 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/word2vec_inner.pyx":486 + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * if c[0].negative: # <<<<<<<<<<<<<< + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) */ - __pyx_t_4 = (__pyx_v_negative != 0); + __pyx_t_4 = ((__pyx_v_c[0]).negative != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":529 + /* "gensim/models/word2vec_inner.pyx":487 * - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative: + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 529, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 487, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 529, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 487, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 529, __pyx_L1_error) - __pyx_v_syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 487, __pyx_L1_error) + (__pyx_v_c[0]).syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/word2vec_inner.pyx":530 - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: + /* "gensim/models/word2vec_inner.pyx":488 + * if c[0].negative: + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 530, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 488, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 530, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 488, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 530, __pyx_L1_error) - __pyx_v_cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 488, __pyx_L1_error) + (__pyx_v_c[0]).cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":531 - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/word2vec_inner.pyx":489 + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< + * if c[0].negative or c[0].sample: + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 531, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 489, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 531, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 489, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_6 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 531, __pyx_L1_error) + __pyx_t_6 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 489, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_cum_table_len = __pyx_t_6; + (__pyx_v_c[0]).cum_table_len = __pyx_t_6; - /* "gensim/models/word2vec_inner.pyx":528 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) + /* "gensim/models/word2vec_inner.pyx":486 + * c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * if c[0].negative: # <<<<<<<<<<<<<< + * c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) */ } - /* "gensim/models/word2vec_inner.pyx":532 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/word2vec_inner.pyx":490 + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: # <<<<<<<<<<<<<< + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) * */ - __pyx_t_7 = (__pyx_v_negative != 0); + __pyx_t_7 = ((__pyx_v_c[0]).negative != 0); if (!__pyx_t_7) { } else { __pyx_t_4 = __pyx_t_7; goto __pyx_L6_bool_binop_done; } - __pyx_t_7 = (__pyx_v_sample != 0); + __pyx_t_7 = ((__pyx_v_c[0]).sample != 0); __pyx_t_4 = __pyx_t_7; __pyx_L6_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":533 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":491 + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< * * # convert Python structures to primitive types, so we can release the GIL */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_t_1 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_8 = PyNumber_Add(__pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_t_8 = PyNumber_Add(__pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_9 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_8); if (unlikely((__pyx_t_9 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_t_9 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_8); if (unlikely((__pyx_t_9 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_next_random = __pyx_t_9; + (__pyx_v_c[0]).next_random = __pyx_t_9; - /* "gensim/models/word2vec_inner.pyx":532 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + /* "gensim/models/word2vec_inner.pyx":490 + * c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: # <<<<<<<<<<<<<< + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) * */ } - /* "gensim/models/word2vec_inner.pyx":536 + /* "gensim/models/word2vec_inner.pyx":494 * * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< + * c[0].work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< + * + * if _neu1 is not None: + */ + if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 494, __pyx_L1_error) + (__pyx_v_c[0]).work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); + + /* "gensim/models/word2vec_inner.pyx":496 + * c[0].work = np.PyArray_DATA(_work) + * + * if _neu1 is not None: # <<<<<<<<<<<<<< + * c[0].neu1 = np.PyArray_DATA(_neu1) + * + */ + __pyx_t_4 = (__pyx_v__neu1 != Py_None); + __pyx_t_7 = (__pyx_t_4 != 0); + if (__pyx_t_7) { + + /* "gensim/models/word2vec_inner.pyx":497 + * + * if _neu1 is not None: + * c[0].neu1 = np.PyArray_DATA(_neu1) # <<<<<<<<<<<<<< + * * - * # prepare C structures so we can go "full C" and release the Python GIL */ - if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 536, __pyx_L1_error) - __pyx_v_work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); + if (!(likely(((__pyx_v__neu1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__neu1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 497, __pyx_L1_error) + (__pyx_v_c[0]).neu1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__neu1))); - /* "gensim/models/word2vec_inner.pyx":539 + /* "gensim/models/word2vec_inner.pyx":496 + * c[0].work = np.PyArray_DATA(_work) + * + * if _neu1 is not None: # <<<<<<<<<<<<<< + * c[0].neu1 = np.PyArray_DATA(_neu1) + * + */ + } + + /* "gensim/models/word2vec_inner.pyx":467 + * + * + * cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=None): # <<<<<<<<<<<<<< + * c[0].hs = model.hs + * c[0].negative = model.negative + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("gensim.models.word2vec_inner.init_w2v_config", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "gensim/models/word2vec_inner.pyx":500 + * + * + * def train_batch_sg(model, sentences, alpha, _work, compute_loss): # <<<<<<<<<<<<<< + * """Update skip-gram model by training on a batch of sentences. + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_1train_batch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_6gensim_6models_14word2vec_inner_train_batch_sg[] = "train_batch_sg(model, sentences, alpha, _work, compute_loss)\nUpdate skip-gram model by training on a batch of sentences.\n\n Called internally from :meth:`~gensim.models.word2vec.Word2Vec.train`.\n\n Parameters\n ----------\n model : :class:`~gensim.models.word2Vec.Word2Vec`\n The Word2Vec model instance to train.\n sentences : iterable of list of str\n The corpus used to train the model.\n alpha : float\n The learning rate\n _work : np.ndarray\n Private working memory for each worker.\n compute_loss : bool\n Whether or not the training loss should be computed in this batch.\n\n Returns\n -------\n int\n Number of words in the vocabulary actually used for training (They already existed in the vocabulary\n and were not discarded by negative sampling).\n\n "; +static PyMethodDef __pyx_mdef_6gensim_6models_14word2vec_inner_1train_batch_sg = {"train_batch_sg", (PyCFunction)__pyx_pw_6gensim_6models_14word2vec_inner_1train_batch_sg, METH_VARARGS|METH_KEYWORDS, __pyx_doc_6gensim_6models_14word2vec_inner_train_batch_sg}; +static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_1train_batch_sg(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_model = 0; + PyObject *__pyx_v_sentences = 0; + PyObject *__pyx_v_alpha = 0; + PyObject *__pyx_v__work = 0; + PyObject *__pyx_v_compute_loss = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("train_batch_sg (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_model,&__pyx_n_s_sentences,&__pyx_n_s_alpha,&__pyx_n_s_work,&__pyx_n_s_compute_loss,0}; + PyObject* values[5] = {0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_model)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_sentences)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 1); __PYX_ERR(0, 500, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 2); __PYX_ERR(0, 500, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 3); __PYX_ERR(0, 500, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_compute_loss)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, 4); __PYX_ERR(0, 500, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_batch_sg") < 0)) __PYX_ERR(0, 500, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 5) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + } + __pyx_v_model = values[0]; + __pyx_v_sentences = values[1]; + __pyx_v_alpha = values[2]; + __pyx_v__work = values[3]; + __pyx_v_compute_loss = values[4]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("train_batch_sg", 1, 5, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 500, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("gensim.models.word2vec_inner.train_batch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(__pyx_self, __pyx_v_model, __pyx_v_sentences, __pyx_v_alpha, __pyx_v__work, __pyx_v_compute_loss); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_sentences, PyObject *__pyx_v_alpha, PyObject *__pyx_v__work, PyObject *__pyx_v_compute_loss) { + struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig __pyx_v_c; + int __pyx_v_i; + int __pyx_v_j; + int __pyx_v_k; + int __pyx_v_effective_words; + int __pyx_v_effective_sentences; + int __pyx_v_sent_idx; + int __pyx_v_idx_start; + int __pyx_v_idx_end; + PyObject *__pyx_v_vlookup = NULL; + PyObject *__pyx_v_sent = NULL; + PyObject *__pyx_v_token = NULL; + PyObject *__pyx_v_word = NULL; + PyObject *__pyx_v_item = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + Py_ssize_t __pyx_t_3; + PyObject *(*__pyx_t_4)(PyObject *); + int __pyx_t_5; + int __pyx_t_6; + Py_ssize_t __pyx_t_7; + PyObject *(*__pyx_t_8)(PyObject *); + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + __pyx_t_5numpy_uint32_t __pyx_t_12; + Py_ssize_t __pyx_t_13; + int __pyx_t_14; + int __pyx_t_15; + PyObject *__pyx_t_16 = NULL; + int __pyx_t_17; + int __pyx_t_18; + int __pyx_t_19; + int __pyx_t_20; + int __pyx_t_21; + int __pyx_t_22; + int __pyx_t_23; + __Pyx_RefNannySetupContext("train_batch_sg", 0); + + /* "gensim/models/word2vec_inner.pyx":527 + * cdef Word2VecConfig c + * cdef int i, j, k + * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< + * cdef int sent_idx, idx_start, idx_end + * + */ + __pyx_v_effective_words = 0; + __pyx_v_effective_sentences = 0; + + /* "gensim/models/word2vec_inner.pyx":530 + * cdef int sent_idx, idx_start, idx_end + * + * init_w2v_config(&c, model, alpha, compute_loss, _work) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __pyx_f_6gensim_6models_14word2vec_inner_init_w2v_config((&__pyx_v_c), __pyx_v_model, __pyx_v_alpha, __pyx_v_compute_loss, __pyx_v__work, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 530, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "gensim/models/word2vec_inner.pyx":534 * * # prepare C structures so we can go "full C" and release the Python GIL * vlookup = model.wv.vocab # <<<<<<<<<<<<<< - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 539, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 539, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_vlookup = __pyx_t_3; - __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 534, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 534, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_vlookup = __pyx_t_2; + __pyx_t_2 = 0; - /* "gensim/models/word2vec_inner.pyx":540 + /* "gensim/models/word2vec_inner.pyx":535 * # prepare C structures so we can go "full C" and release the Python GIL * vlookup = model.wv.vocab - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< * for sent in sentences: * if not sent: */ - (__pyx_v_sentence_idx[0]) = 0; + (__pyx_v_c.sentence_idx[0]) = 0; - /* "gensim/models/word2vec_inner.pyx":541 + /* "gensim/models/word2vec_inner.pyx":536 * vlookup = model.wv.vocab - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: # <<<<<<<<<<<<<< * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged */ if (likely(PyList_CheckExact(__pyx_v_sentences)) || PyTuple_CheckExact(__pyx_v_sentences)) { - __pyx_t_3 = __pyx_v_sentences; __Pyx_INCREF(__pyx_t_3); __pyx_t_6 = 0; - __pyx_t_10 = NULL; + __pyx_t_2 = __pyx_v_sentences; __Pyx_INCREF(__pyx_t_2); __pyx_t_3 = 0; + __pyx_t_4 = NULL; } else { - __pyx_t_6 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_sentences); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 541, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_10 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 541, __pyx_L1_error) + __pyx_t_3 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_sentences); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 536, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = Py_TYPE(__pyx_t_2)->tp_iternext; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 536, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_10)) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_3)) break; + if (likely(!__pyx_t_4)) { + if (likely(PyList_CheckExact(__pyx_t_2))) { + if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_2)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_6); __Pyx_INCREF(__pyx_t_8); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 541, __pyx_L1_error) + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_1); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 536, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 541, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 536, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } else { - if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + if (__pyx_t_3 >= PyTuple_GET_SIZE(__pyx_t_2)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_6); __Pyx_INCREF(__pyx_t_8); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 541, __pyx_L1_error) + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_1); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 536, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 541, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 536, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } } else { - __pyx_t_8 = __pyx_t_10(__pyx_t_3); - if (unlikely(!__pyx_t_8)) { + __pyx_t_1 = __pyx_t_4(__pyx_t_2); + if (unlikely(!__pyx_t_1)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 541, __pyx_L1_error) + else __PYX_ERR(0, 536, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_1); } - __Pyx_XDECREF_SET(__pyx_v_sent, __pyx_t_8); - __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_sent, __pyx_t_1); + __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":542 - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + /* "gensim/models/word2vec_inner.pyx":537 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: * if not sent: # <<<<<<<<<<<<<< * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: */ - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_v_sent); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 542, __pyx_L1_error) - __pyx_t_7 = ((!__pyx_t_4) != 0); - if (__pyx_t_7) { + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_v_sent); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 537, __pyx_L1_error) + __pyx_t_6 = ((!__pyx_t_5) != 0); + if (__pyx_t_6) { - /* "gensim/models/word2vec_inner.pyx":543 + /* "gensim/models/word2vec_inner.pyx":538 * for sent in sentences: * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged # <<<<<<<<<<<<<< * for token in sent: * word = vlookup[token] if token in vlookup else None */ - goto __pyx_L8_continue; + goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":542 - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + /* "gensim/models/word2vec_inner.pyx":537 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: * if not sent: # <<<<<<<<<<<<<< * continue # ignore empty sentences; leave effective_sentences unchanged @@ -4545,7 +4659,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON */ } - /* "gensim/models/word2vec_inner.pyx":544 + /* "gensim/models/word2vec_inner.pyx":539 * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: # <<<<<<<<<<<<<< @@ -4553,241 +4667,241 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON * if word is None: */ if (likely(PyList_CheckExact(__pyx_v_sent)) || PyTuple_CheckExact(__pyx_v_sent)) { - __pyx_t_8 = __pyx_v_sent; __Pyx_INCREF(__pyx_t_8); __pyx_t_11 = 0; - __pyx_t_12 = NULL; + __pyx_t_1 = __pyx_v_sent; __Pyx_INCREF(__pyx_t_1); __pyx_t_7 = 0; + __pyx_t_8 = NULL; } else { - __pyx_t_11 = -1; __pyx_t_8 = PyObject_GetIter(__pyx_v_sent); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_12 = Py_TYPE(__pyx_t_8)->tp_iternext; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 544, __pyx_L1_error) + __pyx_t_7 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_sent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 539, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 539, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_12)) { - if (likely(PyList_CheckExact(__pyx_t_8))) { - if (__pyx_t_11 >= PyList_GET_SIZE(__pyx_t_8)) break; + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_8, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 544, __pyx_L1_error) + __pyx_t_9 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 539, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_8, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 539, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); #endif } else { - if (__pyx_t_11 >= PyTuple_GET_SIZE(__pyx_t_8)) break; + if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_8, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 544, __pyx_L1_error) + __pyx_t_9 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 539, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_8, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 544, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = PySequence_ITEM(__pyx_t_1, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 539, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); #endif } } else { - __pyx_t_1 = __pyx_t_12(__pyx_t_8); - if (unlikely(!__pyx_t_1)) { + __pyx_t_9 = __pyx_t_8(__pyx_t_1); + if (unlikely(!__pyx_t_9)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 544, __pyx_L1_error) + else __PYX_ERR(0, 539, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_9); } - __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_1); - __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_9); + __pyx_t_9 = 0; - /* "gensim/models/word2vec_inner.pyx":545 + /* "gensim/models/word2vec_inner.pyx":540 * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: * word = vlookup[token] if token in vlookup else None # <<<<<<<<<<<<<< * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window */ - __pyx_t_7 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 545, __pyx_L1_error) - if ((__pyx_t_7 != 0)) { - __pyx_t_13 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 545, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_1 = __pyx_t_13; - __pyx_t_13 = 0; + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 540, __pyx_L1_error) + if ((__pyx_t_6 != 0)) { + __pyx_t_10 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 540, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_9 = __pyx_t_10; + __pyx_t_10 = 0; } else { __Pyx_INCREF(Py_None); - __pyx_t_1 = Py_None; + __pyx_t_9 = Py_None; } - __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_1); - __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_9); + __pyx_t_9 = 0; - /* "gensim/models/word2vec_inner.pyx":546 + /* "gensim/models/word2vec_inner.pyx":541 * for token in sent: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): */ - __pyx_t_7 = (__pyx_v_word == Py_None); - __pyx_t_4 = (__pyx_t_7 != 0); - if (__pyx_t_4) { + __pyx_t_6 = (__pyx_v_word == Py_None); + __pyx_t_5 = (__pyx_t_6 != 0); + if (__pyx_t_5) { - /* "gensim/models/word2vec_inner.pyx":547 + /* "gensim/models/word2vec_inner.pyx":542 * word = vlookup[token] if token in vlookup else None * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window # <<<<<<<<<<<<<< - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue */ - goto __pyx_L11_continue; + goto __pyx_L6_continue; - /* "gensim/models/word2vec_inner.pyx":546 + /* "gensim/models/word2vec_inner.pyx":541 * for token in sent: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): */ } - /* "gensim/models/word2vec_inner.pyx":548 + /* "gensim/models/word2vec_inner.pyx":543 * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index */ - __pyx_t_7 = (__pyx_v_sample != 0); - if (__pyx_t_7) { + __pyx_t_6 = (__pyx_v_c.sample != 0); + if (__pyx_t_6) { } else { - __pyx_t_4 = __pyx_t_7; - goto __pyx_L15_bool_binop_done; + __pyx_t_5 = __pyx_t_6; + goto __pyx_L10_bool_binop_done; } - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 548, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_13 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_next_random))); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 548, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_14 = PyObject_RichCompare(__pyx_t_1, __pyx_t_13, Py_LT); __Pyx_XGOTREF(__pyx_t_14); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 548, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_14); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 548, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __pyx_t_4 = __pyx_t_7; - __pyx_L15_bool_binop_done:; - if (__pyx_t_4) { + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 543, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_c.next_random))); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 543, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_11 = PyObject_RichCompare(__pyx_t_9, __pyx_t_10, Py_LT); __Pyx_XGOTREF(__pyx_t_11); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 543, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_11); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 543, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_5 = __pyx_t_6; + __pyx_L10_bool_binop_done:; + if (__pyx_t_5) { - /* "gensim/models/word2vec_inner.pyx":549 + /* "gensim/models/word2vec_inner.pyx":544 * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue # <<<<<<<<<<<<<< - * indexes[effective_words] = word.index - * if hs: + * c.indexes[effective_words] = word.index + * if c.hs: */ - goto __pyx_L11_continue; + goto __pyx_L6_continue; - /* "gensim/models/word2vec_inner.pyx":548 + /* "gensim/models/word2vec_inner.pyx":543 * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index */ } - /* "gensim/models/word2vec_inner.pyx":550 - * if sample and word.sample_int < random_int32(&next_random): + /* "gensim/models/word2vec_inner.pyx":545 + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue - * indexes[effective_words] = word.index # <<<<<<<<<<<<<< - * if hs: - * codelens[effective_words] = len(word.code) + * c.indexes[effective_words] = word.index # <<<<<<<<<<<<<< + * if c.hs: + * c.codelens[effective_words] = len(word.code) */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 550, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_15 = __Pyx_PyInt_As_npy_uint32(__pyx_t_14); if (unlikely((__pyx_t_15 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 550, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - (__pyx_v_indexes[__pyx_v_effective_words]) = __pyx_t_15; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 545, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_t_11); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 545, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.indexes[__pyx_v_effective_words]) = __pyx_t_12; - /* "gensim/models/word2vec_inner.pyx":551 + /* "gensim/models/word2vec_inner.pyx":546 * continue - * indexes[effective_words] = word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) + * c.indexes[effective_words] = word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) */ - __pyx_t_4 = (__pyx_v_hs != 0); - if (__pyx_t_4) { + __pyx_t_5 = (__pyx_v_c.hs != 0); + if (__pyx_t_5) { - /* "gensim/models/word2vec_inner.pyx":552 - * indexes[effective_words] = word.index - * if hs: - * codelens[effective_words] = len(word.code) # <<<<<<<<<<<<<< - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) - */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 552, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_16 = PyObject_Length(__pyx_t_14); if (unlikely(__pyx_t_16 == ((Py_ssize_t)-1))) __PYX_ERR(0, 552, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - (__pyx_v_codelens[__pyx_v_effective_words]) = ((int)__pyx_t_16); - - /* "gensim/models/word2vec_inner.pyx":553 - * if hs: - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":547 + * c.indexes[effective_words] = word.index + * if c.hs: + * c.codelens[effective_words] = len(word.code) # <<<<<<<<<<<<<< + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) + */ + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 547, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_13 = PyObject_Length(__pyx_t_11); if (unlikely(__pyx_t_13 == ((Py_ssize_t)-1))) __PYX_ERR(0, 547, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + (__pyx_v_c.codelens[__pyx_v_effective_words]) = ((int)__pyx_t_13); + + /* "gensim/models/word2vec_inner.pyx":548 + * if c.hs: + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 553, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - if (!(likely(((__pyx_t_14) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_14, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 553, __pyx_L1_error) - (__pyx_v_codes[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_14))); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 548, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 548, __pyx_L1_error) + (__pyx_v_c.codes[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/word2vec_inner.pyx":554 - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":549 + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 554, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - if (!(likely(((__pyx_t_14) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_14, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 554, __pyx_L1_error) - (__pyx_v_points[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_14))); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 549, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (!(likely(((__pyx_t_11) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_11, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 549, __pyx_L1_error) + (__pyx_v_c.points[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_11))); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/word2vec_inner.pyx":551 + /* "gensim/models/word2vec_inner.pyx":546 * continue - * indexes[effective_words] = word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) + * c.indexes[effective_words] = word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) */ } - /* "gensim/models/word2vec_inner.pyx":555 - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":550 + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 # <<<<<<<<<<<<<< * if effective_words == MAX_SENTENCE_LEN: * break # TODO: log warning, tally overflow? */ __pyx_v_effective_words = (__pyx_v_effective_words + 1); - /* "gensim/models/word2vec_inner.pyx":556 - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":551 + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? * */ - __pyx_t_4 = ((__pyx_v_effective_words == 0x2710) != 0); - if (__pyx_t_4) { + __pyx_t_5 = ((__pyx_v_effective_words == 0x2710) != 0); + if (__pyx_t_5) { - /* "gensim/models/word2vec_inner.pyx":557 + /* "gensim/models/word2vec_inner.pyx":552 * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: * break # TODO: log warning, tally overflow? # <<<<<<<<<<<<<< * * # keep track of which words go into which sentence, so we don't train */ - goto __pyx_L12_break; + goto __pyx_L7_break; - /* "gensim/models/word2vec_inner.pyx":556 - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":551 + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? @@ -4795,57 +4909,57 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON */ } - /* "gensim/models/word2vec_inner.pyx":544 + /* "gensim/models/word2vec_inner.pyx":539 * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: # <<<<<<<<<<<<<< * word = vlookup[token] if token in vlookup else None * if word is None: */ - __pyx_L11_continue:; + __pyx_L6_continue:; } - __pyx_L12_break:; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_L7_break:; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":562 + /* "gensim/models/word2vec_inner.pyx":557 * # across sentence boundaries. * # indices of sentence number X are between tp_iternext; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 569, __pyx_L1_error) + __pyx_t_3 = -1; __pyx_t_11 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 564, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_4 = Py_TYPE(__pyx_t_11)->tp_iternext; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 564, __pyx_L1_error) } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; for (;;) { - if (likely(!__pyx_t_10)) { - if (likely(PyList_CheckExact(__pyx_t_14))) { - if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_14)) break; + if (likely(!__pyx_t_4)) { + if (likely(PyList_CheckExact(__pyx_t_11))) { + if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_11)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_14, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 569, __pyx_L1_error) + __pyx_t_2 = PyList_GET_ITEM(__pyx_t_11, __pyx_t_3); __Pyx_INCREF(__pyx_t_2); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 564, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_14, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 569, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = PySequence_ITEM(__pyx_t_11, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 564, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); #endif } else { - if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_14)) break; + if (__pyx_t_3 >= PyTuple_GET_SIZE(__pyx_t_11)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_14, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 569, __pyx_L1_error) + __pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_11, __pyx_t_3); __Pyx_INCREF(__pyx_t_2); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 564, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_14, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 569, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = PySequence_ITEM(__pyx_t_11, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 564, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); #endif } } else { - __pyx_t_3 = __pyx_t_10(__pyx_t_14); - if (unlikely(!__pyx_t_3)) { + __pyx_t_2 = __pyx_t_4(__pyx_t_11); + if (unlikely(!__pyx_t_2)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 569, __pyx_L1_error) + else __PYX_ERR(0, 564, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_2); } - __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_3); - __pyx_t_3 = 0; - __pyx_v_i = __pyx_t_2; - __pyx_t_2 = (__pyx_t_2 + 1); + __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_2); + __pyx_t_2 = 0; + __pyx_v_i = __pyx_t_14; + __pyx_t_14 = (__pyx_t_14 + 1); - /* "gensim/models/word2vec_inner.pyx":570 + /* "gensim/models/word2vec_inner.pyx":565 * # precompute "reduced window" offsets in a single randint() call - * for i, item in enumerate(model.random.randint(0, window, effective_words)): - * reduced_windows[i] = item # <<<<<<<<<<<<<< + * for i, item in enumerate(model.random.randint(0, c.window, effective_words)): + * c.reduced_windows[i] = item # <<<<<<<<<<<<<< * * # release GIL & train on all sentences */ - __pyx_t_15 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_15 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 570, __pyx_L1_error) - (__pyx_v_reduced_windows[__pyx_v_i]) = __pyx_t_15; + __pyx_t_12 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_12 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 565, __pyx_L1_error) + (__pyx_v_c.reduced_windows[__pyx_v_i]) = __pyx_t_12; - /* "gensim/models/word2vec_inner.pyx":569 + /* "gensim/models/word2vec_inner.pyx":564 * * # precompute "reduced window" offsets in a single randint() call - * for i, item in enumerate(model.random.randint(0, window, effective_words)): # <<<<<<<<<<<<<< - * reduced_windows[i] = item + * for i, item in enumerate(model.random.randint(0, c.window, effective_words)): # <<<<<<<<<<<<<< + * c.reduced_windows[i] = item * */ } - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/word2vec_inner.pyx":573 + /* "gensim/models/word2vec_inner.pyx":568 * * # release GIL & train on all sentences * with nogil: # <<<<<<<<<<<<<< * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] + * idx_start = c.sentence_idx[sent_idx] */ { #ifdef WITH_THREAD @@ -5014,106 +5128,106 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON #endif /*try:*/ { - /* "gensim/models/word2vec_inner.pyx":574 + /* "gensim/models/word2vec_inner.pyx":569 * # release GIL & train on all sentences * with nogil: * for sent_idx in range(effective_sentences): # <<<<<<<<<<<<<< - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] */ - __pyx_t_2 = __pyx_v_effective_sentences; - __pyx_t_17 = __pyx_t_2; - for (__pyx_t_19 = 0; __pyx_t_19 < __pyx_t_17; __pyx_t_19+=1) { - __pyx_v_sent_idx = __pyx_t_19; + __pyx_t_14 = __pyx_v_effective_sentences; + __pyx_t_15 = __pyx_t_14; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_15; __pyx_t_17+=1) { + __pyx_v_sent_idx = __pyx_t_17; - /* "gensim/models/word2vec_inner.pyx":575 + /* "gensim/models/word2vec_inner.pyx":570 * with nogil: * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] # <<<<<<<<<<<<<< - * idx_end = sentence_idx[sent_idx + 1] + * idx_start = c.sentence_idx[sent_idx] # <<<<<<<<<<<<<< + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): */ - __pyx_v_idx_start = (__pyx_v_sentence_idx[__pyx_v_sent_idx]); + __pyx_v_idx_start = (__pyx_v_c.sentence_idx[__pyx_v_sent_idx]); - /* "gensim/models/word2vec_inner.pyx":576 + /* "gensim/models/word2vec_inner.pyx":571 * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] */ - __pyx_v_idx_end = (__pyx_v_sentence_idx[(__pyx_v_sent_idx + 1)]); + __pyx_v_idx_end = (__pyx_v_c.sentence_idx[(__pyx_v_sent_idx + 1)]); - /* "gensim/models/word2vec_inner.pyx":577 - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] + /* "gensim/models/word2vec_inner.pyx":572 + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): # <<<<<<<<<<<<<< - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: */ - __pyx_t_20 = __pyx_v_idx_end; - __pyx_t_21 = __pyx_t_20; - for (__pyx_t_22 = __pyx_v_idx_start; __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { - __pyx_v_i = __pyx_t_22; + __pyx_t_18 = __pyx_v_idx_end; + __pyx_t_19 = __pyx_t_18; + for (__pyx_t_20 = __pyx_v_idx_start; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { + __pyx_v_i = __pyx_t_20; - /* "gensim/models/word2vec_inner.pyx":578 - * idx_end = sentence_idx[sent_idx + 1] + /* "gensim/models/word2vec_inner.pyx":573 + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< * if j < idx_start: * j = idx_start */ - __pyx_v_j = ((__pyx_v_i - __pyx_v_window) + (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/word2vec_inner.pyx":579 + /* "gensim/models/word2vec_inner.pyx":574 * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: # <<<<<<<<<<<<<< * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ - __pyx_t_4 = ((__pyx_v_j < __pyx_v_idx_start) != 0); - if (__pyx_t_4) { + __pyx_t_5 = ((__pyx_v_j < __pyx_v_idx_start) != 0); + if (__pyx_t_5) { - /* "gensim/models/word2vec_inner.pyx":580 - * j = i - window + reduced_windows[i] + /* "gensim/models/word2vec_inner.pyx":575 + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: * j = idx_start # <<<<<<<<<<<<<< - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: */ __pyx_v_j = __pyx_v_idx_start; - /* "gensim/models/word2vec_inner.pyx":579 + /* "gensim/models/word2vec_inner.pyx":574 * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: # <<<<<<<<<<<<<< * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ } - /* "gensim/models/word2vec_inner.pyx":581 + /* "gensim/models/word2vec_inner.pyx":576 * if j < idx_start: * j = idx_start - * k = i + window + 1 - reduced_windows[i] # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< * if k > idx_end: * k = idx_end */ - __pyx_v_k = (((__pyx_v_i + __pyx_v_window) + 1) - (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/word2vec_inner.pyx":582 + /* "gensim/models/word2vec_inner.pyx":577 * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: # <<<<<<<<<<<<<< * k = idx_end * for j in range(j, k): */ - __pyx_t_4 = ((__pyx_v_k > __pyx_v_idx_end) != 0); - if (__pyx_t_4) { + __pyx_t_5 = ((__pyx_v_k > __pyx_v_idx_end) != 0); + if (__pyx_t_5) { - /* "gensim/models/word2vec_inner.pyx":583 - * k = i + window + 1 - reduced_windows[i] + /* "gensim/models/word2vec_inner.pyx":578 + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: * k = idx_end # <<<<<<<<<<<<<< * for j in range(j, k): @@ -5121,122 +5235,122 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON */ __pyx_v_k = __pyx_v_idx_end; - /* "gensim/models/word2vec_inner.pyx":582 + /* "gensim/models/word2vec_inner.pyx":577 * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: # <<<<<<<<<<<<<< * k = idx_end * for j in range(j, k): */ } - /* "gensim/models/word2vec_inner.pyx":584 + /* "gensim/models/word2vec_inner.pyx":579 * if k > idx_end: * k = idx_end * for j in range(j, k): # <<<<<<<<<<<<<< * if j == i: * continue */ - __pyx_t_23 = __pyx_v_k; - __pyx_t_24 = __pyx_t_23; - for (__pyx_t_25 = __pyx_v_j; __pyx_t_25 < __pyx_t_24; __pyx_t_25+=1) { - __pyx_v_j = __pyx_t_25; + __pyx_t_21 = __pyx_v_k; + __pyx_t_22 = __pyx_t_21; + for (__pyx_t_23 = __pyx_v_j; __pyx_t_23 < __pyx_t_22; __pyx_t_23+=1) { + __pyx_v_j = __pyx_t_23; - /* "gensim/models/word2vec_inner.pyx":585 + /* "gensim/models/word2vec_inner.pyx":580 * k = idx_end * for j in range(j, k): * if j == i: # <<<<<<<<<<<<<< * continue - * if hs: + * if c.hs: */ - __pyx_t_4 = ((__pyx_v_j == __pyx_v_i) != 0); - if (__pyx_t_4) { + __pyx_t_5 = ((__pyx_v_j == __pyx_v_i) != 0); + if (__pyx_t_5) { - /* "gensim/models/word2vec_inner.pyx":586 + /* "gensim/models/word2vec_inner.pyx":581 * for j in range(j, k): * if j == i: * continue # <<<<<<<<<<<<<< - * if hs: - * fast_sentence_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], _alpha, work, word_locks, _compute_loss, &_running_training_loss) + * if c.hs: + * w2v_fast_sentence_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) */ - goto __pyx_L31_continue; + goto __pyx_L26_continue; - /* "gensim/models/word2vec_inner.pyx":585 + /* "gensim/models/word2vec_inner.pyx":580 * k = idx_end * for j in range(j, k): * if j == i: # <<<<<<<<<<<<<< * continue - * if hs: + * if c.hs: */ } - /* "gensim/models/word2vec_inner.pyx":587 + /* "gensim/models/word2vec_inner.pyx":582 * if j == i: * continue - * if hs: # <<<<<<<<<<<<<< - * fast_sentence_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], _alpha, work, word_locks, _compute_loss, &_running_training_loss) - * if negative: + * if c.hs: # <<<<<<<<<<<<<< + * w2v_fast_sentence_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: */ - __pyx_t_4 = (__pyx_v_hs != 0); - if (__pyx_t_4) { + __pyx_t_5 = (__pyx_v_c.hs != 0); + if (__pyx_t_5) { - /* "gensim/models/word2vec_inner.pyx":588 + /* "gensim/models/word2vec_inner.pyx":583 * continue - * if hs: - * fast_sentence_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], _alpha, work, word_locks, _compute_loss, &_running_training_loss) # <<<<<<<<<<<<<< - * if negative: - * next_random = fast_sentence_sg_neg(negative, cum_table, cum_table_len, syn0, syn1neg, size, indexes[i], indexes[j], _alpha, work, next_random, word_locks, _compute_loss, &_running_training_loss) + * if c.hs: + * w2v_fast_sentence_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) # <<<<<<<<<<<<<< + * if c.negative: + * c.next_random = w2v_fast_sentence_sg_neg(c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, c.indexes[i], c.indexes[j], c.alpha, c.work, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) */ - __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_hs((__pyx_v_points[__pyx_v_i]), (__pyx_v_codes[__pyx_v_i]), (__pyx_v_codelens[__pyx_v_i]), __pyx_v_syn0, __pyx_v_syn1, __pyx_v_size, (__pyx_v_indexes[__pyx_v_j]), __pyx_v__alpha, __pyx_v_work, __pyx_v_word_locks, __pyx_v__compute_loss, (&__pyx_v__running_training_loss)); + __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.syn0, __pyx_v_c.syn1, __pyx_v_c.size, (__pyx_v_c.indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.word_locks, __pyx_v_c.compute_loss, (&__pyx_v_c.running_training_loss)); - /* "gensim/models/word2vec_inner.pyx":587 + /* "gensim/models/word2vec_inner.pyx":582 * if j == i: * continue - * if hs: # <<<<<<<<<<<<<< - * fast_sentence_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], _alpha, work, word_locks, _compute_loss, &_running_training_loss) - * if negative: + * if c.hs: # <<<<<<<<<<<<<< + * w2v_fast_sentence_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: */ } - /* "gensim/models/word2vec_inner.pyx":589 - * if hs: - * fast_sentence_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], _alpha, work, word_locks, _compute_loss, &_running_training_loss) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_sentence_sg_neg(negative, cum_table, cum_table_len, syn0, syn1neg, size, indexes[i], indexes[j], _alpha, work, next_random, word_locks, _compute_loss, &_running_training_loss) + /* "gensim/models/word2vec_inner.pyx":584 + * if c.hs: + * w2v_fast_sentence_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = w2v_fast_sentence_sg_neg(c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, c.indexes[i], c.indexes[j], c.alpha, c.work, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) * */ - __pyx_t_4 = (__pyx_v_negative != 0); - if (__pyx_t_4) { + __pyx_t_5 = (__pyx_v_c.negative != 0); + if (__pyx_t_5) { - /* "gensim/models/word2vec_inner.pyx":590 - * fast_sentence_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], _alpha, work, word_locks, _compute_loss, &_running_training_loss) - * if negative: - * next_random = fast_sentence_sg_neg(negative, cum_table, cum_table_len, syn0, syn1neg, size, indexes[i], indexes[j], _alpha, work, next_random, word_locks, _compute_loss, &_running_training_loss) # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":585 + * w2v_fast_sentence_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: + * c.next_random = w2v_fast_sentence_sg_neg(c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, c.indexes[i], c.indexes[j], c.alpha, c.work, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) # <<<<<<<<<<<<<< * - * model.running_training_loss = _running_training_loss + * model.running_training_loss = c.running_training_loss */ - __pyx_v_next_random = __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_sg_neg(__pyx_v_negative, __pyx_v_cum_table, __pyx_v_cum_table_len, __pyx_v_syn0, __pyx_v_syn1neg, __pyx_v_size, (__pyx_v_indexes[__pyx_v_i]), (__pyx_v_indexes[__pyx_v_j]), __pyx_v__alpha, __pyx_v_work, __pyx_v_next_random, __pyx_v_word_locks, __pyx_v__compute_loss, (&__pyx_v__running_training_loss)); + __pyx_v_c.next_random = __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.syn0, __pyx_v_c.syn1neg, __pyx_v_c.size, (__pyx_v_c.indexes[__pyx_v_i]), (__pyx_v_c.indexes[__pyx_v_j]), __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_c.next_random, __pyx_v_c.word_locks, __pyx_v_c.compute_loss, (&__pyx_v_c.running_training_loss)); - /* "gensim/models/word2vec_inner.pyx":589 - * if hs: - * fast_sentence_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], _alpha, work, word_locks, _compute_loss, &_running_training_loss) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_sentence_sg_neg(negative, cum_table, cum_table_len, syn0, syn1neg, size, indexes[i], indexes[j], _alpha, work, next_random, word_locks, _compute_loss, &_running_training_loss) + /* "gensim/models/word2vec_inner.pyx":584 + * if c.hs: + * w2v_fast_sentence_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = w2v_fast_sentence_sg_neg(c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, c.indexes[i], c.indexes[j], c.alpha, c.work, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) * */ } - __pyx_L31_continue:; + __pyx_L26_continue:; } } } } - /* "gensim/models/word2vec_inner.pyx":573 + /* "gensim/models/word2vec_inner.pyx":568 * * # release GIL & train on all sentences * with nogil: # <<<<<<<<<<<<<< * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] + * idx_start = c.sentence_idx[sent_idx] */ /*finally:*/ { /*normal exit:*/{ @@ -5244,39 +5358,39 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON __Pyx_FastGIL_Forget(); Py_BLOCK_THREADS #endif - goto __pyx_L24; + goto __pyx_L19; } - __pyx_L24:; + __pyx_L19:; } } - /* "gensim/models/word2vec_inner.pyx":592 - * next_random = fast_sentence_sg_neg(negative, cum_table, cum_table_len, syn0, syn1neg, size, indexes[i], indexes[j], _alpha, work, next_random, word_locks, _compute_loss, &_running_training_loss) + /* "gensim/models/word2vec_inner.pyx":587 + * c.next_random = w2v_fast_sentence_sg_neg(c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, c.indexes[i], c.indexes[j], c.alpha, c.work, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) * - * model.running_training_loss = _running_training_loss # <<<<<<<<<<<<<< + * model.running_training_loss = c.running_training_loss # <<<<<<<<<<<<<< * return effective_words * */ - __pyx_t_14 = PyFloat_FromDouble(__pyx_v__running_training_loss); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 592, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - if (__Pyx_PyObject_SetAttrStr(__pyx_v_model, __pyx_n_s_running_training_loss, __pyx_t_14) < 0) __PYX_ERR(0, 592, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_11 = PyFloat_FromDouble(__pyx_v_c.running_training_loss); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 587, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_model, __pyx_n_s_running_training_loss, __pyx_t_11) < 0) __PYX_ERR(0, 587, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; - /* "gensim/models/word2vec_inner.pyx":593 + /* "gensim/models/word2vec_inner.pyx":588 * - * model.running_training_loss = _running_training_loss + * model.running_training_loss = c.running_training_loss * return effective_words # <<<<<<<<<<<<<< * * */ __Pyx_XDECREF(__pyx_r); - __pyx_t_14 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 593, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_r = __pyx_t_14; - __pyx_t_14 = 0; + __pyx_t_11 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 588, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_r = __pyx_t_11; + __pyx_t_11 = 0; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":465 + /* "gensim/models/word2vec_inner.pyx":500 * * * def train_batch_sg(model, sentences, alpha, _work, compute_loss): # <<<<<<<<<<<<<< @@ -5287,11 +5401,11 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_14); - __Pyx_XDECREF(__pyx_t_18); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_16); __Pyx_AddTraceback("gensim.models.word2vec_inner.train_batch_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; @@ -5305,7 +5419,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_train_batch_sg(CYTHON return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":596 +/* "gensim/models/word2vec_inner.pyx":591 * * * def train_batch_cbow(model, sentences, alpha, _work, _neu1, compute_loss): # <<<<<<<<<<<<<< @@ -5358,35 +5472,35 @@ static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_3train_batch_cbow(PyO case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_sentences)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 1); __PYX_ERR(0, 596, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 1); __PYX_ERR(0, 591, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_alpha)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 2); __PYX_ERR(0, 596, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 2); __PYX_ERR(0, 591, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 3); __PYX_ERR(0, 596, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 3); __PYX_ERR(0, 591, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 4: if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_neu1)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 4); __PYX_ERR(0, 596, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 4); __PYX_ERR(0, 591, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 5: if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_compute_loss)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 5); __PYX_ERR(0, 596, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, 5); __PYX_ERR(0, 591, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_batch_cbow") < 0)) __PYX_ERR(0, 596, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "train_batch_cbow") < 0)) __PYX_ERR(0, 591, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 6) { goto __pyx_L5_argtuple_error; @@ -5407,7 +5521,7 @@ static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_3train_batch_cbow(PyO } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 596, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("train_batch_cbow", 1, 6, 6, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 591, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("gensim.models.word2vec_inner.train_batch_cbow", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); @@ -5421,22 +5535,7 @@ static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_3train_batch_cbow(PyO } static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_sentences, PyObject *__pyx_v_alpha, PyObject *__pyx_v__work, PyObject *__pyx_v__neu1, PyObject *__pyx_v_compute_loss) { - int __pyx_v_hs; - int __pyx_v_negative; - int __pyx_v_sample; - int __pyx_v_cbow_mean; - int __pyx_v__compute_loss; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__running_training_loss; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_word_locks; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_v__alpha; - int __pyx_v_size; - int __pyx_v_codelens[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_indexes[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_reduced_windows[0x2710]; - int __pyx_v_sentence_idx[(0x2710 + 1)]; - int __pyx_v_window; + struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig __pyx_v_c; int __pyx_v_i; int __pyx_v_j; int __pyx_v_k; @@ -5445,14 +5544,6 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT int __pyx_v_sent_idx; int __pyx_v_idx_start; int __pyx_v_idx_end; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1; - __pyx_t_5numpy_uint32_t *__pyx_v_points[0x2710]; - __pyx_t_5numpy_uint8_t *__pyx_v_codes[0x2710]; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1neg; - __pyx_t_5numpy_uint32_t *__pyx_v_cum_table; - unsigned PY_LONG_LONG __pyx_v_cum_table_len; - unsigned PY_LONG_LONG __pyx_v_next_random; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1; PyObject *__pyx_v_vlookup = NULL; PyObject *__pyx_v_sent = NULL; PyObject *__pyx_v_token = NULL; @@ -5461,189 +5552,30 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; + struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config __pyx_t_2; PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t __pyx_t_5; - Py_ssize_t __pyx_t_6; + Py_ssize_t __pyx_t_4; + PyObject *(*__pyx_t_5)(PyObject *); + int __pyx_t_6; int __pyx_t_7; - PyObject *__pyx_t_8 = NULL; - unsigned PY_LONG_LONG __pyx_t_9; - PyObject *(*__pyx_t_10)(PyObject *); - Py_ssize_t __pyx_t_11; - PyObject *(*__pyx_t_12)(PyObject *); - PyObject *__pyx_t_13 = NULL; - PyObject *__pyx_t_14 = NULL; - __pyx_t_5numpy_uint32_t __pyx_t_15; - Py_ssize_t __pyx_t_16; - int __pyx_t_17; - PyObject *__pyx_t_18 = NULL; + Py_ssize_t __pyx_t_8; + PyObject *(*__pyx_t_9)(PyObject *); + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + __pyx_t_5numpy_uint32_t __pyx_t_13; + Py_ssize_t __pyx_t_14; + int __pyx_t_15; + int __pyx_t_16; + PyObject *__pyx_t_17 = NULL; + int __pyx_t_18; int __pyx_t_19; int __pyx_t_20; int __pyx_t_21; - int __pyx_t_22; __Pyx_RefNannySetupContext("train_batch_cbow", 0); - /* "gensim/models/word2vec_inner.pyx":622 - * and were not discarded by negative sampling). - * """ - * cdef int hs = model.hs # <<<<<<<<<<<<<< - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_hs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 622, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 622, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_hs = __pyx_t_2; - - /* "gensim/models/word2vec_inner.pyx":623 - * """ - * cdef int hs = model.hs - * cdef int negative = model.negative # <<<<<<<<<<<<<< - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int cbow_mean = model.cbow_mean - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_negative); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 623, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 623, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_negative = __pyx_t_2; - - /* "gensim/models/word2vec_inner.pyx":624 - * cdef int hs = model.hs - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) # <<<<<<<<<<<<<< - * cdef int cbow_mean = model.cbow_mean - * - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 624, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sample); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 624, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_int_0, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 624, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 624, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_sample = __pyx_t_2; - - /* "gensim/models/word2vec_inner.pyx":625 - * cdef int negative = model.negative - * cdef int sample = (model.vocabulary.sample != 0) - * cdef int cbow_mean = model.cbow_mean # <<<<<<<<<<<<<< - * - * cdef int _compute_loss = (1 if compute_loss == True else 0) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_cbow_mean); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 625, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 625, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_cbow_mean = __pyx_t_2; - - /* "gensim/models/word2vec_inner.pyx":627 - * cdef int cbow_mean = model.cbow_mean - * - * cdef int _compute_loss = (1 if compute_loss == True else 0) # <<<<<<<<<<<<<< - * cdef REAL_t _running_training_loss = model.running_training_loss - * - */ - __pyx_t_1 = PyObject_RichCompare(__pyx_v_compute_loss, Py_True, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 627, __pyx_L1_error) - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 627, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_4) { - __pyx_t_2 = 1; - } else { - __pyx_t_2 = 0; - } - __pyx_v__compute_loss = __pyx_t_2; - - /* "gensim/models/word2vec_inner.pyx":628 - * - * cdef int _compute_loss = (1 if compute_loss == True else 0) - * cdef REAL_t _running_training_loss = model.running_training_loss # <<<<<<<<<<<<<< - * - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_running_training_loss); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 628, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_t_1); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 628, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v__running_training_loss = __pyx_t_5; - - /* "gensim/models/word2vec_inner.pyx":630 - * cdef REAL_t _running_training_loss = model.running_training_loss - * - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) # <<<<<<<<<<<<<< - * cdef REAL_t *word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) - * cdef REAL_t *work - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 630, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 630, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 630, __pyx_L1_error) - __pyx_v_syn0 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "gensim/models/word2vec_inner.pyx":631 - * - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) - * cdef REAL_t *word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) # <<<<<<<<<<<<<< - * cdef REAL_t *work - * cdef REAL_t _alpha = alpha - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 631, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vectors_lockf); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 631, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 631, __pyx_L1_error) - __pyx_v_word_locks = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "gensim/models/word2vec_inner.pyx":633 - * cdef REAL_t *word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) - * cdef REAL_t *work - * cdef REAL_t _alpha = alpha # <<<<<<<<<<<<<< - * cdef int size = model.wv.vector_size - * - */ - __pyx_t_5 = __pyx_PyFloat_AsFloat(__pyx_v_alpha); if (unlikely((__pyx_t_5 == ((npy_float32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 633, __pyx_L1_error) - __pyx_v__alpha = __pyx_t_5; - - /* "gensim/models/word2vec_inner.pyx":634 - * cdef REAL_t *work - * cdef REAL_t _alpha = alpha - * cdef int size = model.wv.vector_size # <<<<<<<<<<<<<< - * - * cdef int codelens[MAX_SENTENCE_LEN] - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 634, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 634, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 634, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_size = __pyx_t_2; - - /* "gensim/models/word2vec_inner.pyx":640 - * cdef np.uint32_t reduced_windows[MAX_SENTENCE_LEN] - * cdef int sentence_idx[MAX_SENTENCE_LEN + 1] - * cdef int window = model.window # <<<<<<<<<<<<<< - * - * cdef int i, j, k - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 640, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 640, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_window = __pyx_t_2; - - /* "gensim/models/word2vec_inner.pyx":643 - * + /* "gensim/models/word2vec_inner.pyx":619 + * cdef Word2VecConfig c * cdef int i, j, k * cdef int effective_words = 0, effective_sentences = 0 # <<<<<<<<<<<<<< * cdef int sent_idx, idx_start, idx_end @@ -5652,284 +5584,114 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT __pyx_v_effective_words = 0; __pyx_v_effective_sentences = 0; - /* "gensim/models/word2vec_inner.pyx":658 - * cdef unsigned long long next_random - * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - */ - __pyx_t_4 = (__pyx_v_hs != 0); - if (__pyx_t_4) { - - /* "gensim/models/word2vec_inner.pyx":659 - * - * if hs: - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< - * - * if negative: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 659, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_syn1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 659, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 659, __pyx_L1_error) - __pyx_v_syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "gensim/models/word2vec_inner.pyx":658 - * cdef unsigned long long next_random - * - * if hs: # <<<<<<<<<<<<<< - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - */ - } - - /* "gensim/models/word2vec_inner.pyx":661 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - */ - __pyx_t_4 = (__pyx_v_negative != 0); - if (__pyx_t_4) { - - /* "gensim/models/word2vec_inner.pyx":662 - * - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) # <<<<<<<<<<<<<< - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 662, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1neg); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 662, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 662, __pyx_L1_error) - __pyx_v_syn1neg = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "gensim/models/word2vec_inner.pyx":663 - * if negative: - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) # <<<<<<<<<<<<<< - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 663, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 663, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 663, __pyx_L1_error) - __pyx_v_cum_table = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_1))); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "gensim/models/word2vec_inner.pyx":664 - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) # <<<<<<<<<<<<<< - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_vocabulary); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 664, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_cum_table); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 664, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_6 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 664, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_cum_table_len = __pyx_t_6; - - /* "gensim/models/word2vec_inner.pyx":661 - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) - * - * if negative: # <<<<<<<<<<<<<< - * syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - */ - } - - /* "gensim/models/word2vec_inner.pyx":665 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - * - */ - __pyx_t_7 = (__pyx_v_negative != 0); - if (!__pyx_t_7) { - } else { - __pyx_t_4 = __pyx_t_7; - goto __pyx_L6_bool_binop_done; - } - __pyx_t_7 = (__pyx_v_sample != 0); - __pyx_t_4 = __pyx_t_7; - __pyx_L6_bool_binop_done:; - if (__pyx_t_4) { - - /* "gensim/models/word2vec_inner.pyx":666 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< - * - * # convert Python structures to primitive types, so we can release the GIL - */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyNumber_Multiply(__pyx_int_16777216, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_random); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_randint); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_8 = PyNumber_Add(__pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_9 = __Pyx_PyInt_As_unsigned_PY_LONG_LONG(__pyx_t_8); if (unlikely((__pyx_t_9 == (unsigned PY_LONG_LONG)-1) && PyErr_Occurred())) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_next_random = __pyx_t_9; - - /* "gensim/models/word2vec_inner.pyx":665 - * cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: # <<<<<<<<<<<<<< - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - * - */ - } - - /* "gensim/models/word2vec_inner.pyx":669 - * - * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< - * neu1 = np.PyArray_DATA(_neu1) + /* "gensim/models/word2vec_inner.pyx":622 + * cdef int sent_idx, idx_start, idx_end * - */ - if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 669, __pyx_L1_error) - __pyx_v_work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); - - /* "gensim/models/word2vec_inner.pyx":670 - * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) - * neu1 = np.PyArray_DATA(_neu1) # <<<<<<<<<<<<<< + * init_w2v_config(&c, model, alpha, compute_loss, _work, _neu1) # <<<<<<<<<<<<<< * * # prepare C structures so we can go "full C" and release the Python GIL */ - if (!(likely(((__pyx_v__neu1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__neu1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 670, __pyx_L1_error) - __pyx_v_neu1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__neu1))); + __pyx_t_2.__pyx_n = 1; + __pyx_t_2._neu1 = __pyx_v__neu1; + __pyx_t_1 = __pyx_f_6gensim_6models_14word2vec_inner_init_w2v_config((&__pyx_v_c), __pyx_v_model, __pyx_v_alpha, __pyx_v_compute_loss, __pyx_v__work, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 622, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":673 + /* "gensim/models/word2vec_inner.pyx":625 * * # prepare C structures so we can go "full C" and release the Python GIL * vlookup = model.wv.vocab # <<<<<<<<<<<<<< - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: */ - __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 673, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 673, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 625, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vocab); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 625, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v_vlookup = __pyx_t_3; __pyx_t_3 = 0; - /* "gensim/models/word2vec_inner.pyx":674 + /* "gensim/models/word2vec_inner.pyx":626 * # prepare C structures so we can go "full C" and release the Python GIL * vlookup = model.wv.vocab - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 # <<<<<<<<<<<<<< * for sent in sentences: * if not sent: */ - (__pyx_v_sentence_idx[0]) = 0; + (__pyx_v_c.sentence_idx[0]) = 0; - /* "gensim/models/word2vec_inner.pyx":675 + /* "gensim/models/word2vec_inner.pyx":627 * vlookup = model.wv.vocab - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: # <<<<<<<<<<<<<< * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged */ if (likely(PyList_CheckExact(__pyx_v_sentences)) || PyTuple_CheckExact(__pyx_v_sentences)) { - __pyx_t_3 = __pyx_v_sentences; __Pyx_INCREF(__pyx_t_3); __pyx_t_6 = 0; - __pyx_t_10 = NULL; + __pyx_t_3 = __pyx_v_sentences; __Pyx_INCREF(__pyx_t_3); __pyx_t_4 = 0; + __pyx_t_5 = NULL; } else { - __pyx_t_6 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_sentences); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 675, __pyx_L1_error) + __pyx_t_4 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_sentences); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 627, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_10 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 675, __pyx_L1_error) + __pyx_t_5 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 627, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_10)) { + if (likely(!__pyx_t_5)) { if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_3)) break; + if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_3)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_6); __Pyx_INCREF(__pyx_t_8); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 675, __pyx_L1_error) + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 627, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 675, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 627, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } else { - if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_3)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_8 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_6); __Pyx_INCREF(__pyx_t_8); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 675, __pyx_L1_error) + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_1); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 627, __pyx_L1_error) #else - __pyx_t_8 = PySequence_ITEM(__pyx_t_3, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 675, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = PySequence_ITEM(__pyx_t_3, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 627, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); #endif } } else { - __pyx_t_8 = __pyx_t_10(__pyx_t_3); - if (unlikely(!__pyx_t_8)) { + __pyx_t_1 = __pyx_t_5(__pyx_t_3); + if (unlikely(!__pyx_t_1)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 675, __pyx_L1_error) + else __PYX_ERR(0, 627, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_1); } - __Pyx_XDECREF_SET(__pyx_v_sent, __pyx_t_8); - __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_sent, __pyx_t_1); + __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":676 - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + /* "gensim/models/word2vec_inner.pyx":628 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: * if not sent: # <<<<<<<<<<<<<< * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: */ - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_v_sent); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 676, __pyx_L1_error) - __pyx_t_7 = ((!__pyx_t_4) != 0); + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_v_sent); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 628, __pyx_L1_error) + __pyx_t_7 = ((!__pyx_t_6) != 0); if (__pyx_t_7) { - /* "gensim/models/word2vec_inner.pyx":677 + /* "gensim/models/word2vec_inner.pyx":629 * for sent in sentences: * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged # <<<<<<<<<<<<<< * for token in sent: * word = vlookup[token] if token in vlookup else None */ - goto __pyx_L8_continue; + goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":676 - * sentence_idx[0] = 0 # indices of the first sentence always start at 0 + /* "gensim/models/word2vec_inner.pyx":628 + * c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 * for sent in sentences: * if not sent: # <<<<<<<<<<<<<< * continue # ignore empty sentences; leave effective_sentences unchanged @@ -5937,7 +5699,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT */ } - /* "gensim/models/word2vec_inner.pyx":678 + /* "gensim/models/word2vec_inner.pyx":630 * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: # <<<<<<<<<<<<<< @@ -5945,241 +5707,241 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT * if word is None: */ if (likely(PyList_CheckExact(__pyx_v_sent)) || PyTuple_CheckExact(__pyx_v_sent)) { - __pyx_t_8 = __pyx_v_sent; __Pyx_INCREF(__pyx_t_8); __pyx_t_11 = 0; - __pyx_t_12 = NULL; + __pyx_t_1 = __pyx_v_sent; __Pyx_INCREF(__pyx_t_1); __pyx_t_8 = 0; + __pyx_t_9 = NULL; } else { - __pyx_t_11 = -1; __pyx_t_8 = PyObject_GetIter(__pyx_v_sent); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 678, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_12 = Py_TYPE(__pyx_t_8)->tp_iternext; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 678, __pyx_L1_error) + __pyx_t_8 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_sent); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 630, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 630, __pyx_L1_error) } for (;;) { - if (likely(!__pyx_t_12)) { - if (likely(PyList_CheckExact(__pyx_t_8))) { - if (__pyx_t_11 >= PyList_GET_SIZE(__pyx_t_8)) break; + if (likely(!__pyx_t_9)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_8 >= PyList_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_8, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 678, __pyx_L1_error) + __pyx_t_10 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_8); __Pyx_INCREF(__pyx_t_10); __pyx_t_8++; if (unlikely(0 < 0)) __PYX_ERR(0, 630, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_8, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 678, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = PySequence_ITEM(__pyx_t_1, __pyx_t_8); __pyx_t_8++; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 630, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); #endif } else { - if (__pyx_t_11 >= PyTuple_GET_SIZE(__pyx_t_8)) break; + if (__pyx_t_8 >= PyTuple_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_8, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 678, __pyx_L1_error) + __pyx_t_10 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_8); __Pyx_INCREF(__pyx_t_10); __pyx_t_8++; if (unlikely(0 < 0)) __PYX_ERR(0, 630, __pyx_L1_error) #else - __pyx_t_1 = PySequence_ITEM(__pyx_t_8, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 678, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = PySequence_ITEM(__pyx_t_1, __pyx_t_8); __pyx_t_8++; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 630, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); #endif } } else { - __pyx_t_1 = __pyx_t_12(__pyx_t_8); - if (unlikely(!__pyx_t_1)) { + __pyx_t_10 = __pyx_t_9(__pyx_t_1); + if (unlikely(!__pyx_t_10)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 678, __pyx_L1_error) + else __PYX_ERR(0, 630, __pyx_L1_error) } break; } - __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_10); } - __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_1); - __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_10); + __pyx_t_10 = 0; - /* "gensim/models/word2vec_inner.pyx":679 + /* "gensim/models/word2vec_inner.pyx":631 * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: * word = vlookup[token] if token in vlookup else None # <<<<<<<<<<<<<< * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window */ - __pyx_t_7 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 679, __pyx_L1_error) + __pyx_t_7 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 631, __pyx_L1_error) if ((__pyx_t_7 != 0)) { - __pyx_t_13 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 679, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_1 = __pyx_t_13; - __pyx_t_13 = 0; + __pyx_t_11 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 631, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_10 = __pyx_t_11; + __pyx_t_11 = 0; } else { __Pyx_INCREF(Py_None); - __pyx_t_1 = Py_None; + __pyx_t_10 = Py_None; } - __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_1); - __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_10); + __pyx_t_10 = 0; - /* "gensim/models/word2vec_inner.pyx":680 + /* "gensim/models/word2vec_inner.pyx":632 * for token in sent: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): */ __pyx_t_7 = (__pyx_v_word == Py_None); - __pyx_t_4 = (__pyx_t_7 != 0); - if (__pyx_t_4) { + __pyx_t_6 = (__pyx_t_7 != 0); + if (__pyx_t_6) { - /* "gensim/models/word2vec_inner.pyx":681 + /* "gensim/models/word2vec_inner.pyx":633 * word = vlookup[token] if token in vlookup else None * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window # <<<<<<<<<<<<<< - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue */ - goto __pyx_L11_continue; + goto __pyx_L6_continue; - /* "gensim/models/word2vec_inner.pyx":680 + /* "gensim/models/word2vec_inner.pyx":632 * for token in sent: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): */ } - /* "gensim/models/word2vec_inner.pyx":682 + /* "gensim/models/word2vec_inner.pyx":634 * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index */ - __pyx_t_7 = (__pyx_v_sample != 0); + __pyx_t_7 = (__pyx_v_c.sample != 0); if (__pyx_t_7) { } else { - __pyx_t_4 = __pyx_t_7; - goto __pyx_L15_bool_binop_done; + __pyx_t_6 = __pyx_t_7; + goto __pyx_L10_bool_binop_done; } - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 682, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_13 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_next_random))); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 682, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_13); - __pyx_t_14 = PyObject_RichCompare(__pyx_t_1, __pyx_t_13, Py_LT); __Pyx_XGOTREF(__pyx_t_14); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 682, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; - __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_14); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 682, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - __pyx_t_4 = __pyx_t_7; - __pyx_L15_bool_binop_done:; - if (__pyx_t_4) { + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_sample_int); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 634, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_11 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_f_6gensim_6models_14word2vec_inner_random_int32((&__pyx_v_c.next_random))); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 634, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = PyObject_RichCompare(__pyx_t_10, __pyx_t_11, Py_LT); __Pyx_XGOTREF(__pyx_t_12); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 634, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_12); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 634, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_6 = __pyx_t_7; + __pyx_L10_bool_binop_done:; + if (__pyx_t_6) { - /* "gensim/models/word2vec_inner.pyx":683 + /* "gensim/models/word2vec_inner.pyx":635 * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue # <<<<<<<<<<<<<< - * indexes[effective_words] = word.index - * if hs: + * c.indexes[effective_words] = word.index + * if c.hs: */ - goto __pyx_L11_continue; + goto __pyx_L6_continue; - /* "gensim/models/word2vec_inner.pyx":682 + /* "gensim/models/word2vec_inner.pyx":634 * if word is None: * continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - * if sample and word.sample_int < random_int32(&next_random): # <<<<<<<<<<<<<< + * if c.sample and word.sample_int < random_int32(&c.next_random): # <<<<<<<<<<<<<< * continue - * indexes[effective_words] = word.index + * c.indexes[effective_words] = word.index */ } - /* "gensim/models/word2vec_inner.pyx":684 - * if sample and word.sample_int < random_int32(&next_random): + /* "gensim/models/word2vec_inner.pyx":636 + * if c.sample and word.sample_int < random_int32(&c.next_random): * continue - * indexes[effective_words] = word.index # <<<<<<<<<<<<<< - * if hs: - * codelens[effective_words] = len(word.code) + * c.indexes[effective_words] = word.index # <<<<<<<<<<<<<< + * if c.hs: + * c.codelens[effective_words] = len(word.code) */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 684, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_15 = __Pyx_PyInt_As_npy_uint32(__pyx_t_14); if (unlikely((__pyx_t_15 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 684, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - (__pyx_v_indexes[__pyx_v_effective_words]) = __pyx_t_15; + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 636, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_13 = __Pyx_PyInt_As_npy_uint32(__pyx_t_12); if (unlikely((__pyx_t_13 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 636, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + (__pyx_v_c.indexes[__pyx_v_effective_words]) = __pyx_t_13; - /* "gensim/models/word2vec_inner.pyx":685 + /* "gensim/models/word2vec_inner.pyx":637 * continue - * indexes[effective_words] = word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) + * c.indexes[effective_words] = word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) */ - __pyx_t_4 = (__pyx_v_hs != 0); - if (__pyx_t_4) { + __pyx_t_6 = (__pyx_v_c.hs != 0); + if (__pyx_t_6) { - /* "gensim/models/word2vec_inner.pyx":686 - * indexes[effective_words] = word.index - * if hs: - * codelens[effective_words] = len(word.code) # <<<<<<<<<<<<<< - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) - */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 686, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_t_16 = PyObject_Length(__pyx_t_14); if (unlikely(__pyx_t_16 == ((Py_ssize_t)-1))) __PYX_ERR(0, 686, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; - (__pyx_v_codelens[__pyx_v_effective_words]) = ((int)__pyx_t_16); - - /* "gensim/models/word2vec_inner.pyx":687 - * if hs: - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":638 + * c.indexes[effective_words] = word.index + * if c.hs: + * c.codelens[effective_words] = len(word.code) # <<<<<<<<<<<<<< + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) + */ + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 638, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_14 = PyObject_Length(__pyx_t_12); if (unlikely(__pyx_t_14 == ((Py_ssize_t)-1))) __PYX_ERR(0, 638, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + (__pyx_v_c.codelens[__pyx_v_effective_words]) = ((int)__pyx_t_14); + + /* "gensim/models/word2vec_inner.pyx":639 + * if c.hs: + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 687, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - if (!(likely(((__pyx_t_14) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_14, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 687, __pyx_L1_error) - (__pyx_v_codes[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_14))); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 639, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + if (!(likely(((__pyx_t_12) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_12, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 639, __pyx_L1_error) + (__pyx_v_c.codes[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_12))); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - /* "gensim/models/word2vec_inner.pyx":688 - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":640 + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: */ - __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 688, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - if (!(likely(((__pyx_t_14) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_14, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 688, __pyx_L1_error) - (__pyx_v_points[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_14))); - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 640, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + if (!(likely(((__pyx_t_12) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_12, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 640, __pyx_L1_error) + (__pyx_v_c.points[__pyx_v_effective_words]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_12))); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - /* "gensim/models/word2vec_inner.pyx":685 + /* "gensim/models/word2vec_inner.pyx":637 * continue - * indexes[effective_words] = word.index - * if hs: # <<<<<<<<<<<<<< - * codelens[effective_words] = len(word.code) - * codes[effective_words] = np.PyArray_DATA(word.code) + * c.indexes[effective_words] = word.index + * if c.hs: # <<<<<<<<<<<<<< + * c.codelens[effective_words] = len(word.code) + * c.codes[effective_words] = np.PyArray_DATA(word.code) */ } - /* "gensim/models/word2vec_inner.pyx":689 - * codes[effective_words] = np.PyArray_DATA(word.code) - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":641 + * c.codes[effective_words] = np.PyArray_DATA(word.code) + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 # <<<<<<<<<<<<<< * if effective_words == MAX_SENTENCE_LEN: * break # TODO: log warning, tally overflow? */ __pyx_v_effective_words = (__pyx_v_effective_words + 1); - /* "gensim/models/word2vec_inner.pyx":690 - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":642 + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? * */ - __pyx_t_4 = ((__pyx_v_effective_words == 0x2710) != 0); - if (__pyx_t_4) { + __pyx_t_6 = ((__pyx_v_effective_words == 0x2710) != 0); + if (__pyx_t_6) { - /* "gensim/models/word2vec_inner.pyx":691 + /* "gensim/models/word2vec_inner.pyx":643 * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: * break # TODO: log warning, tally overflow? # <<<<<<<<<<<<<< * * # keep track of which words go into which sentence, so we don't train */ - goto __pyx_L12_break; + goto __pyx_L7_break; - /* "gensim/models/word2vec_inner.pyx":690 - * points[effective_words] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":642 + * c.points[effective_words] = np.PyArray_DATA(word.point) * effective_words += 1 * if effective_words == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< * break # TODO: log warning, tally overflow? @@ -6187,57 +5949,57 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT */ } - /* "gensim/models/word2vec_inner.pyx":678 + /* "gensim/models/word2vec_inner.pyx":630 * if not sent: * continue # ignore empty sentences; leave effective_sentences unchanged * for token in sent: # <<<<<<<<<<<<<< * word = vlookup[token] if token in vlookup else None * if word is None: */ - __pyx_L11_continue:; + __pyx_L6_continue:; } - __pyx_L12_break:; - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_L7_break:; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":696 + /* "gensim/models/word2vec_inner.pyx":648 * # across sentence boundaries. * # indices of sentence number X are between tp_iternext; if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 703, __pyx_L1_error) + __pyx_t_4 = -1; __pyx_t_12 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 655, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_5 = Py_TYPE(__pyx_t_12)->tp_iternext; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 655, __pyx_L1_error) } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; for (;;) { - if (likely(!__pyx_t_10)) { - if (likely(PyList_CheckExact(__pyx_t_14))) { - if (__pyx_t_6 >= PyList_GET_SIZE(__pyx_t_14)) break; + if (likely(!__pyx_t_5)) { + if (likely(PyList_CheckExact(__pyx_t_12))) { + if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_12)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_14, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 703, __pyx_L1_error) + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_12, __pyx_t_4); __Pyx_INCREF(__pyx_t_3); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 655, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_14, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 703, __pyx_L1_error) + __pyx_t_3 = PySequence_ITEM(__pyx_t_12, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 655, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); #endif } else { - if (__pyx_t_6 >= PyTuple_GET_SIZE(__pyx_t_14)) break; + if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_12)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_14, __pyx_t_6); __Pyx_INCREF(__pyx_t_3); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 703, __pyx_L1_error) + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_12, __pyx_t_4); __Pyx_INCREF(__pyx_t_3); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 655, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_14, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 703, __pyx_L1_error) + __pyx_t_3 = PySequence_ITEM(__pyx_t_12, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 655, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); #endif } } else { - __pyx_t_3 = __pyx_t_10(__pyx_t_14); + __pyx_t_3 = __pyx_t_5(__pyx_t_12); if (unlikely(!__pyx_t_3)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 703, __pyx_L1_error) + else __PYX_ERR(0, 655, __pyx_L1_error) } break; } @@ -6368,35 +6130,35 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT } __Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_3); __pyx_t_3 = 0; - __pyx_v_i = __pyx_t_2; - __pyx_t_2 = (__pyx_t_2 + 1); + __pyx_v_i = __pyx_t_15; + __pyx_t_15 = (__pyx_t_15 + 1); - /* "gensim/models/word2vec_inner.pyx":704 + /* "gensim/models/word2vec_inner.pyx":656 * # precompute "reduced window" offsets in a single randint() call - * for i, item in enumerate(model.random.randint(0, window, effective_words)): - * reduced_windows[i] = item # <<<<<<<<<<<<<< + * for i, item in enumerate(model.random.randint(0, c.window, effective_words)): + * c.reduced_windows[i] = item # <<<<<<<<<<<<<< * * # release GIL & train on all sentences */ - __pyx_t_15 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_15 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 704, __pyx_L1_error) - (__pyx_v_reduced_windows[__pyx_v_i]) = __pyx_t_15; + __pyx_t_13 = __Pyx_PyInt_As_npy_uint32(__pyx_v_item); if (unlikely((__pyx_t_13 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 656, __pyx_L1_error) + (__pyx_v_c.reduced_windows[__pyx_v_i]) = __pyx_t_13; - /* "gensim/models/word2vec_inner.pyx":703 + /* "gensim/models/word2vec_inner.pyx":655 * * # precompute "reduced window" offsets in a single randint() call - * for i, item in enumerate(model.random.randint(0, window, effective_words)): # <<<<<<<<<<<<<< - * reduced_windows[i] = item + * for i, item in enumerate(model.random.randint(0, c.window, effective_words)): # <<<<<<<<<<<<<< + * c.reduced_windows[i] = item * */ } - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - /* "gensim/models/word2vec_inner.pyx":707 + /* "gensim/models/word2vec_inner.pyx":659 * * # release GIL & train on all sentences * with nogil: # <<<<<<<<<<<<<< * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] + * idx_start = c.sentence_idx[sent_idx] */ { #ifdef WITH_THREAD @@ -6406,174 +6168,174 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT #endif /*try:*/ { - /* "gensim/models/word2vec_inner.pyx":708 + /* "gensim/models/word2vec_inner.pyx":660 * # release GIL & train on all sentences * with nogil: * for sent_idx in range(effective_sentences): # <<<<<<<<<<<<<< - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] */ - __pyx_t_2 = __pyx_v_effective_sentences; - __pyx_t_17 = __pyx_t_2; - for (__pyx_t_19 = 0; __pyx_t_19 < __pyx_t_17; __pyx_t_19+=1) { - __pyx_v_sent_idx = __pyx_t_19; + __pyx_t_15 = __pyx_v_effective_sentences; + __pyx_t_16 = __pyx_t_15; + for (__pyx_t_18 = 0; __pyx_t_18 < __pyx_t_16; __pyx_t_18+=1) { + __pyx_v_sent_idx = __pyx_t_18; - /* "gensim/models/word2vec_inner.pyx":709 + /* "gensim/models/word2vec_inner.pyx":661 * with nogil: * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] # <<<<<<<<<<<<<< - * idx_end = sentence_idx[sent_idx + 1] + * idx_start = c.sentence_idx[sent_idx] # <<<<<<<<<<<<<< + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): */ - __pyx_v_idx_start = (__pyx_v_sentence_idx[__pyx_v_sent_idx]); + __pyx_v_idx_start = (__pyx_v_c.sentence_idx[__pyx_v_sent_idx]); - /* "gensim/models/word2vec_inner.pyx":710 + /* "gensim/models/word2vec_inner.pyx":662 * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] # <<<<<<<<<<<<<< * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] */ - __pyx_v_idx_end = (__pyx_v_sentence_idx[(__pyx_v_sent_idx + 1)]); + __pyx_v_idx_end = (__pyx_v_c.sentence_idx[(__pyx_v_sent_idx + 1)]); - /* "gensim/models/word2vec_inner.pyx":711 - * idx_start = sentence_idx[sent_idx] - * idx_end = sentence_idx[sent_idx + 1] + /* "gensim/models/word2vec_inner.pyx":663 + * idx_start = c.sentence_idx[sent_idx] + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): # <<<<<<<<<<<<<< - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: */ - __pyx_t_20 = __pyx_v_idx_end; - __pyx_t_21 = __pyx_t_20; - for (__pyx_t_22 = __pyx_v_idx_start; __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { - __pyx_v_i = __pyx_t_22; + __pyx_t_19 = __pyx_v_idx_end; + __pyx_t_20 = __pyx_t_19; + for (__pyx_t_21 = __pyx_v_idx_start; __pyx_t_21 < __pyx_t_20; __pyx_t_21+=1) { + __pyx_v_i = __pyx_t_21; - /* "gensim/models/word2vec_inner.pyx":712 - * idx_end = sentence_idx[sent_idx + 1] + /* "gensim/models/word2vec_inner.pyx":664 + * idx_end = c.sentence_idx[sent_idx + 1] * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] # <<<<<<<<<<<<<< + * j = i - c.window + c.reduced_windows[i] # <<<<<<<<<<<<<< * if j < idx_start: * j = idx_start */ - __pyx_v_j = ((__pyx_v_i - __pyx_v_window) + (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_j = ((__pyx_v_i - __pyx_v_c.window) + (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/word2vec_inner.pyx":713 + /* "gensim/models/word2vec_inner.pyx":665 * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: # <<<<<<<<<<<<<< * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ - __pyx_t_4 = ((__pyx_v_j < __pyx_v_idx_start) != 0); - if (__pyx_t_4) { + __pyx_t_6 = ((__pyx_v_j < __pyx_v_idx_start) != 0); + if (__pyx_t_6) { - /* "gensim/models/word2vec_inner.pyx":714 - * j = i - window + reduced_windows[i] + /* "gensim/models/word2vec_inner.pyx":666 + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: * j = idx_start # <<<<<<<<<<<<<< - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: */ __pyx_v_j = __pyx_v_idx_start; - /* "gensim/models/word2vec_inner.pyx":713 + /* "gensim/models/word2vec_inner.pyx":665 * for i in range(idx_start, idx_end): - * j = i - window + reduced_windows[i] + * j = i - c.window + c.reduced_windows[i] * if j < idx_start: # <<<<<<<<<<<<<< * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] */ } - /* "gensim/models/word2vec_inner.pyx":715 + /* "gensim/models/word2vec_inner.pyx":667 * if j < idx_start: * j = idx_start - * k = i + window + 1 - reduced_windows[i] # <<<<<<<<<<<<<< + * k = i + c.window + 1 - c.reduced_windows[i] # <<<<<<<<<<<<<< * if k > idx_end: * k = idx_end */ - __pyx_v_k = (((__pyx_v_i + __pyx_v_window) + 1) - (__pyx_v_reduced_windows[__pyx_v_i])); + __pyx_v_k = (((__pyx_v_i + __pyx_v_c.window) + 1) - (__pyx_v_c.reduced_windows[__pyx_v_i])); - /* "gensim/models/word2vec_inner.pyx":716 + /* "gensim/models/word2vec_inner.pyx":668 * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: # <<<<<<<<<<<<<< * k = idx_end - * if hs: + * if c.hs: */ - __pyx_t_4 = ((__pyx_v_k > __pyx_v_idx_end) != 0); - if (__pyx_t_4) { + __pyx_t_6 = ((__pyx_v_k > __pyx_v_idx_end) != 0); + if (__pyx_t_6) { - /* "gensim/models/word2vec_inner.pyx":717 - * k = i + window + 1 - reduced_windows[i] + /* "gensim/models/word2vec_inner.pyx":669 + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: * k = idx_end # <<<<<<<<<<<<<< - * if hs: - * fast_sentence_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, _alpha, work, i, j, k, cbow_mean, word_locks, _compute_loss, &_running_training_loss) + * if c.hs: + * w2v_fast_sentence_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) */ __pyx_v_k = __pyx_v_idx_end; - /* "gensim/models/word2vec_inner.pyx":716 + /* "gensim/models/word2vec_inner.pyx":668 * j = idx_start - * k = i + window + 1 - reduced_windows[i] + * k = i + c.window + 1 - c.reduced_windows[i] * if k > idx_end: # <<<<<<<<<<<<<< * k = idx_end - * if hs: + * if c.hs: */ } - /* "gensim/models/word2vec_inner.pyx":718 + /* "gensim/models/word2vec_inner.pyx":670 * if k > idx_end: * k = idx_end - * if hs: # <<<<<<<<<<<<<< - * fast_sentence_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, _alpha, work, i, j, k, cbow_mean, word_locks, _compute_loss, &_running_training_loss) - * if negative: + * if c.hs: # <<<<<<<<<<<<<< + * w2v_fast_sentence_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: */ - __pyx_t_4 = (__pyx_v_hs != 0); - if (__pyx_t_4) { + __pyx_t_6 = (__pyx_v_c.hs != 0); + if (__pyx_t_6) { - /* "gensim/models/word2vec_inner.pyx":719 + /* "gensim/models/word2vec_inner.pyx":671 * k = idx_end - * if hs: - * fast_sentence_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, _alpha, work, i, j, k, cbow_mean, word_locks, _compute_loss, &_running_training_loss) # <<<<<<<<<<<<<< - * if negative: - * next_random = fast_sentence_cbow_neg(negative, cum_table, cum_table_len, codelens, neu1, syn0, syn1neg, size, indexes, _alpha, work, i, j, k, cbow_mean, next_random, word_locks, _compute_loss, &_running_training_loss) + * if c.hs: + * w2v_fast_sentence_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) # <<<<<<<<<<<<<< + * if c.negative: + * c.next_random = w2v_fast_sentence_cbow_neg(c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, c.syn1neg, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) */ - __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_hs((__pyx_v_points[__pyx_v_i]), (__pyx_v_codes[__pyx_v_i]), __pyx_v_codelens, __pyx_v_neu1, __pyx_v_syn0, __pyx_v_syn1, __pyx_v_size, __pyx_v_indexes, __pyx_v__alpha, __pyx_v_work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_cbow_mean, __pyx_v_word_locks, __pyx_v__compute_loss, (&__pyx_v__running_training_loss)); + __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), __pyx_v_c.codelens, __pyx_v_c.neu1, __pyx_v_c.syn0, __pyx_v_c.syn1, __pyx_v_c.size, __pyx_v_c.indexes, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_c.cbow_mean, __pyx_v_c.word_locks, __pyx_v_c.compute_loss, (&__pyx_v_c.running_training_loss)); - /* "gensim/models/word2vec_inner.pyx":718 + /* "gensim/models/word2vec_inner.pyx":670 * if k > idx_end: * k = idx_end - * if hs: # <<<<<<<<<<<<<< - * fast_sentence_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, _alpha, work, i, j, k, cbow_mean, word_locks, _compute_loss, &_running_training_loss) - * if negative: + * if c.hs: # <<<<<<<<<<<<<< + * w2v_fast_sentence_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: */ } - /* "gensim/models/word2vec_inner.pyx":720 - * if hs: - * fast_sentence_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, _alpha, work, i, j, k, cbow_mean, word_locks, _compute_loss, &_running_training_loss) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_sentence_cbow_neg(negative, cum_table, cum_table_len, codelens, neu1, syn0, syn1neg, size, indexes, _alpha, work, i, j, k, cbow_mean, next_random, word_locks, _compute_loss, &_running_training_loss) + /* "gensim/models/word2vec_inner.pyx":672 + * if c.hs: + * w2v_fast_sentence_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = w2v_fast_sentence_cbow_neg(c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, c.syn1neg, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) * */ - __pyx_t_4 = (__pyx_v_negative != 0); - if (__pyx_t_4) { + __pyx_t_6 = (__pyx_v_c.negative != 0); + if (__pyx_t_6) { - /* "gensim/models/word2vec_inner.pyx":721 - * fast_sentence_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, _alpha, work, i, j, k, cbow_mean, word_locks, _compute_loss, &_running_training_loss) - * if negative: - * next_random = fast_sentence_cbow_neg(negative, cum_table, cum_table_len, codelens, neu1, syn0, syn1neg, size, indexes, _alpha, work, i, j, k, cbow_mean, next_random, word_locks, _compute_loss, &_running_training_loss) # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":673 + * w2v_fast_sentence_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: + * c.next_random = w2v_fast_sentence_cbow_neg(c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, c.syn1neg, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) # <<<<<<<<<<<<<< * - * model.running_training_loss = _running_training_loss + * model.running_training_loss = c.running_training_loss */ - __pyx_v_next_random = __pyx_f_6gensim_6models_14word2vec_inner_fast_sentence_cbow_neg(__pyx_v_negative, __pyx_v_cum_table, __pyx_v_cum_table_len, __pyx_v_codelens, __pyx_v_neu1, __pyx_v_syn0, __pyx_v_syn1neg, __pyx_v_size, __pyx_v_indexes, __pyx_v__alpha, __pyx_v_work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_cbow_mean, __pyx_v_next_random, __pyx_v_word_locks, __pyx_v__compute_loss, (&__pyx_v__running_training_loss)); + __pyx_v_c.next_random = __pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_neg(__pyx_v_c.negative, __pyx_v_c.cum_table, __pyx_v_c.cum_table_len, __pyx_v_c.codelens, __pyx_v_c.neu1, __pyx_v_c.syn0, __pyx_v_c.syn1neg, __pyx_v_c.size, __pyx_v_c.indexes, __pyx_v_c.alpha, __pyx_v_c.work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_c.cbow_mean, __pyx_v_c.next_random, __pyx_v_c.word_locks, __pyx_v_c.compute_loss, (&__pyx_v_c.running_training_loss)); - /* "gensim/models/word2vec_inner.pyx":720 - * if hs: - * fast_sentence_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, _alpha, work, i, j, k, cbow_mean, word_locks, _compute_loss, &_running_training_loss) - * if negative: # <<<<<<<<<<<<<< - * next_random = fast_sentence_cbow_neg(negative, cum_table, cum_table_len, codelens, neu1, syn0, syn1neg, size, indexes, _alpha, work, i, j, k, cbow_mean, next_random, word_locks, _compute_loss, &_running_training_loss) + /* "gensim/models/word2vec_inner.pyx":672 + * if c.hs: + * w2v_fast_sentence_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + * if c.negative: # <<<<<<<<<<<<<< + * c.next_random = w2v_fast_sentence_cbow_neg(c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, c.syn1neg, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) * */ } @@ -6581,12 +6343,12 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT } } - /* "gensim/models/word2vec_inner.pyx":707 + /* "gensim/models/word2vec_inner.pyx":659 * * # release GIL & train on all sentences * with nogil: # <<<<<<<<<<<<<< * for sent_idx in range(effective_sentences): - * idx_start = sentence_idx[sent_idx] + * idx_start = c.sentence_idx[sent_idx] */ /*finally:*/ { /*normal exit:*/{ @@ -6594,39 +6356,39 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT __Pyx_FastGIL_Forget(); Py_BLOCK_THREADS #endif - goto __pyx_L24; + goto __pyx_L19; } - __pyx_L24:; + __pyx_L19:; } } - /* "gensim/models/word2vec_inner.pyx":723 - * next_random = fast_sentence_cbow_neg(negative, cum_table, cum_table_len, codelens, neu1, syn0, syn1neg, size, indexes, _alpha, work, i, j, k, cbow_mean, next_random, word_locks, _compute_loss, &_running_training_loss) + /* "gensim/models/word2vec_inner.pyx":675 + * c.next_random = w2v_fast_sentence_cbow_neg(c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, c.syn1neg, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) * - * model.running_training_loss = _running_training_loss # <<<<<<<<<<<<<< + * model.running_training_loss = c.running_training_loss # <<<<<<<<<<<<<< * return effective_words * */ - __pyx_t_14 = PyFloat_FromDouble(__pyx_v__running_training_loss); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 723, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - if (__Pyx_PyObject_SetAttrStr(__pyx_v_model, __pyx_n_s_running_training_loss, __pyx_t_14) < 0) __PYX_ERR(0, 723, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_12 = PyFloat_FromDouble(__pyx_v_c.running_training_loss); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 675, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_model, __pyx_n_s_running_training_loss, __pyx_t_12) < 0) __PYX_ERR(0, 675, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; - /* "gensim/models/word2vec_inner.pyx":724 + /* "gensim/models/word2vec_inner.pyx":676 * - * model.running_training_loss = _running_training_loss + * model.running_training_loss = c.running_training_loss * return effective_words # <<<<<<<<<<<<<< * * */ __Pyx_XDECREF(__pyx_r); - __pyx_t_14 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 724, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_14); - __pyx_r = __pyx_t_14; - __pyx_t_14 = 0; + __pyx_t_12 = __Pyx_PyInt_From_int(__pyx_v_effective_words); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 676, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_r = __pyx_t_12; + __pyx_t_12 = 0; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":596 + /* "gensim/models/word2vec_inner.pyx":591 * * * def train_batch_cbow(model, sentences, alpha, _work, _neu1, compute_loss): # <<<<<<<<<<<<<< @@ -6638,10 +6400,10 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_XDECREF(__pyx_t_13); - __Pyx_XDECREF(__pyx_t_14); - __Pyx_XDECREF(__pyx_t_18); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_17); __Pyx_AddTraceback("gensim.models.word2vec_inner.train_batch_cbow", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; @@ -6655,7 +6417,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_2train_batch_cbow(CYT return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":727 +/* "gensim/models/word2vec_inner.pyx":679 * * * def score_sentence_sg(model, sentence, _work): # <<<<<<<<<<<<<< @@ -6699,17 +6461,17 @@ static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_5score_sentence_sg(Py case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_sentence)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("score_sentence_sg", 1, 3, 3, 1); __PYX_ERR(0, 727, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("score_sentence_sg", 1, 3, 3, 1); __PYX_ERR(0, 679, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("score_sentence_sg", 1, 3, 3, 2); __PYX_ERR(0, 727, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("score_sentence_sg", 1, 3, 3, 2); __PYX_ERR(0, 679, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "score_sentence_sg") < 0)) __PYX_ERR(0, 727, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "score_sentence_sg") < 0)) __PYX_ERR(0, 679, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { goto __pyx_L5_argtuple_error; @@ -6724,7 +6486,7 @@ static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_5score_sentence_sg(Py } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("score_sentence_sg", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 727, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("score_sentence_sg", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 679, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("gensim.models.word2vec_inner.score_sentence_sg", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); @@ -6738,20 +6500,12 @@ static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_5score_sentence_sg(Py } static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_sentence, PyObject *__pyx_v__work) { - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work; - int __pyx_v_size; - int __pyx_v_codelens[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_indexes[0x2710]; - int __pyx_v_sentence_len; - int __pyx_v_window; + struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig __pyx_v_c; int __pyx_v_i; int __pyx_v_j; int __pyx_v_k; long __pyx_v_result; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1; - __pyx_t_5numpy_uint32_t *__pyx_v_points[0x2710]; - __pyx_t_5numpy_uint8_t *__pyx_v_codes[0x2710]; + int __pyx_v_sentence_len; PyObject *__pyx_v_vlookup = NULL; PyObject *__pyx_v_token = NULL; PyObject *__pyx_v_word = NULL; @@ -6774,102 +6528,102 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY int __pyx_t_15; __Pyx_RefNannySetupContext("score_sentence_sg", 0); - /* "gensim/models/word2vec_inner.pyx":751 + /* "gensim/models/word2vec_inner.pyx":703 * """ + * cdef Word2VecConfig c + * c.syn0 = (np.PyArray_DATA(model.wv.vectors)) # <<<<<<<<<<<<<< + * c.size = model.wv.vector_size * - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) # <<<<<<<<<<<<<< - * cdef REAL_t *work - * cdef int size = model.wv.vector_size */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 751, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 703, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 751, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 703, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 751, __pyx_L1_error) - __pyx_v_syn0 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_2))); + if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 703, __pyx_L1_error) + __pyx_v_c.syn0 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_2))); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "gensim/models/word2vec_inner.pyx":753 - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) - * cdef REAL_t *work - * cdef int size = model.wv.vector_size # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":704 + * cdef Word2VecConfig c + * c.syn0 = (np.PyArray_DATA(model.wv.vectors)) + * c.size = model.wv.vector_size # <<<<<<<<<<<<<< * - * cdef int codelens[MAX_SENTENCE_LEN] + * c.window = model.window */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 753, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 704, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 753, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 704, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 753, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 704, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_size = __pyx_t_3; + __pyx_v_c.size = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":758 - * cdef np.uint32_t indexes[MAX_SENTENCE_LEN] - * cdef int sentence_len - * cdef int window = model.window # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":706 + * c.size = model.wv.vector_size + * + * c.window = model.window # <<<<<<<<<<<<<< * * cdef int i, j, k */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 758, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 706, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 758, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 706, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_window = __pyx_t_3; + __pyx_v_c.window = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":761 + /* "gensim/models/word2vec_inner.pyx":709 * * cdef int i, j, k * cdef long result = 0 # <<<<<<<<<<<<<< + * cdef int sentence_len * - * cdef REAL_t *syn1 */ __pyx_v_result = 0; - /* "gensim/models/word2vec_inner.pyx":767 - * cdef np.uint8_t *codes[MAX_SENTENCE_LEN] + /* "gensim/models/word2vec_inner.pyx":712 + * cdef int sentence_len * - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< + * c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< * * # convert Python structures to primitive types, so we can release the GIL */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 767, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 712, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 767, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 712, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 767, __pyx_L1_error) - __pyx_v_syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_2))); + if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 712, __pyx_L1_error) + __pyx_v_c.syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_2))); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "gensim/models/word2vec_inner.pyx":770 + /* "gensim/models/word2vec_inner.pyx":715 * * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< + * c.work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< * * vlookup = model.wv.vocab */ - if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 770, __pyx_L1_error) - __pyx_v_work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); + if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 715, __pyx_L1_error) + __pyx_v_c.work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); - /* "gensim/models/word2vec_inner.pyx":772 - * work = np.PyArray_DATA(_work) + /* "gensim/models/word2vec_inner.pyx":717 + * c.work = np.PyArray_DATA(_work) * * vlookup = model.wv.vocab # <<<<<<<<<<<<<< * i = 0 * for token in sentence: */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 772, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 717, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_vocab); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 772, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_vocab); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 717, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_v_vlookup = __pyx_t_1; __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":773 + /* "gensim/models/word2vec_inner.pyx":718 * * vlookup = model.wv.vocab * i = 0 # <<<<<<<<<<<<<< @@ -6878,7 +6632,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY */ __pyx_v_i = 0; - /* "gensim/models/word2vec_inner.pyx":774 + /* "gensim/models/word2vec_inner.pyx":719 * vlookup = model.wv.vocab * i = 0 * for token in sentence: # <<<<<<<<<<<<<< @@ -6889,26 +6643,26 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY __pyx_t_1 = __pyx_v_sentence; __Pyx_INCREF(__pyx_t_1); __pyx_t_4 = 0; __pyx_t_5 = NULL; } else { - __pyx_t_4 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_sentence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 774, __pyx_L1_error) + __pyx_t_4 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_sentence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 719, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_5 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 774, __pyx_L1_error) + __pyx_t_5 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 719, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_5)) { if (likely(PyList_CheckExact(__pyx_t_1))) { if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_2 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_2); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 774, __pyx_L1_error) + __pyx_t_2 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_2); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 719, __pyx_L1_error) #else - __pyx_t_2 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 774, __pyx_L1_error) + __pyx_t_2 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 719, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); #endif } else { if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_2); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 774, __pyx_L1_error) + __pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_2); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 719, __pyx_L1_error) #else - __pyx_t_2 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 774, __pyx_L1_error) + __pyx_t_2 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 719, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); #endif } @@ -6918,7 +6672,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 774, __pyx_L1_error) + else __PYX_ERR(0, 719, __pyx_L1_error) } break; } @@ -6927,16 +6681,16 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_2); __pyx_t_2 = 0; - /* "gensim/models/word2vec_inner.pyx":775 + /* "gensim/models/word2vec_inner.pyx":720 * i = 0 * for token in sentence: * word = vlookup[token] if token in vlookup else None # <<<<<<<<<<<<<< * if word is None: * continue # should drop the */ - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 775, __pyx_L1_error) + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 720, __pyx_L1_error) if ((__pyx_t_6 != 0)) { - __pyx_t_7 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 775, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 720, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_2 = __pyx_t_7; __pyx_t_7 = 0; @@ -6947,98 +6701,98 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_2); __pyx_t_2 = 0; - /* "gensim/models/word2vec_inner.pyx":776 + /* "gensim/models/word2vec_inner.pyx":721 * for token in sentence: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # should drop the - * indexes[i] = word.index + * c.indexes[i] = word.index */ __pyx_t_6 = (__pyx_v_word == Py_None); __pyx_t_8 = (__pyx_t_6 != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":777 + /* "gensim/models/word2vec_inner.pyx":722 * word = vlookup[token] if token in vlookup else None * if word is None: * continue # should drop the # <<<<<<<<<<<<<< - * indexes[i] = word.index - * codelens[i] = len(word.code) + * c.indexes[i] = word.index + * c.codelens[i] = len(word.code) */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":776 + /* "gensim/models/word2vec_inner.pyx":721 * for token in sentence: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # should drop the - * indexes[i] = word.index + * c.indexes[i] = word.index */ } - /* "gensim/models/word2vec_inner.pyx":778 + /* "gensim/models/word2vec_inner.pyx":723 * if word is None: * continue # should drop the - * indexes[i] = word.index # <<<<<<<<<<<<<< - * codelens[i] = len(word.code) - * codes[i] = np.PyArray_DATA(word.code) + * c.indexes[i] = word.index # <<<<<<<<<<<<<< + * c.codelens[i] = len(word.code) + * c.codes[i] = np.PyArray_DATA(word.code) */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 778, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 723, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - __pyx_t_9 = __Pyx_PyInt_As_npy_uint32(__pyx_t_2); if (unlikely((__pyx_t_9 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 778, __pyx_L1_error) + __pyx_t_9 = __Pyx_PyInt_As_npy_uint32(__pyx_t_2); if (unlikely((__pyx_t_9 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 723, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - (__pyx_v_indexes[__pyx_v_i]) = __pyx_t_9; + (__pyx_v_c.indexes[__pyx_v_i]) = __pyx_t_9; - /* "gensim/models/word2vec_inner.pyx":779 + /* "gensim/models/word2vec_inner.pyx":724 * continue # should drop the - * indexes[i] = word.index - * codelens[i] = len(word.code) # <<<<<<<<<<<<<< - * codes[i] = np.PyArray_DATA(word.code) - * points[i] = np.PyArray_DATA(word.point) + * c.indexes[i] = word.index + * c.codelens[i] = len(word.code) # <<<<<<<<<<<<<< + * c.codes[i] = np.PyArray_DATA(word.code) + * c.points[i] = np.PyArray_DATA(word.point) */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 779, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 724, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - __pyx_t_10 = PyObject_Length(__pyx_t_2); if (unlikely(__pyx_t_10 == ((Py_ssize_t)-1))) __PYX_ERR(0, 779, __pyx_L1_error) + __pyx_t_10 = PyObject_Length(__pyx_t_2); if (unlikely(__pyx_t_10 == ((Py_ssize_t)-1))) __PYX_ERR(0, 724, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - (__pyx_v_codelens[__pyx_v_i]) = ((int)__pyx_t_10); + (__pyx_v_c.codelens[__pyx_v_i]) = ((int)__pyx_t_10); - /* "gensim/models/word2vec_inner.pyx":780 - * indexes[i] = word.index - * codelens[i] = len(word.code) - * codes[i] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< - * points[i] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":725 + * c.indexes[i] = word.index + * c.codelens[i] = len(word.code) + * c.codes[i] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< + * c.points[i] = np.PyArray_DATA(word.point) * result += 1 */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 780, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 725, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 780, __pyx_L1_error) - (__pyx_v_codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_2))); + if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 725, __pyx_L1_error) + (__pyx_v_c.codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_2))); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "gensim/models/word2vec_inner.pyx":781 - * codelens[i] = len(word.code) - * codes[i] = np.PyArray_DATA(word.code) - * points[i] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":726 + * c.codelens[i] = len(word.code) + * c.codes[i] = np.PyArray_DATA(word.code) + * c.points[i] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< * result += 1 * i += 1 */ - __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 781, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 726, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); - if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 781, __pyx_L1_error) - (__pyx_v_points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_2))); + if (!(likely(((__pyx_t_2) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_2, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 726, __pyx_L1_error) + (__pyx_v_c.points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_2))); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - /* "gensim/models/word2vec_inner.pyx":782 - * codes[i] = np.PyArray_DATA(word.code) - * points[i] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":727 + * c.codes[i] = np.PyArray_DATA(word.code) + * c.points[i] = np.PyArray_DATA(word.point) * result += 1 # <<<<<<<<<<<<<< * i += 1 * if i == MAX_SENTENCE_LEN: */ __pyx_v_result = (__pyx_v_result + 1); - /* "gensim/models/word2vec_inner.pyx":783 - * points[i] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":728 + * c.points[i] = np.PyArray_DATA(word.point) * result += 1 * i += 1 # <<<<<<<<<<<<<< * if i == MAX_SENTENCE_LEN: @@ -7046,7 +6800,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY */ __pyx_v_i = (__pyx_v_i + 1); - /* "gensim/models/word2vec_inner.pyx":784 + /* "gensim/models/word2vec_inner.pyx":729 * result += 1 * i += 1 * if i == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< @@ -7056,7 +6810,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY __pyx_t_8 = ((__pyx_v_i == 0x2710) != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":785 + /* "gensim/models/word2vec_inner.pyx":730 * i += 1 * if i == MAX_SENTENCE_LEN: * break # TODO: log warning, tally overflow? # <<<<<<<<<<<<<< @@ -7065,7 +6819,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY */ goto __pyx_L4_break; - /* "gensim/models/word2vec_inner.pyx":784 + /* "gensim/models/word2vec_inner.pyx":729 * result += 1 * i += 1 * if i == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< @@ -7074,7 +6828,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY */ } - /* "gensim/models/word2vec_inner.pyx":774 + /* "gensim/models/word2vec_inner.pyx":719 * vlookup = model.wv.vocab * i = 0 * for token in sentence: # <<<<<<<<<<<<<< @@ -7086,7 +6840,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY __pyx_L4_break:; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":786 + /* "gensim/models/word2vec_inner.pyx":731 * if i == MAX_SENTENCE_LEN: * break # TODO: log warning, tally overflow? * sentence_len = i # <<<<<<<<<<<<<< @@ -7095,21 +6849,21 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY */ __pyx_v_sentence_len = __pyx_v_i; - /* "gensim/models/word2vec_inner.pyx":789 + /* "gensim/models/word2vec_inner.pyx":734 * * # release GIL & train on the sentence - * work[0] = 0.0 # <<<<<<<<<<<<<< + * c.work[0] = 0.0 # <<<<<<<<<<<<<< * * with nogil: */ - (__pyx_v_work[0]) = 0.0; + (__pyx_v_c.work[0]) = 0.0; - /* "gensim/models/word2vec_inner.pyx":791 - * work[0] = 0.0 + /* "gensim/models/word2vec_inner.pyx":736 + * c.work[0] = 0.0 * * with nogil: # <<<<<<<<<<<<<< * for i in range(sentence_len): - * if codelens[i] == 0: + * if c.codelens[i] == 0: */ { #ifdef WITH_THREAD @@ -7119,11 +6873,11 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY #endif /*try:*/ { - /* "gensim/models/word2vec_inner.pyx":792 + /* "gensim/models/word2vec_inner.pyx":737 * * with nogil: * for i in range(sentence_len): # <<<<<<<<<<<<<< - * if codelens[i] == 0: + * if c.codelens[i] == 0: * continue */ __pyx_t_3 = __pyx_v_sentence_len; @@ -7131,83 +6885,83 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY for (__pyx_t_12 = 0; __pyx_t_12 < __pyx_t_11; __pyx_t_12+=1) { __pyx_v_i = __pyx_t_12; - /* "gensim/models/word2vec_inner.pyx":793 + /* "gensim/models/word2vec_inner.pyx":738 * with nogil: * for i in range(sentence_len): - * if codelens[i] == 0: # <<<<<<<<<<<<<< + * if c.codelens[i] == 0: # <<<<<<<<<<<<<< * continue - * j = i - window + * j = i - c.window */ - __pyx_t_8 = (((__pyx_v_codelens[__pyx_v_i]) == 0) != 0); + __pyx_t_8 = (((__pyx_v_c.codelens[__pyx_v_i]) == 0) != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":794 + /* "gensim/models/word2vec_inner.pyx":739 * for i in range(sentence_len): - * if codelens[i] == 0: + * if c.codelens[i] == 0: * continue # <<<<<<<<<<<<<< - * j = i - window + * j = i - c.window * if j < 0: */ goto __pyx_L10_continue; - /* "gensim/models/word2vec_inner.pyx":793 + /* "gensim/models/word2vec_inner.pyx":738 * with nogil: * for i in range(sentence_len): - * if codelens[i] == 0: # <<<<<<<<<<<<<< + * if c.codelens[i] == 0: # <<<<<<<<<<<<<< * continue - * j = i - window + * j = i - c.window */ } - /* "gensim/models/word2vec_inner.pyx":795 - * if codelens[i] == 0: + /* "gensim/models/word2vec_inner.pyx":740 + * if c.codelens[i] == 0: * continue - * j = i - window # <<<<<<<<<<<<<< + * j = i - c.window # <<<<<<<<<<<<<< * if j < 0: * j = 0 */ - __pyx_v_j = (__pyx_v_i - __pyx_v_window); + __pyx_v_j = (__pyx_v_i - __pyx_v_c.window); - /* "gensim/models/word2vec_inner.pyx":796 + /* "gensim/models/word2vec_inner.pyx":741 * continue - * j = i - window + * j = i - c.window * if j < 0: # <<<<<<<<<<<<<< * j = 0 - * k = i + window + 1 + * k = i + c.window + 1 */ __pyx_t_8 = ((__pyx_v_j < 0) != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":797 - * j = i - window + /* "gensim/models/word2vec_inner.pyx":742 + * j = i - c.window * if j < 0: * j = 0 # <<<<<<<<<<<<<< - * k = i + window + 1 + * k = i + c.window + 1 * if k > sentence_len: */ __pyx_v_j = 0; - /* "gensim/models/word2vec_inner.pyx":796 + /* "gensim/models/word2vec_inner.pyx":741 * continue - * j = i - window + * j = i - c.window * if j < 0: # <<<<<<<<<<<<<< * j = 0 - * k = i + window + 1 + * k = i + c.window + 1 */ } - /* "gensim/models/word2vec_inner.pyx":798 + /* "gensim/models/word2vec_inner.pyx":743 * if j < 0: * j = 0 - * k = i + window + 1 # <<<<<<<<<<<<<< + * k = i + c.window + 1 # <<<<<<<<<<<<<< * if k > sentence_len: * k = sentence_len */ - __pyx_v_k = ((__pyx_v_i + __pyx_v_window) + 1); + __pyx_v_k = ((__pyx_v_i + __pyx_v_c.window) + 1); - /* "gensim/models/word2vec_inner.pyx":799 + /* "gensim/models/word2vec_inner.pyx":744 * j = 0 - * k = i + window + 1 + * k = i + c.window + 1 * if k > sentence_len: # <<<<<<<<<<<<<< * k = sentence_len * for j in range(j, k): @@ -7215,29 +6969,29 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY __pyx_t_8 = ((__pyx_v_k > __pyx_v_sentence_len) != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":800 - * k = i + window + 1 + /* "gensim/models/word2vec_inner.pyx":745 + * k = i + c.window + 1 * if k > sentence_len: * k = sentence_len # <<<<<<<<<<<<<< * for j in range(j, k): - * if j == i or codelens[j] == 0: + * if j == i or c.codelens[j] == 0: */ __pyx_v_k = __pyx_v_sentence_len; - /* "gensim/models/word2vec_inner.pyx":799 + /* "gensim/models/word2vec_inner.pyx":744 * j = 0 - * k = i + window + 1 + * k = i + c.window + 1 * if k > sentence_len: # <<<<<<<<<<<<<< * k = sentence_len * for j in range(j, k): */ } - /* "gensim/models/word2vec_inner.pyx":801 + /* "gensim/models/word2vec_inner.pyx":746 * if k > sentence_len: * k = sentence_len * for j in range(j, k): # <<<<<<<<<<<<<< - * if j == i or codelens[j] == 0: + * if j == i or c.codelens[j] == 0: * continue */ __pyx_t_13 = __pyx_v_k; @@ -7245,12 +6999,12 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY for (__pyx_t_15 = __pyx_v_j; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { __pyx_v_j = __pyx_t_15; - /* "gensim/models/word2vec_inner.pyx":802 + /* "gensim/models/word2vec_inner.pyx":747 * k = sentence_len * for j in range(j, k): - * if j == i or codelens[j] == 0: # <<<<<<<<<<<<<< + * if j == i or c.codelens[j] == 0: # <<<<<<<<<<<<<< * continue - * score_pair_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], work) + * score_pair_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.work) */ __pyx_t_6 = ((__pyx_v_j == __pyx_v_i) != 0); if (!__pyx_t_6) { @@ -7258,49 +7012,49 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY __pyx_t_8 = __pyx_t_6; goto __pyx_L18_bool_binop_done; } - __pyx_t_6 = (((__pyx_v_codelens[__pyx_v_j]) == 0) != 0); + __pyx_t_6 = (((__pyx_v_c.codelens[__pyx_v_j]) == 0) != 0); __pyx_t_8 = __pyx_t_6; __pyx_L18_bool_binop_done:; if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":803 + /* "gensim/models/word2vec_inner.pyx":748 * for j in range(j, k): - * if j == i or codelens[j] == 0: + * if j == i or c.codelens[j] == 0: * continue # <<<<<<<<<<<<<< - * score_pair_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], work) + * score_pair_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.work) * */ goto __pyx_L15_continue; - /* "gensim/models/word2vec_inner.pyx":802 + /* "gensim/models/word2vec_inner.pyx":747 * k = sentence_len * for j in range(j, k): - * if j == i or codelens[j] == 0: # <<<<<<<<<<<<<< + * if j == i or c.codelens[j] == 0: # <<<<<<<<<<<<<< * continue - * score_pair_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], work) + * score_pair_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.work) */ } - /* "gensim/models/word2vec_inner.pyx":804 - * if j == i or codelens[j] == 0: + /* "gensim/models/word2vec_inner.pyx":749 + * if j == i or c.codelens[j] == 0: * continue - * score_pair_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], work) # <<<<<<<<<<<<<< + * score_pair_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.work) # <<<<<<<<<<<<<< * - * return work[0] + * return c.work[0] */ - __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs((__pyx_v_points[__pyx_v_i]), (__pyx_v_codes[__pyx_v_i]), (__pyx_v_codelens[__pyx_v_i]), __pyx_v_syn0, __pyx_v_syn1, __pyx_v_size, (__pyx_v_indexes[__pyx_v_j]), __pyx_v_work); + __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), (__pyx_v_c.codelens[__pyx_v_i]), __pyx_v_c.syn0, __pyx_v_c.syn1, __pyx_v_c.size, (__pyx_v_c.indexes[__pyx_v_j]), __pyx_v_c.work); __pyx_L15_continue:; } __pyx_L10_continue:; } } - /* "gensim/models/word2vec_inner.pyx":791 - * work[0] = 0.0 + /* "gensim/models/word2vec_inner.pyx":736 + * c.work[0] = 0.0 * * with nogil: # <<<<<<<<<<<<<< * for i in range(sentence_len): - * if codelens[i] == 0: + * if c.codelens[i] == 0: */ /*finally:*/ { /*normal exit:*/{ @@ -7314,21 +7068,21 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY } } - /* "gensim/models/word2vec_inner.pyx":806 - * score_pair_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], work) + /* "gensim/models/word2vec_inner.pyx":751 + * score_pair_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.work) * - * return work[0] # <<<<<<<<<<<<<< + * return c.work[0] # <<<<<<<<<<<<<< * * cdef void score_pair_sg_hs( */ __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyFloat_FromDouble((__pyx_v_work[0])); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 806, __pyx_L1_error) + __pyx_t_1 = PyFloat_FromDouble((__pyx_v_c.work[0])); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 751, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":727 + /* "gensim/models/word2vec_inner.pyx":679 * * * def score_sentence_sg(model, sentence, _work): # <<<<<<<<<<<<<< @@ -7352,8 +7106,8 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_4score_sentence_sg(CY return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":808 - * return work[0] +/* "gensim/models/word2vec_inner.pyx":753 + * return c.work[0] * * cdef void score_pair_sg_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, @@ -7373,7 +7127,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n int __pyx_t_5; long __pyx_t_6; - /* "gensim/models/word2vec_inner.pyx":814 + /* "gensim/models/word2vec_inner.pyx":759 * * cdef long long b * cdef long long row1 = word2_index * size, row2, sgn # <<<<<<<<<<<<<< @@ -7382,7 +7136,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n */ __pyx_v_row1 = (__pyx_v_word2_index * __pyx_v_size); - /* "gensim/models/word2vec_inner.pyx":817 + /* "gensim/models/word2vec_inner.pyx":762 * cdef REAL_t f * * for b in range(codelen): # <<<<<<<<<<<<<< @@ -7394,7 +7148,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_b = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":818 + /* "gensim/models/word2vec_inner.pyx":763 * * for b in range(codelen): * row2 = word_point[b] * size # <<<<<<<<<<<<<< @@ -7403,7 +7157,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n */ __pyx_v_row2 = ((__pyx_v_word_point[__pyx_v_b]) * __pyx_v_size); - /* "gensim/models/word2vec_inner.pyx":819 + /* "gensim/models/word2vec_inner.pyx":764 * for b in range(codelen): * row2 = word_point[b] * size * f = our_dot(&size, &syn0[row1], &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -7412,7 +7166,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n */ __pyx_v_f = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), (&(__pyx_v_syn0[__pyx_v_row1])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":820 + /* "gensim/models/word2vec_inner.pyx":765 * row2 = word_point[b] * size * f = our_dot(&size, &syn0[row1], &ONE, &syn1[row2], &ONE) * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 # <<<<<<<<<<<<<< @@ -7421,7 +7175,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n */ __pyx_v_sgn = __Pyx_pow_long(-1L, ((long)(__pyx_v_word_code[__pyx_v_b]))); - /* "gensim/models/word2vec_inner.pyx":821 + /* "gensim/models/word2vec_inner.pyx":766 * f = our_dot(&size, &syn0[row1], &ONE, &syn1[row2], &ONE) * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * f *= sgn # <<<<<<<<<<<<<< @@ -7430,7 +7184,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n */ __pyx_v_f = (__pyx_v_f * __pyx_v_sgn); - /* "gensim/models/word2vec_inner.pyx":822 + /* "gensim/models/word2vec_inner.pyx":767 * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * f *= sgn * if f <= -MAX_EXP or f >= MAX_EXP: # <<<<<<<<<<<<<< @@ -7448,7 +7202,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n __pyx_L6_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":823 + /* "gensim/models/word2vec_inner.pyx":768 * f *= sgn * if f <= -MAX_EXP or f >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -7457,7 +7211,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":822 + /* "gensim/models/word2vec_inner.pyx":767 * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * f *= sgn * if f <= -MAX_EXP or f >= MAX_EXP: # <<<<<<<<<<<<<< @@ -7466,7 +7220,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n */ } - /* "gensim/models/word2vec_inner.pyx":824 + /* "gensim/models/word2vec_inner.pyx":769 * if f <= -MAX_EXP or f >= MAX_EXP: * continue * f = LOG_TABLE[((f + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -7475,7 +7229,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_LOG_TABLE[((int)((__pyx_v_f + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":825 + /* "gensim/models/word2vec_inner.pyx":770 * continue * f = LOG_TABLE[((f + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * work[0] += f # <<<<<<<<<<<<<< @@ -7487,8 +7241,8 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n __pyx_L3_continue:; } - /* "gensim/models/word2vec_inner.pyx":808 - * return work[0] + /* "gensim/models/word2vec_inner.pyx":753 + * return c.work[0] * * cdef void score_pair_sg_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, @@ -7498,7 +7252,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_sg_hs(__pyx_t_5n /* function exit code */ } -/* "gensim/models/word2vec_inner.pyx":827 +/* "gensim/models/word2vec_inner.pyx":772 * work[0] += f * * def score_sentence_cbow(model, sentence, _work, _neu1): # <<<<<<<<<<<<<< @@ -7545,23 +7299,23 @@ static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_7score_sentence_cbow( case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_sentence)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("score_sentence_cbow", 1, 4, 4, 1); __PYX_ERR(0, 827, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("score_sentence_cbow", 1, 4, 4, 1); __PYX_ERR(0, 772, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_work)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("score_sentence_cbow", 1, 4, 4, 2); __PYX_ERR(0, 827, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("score_sentence_cbow", 1, 4, 4, 2); __PYX_ERR(0, 772, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_neu1)) != 0)) kw_args--; else { - __Pyx_RaiseArgtupleInvalid("score_sentence_cbow", 1, 4, 4, 3); __PYX_ERR(0, 827, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("score_sentence_cbow", 1, 4, 4, 3); __PYX_ERR(0, 772, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "score_sentence_cbow") < 0)) __PYX_ERR(0, 827, __pyx_L3_error) + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "score_sentence_cbow") < 0)) __PYX_ERR(0, 772, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 4) { goto __pyx_L5_argtuple_error; @@ -7578,7 +7332,7 @@ static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_7score_sentence_cbow( } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("score_sentence_cbow", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 827, __pyx_L3_error) + __Pyx_RaiseArgtupleInvalid("score_sentence_cbow", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 772, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("gensim.models.word2vec_inner.score_sentence_cbow", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); @@ -7592,25 +7346,15 @@ static PyObject *__pyx_pw_6gensim_6models_14word2vec_inner_7score_sentence_cbow( } static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_model, PyObject *__pyx_v_sentence, PyObject *__pyx_v__work, PyObject *__pyx_v__neu1) { - int __pyx_v_cbow_mean; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn0; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_work; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_neu1; - int __pyx_v_size; - int __pyx_v_codelens[0x2710]; - __pyx_t_5numpy_uint32_t __pyx_v_indexes[0x2710]; - int __pyx_v_sentence_len; - int __pyx_v_window; + struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig __pyx_v_c; int __pyx_v_i; int __pyx_v_j; int __pyx_v_k; long __pyx_v_result; - __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *__pyx_v_syn1; - __pyx_t_5numpy_uint32_t *__pyx_v_points[0x2710]; - __pyx_t_5numpy_uint8_t *__pyx_v_codes[0x2710]; PyObject *__pyx_v_vlookup = NULL; PyObject *__pyx_v_token = NULL; PyObject *__pyx_v_word = NULL; + int __pyx_v_sentence_len; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; @@ -7627,125 +7371,125 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( int __pyx_t_12; __Pyx_RefNannySetupContext("score_sentence_cbow", 0); - /* "gensim/models/word2vec_inner.pyx":852 - * - * """ - * cdef int cbow_mean = model.cbow_mean # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":799 + * cdef Word2VecConfig c * - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) + * c.cbow_mean = model.cbow_mean # <<<<<<<<<<<<<< + * c.syn0 = (np.PyArray_DATA(model.wv.vectors)) + * c.size = model.wv.vector_size */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_cbow_mean); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 852, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_cbow_mean); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 799, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 852, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 799, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_cbow_mean = __pyx_t_2; + __pyx_v_c.cbow_mean = __pyx_t_2; - /* "gensim/models/word2vec_inner.pyx":854 - * cdef int cbow_mean = model.cbow_mean + /* "gensim/models/word2vec_inner.pyx":800 * - * cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) # <<<<<<<<<<<<<< - * cdef REAL_t *work - * cdef REAL_t *neu1 + * c.cbow_mean = model.cbow_mean + * c.syn0 = (np.PyArray_DATA(model.wv.vectors)) # <<<<<<<<<<<<<< + * c.size = model.wv.vector_size + * c.window = model.window */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 854, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 800, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 854, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_vectors); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 800, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 854, __pyx_L1_error) - __pyx_v_syn0 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 800, __pyx_L1_error) + __pyx_v_c.syn0 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/word2vec_inner.pyx":857 - * cdef REAL_t *work - * cdef REAL_t *neu1 - * cdef int size = model.wv.vector_size # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":801 + * c.cbow_mean = model.cbow_mean + * c.syn0 = (np.PyArray_DATA(model.wv.vectors)) + * c.size = model.wv.vector_size # <<<<<<<<<<<<<< + * c.window = model.window * - * cdef int codelens[MAX_SENTENCE_LEN] */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 857, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 801, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 857, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vector_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 801, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 857, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 801, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_size = __pyx_t_2; + __pyx_v_c.size = __pyx_t_2; - /* "gensim/models/word2vec_inner.pyx":862 - * cdef np.uint32_t indexes[MAX_SENTENCE_LEN] - * cdef int sentence_len - * cdef int window = model.window # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":802 + * c.syn0 = (np.PyArray_DATA(model.wv.vectors)) + * c.size = model.wv.vector_size + * c.window = model.window # <<<<<<<<<<<<<< * * cdef int i, j, k */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 862, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_window); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 802, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 862, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 802, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_window = __pyx_t_2; + __pyx_v_c.window = __pyx_t_2; - /* "gensim/models/word2vec_inner.pyx":865 + /* "gensim/models/word2vec_inner.pyx":805 * * cdef int i, j, k * cdef long result = 0 # <<<<<<<<<<<<<< * - * # For hierarchical softmax + * c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) */ __pyx_v_result = 0; - /* "gensim/models/word2vec_inner.pyx":872 - * cdef np.uint8_t *codes[MAX_SENTENCE_LEN] + /* "gensim/models/word2vec_inner.pyx":807 + * cdef long result = 0 * - * syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< + * c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) # <<<<<<<<<<<<<< * * # convert Python structures to primitive types, so we can release the GIL */ - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 872, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_trainables); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 807, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 872, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_syn1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 807, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 872, __pyx_L1_error) - __pyx_v_syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 807, __pyx_L1_error) + __pyx_v_c.syn1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/word2vec_inner.pyx":875 + /* "gensim/models/word2vec_inner.pyx":810 * * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< - * neu1 = np.PyArray_DATA(_neu1) + * c.work = np.PyArray_DATA(_work) # <<<<<<<<<<<<<< + * c.neu1 = np.PyArray_DATA(_neu1) * */ - if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 875, __pyx_L1_error) - __pyx_v_work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); + if (!(likely(((__pyx_v__work) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__work, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 810, __pyx_L1_error) + __pyx_v_c.work = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__work))); - /* "gensim/models/word2vec_inner.pyx":876 + /* "gensim/models/word2vec_inner.pyx":811 * # convert Python structures to primitive types, so we can release the GIL - * work = np.PyArray_DATA(_work) - * neu1 = np.PyArray_DATA(_neu1) # <<<<<<<<<<<<<< + * c.work = np.PyArray_DATA(_work) + * c.neu1 = np.PyArray_DATA(_neu1) # <<<<<<<<<<<<<< * * vlookup = model.wv.vocab */ - if (!(likely(((__pyx_v__neu1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__neu1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 876, __pyx_L1_error) - __pyx_v_neu1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__neu1))); + if (!(likely(((__pyx_v__neu1) == Py_None) || likely(__Pyx_TypeTest(__pyx_v__neu1, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 811, __pyx_L1_error) + __pyx_v_c.neu1 = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)PyArray_DATA(((PyArrayObject *)__pyx_v__neu1))); - /* "gensim/models/word2vec_inner.pyx":878 - * neu1 = np.PyArray_DATA(_neu1) + /* "gensim/models/word2vec_inner.pyx":813 + * c.neu1 = np.PyArray_DATA(_neu1) * * vlookup = model.wv.vocab # <<<<<<<<<<<<<< * i = 0 * for token in sentence: */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 878, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_model, __pyx_n_s_wv); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 813, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vocab); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 878, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_vocab); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 813, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_v_vlookup = __pyx_t_1; __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":879 + /* "gensim/models/word2vec_inner.pyx":814 * * vlookup = model.wv.vocab * i = 0 # <<<<<<<<<<<<<< @@ -7754,7 +7498,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( */ __pyx_v_i = 0; - /* "gensim/models/word2vec_inner.pyx":880 + /* "gensim/models/word2vec_inner.pyx":815 * vlookup = model.wv.vocab * i = 0 * for token in sentence: # <<<<<<<<<<<<<< @@ -7765,26 +7509,26 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( __pyx_t_1 = __pyx_v_sentence; __Pyx_INCREF(__pyx_t_1); __pyx_t_4 = 0; __pyx_t_5 = NULL; } else { - __pyx_t_4 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_sentence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 880, __pyx_L1_error) + __pyx_t_4 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_sentence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 815, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_5 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 880, __pyx_L1_error) + __pyx_t_5 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 815, __pyx_L1_error) } for (;;) { if (likely(!__pyx_t_5)) { if (likely(PyList_CheckExact(__pyx_t_1))) { if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_3); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 880, __pyx_L1_error) + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_3); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 815, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 880, __pyx_L1_error) + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 815, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); #endif } else { if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_1)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_3); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 880, __pyx_L1_error) + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_3); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 815, __pyx_L1_error) #else - __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 880, __pyx_L1_error) + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 815, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); #endif } @@ -7794,7 +7538,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); - else __PYX_ERR(0, 880, __pyx_L1_error) + else __PYX_ERR(0, 815, __pyx_L1_error) } break; } @@ -7803,16 +7547,16 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( __Pyx_XDECREF_SET(__pyx_v_token, __pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/word2vec_inner.pyx":881 + /* "gensim/models/word2vec_inner.pyx":816 * i = 0 * for token in sentence: * word = vlookup[token] if token in vlookup else None # <<<<<<<<<<<<<< * if word is None: * continue # for score, should this be a default negative value? */ - __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 881, __pyx_L1_error) + __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_token, __pyx_v_vlookup, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 816, __pyx_L1_error) if ((__pyx_t_6 != 0)) { - __pyx_t_7 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 881, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_GetItem(__pyx_v_vlookup, __pyx_v_token); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 816, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_3 = __pyx_t_7; __pyx_t_7 = 0; @@ -7823,98 +7567,98 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( __Pyx_XDECREF_SET(__pyx_v_word, __pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/word2vec_inner.pyx":882 + /* "gensim/models/word2vec_inner.pyx":817 * for token in sentence: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # for score, should this be a default negative value? - * indexes[i] = word.index + * c.indexes[i] = word.index */ __pyx_t_6 = (__pyx_v_word == Py_None); __pyx_t_8 = (__pyx_t_6 != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":883 + /* "gensim/models/word2vec_inner.pyx":818 * word = vlookup[token] if token in vlookup else None * if word is None: * continue # for score, should this be a default negative value? # <<<<<<<<<<<<<< - * indexes[i] = word.index - * codelens[i] = len(word.code) + * c.indexes[i] = word.index + * c.codelens[i] = len(word.code) */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":882 + /* "gensim/models/word2vec_inner.pyx":817 * for token in sentence: * word = vlookup[token] if token in vlookup else None * if word is None: # <<<<<<<<<<<<<< * continue # for score, should this be a default negative value? - * indexes[i] = word.index + * c.indexes[i] = word.index */ } - /* "gensim/models/word2vec_inner.pyx":884 + /* "gensim/models/word2vec_inner.pyx":819 * if word is None: * continue # for score, should this be a default negative value? - * indexes[i] = word.index # <<<<<<<<<<<<<< - * codelens[i] = len(word.code) - * codes[i] = np.PyArray_DATA(word.code) + * c.indexes[i] = word.index # <<<<<<<<<<<<<< + * c.codelens[i] = len(word.code) + * c.codes[i] = np.PyArray_DATA(word.code) */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 884, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_index); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 819, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_9 = __Pyx_PyInt_As_npy_uint32(__pyx_t_3); if (unlikely((__pyx_t_9 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 884, __pyx_L1_error) + __pyx_t_9 = __Pyx_PyInt_As_npy_uint32(__pyx_t_3); if (unlikely((__pyx_t_9 == ((npy_uint32)-1)) && PyErr_Occurred())) __PYX_ERR(0, 819, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - (__pyx_v_indexes[__pyx_v_i]) = __pyx_t_9; + (__pyx_v_c.indexes[__pyx_v_i]) = __pyx_t_9; - /* "gensim/models/word2vec_inner.pyx":885 + /* "gensim/models/word2vec_inner.pyx":820 * continue # for score, should this be a default negative value? - * indexes[i] = word.index - * codelens[i] = len(word.code) # <<<<<<<<<<<<<< - * codes[i] = np.PyArray_DATA(word.code) - * points[i] = np.PyArray_DATA(word.point) + * c.indexes[i] = word.index + * c.codelens[i] = len(word.code) # <<<<<<<<<<<<<< + * c.codes[i] = np.PyArray_DATA(word.code) + * c.points[i] = np.PyArray_DATA(word.point) */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 885, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 820, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - __pyx_t_10 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_10 == ((Py_ssize_t)-1))) __PYX_ERR(0, 885, __pyx_L1_error) + __pyx_t_10 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_10 == ((Py_ssize_t)-1))) __PYX_ERR(0, 820, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - (__pyx_v_codelens[__pyx_v_i]) = ((int)__pyx_t_10); + (__pyx_v_c.codelens[__pyx_v_i]) = ((int)__pyx_t_10); - /* "gensim/models/word2vec_inner.pyx":886 - * indexes[i] = word.index - * codelens[i] = len(word.code) - * codes[i] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< - * points[i] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":821 + * c.indexes[i] = word.index + * c.codelens[i] = len(word.code) + * c.codes[i] = np.PyArray_DATA(word.code) # <<<<<<<<<<<<<< + * c.points[i] = np.PyArray_DATA(word.point) * result += 1 */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 886, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 821, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 886, __pyx_L1_error) - (__pyx_v_codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 821, __pyx_L1_error) + (__pyx_v_c.codes[__pyx_v_i]) = ((__pyx_t_5numpy_uint8_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/word2vec_inner.pyx":887 - * codelens[i] = len(word.code) - * codes[i] = np.PyArray_DATA(word.code) - * points[i] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":822 + * c.codelens[i] = len(word.code) + * c.codes[i] = np.PyArray_DATA(word.code) + * c.points[i] = np.PyArray_DATA(word.point) # <<<<<<<<<<<<<< * result += 1 * i += 1 */ - __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 887, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_word, __pyx_n_s_point); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 822, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); - if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 887, __pyx_L1_error) - (__pyx_v_points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 822, __pyx_L1_error) + (__pyx_v_c.points[__pyx_v_i]) = ((__pyx_t_5numpy_uint32_t *)PyArray_DATA(((PyArrayObject *)__pyx_t_3))); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - /* "gensim/models/word2vec_inner.pyx":888 - * codes[i] = np.PyArray_DATA(word.code) - * points[i] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":823 + * c.codes[i] = np.PyArray_DATA(word.code) + * c.points[i] = np.PyArray_DATA(word.point) * result += 1 # <<<<<<<<<<<<<< * i += 1 * if i == MAX_SENTENCE_LEN: */ __pyx_v_result = (__pyx_v_result + 1); - /* "gensim/models/word2vec_inner.pyx":889 - * points[i] = np.PyArray_DATA(word.point) + /* "gensim/models/word2vec_inner.pyx":824 + * c.points[i] = np.PyArray_DATA(word.point) * result += 1 * i += 1 # <<<<<<<<<<<<<< * if i == MAX_SENTENCE_LEN: @@ -7922,7 +7666,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( */ __pyx_v_i = (__pyx_v_i + 1); - /* "gensim/models/word2vec_inner.pyx":890 + /* "gensim/models/word2vec_inner.pyx":825 * result += 1 * i += 1 * if i == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< @@ -7932,7 +7676,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( __pyx_t_8 = ((__pyx_v_i == 0x2710) != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":891 + /* "gensim/models/word2vec_inner.pyx":826 * i += 1 * if i == MAX_SENTENCE_LEN: * break # TODO: log warning, tally overflow? # <<<<<<<<<<<<<< @@ -7941,7 +7685,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( */ goto __pyx_L4_break; - /* "gensim/models/word2vec_inner.pyx":890 + /* "gensim/models/word2vec_inner.pyx":825 * result += 1 * i += 1 * if i == MAX_SENTENCE_LEN: # <<<<<<<<<<<<<< @@ -7950,7 +7694,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( */ } - /* "gensim/models/word2vec_inner.pyx":880 + /* "gensim/models/word2vec_inner.pyx":815 * vlookup = model.wv.vocab * i = 0 * for token in sentence: # <<<<<<<<<<<<<< @@ -7962,7 +7706,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( __pyx_L4_break:; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":892 + /* "gensim/models/word2vec_inner.pyx":827 * if i == MAX_SENTENCE_LEN: * break # TODO: log warning, tally overflow? * sentence_len = i # <<<<<<<<<<<<<< @@ -7971,21 +7715,21 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( */ __pyx_v_sentence_len = __pyx_v_i; - /* "gensim/models/word2vec_inner.pyx":895 + /* "gensim/models/word2vec_inner.pyx":830 * * # release GIL & train on the sentence - * work[0] = 0.0 # <<<<<<<<<<<<<< + * c.work[0] = 0.0 # <<<<<<<<<<<<<< * with nogil: * for i in range(sentence_len): */ - (__pyx_v_work[0]) = 0.0; + (__pyx_v_c.work[0]) = 0.0; - /* "gensim/models/word2vec_inner.pyx":896 + /* "gensim/models/word2vec_inner.pyx":831 * # release GIL & train on the sentence - * work[0] = 0.0 + * c.work[0] = 0.0 * with nogil: # <<<<<<<<<<<<<< * for i in range(sentence_len): - * if codelens[i] == 0: + * if c.codelens[i] == 0: */ { #ifdef WITH_THREAD @@ -7995,11 +7739,11 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( #endif /*try:*/ { - /* "gensim/models/word2vec_inner.pyx":897 - * work[0] = 0.0 + /* "gensim/models/word2vec_inner.pyx":832 + * c.work[0] = 0.0 * with nogil: * for i in range(sentence_len): # <<<<<<<<<<<<<< - * if codelens[i] == 0: + * if c.codelens[i] == 0: * continue */ __pyx_t_2 = __pyx_v_sentence_len; @@ -8007,126 +7751,126 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( for (__pyx_t_12 = 0; __pyx_t_12 < __pyx_t_11; __pyx_t_12+=1) { __pyx_v_i = __pyx_t_12; - /* "gensim/models/word2vec_inner.pyx":898 + /* "gensim/models/word2vec_inner.pyx":833 * with nogil: * for i in range(sentence_len): - * if codelens[i] == 0: # <<<<<<<<<<<<<< + * if c.codelens[i] == 0: # <<<<<<<<<<<<<< * continue - * j = i - window + * j = i - c.window */ - __pyx_t_8 = (((__pyx_v_codelens[__pyx_v_i]) == 0) != 0); + __pyx_t_8 = (((__pyx_v_c.codelens[__pyx_v_i]) == 0) != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":899 + /* "gensim/models/word2vec_inner.pyx":834 * for i in range(sentence_len): - * if codelens[i] == 0: + * if c.codelens[i] == 0: * continue # <<<<<<<<<<<<<< - * j = i - window + * j = i - c.window * if j < 0: */ goto __pyx_L10_continue; - /* "gensim/models/word2vec_inner.pyx":898 + /* "gensim/models/word2vec_inner.pyx":833 * with nogil: * for i in range(sentence_len): - * if codelens[i] == 0: # <<<<<<<<<<<<<< + * if c.codelens[i] == 0: # <<<<<<<<<<<<<< * continue - * j = i - window + * j = i - c.window */ } - /* "gensim/models/word2vec_inner.pyx":900 - * if codelens[i] == 0: + /* "gensim/models/word2vec_inner.pyx":835 + * if c.codelens[i] == 0: * continue - * j = i - window # <<<<<<<<<<<<<< + * j = i - c.window # <<<<<<<<<<<<<< * if j < 0: * j = 0 */ - __pyx_v_j = (__pyx_v_i - __pyx_v_window); + __pyx_v_j = (__pyx_v_i - __pyx_v_c.window); - /* "gensim/models/word2vec_inner.pyx":901 + /* "gensim/models/word2vec_inner.pyx":836 * continue - * j = i - window + * j = i - c.window * if j < 0: # <<<<<<<<<<<<<< * j = 0 - * k = i + window + 1 + * k = i + c.window + 1 */ __pyx_t_8 = ((__pyx_v_j < 0) != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":902 - * j = i - window + /* "gensim/models/word2vec_inner.pyx":837 + * j = i - c.window * if j < 0: * j = 0 # <<<<<<<<<<<<<< - * k = i + window + 1 + * k = i + c.window + 1 * if k > sentence_len: */ __pyx_v_j = 0; - /* "gensim/models/word2vec_inner.pyx":901 + /* "gensim/models/word2vec_inner.pyx":836 * continue - * j = i - window + * j = i - c.window * if j < 0: # <<<<<<<<<<<<<< * j = 0 - * k = i + window + 1 + * k = i + c.window + 1 */ } - /* "gensim/models/word2vec_inner.pyx":903 + /* "gensim/models/word2vec_inner.pyx":838 * if j < 0: * j = 0 - * k = i + window + 1 # <<<<<<<<<<<<<< + * k = i + c.window + 1 # <<<<<<<<<<<<<< * if k > sentence_len: * k = sentence_len */ - __pyx_v_k = ((__pyx_v_i + __pyx_v_window) + 1); + __pyx_v_k = ((__pyx_v_i + __pyx_v_c.window) + 1); - /* "gensim/models/word2vec_inner.pyx":904 + /* "gensim/models/word2vec_inner.pyx":839 * j = 0 - * k = i + window + 1 + * k = i + c.window + 1 * if k > sentence_len: # <<<<<<<<<<<<<< * k = sentence_len - * score_pair_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, work, i, j, k, cbow_mean) + * score_pair_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.work, i, j, k, c.cbow_mean) */ __pyx_t_8 = ((__pyx_v_k > __pyx_v_sentence_len) != 0); if (__pyx_t_8) { - /* "gensim/models/word2vec_inner.pyx":905 - * k = i + window + 1 + /* "gensim/models/word2vec_inner.pyx":840 + * k = i + c.window + 1 * if k > sentence_len: * k = sentence_len # <<<<<<<<<<<<<< - * score_pair_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, work, i, j, k, cbow_mean) + * score_pair_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.work, i, j, k, c.cbow_mean) * */ __pyx_v_k = __pyx_v_sentence_len; - /* "gensim/models/word2vec_inner.pyx":904 + /* "gensim/models/word2vec_inner.pyx":839 * j = 0 - * k = i + window + 1 + * k = i + c.window + 1 * if k > sentence_len: # <<<<<<<<<<<<<< * k = sentence_len - * score_pair_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, work, i, j, k, cbow_mean) + * score_pair_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.work, i, j, k, c.cbow_mean) */ } - /* "gensim/models/word2vec_inner.pyx":906 + /* "gensim/models/word2vec_inner.pyx":841 * if k > sentence_len: * k = sentence_len - * score_pair_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, work, i, j, k, cbow_mean) # <<<<<<<<<<<<<< + * score_pair_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.work, i, j, k, c.cbow_mean) # <<<<<<<<<<<<<< * - * return work[0] + * return c.work[0] */ - __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs((__pyx_v_points[__pyx_v_i]), (__pyx_v_codes[__pyx_v_i]), __pyx_v_codelens, __pyx_v_neu1, __pyx_v_syn0, __pyx_v_syn1, __pyx_v_size, __pyx_v_indexes, __pyx_v_work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_cbow_mean); + __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs((__pyx_v_c.points[__pyx_v_i]), (__pyx_v_c.codes[__pyx_v_i]), __pyx_v_c.codelens, __pyx_v_c.neu1, __pyx_v_c.syn0, __pyx_v_c.syn1, __pyx_v_c.size, __pyx_v_c.indexes, __pyx_v_c.work, __pyx_v_i, __pyx_v_j, __pyx_v_k, __pyx_v_c.cbow_mean); __pyx_L10_continue:; } } - /* "gensim/models/word2vec_inner.pyx":896 + /* "gensim/models/word2vec_inner.pyx":831 * # release GIL & train on the sentence - * work[0] = 0.0 + * c.work[0] = 0.0 * with nogil: # <<<<<<<<<<<<<< * for i in range(sentence_len): - * if codelens[i] == 0: + * if c.codelens[i] == 0: */ /*finally:*/ { /*normal exit:*/{ @@ -8140,21 +7884,21 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( } } - /* "gensim/models/word2vec_inner.pyx":908 - * score_pair_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, work, i, j, k, cbow_mean) + /* "gensim/models/word2vec_inner.pyx":843 + * score_pair_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.work, i, j, k, c.cbow_mean) * - * return work[0] # <<<<<<<<<<<<<< + * return c.work[0] # <<<<<<<<<<<<<< * * cdef void score_pair_cbow_hs( */ __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyFloat_FromDouble((__pyx_v_work[0])); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 908, __pyx_L1_error) + __pyx_t_1 = PyFloat_FromDouble((__pyx_v_c.work[0])); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 843, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":827 + /* "gensim/models/word2vec_inner.pyx":772 * work[0] += f * * def score_sentence_cbow(model, sentence, _work, _neu1): # <<<<<<<<<<<<<< @@ -8178,8 +7922,8 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_6score_sentence_cbow( return __pyx_r; } -/* "gensim/models/word2vec_inner.pyx":910 - * return work[0] +/* "gensim/models/word2vec_inner.pyx":845 + * return c.work[0] * * cdef void score_pair_cbow_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], @@ -8202,7 +7946,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ PY_LONG_LONG __pyx_t_6; long __pyx_t_7; - /* "gensim/models/word2vec_inner.pyx":921 + /* "gensim/models/word2vec_inner.pyx":856 * cdef int m * * memset(neu1, 0, size * cython.sizeof(REAL_t)) # <<<<<<<<<<<<<< @@ -8211,7 +7955,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ (void)(memset(__pyx_v_neu1, 0, (__pyx_v_size * (sizeof(__pyx_t_6gensim_6models_14word2vec_inner_REAL_t))))); - /* "gensim/models/word2vec_inner.pyx":922 + /* "gensim/models/word2vec_inner.pyx":857 * * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 # <<<<<<<<<<<<<< @@ -8220,7 +7964,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ __pyx_v_count = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.0); - /* "gensim/models/word2vec_inner.pyx":923 + /* "gensim/models/word2vec_inner.pyx":858 * memset(neu1, 0, size * cython.sizeof(REAL_t)) * count = 0.0 * for m in range(j, k): # <<<<<<<<<<<<<< @@ -8232,7 +7976,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ for (__pyx_t_3 = __pyx_v_j; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_m = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":924 + /* "gensim/models/word2vec_inner.pyx":859 * count = 0.0 * for m in range(j, k): * if m == i or codelens[m] == 0: # <<<<<<<<<<<<<< @@ -8250,7 +7994,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ __pyx_L6_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":925 + /* "gensim/models/word2vec_inner.pyx":860 * for m in range(j, k): * if m == i or codelens[m] == 0: * continue # <<<<<<<<<<<<<< @@ -8259,7 +8003,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ goto __pyx_L3_continue; - /* "gensim/models/word2vec_inner.pyx":924 + /* "gensim/models/word2vec_inner.pyx":859 * count = 0.0 * for m in range(j, k): * if m == i or codelens[m] == 0: # <<<<<<<<<<<<<< @@ -8268,7 +8012,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ } - /* "gensim/models/word2vec_inner.pyx":927 + /* "gensim/models/word2vec_inner.pyx":862 * continue * else: * count += ONEF # <<<<<<<<<<<<<< @@ -8278,7 +8022,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ /*else*/ { __pyx_v_count = (__pyx_v_count + __pyx_v_6gensim_6models_14word2vec_inner_ONEF); - /* "gensim/models/word2vec_inner.pyx":928 + /* "gensim/models/word2vec_inner.pyx":863 * else: * count += ONEF * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) # <<<<<<<<<<<<<< @@ -8290,7 +8034,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ __pyx_L3_continue:; } - /* "gensim/models/word2vec_inner.pyx":929 + /* "gensim/models/word2vec_inner.pyx":864 * count += ONEF * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< @@ -8300,7 +8044,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ __pyx_t_4 = ((__pyx_v_count > ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0.5)) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":930 + /* "gensim/models/word2vec_inner.pyx":865 * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) * if count > (0.5): * inv_count = ONEF/count # <<<<<<<<<<<<<< @@ -8309,7 +8053,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ __pyx_v_inv_count = (__pyx_v_6gensim_6models_14word2vec_inner_ONEF / __pyx_v_count); - /* "gensim/models/word2vec_inner.pyx":929 + /* "gensim/models/word2vec_inner.pyx":864 * count += ONEF * our_saxpy(&size, &ONEF, &syn0[indexes[m] * size], &ONE, neu1, &ONE) * if count > (0.5): # <<<<<<<<<<<<<< @@ -8318,7 +8062,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ } - /* "gensim/models/word2vec_inner.pyx":931 + /* "gensim/models/word2vec_inner.pyx":866 * if count > (0.5): * inv_count = ONEF/count * if cbow_mean: # <<<<<<<<<<<<<< @@ -8328,7 +8072,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ __pyx_t_4 = (__pyx_v_cbow_mean != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":932 + /* "gensim/models/word2vec_inner.pyx":867 * inv_count = ONEF/count * if cbow_mean: * sscal(&size, &inv_count, neu1, &ONE) # <<<<<<<<<<<<<< @@ -8337,7 +8081,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ __pyx_v_6gensim_6models_14word2vec_inner_sscal((&__pyx_v_size), (&__pyx_v_inv_count), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":931 + /* "gensim/models/word2vec_inner.pyx":866 * if count > (0.5): * inv_count = ONEF/count * if cbow_mean: # <<<<<<<<<<<<<< @@ -8346,7 +8090,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ } - /* "gensim/models/word2vec_inner.pyx":934 + /* "gensim/models/word2vec_inner.pyx":869 * sscal(&size, &inv_count, neu1, &ONE) * * for b in range(codelens[i]): # <<<<<<<<<<<<<< @@ -8358,7 +8102,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_2; __pyx_t_6+=1) { __pyx_v_b = __pyx_t_6; - /* "gensim/models/word2vec_inner.pyx":935 + /* "gensim/models/word2vec_inner.pyx":870 * * for b in range(codelens[i]): * row2 = word_point[b] * size # <<<<<<<<<<<<<< @@ -8367,7 +8111,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ __pyx_v_row2 = ((__pyx_v_word_point[__pyx_v_b]) * __pyx_v_size); - /* "gensim/models/word2vec_inner.pyx":936 + /* "gensim/models/word2vec_inner.pyx":871 * for b in range(codelens[i]): * row2 = word_point[b] * size * f = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) # <<<<<<<<<<<<<< @@ -8376,7 +8120,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ __pyx_v_f = __pyx_v_6gensim_6models_14word2vec_inner_our_dot((&__pyx_v_size), __pyx_v_neu1, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), (&(__pyx_v_syn1[__pyx_v_row2])), (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":937 + /* "gensim/models/word2vec_inner.pyx":872 * row2 = word_point[b] * size * f = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 # <<<<<<<<<<<<<< @@ -8385,7 +8129,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ __pyx_v_sgn = __Pyx_pow_long(-1L, ((long)(__pyx_v_word_code[__pyx_v_b]))); - /* "gensim/models/word2vec_inner.pyx":938 + /* "gensim/models/word2vec_inner.pyx":873 * f = our_dot(&size, neu1, &ONE, &syn1[row2], &ONE) * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * f *= sgn # <<<<<<<<<<<<<< @@ -8394,7 +8138,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ __pyx_v_f = (__pyx_v_f * __pyx_v_sgn); - /* "gensim/models/word2vec_inner.pyx":939 + /* "gensim/models/word2vec_inner.pyx":874 * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * f *= sgn * if f <= -MAX_EXP or f >= MAX_EXP: # <<<<<<<<<<<<<< @@ -8412,7 +8156,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ __pyx_L13_bool_binop_done:; if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":940 + /* "gensim/models/word2vec_inner.pyx":875 * f *= sgn * if f <= -MAX_EXP or f >= MAX_EXP: * continue # <<<<<<<<<<<<<< @@ -8421,7 +8165,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ goto __pyx_L10_continue; - /* "gensim/models/word2vec_inner.pyx":939 + /* "gensim/models/word2vec_inner.pyx":874 * sgn = (-1)**word_code[b] # ch function: 0-> 1, 1 -> -1 * f *= sgn * if f <= -MAX_EXP or f >= MAX_EXP: # <<<<<<<<<<<<<< @@ -8430,7 +8174,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ } - /* "gensim/models/word2vec_inner.pyx":941 + /* "gensim/models/word2vec_inner.pyx":876 * if f <= -MAX_EXP or f >= MAX_EXP: * continue * f = LOG_TABLE[((f + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] # <<<<<<<<<<<<<< @@ -8439,7 +8183,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ */ __pyx_v_f = (__pyx_v_6gensim_6models_14word2vec_inner_LOG_TABLE[((int)((__pyx_v_f + 6.0) * 83.0))]); - /* "gensim/models/word2vec_inner.pyx":942 + /* "gensim/models/word2vec_inner.pyx":877 * continue * f = LOG_TABLE[((f + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))] * work[0] += f # <<<<<<<<<<<<<< @@ -8451,8 +8195,8 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ __pyx_L10_continue:; } - /* "gensim/models/word2vec_inner.pyx":910 - * return work[0] + /* "gensim/models/word2vec_inner.pyx":845 + * return c.work[0] * * cdef void score_pair_cbow_hs( # <<<<<<<<<<<<<< * const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], @@ -8462,7 +8206,7 @@ static void __pyx_f_6gensim_6models_14word2vec_inner_score_pair_cbow_hs(__pyx_t_ /* function exit code */ } -/* "gensim/models/word2vec_inner.pyx":945 +/* "gensim/models/word2vec_inner.pyx":880 * * * def init(): # <<<<<<<<<<<<<< @@ -8501,7 +8245,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P int __pyx_t_4; __Pyx_RefNannySetupContext("init", 0); - /* "gensim/models/word2vec_inner.pyx":961 + /* "gensim/models/word2vec_inner.pyx":896 * * cdef int i * cdef float *x = [10.0] # <<<<<<<<<<<<<< @@ -8511,7 +8255,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P __pyx_t_1[0] = ((float)10.0); __pyx_v_x = __pyx_t_1; - /* "gensim/models/word2vec_inner.pyx":962 + /* "gensim/models/word2vec_inner.pyx":897 * cdef int i * cdef float *x = [10.0] * cdef float *y = [0.01] # <<<<<<<<<<<<<< @@ -8521,7 +8265,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P __pyx_t_2[0] = ((float)0.01); __pyx_v_y = __pyx_t_2; - /* "gensim/models/word2vec_inner.pyx":963 + /* "gensim/models/word2vec_inner.pyx":898 * cdef float *x = [10.0] * cdef float *y = [0.01] * cdef float expected = 0.1 # <<<<<<<<<<<<<< @@ -8530,7 +8274,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ __pyx_v_expected = ((float)0.1); - /* "gensim/models/word2vec_inner.pyx":964 + /* "gensim/models/word2vec_inner.pyx":899 * cdef float *y = [0.01] * cdef float expected = 0.1 * cdef int size = 1 # <<<<<<<<<<<<<< @@ -8539,7 +8283,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ __pyx_v_size = 1; - /* "gensim/models/word2vec_inner.pyx":969 + /* "gensim/models/word2vec_inner.pyx":904 * * # build the sigmoid table * for i in range(EXP_TABLE_SIZE): # <<<<<<<<<<<<<< @@ -8549,7 +8293,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P for (__pyx_t_3 = 0; __pyx_t_3 < 0x3E8; __pyx_t_3+=1) { __pyx_v_i = __pyx_t_3; - /* "gensim/models/word2vec_inner.pyx":970 + /* "gensim/models/word2vec_inner.pyx":905 * # build the sigmoid table * for i in range(EXP_TABLE_SIZE): * EXP_TABLE[i] = exp((i / EXP_TABLE_SIZE * 2 - 1) * MAX_EXP) # <<<<<<<<<<<<<< @@ -8558,7 +8302,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]) = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)exp(((((__pyx_v_i / ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)0x3E8)) * 2.0) - 1.0) * 6.0))); - /* "gensim/models/word2vec_inner.pyx":971 + /* "gensim/models/word2vec_inner.pyx":906 * for i in range(EXP_TABLE_SIZE): * EXP_TABLE[i] = exp((i / EXP_TABLE_SIZE * 2 - 1) * MAX_EXP) * EXP_TABLE[i] = (EXP_TABLE[i] / (EXP_TABLE[i] + 1)) # <<<<<<<<<<<<<< @@ -8567,7 +8311,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ (__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]) = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)((__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]) / ((__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]) + 1.0))); - /* "gensim/models/word2vec_inner.pyx":972 + /* "gensim/models/word2vec_inner.pyx":907 * EXP_TABLE[i] = exp((i / EXP_TABLE_SIZE * 2 - 1) * MAX_EXP) * EXP_TABLE[i] = (EXP_TABLE[i] / (EXP_TABLE[i] + 1)) * LOG_TABLE[i] = log( EXP_TABLE[i] ) # <<<<<<<<<<<<<< @@ -8577,7 +8321,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P (__pyx_v_6gensim_6models_14word2vec_inner_LOG_TABLE[__pyx_v_i]) = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)log((__pyx_v_6gensim_6models_14word2vec_inner_EXP_TABLE[__pyx_v_i]))); } - /* "gensim/models/word2vec_inner.pyx":975 + /* "gensim/models/word2vec_inner.pyx":910 * * # check whether sdot returns double or float * d_res = dsdot(&size, x, &ONE, y, &ONE) # <<<<<<<<<<<<<< @@ -8586,7 +8330,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ __pyx_v_d_res = __pyx_v_6gensim_6models_14word2vec_inner_dsdot((&__pyx_v_size), __pyx_v_x, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE), __pyx_v_y, (&__pyx_v_6gensim_6models_14word2vec_inner_ONE)); - /* "gensim/models/word2vec_inner.pyx":976 + /* "gensim/models/word2vec_inner.pyx":911 * # check whether sdot returns double or float * d_res = dsdot(&size, x, &ONE, y, &ONE) * p_res = &d_res # <<<<<<<<<<<<<< @@ -8595,7 +8339,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ __pyx_v_p_res = ((float *)(&__pyx_v_d_res)); - /* "gensim/models/word2vec_inner.pyx":977 + /* "gensim/models/word2vec_inner.pyx":912 * d_res = dsdot(&size, x, &ONE, y, &ONE) * p_res = &d_res * if abs(d_res - expected) < 0.0001: # <<<<<<<<<<<<<< @@ -8605,7 +8349,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P __pyx_t_4 = ((fabs((__pyx_v_d_res - __pyx_v_expected)) < 0.0001) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":978 + /* "gensim/models/word2vec_inner.pyx":913 * p_res = &d_res * if abs(d_res - expected) < 0.0001: * our_dot = our_dot_double # <<<<<<<<<<<<<< @@ -8614,7 +8358,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_dot = __pyx_f_6gensim_6models_14word2vec_inner_our_dot_double; - /* "gensim/models/word2vec_inner.pyx":979 + /* "gensim/models/word2vec_inner.pyx":914 * if abs(d_res - expected) < 0.0001: * our_dot = our_dot_double * our_saxpy = saxpy # <<<<<<<<<<<<<< @@ -8623,7 +8367,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy = __pyx_v_6gensim_6models_14word2vec_inner_saxpy; - /* "gensim/models/word2vec_inner.pyx":980 + /* "gensim/models/word2vec_inner.pyx":915 * our_dot = our_dot_double * our_saxpy = saxpy * return 0 # double # <<<<<<<<<<<<<< @@ -8635,7 +8379,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P __pyx_r = __pyx_int_0; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":977 + /* "gensim/models/word2vec_inner.pyx":912 * d_res = dsdot(&size, x, &ONE, y, &ONE) * p_res = &d_res * if abs(d_res - expected) < 0.0001: # <<<<<<<<<<<<<< @@ -8644,7 +8388,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ } - /* "gensim/models/word2vec_inner.pyx":981 + /* "gensim/models/word2vec_inner.pyx":916 * our_saxpy = saxpy * return 0 # double * elif abs(p_res[0] - expected) < 0.0001: # <<<<<<<<<<<<<< @@ -8654,7 +8398,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P __pyx_t_4 = ((fabsf(((__pyx_v_p_res[0]) - __pyx_v_expected)) < 0.0001) != 0); if (__pyx_t_4) { - /* "gensim/models/word2vec_inner.pyx":982 + /* "gensim/models/word2vec_inner.pyx":917 * return 0 # double * elif abs(p_res[0] - expected) < 0.0001: * our_dot = our_dot_float # <<<<<<<<<<<<<< @@ -8663,7 +8407,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_dot = __pyx_f_6gensim_6models_14word2vec_inner_our_dot_float; - /* "gensim/models/word2vec_inner.pyx":983 + /* "gensim/models/word2vec_inner.pyx":918 * elif abs(p_res[0] - expected) < 0.0001: * our_dot = our_dot_float * our_saxpy = saxpy # <<<<<<<<<<<<<< @@ -8672,7 +8416,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy = __pyx_v_6gensim_6models_14word2vec_inner_saxpy; - /* "gensim/models/word2vec_inner.pyx":984 + /* "gensim/models/word2vec_inner.pyx":919 * our_dot = our_dot_float * our_saxpy = saxpy * return 1 # float # <<<<<<<<<<<<<< @@ -8684,7 +8428,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P __pyx_r = __pyx_int_1; goto __pyx_L0; - /* "gensim/models/word2vec_inner.pyx":981 + /* "gensim/models/word2vec_inner.pyx":916 * our_saxpy = saxpy * return 0 # double * elif abs(p_res[0] - expected) < 0.0001: # <<<<<<<<<<<<<< @@ -8693,7 +8437,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ } - /* "gensim/models/word2vec_inner.pyx":988 + /* "gensim/models/word2vec_inner.pyx":923 * # neither => use cython loops, no BLAS * # actually, the BLAS is so messed up we'll probably have segfaulted above and never even reach here * our_dot = our_dot_noblas # <<<<<<<<<<<<<< @@ -8703,7 +8447,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P /*else*/ { __pyx_v_6gensim_6models_14word2vec_inner_our_dot = __pyx_f_6gensim_6models_14word2vec_inner_our_dot_noblas; - /* "gensim/models/word2vec_inner.pyx":989 + /* "gensim/models/word2vec_inner.pyx":924 * # actually, the BLAS is so messed up we'll probably have segfaulted above and never even reach here * our_dot = our_dot_noblas * our_saxpy = our_saxpy_noblas # <<<<<<<<<<<<<< @@ -8712,7 +8456,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P */ __pyx_v_6gensim_6models_14word2vec_inner_our_saxpy = __pyx_f_6gensim_6models_14word2vec_inner_our_saxpy_noblas; - /* "gensim/models/word2vec_inner.pyx":990 + /* "gensim/models/word2vec_inner.pyx":925 * our_dot = our_dot_noblas * our_saxpy = our_saxpy_noblas * return 2 # <<<<<<<<<<<<<< @@ -8725,7 +8469,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P goto __pyx_L0; } - /* "gensim/models/word2vec_inner.pyx":945 + /* "gensim/models/word2vec_inner.pyx":880 * * * def init(): # <<<<<<<<<<<<<< @@ -8740,7 +8484,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -8788,7 +8532,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_info->obj); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":222 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 * * cdef int i, ndim * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -8797,7 +8541,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_endian_detector = 1; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":223 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 * cdef int i, ndim * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -8806,7 +8550,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":225 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 * cdef bint little_endian = ((&endian_detector)[0] != 0) * * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< @@ -8815,7 +8559,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8829,7 +8573,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L4_bool_binop_done; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":228 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -8840,7 +8584,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8849,20 +8593,20 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 229, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 229, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 229, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8871,7 +8615,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8885,7 +8629,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L7_bool_binop_done; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":232 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -8896,7 +8640,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8905,20 +8649,20 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< * * info.buf = PyArray_DATA(self) */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 233, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 233, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 233, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8927,7 +8671,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":235 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 * raise ValueError(u"ndarray is not Fortran contiguous") * * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< @@ -8936,7 +8680,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":236 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 * * info.buf = PyArray_DATA(self) * info.ndim = ndim # <<<<<<<<<<<<<< @@ -8945,7 +8689,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->ndim = __pyx_v_ndim; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8955,7 +8699,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":240 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 * # Allocate new buffer for strides and shape info. * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< @@ -8964,7 +8708,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":241 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim # <<<<<<<<<<<<<< @@ -8973,7 +8717,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":242 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim * for i in range(ndim): # <<<<<<<<<<<<<< @@ -8985,7 +8729,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":243 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 * info.shape = info.strides + ndim * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< @@ -8994,7 +8738,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":244 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< @@ -9004,7 +8748,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -9014,7 +8758,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L9; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":246 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 * info.shape[i] = PyArray_DIMS(self)[i] * else: * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< @@ -9024,7 +8768,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":247 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 * else: * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< @@ -9035,7 +8779,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L9:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":248 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL # <<<<<<<<<<<<<< @@ -9044,7 +8788,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->suboffsets = NULL; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":249 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< @@ -9053,7 +8797,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":250 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< @@ -9062,7 +8806,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":253 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 * * cdef int t * cdef char* f = NULL # <<<<<<<<<<<<<< @@ -9071,7 +8815,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_f = NULL; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":254 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 * cdef int t * cdef char* f = NULL * cdef dtype descr = self.descr # <<<<<<<<<<<<<< @@ -9083,7 +8827,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); __pyx_t_3 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":257 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 * cdef int offset * * info.obj = self # <<<<<<<<<<<<<< @@ -9096,7 +8840,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = ((PyObject *)__pyx_v_self); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -9106,7 +8850,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":260 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 * * if not PyDataType_HASFIELDS(descr): * t = descr.type_num # <<<<<<<<<<<<<< @@ -9116,7 +8860,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_4 = __pyx_v_descr->type_num; __pyx_v_t = __pyx_t_4; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9136,7 +8880,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L15_next_or:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":262 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -9153,7 +8897,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L14_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9162,20 +8906,20 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 263, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 263, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 263, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9184,7 +8928,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":264 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< @@ -9196,7 +8940,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"b"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":265 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< @@ -9207,7 +8951,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"B"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":266 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< @@ -9218,7 +8962,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"h"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":267 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< @@ -9229,7 +8973,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"H"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":268 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< @@ -9240,7 +8984,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"i"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":269 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< @@ -9251,7 +8995,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"I"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":270 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< @@ -9262,7 +9006,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"l"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":271 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< @@ -9273,7 +9017,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"L"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":272 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< @@ -9284,7 +9028,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"q"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":273 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< @@ -9295,7 +9039,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Q"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":274 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< @@ -9306,7 +9050,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"f"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":275 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< @@ -9317,7 +9061,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"d"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":276 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< @@ -9328,7 +9072,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"g"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":277 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< @@ -9339,7 +9083,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zf"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":278 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< @@ -9350,7 +9094,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zd"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":279 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< @@ -9361,7 +9105,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zg"); break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":280 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< @@ -9373,7 +9117,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; default: - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":282 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 * elif t == NPY_OBJECT: f = "O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -9394,7 +9138,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":283 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f # <<<<<<<<<<<<<< @@ -9403,7 +9147,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->format = __pyx_v_f; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":284 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f * return # <<<<<<<<<<<<<< @@ -9413,7 +9157,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_r = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -9422,7 +9166,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":286 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 * return * else: * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< @@ -9432,7 +9176,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":287 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 * else: * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< @@ -9441,7 +9185,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->format[0]) = '^'; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":288 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 # <<<<<<<<<<<<<< @@ -9450,7 +9194,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_offset = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":289 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< @@ -9460,7 +9204,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) __pyx_v_f = __pyx_t_8; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":292 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 * info.format + _buffer_format_string_len, * &offset) * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< @@ -9470,7 +9214,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_f[0]) = '\x00'; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -9502,7 +9246,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -9526,7 +9270,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s int __pyx_t_1; __Pyx_RefNannySetupContext("__releasebuffer__", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -9536,7 +9280,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":296 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) # <<<<<<<<<<<<<< @@ -9545,7 +9289,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->format); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -9554,7 +9298,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -9564,7 +9308,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":298 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): * PyObject_Free(info.strides) # <<<<<<<<<<<<<< @@ -9573,7 +9317,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->strides); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -9582,7 +9326,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -9594,7 +9338,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __Pyx_RefNannyFinishContext(); } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -9608,7 +9352,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":776 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 * * cdef inline object PyArray_MultiIterNew1(a): * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< @@ -9622,7 +9366,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -9641,7 +9385,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -9655,7 +9399,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":779 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 * * cdef inline object PyArray_MultiIterNew2(a, b): * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< @@ -9669,7 +9413,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -9688,7 +9432,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -9702,7 +9446,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":782 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 * * cdef inline object PyArray_MultiIterNew3(a, b, c): * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< @@ -9716,7 +9460,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -9735,7 +9479,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -9749,7 +9493,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":785 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< @@ -9763,7 +9507,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -9782,7 +9526,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -9796,7 +9540,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":788 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< @@ -9810,7 +9554,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -9829,7 +9573,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -9843,7 +9587,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ int __pyx_t_1; __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -9853,7 +9597,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":792 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): * return d.subarray.shape # <<<<<<<<<<<<<< @@ -9865,7 +9609,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -9874,7 +9618,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":794 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 * return d.subarray.shape * else: * return () # <<<<<<<<<<<<<< @@ -9888,7 +9632,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ goto __pyx_L0; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -9903,7 +9647,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -9932,7 +9676,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx char *__pyx_t_9; __Pyx_RefNannySetupContext("_util_dtypestring", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":801 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 * * cdef dtype child * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -9941,7 +9685,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_endian_detector = 1; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":802 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 * cdef dtype child * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -9950,7 +9694,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -9973,7 +9717,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); __pyx_t_3 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":806 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 * * for childname in descr.names: * fields = descr.fields[childname] # <<<<<<<<<<<<<< @@ -9990,7 +9734,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":807 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 * for childname in descr.names: * fields = descr.fields[childname] * child, new_offset = fields # <<<<<<<<<<<<<< @@ -10025,7 +9769,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); __pyx_t_4 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -10042,20 +9786,20 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); if (unlikely(__pyx_t_6)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< * * if ((child.byteorder == c'>' and little_endian) or */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 810, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 810, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 810, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -10064,7 +9808,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -10084,7 +9828,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L8_next_or:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":813 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 * * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -10101,7 +9845,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = __pyx_t_7; __pyx_L7_bool_binop_done:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -10110,20 +9854,20 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ if (unlikely(__pyx_t_6)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< * # One could encode it in the format string and have Cython * # complain instead, BUT: < and > in format strings also imply */ - __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 814, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 814, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 814, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -10132,7 +9876,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":824 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 * * # Output padding bytes * while offset[0] < new_offset: # <<<<<<<<<<<<<< @@ -10148,7 +9892,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (!__pyx_t_6) break; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":825 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 * # Output padding bytes * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< @@ -10157,7 +9901,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ (__pyx_v_f[0]) = 0x78; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":826 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte * f += 1 # <<<<<<<<<<<<<< @@ -10166,7 +9910,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":827 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 * f[0] = 120 # "x"; pad byte * f += 1 * offset[0] += 1 # <<<<<<<<<<<<<< @@ -10177,7 +9921,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":829 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 * offset[0] += 1 * * offset[0] += child.itemsize # <<<<<<<<<<<<<< @@ -10187,7 +9931,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_8 = 0; (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -10197,7 +9941,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); if (__pyx_t_6) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":832 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 * * if not PyDataType_HASFIELDS(child): * t = child.type_num # <<<<<<<<<<<<<< @@ -10209,7 +9953,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); __pyx_t_4 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -10219,20 +9963,20 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); if (unlikely(__pyx_t_6)) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< * * # Until ticket #99 is fixed, use integers to avoid warnings */ - __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 834, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 834, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(1, 834, __pyx_L1_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -10241,7 +9985,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":837 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 * * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< @@ -10259,7 +10003,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":838 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< @@ -10277,7 +10021,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":839 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< @@ -10295,7 +10039,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":840 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< @@ -10313,7 +10057,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":841 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< @@ -10331,7 +10075,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":842 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< @@ -10349,7 +10093,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":843 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< @@ -10367,7 +10111,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":844 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< @@ -10385,7 +10129,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":845 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< @@ -10403,7 +10147,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":846 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< @@ -10421,7 +10165,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":847 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< @@ -10439,7 +10183,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":848 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< @@ -10457,7 +10201,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":849 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< @@ -10475,7 +10219,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":850 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< @@ -10495,7 +10239,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":851 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< @@ -10515,7 +10259,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":852 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< @@ -10535,7 +10279,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":853 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< @@ -10553,7 +10297,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":855 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 * elif t == NPY_OBJECT: f[0] = 79 #"O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -10572,7 +10316,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L15:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":856 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * f += 1 # <<<<<<<<<<<<<< @@ -10581,7 +10325,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -10591,7 +10335,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L13; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":860 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 * # Cython ignores struct boundary information ("T{...}"), * # so don't output it * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< @@ -10604,7 +10348,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L13:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -10614,7 +10358,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":861 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 * # so don't output it * f = _util_dtypestring(child, f, end, offset) * return f # <<<<<<<<<<<<<< @@ -10624,7 +10368,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_r = __pyx_v_f; goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -10649,7 +10393,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -10664,7 +10408,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a int __pyx_t_2; __Pyx_RefNannySetupContext("set_array_base", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -10675,7 +10419,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":980 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 * cdef PyObject* baseptr * if base is None: * baseptr = NULL # <<<<<<<<<<<<<< @@ -10684,7 +10428,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_baseptr = NULL; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -10694,7 +10438,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a goto __pyx_L3; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":982 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 * baseptr = NULL * else: * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< @@ -10704,7 +10448,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a /*else*/ { Py_INCREF(__pyx_v_base); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":983 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 * else: * Py_INCREF(base) # important to do this before decref below! * baseptr = base # <<<<<<<<<<<<<< @@ -10715,7 +10459,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a } __pyx_L3:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":984 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 * Py_INCREF(base) # important to do this before decref below! * baseptr = base * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< @@ -10724,7 +10468,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ Py_XDECREF(__pyx_v_arr->base); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":985 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 * baseptr = base * Py_XDECREF(arr.base) * arr.base = baseptr # <<<<<<<<<<<<<< @@ -10733,7 +10477,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_arr->base = __pyx_v_baseptr; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -10745,7 +10489,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __Pyx_RefNannyFinishContext(); } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -10759,7 +10503,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py int __pyx_t_1; __Pyx_RefNannySetupContext("get_array_base", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -10769,7 +10513,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); if (__pyx_t_1) { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":989 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: * return None # <<<<<<<<<<<<<< @@ -10780,7 +10524,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -10789,7 +10533,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py */ } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":991 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 * return None * else: * return arr.base # <<<<<<<<<<<<<< @@ -10803,7 +10547,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py goto __pyx_L0; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -10818,7 +10562,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -10839,7 +10583,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_array", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -10855,7 +10599,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":998 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 * cdef inline int import_array() except -1: * try: * _import_array() # <<<<<<<<<<<<<< @@ -10864,7 +10608,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { */ __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -10878,7 +10622,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":999 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 * try: * _import_array() * except Exception: # <<<<<<<<<<<<<< @@ -10893,14 +10637,14 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< * * cdef inline int import_umath() except -1: */ - __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1000, __pyx_L5_except_error) + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1000, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_Raise(__pyx_t_8, 0, 0, 0); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; @@ -10909,7 +10653,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -10924,7 +10668,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __pyx_L8_try_end:; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -10947,7 +10691,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -10968,7 +10712,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_umath", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -10984,7 +10728,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 * cdef inline int import_umath() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -10993,7 +10737,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -11007,7 +10751,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -11022,14 +10766,14 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< * * cdef inline int import_ufunc() except -1: */ - __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1006, __pyx_L5_except_error) + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1006, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_Raise(__pyx_t_8, 0, 0, 0); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; @@ -11038,7 +10782,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -11053,7 +10797,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __pyx_L8_try_end:; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -11076,7 +10820,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { return __pyx_r; } -/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 +/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -11097,7 +10841,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_ufunc", 0); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -11113,7 +10857,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 * cdef inline int import_ufunc() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -11122,7 +10866,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -11136,7 +10880,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -11150,12 +10894,12 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< */ - __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__13, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1012, __pyx_L5_except_error) + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1012, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_Raise(__pyx_t_8, 0, 0, 0); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; @@ -11164,7 +10908,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -11179,7 +10923,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __pyx_L8_try_end:; } - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -11249,19 +10993,15 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_REAL, __pyx_k_REAL, sizeof(__pyx_k_REAL), 0, 0, 1, 1}, {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1}, - {&__pyx_n_s__14, __pyx_k__14, sizeof(__pyx_k__14), 0, 0, 1, 1}, + {&__pyx_n_s__12, __pyx_k__12, sizeof(__pyx_k__12), 0, 0, 1, 1}, {&__pyx_n_s_alpha, __pyx_k_alpha, sizeof(__pyx_k_alpha), 0, 0, 1, 1}, - {&__pyx_n_s_alpha_2, __pyx_k_alpha_2, sizeof(__pyx_k_alpha_2), 0, 0, 1, 1}, + {&__pyx_n_s_c, __pyx_k_c, sizeof(__pyx_k_c), 0, 0, 1, 1}, {&__pyx_n_s_cbow_mean, __pyx_k_cbow_mean, sizeof(__pyx_k_cbow_mean), 0, 0, 1, 1}, {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, {&__pyx_n_s_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 0, 1, 1}, - {&__pyx_n_s_codelens, __pyx_k_codelens, sizeof(__pyx_k_codelens), 0, 0, 1, 1}, - {&__pyx_n_s_codes, __pyx_k_codes, sizeof(__pyx_k_codes), 0, 0, 1, 1}, {&__pyx_n_s_compute_loss, __pyx_k_compute_loss, sizeof(__pyx_k_compute_loss), 0, 0, 1, 1}, - {&__pyx_n_s_compute_loss_2, __pyx_k_compute_loss_2, sizeof(__pyx_k_compute_loss_2), 0, 0, 1, 1}, {&__pyx_n_s_cpointer, __pyx_k_cpointer, sizeof(__pyx_k_cpointer), 0, 0, 1, 1}, {&__pyx_n_s_cum_table, __pyx_k_cum_table, sizeof(__pyx_k_cum_table), 0, 0, 1, 1}, - {&__pyx_n_s_cum_table_len, __pyx_k_cum_table_len, sizeof(__pyx_k_cum_table_len), 0, 0, 1, 1}, {&__pyx_n_s_d_res, __pyx_k_d_res, sizeof(__pyx_k_d_res), 0, 0, 1, 1}, {&__pyx_n_s_dsdot, __pyx_k_dsdot, sizeof(__pyx_k_dsdot), 0, 0, 1, 1}, {&__pyx_n_s_effective_sentences, __pyx_k_effective_sentences, sizeof(__pyx_k_effective_sentences), 0, 0, 1, 1}, @@ -11278,7 +11018,6 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_idx_start, __pyx_k_idx_start, sizeof(__pyx_k_idx_start), 0, 0, 1, 1}, {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1}, - {&__pyx_n_s_indexes, __pyx_k_indexes, sizeof(__pyx_k_indexes), 0, 0, 1, 1}, {&__pyx_n_s_init, __pyx_k_init, sizeof(__pyx_k_init), 0, 0, 1, 1}, {&__pyx_n_s_item, __pyx_k_item, sizeof(__pyx_k_item), 0, 0, 1, 1}, {&__pyx_n_s_j, __pyx_k_j, sizeof(__pyx_k_j), 0, 0, 1, 1}, @@ -11289,8 +11028,6 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_kp_u_ndarray_is_not_Fortran_contiguou, __pyx_k_ndarray_is_not_Fortran_contiguou, sizeof(__pyx_k_ndarray_is_not_Fortran_contiguou), 0, 1, 0, 0}, {&__pyx_n_s_negative, __pyx_k_negative, sizeof(__pyx_k_negative), 0, 0, 1, 1}, {&__pyx_n_s_neu1, __pyx_k_neu1, sizeof(__pyx_k_neu1), 0, 0, 1, 1}, - {&__pyx_n_s_neu1_2, __pyx_k_neu1_2, sizeof(__pyx_k_neu1_2), 0, 0, 1, 1}, - {&__pyx_n_s_next_random, __pyx_k_next_random, sizeof(__pyx_k_next_random), 0, 0, 1, 1}, {&__pyx_n_s_np, __pyx_k_np, sizeof(__pyx_k_np), 0, 0, 1, 1}, {&__pyx_n_s_numpy, __pyx_k_numpy, sizeof(__pyx_k_numpy), 0, 0, 1, 1}, {&__pyx_kp_s_numpy_core_multiarray_failed_to, __pyx_k_numpy_core_multiarray_failed_to, sizeof(__pyx_k_numpy_core_multiarray_failed_to), 0, 0, 1, 0}, @@ -11299,15 +11036,12 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_our_saxpy, __pyx_k_our_saxpy, sizeof(__pyx_k_our_saxpy), 0, 0, 1, 1}, {&__pyx_n_s_p_res, __pyx_k_p_res, sizeof(__pyx_k_p_res), 0, 0, 1, 1}, {&__pyx_n_s_point, __pyx_k_point, sizeof(__pyx_k_point), 0, 0, 1, 1}, - {&__pyx_n_s_points, __pyx_k_points, sizeof(__pyx_k_points), 0, 0, 1, 1}, {&__pyx_n_s_pyx_capi, __pyx_k_pyx_capi, sizeof(__pyx_k_pyx_capi), 0, 0, 1, 1}, {&__pyx_n_s_randint, __pyx_k_randint, sizeof(__pyx_k_randint), 0, 0, 1, 1}, {&__pyx_n_s_random, __pyx_k_random, sizeof(__pyx_k_random), 0, 0, 1, 1}, {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, - {&__pyx_n_s_reduced_windows, __pyx_k_reduced_windows, sizeof(__pyx_k_reduced_windows), 0, 0, 1, 1}, {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, {&__pyx_n_s_running_training_loss, __pyx_k_running_training_loss, sizeof(__pyx_k_running_training_loss), 0, 0, 1, 1}, - {&__pyx_n_s_running_training_loss_2, __pyx_k_running_training_loss_2, sizeof(__pyx_k_running_training_loss_2), 0, 0, 1, 1}, {&__pyx_n_s_sample, __pyx_k_sample, sizeof(__pyx_k_sample), 0, 0, 1, 1}, {&__pyx_n_s_sample_int, __pyx_k_sample_int, sizeof(__pyx_k_sample_int), 0, 0, 1, 1}, {&__pyx_n_s_saxpy, __pyx_k_saxpy, sizeof(__pyx_k_saxpy), 0, 0, 1, 1}, @@ -11319,13 +11053,11 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_sent, __pyx_k_sent, sizeof(__pyx_k_sent), 0, 0, 1, 1}, {&__pyx_n_s_sent_idx, __pyx_k_sent_idx, sizeof(__pyx_k_sent_idx), 0, 0, 1, 1}, {&__pyx_n_s_sentence, __pyx_k_sentence, sizeof(__pyx_k_sentence), 0, 0, 1, 1}, - {&__pyx_n_s_sentence_idx, __pyx_k_sentence_idx, sizeof(__pyx_k_sentence_idx), 0, 0, 1, 1}, {&__pyx_n_s_sentence_len, __pyx_k_sentence_len, sizeof(__pyx_k_sentence_len), 0, 0, 1, 1}, {&__pyx_n_s_sentences, __pyx_k_sentences, sizeof(__pyx_k_sentences), 0, 0, 1, 1}, {&__pyx_n_s_size, __pyx_k_size, sizeof(__pyx_k_size), 0, 0, 1, 1}, {&__pyx_n_s_snrm2, __pyx_k_snrm2, sizeof(__pyx_k_snrm2), 0, 0, 1, 1}, {&__pyx_n_s_sscal, __pyx_k_sscal, sizeof(__pyx_k_sscal), 0, 0, 1, 1}, - {&__pyx_n_s_syn0, __pyx_k_syn0, sizeof(__pyx_k_syn0), 0, 0, 1, 1}, {&__pyx_n_s_syn1, __pyx_k_syn1, sizeof(__pyx_k_syn1), 0, 0, 1, 1}, {&__pyx_n_s_syn1neg, __pyx_k_syn1neg, sizeof(__pyx_k_syn1neg), 0, 0, 1, 1}, {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, @@ -11342,18 +11074,17 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_vocabulary, __pyx_k_vocabulary, sizeof(__pyx_k_vocabulary), 0, 0, 1, 1}, {&__pyx_n_s_window, __pyx_k_window, sizeof(__pyx_k_window), 0, 0, 1, 1}, {&__pyx_n_s_word, __pyx_k_word, sizeof(__pyx_k_word), 0, 0, 1, 1}, - {&__pyx_n_s_word_locks, __pyx_k_word_locks, sizeof(__pyx_k_word_locks), 0, 0, 1, 1}, {&__pyx_n_s_work, __pyx_k_work, sizeof(__pyx_k_work), 0, 0, 1, 1}, - {&__pyx_n_s_work_2, __pyx_k_work_2, sizeof(__pyx_k_work_2), 0, 0, 1, 1}, + {&__pyx_n_s_workers, __pyx_k_workers, sizeof(__pyx_k_workers), 0, 0, 1, 1}, {&__pyx_n_s_wv, __pyx_k_wv, sizeof(__pyx_k_wv), 0, 0, 1, 1}, {&__pyx_n_s_x, __pyx_k_x, sizeof(__pyx_k_x), 0, 0, 1, 1}, {&__pyx_n_s_y, __pyx_k_y, sizeof(__pyx_k_y), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} }; static int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(0, 24, __pyx_L1_error) - __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 117, __pyx_L1_error) - __pyx_builtin_enumerate = __Pyx_GetBuiltinName(__pyx_n_s_enumerate); if (!__pyx_builtin_enumerate) __PYX_ERR(0, 569, __pyx_L1_error) + __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(0, 25, __pyx_L1_error) + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 119, __pyx_L1_error) + __pyx_builtin_enumerate = __Pyx_GetBuiltinName(__pyx_n_s_enumerate); if (!__pyx_builtin_enumerate) __PYX_ERR(0, 564, __pyx_L1_error) __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(1, 229, __pyx_L1_error) __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(1, 810, __pyx_L1_error) return 0; @@ -11365,190 +11096,176 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - /* "gensim/models/word2vec_inner.pyx":533 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< + /* "gensim/models/word2vec_inner.pyx":491 + * c[0].cum_table_len = len(model.vocabulary.cum_table) + * if c[0].negative or c[0].sample: + * c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< * * # convert Python structures to primitive types, so we can release the GIL */ - __pyx_tuple_ = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_tuple_ = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple_); __Pyx_GIVEREF(__pyx_tuple_); - __pyx_tuple__2 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 533, __pyx_L1_error) + __pyx_tuple__2 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 491, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__2); __Pyx_GIVEREF(__pyx_tuple__2); - /* "gensim/models/word2vec_inner.pyx":666 - * cum_table_len = len(model.vocabulary.cum_table) - * if negative or sample: - * next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) # <<<<<<<<<<<<<< - * - * # convert Python structures to primitive types, so we can release the GIL - */ - __pyx_tuple__3 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__3); - __Pyx_GIVEREF(__pyx_tuple__3); - __pyx_tuple__4 = PyTuple_Pack(2, __pyx_int_0, __pyx_int_16777216); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 666, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__4); - __Pyx_GIVEREF(__pyx_tuple__4); - - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) */ - __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 229, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__5); - __Pyx_GIVEREF(__pyx_tuple__5); + __pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 229, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< * * info.buf = PyArray_DATA(self) */ - __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 233, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__6); - __Pyx_GIVEREF(__pyx_tuple__6); + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 233, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" */ - __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 263, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__7); - __Pyx_GIVEREF(__pyx_tuple__7); + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< * * if ((child.byteorder == c'>' and little_endian) or */ - __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 810, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__8); - __Pyx_GIVEREF(__pyx_tuple__8); + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 810, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< * # One could encode it in the format string and have Cython * # complain instead, BUT: < and > in format strings also imply */ - __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 814, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__9); - __Pyx_GIVEREF(__pyx_tuple__9); + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 814, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< * * # Until ticket #99 is fixed, use integers to avoid warnings */ - __pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(1, 834, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__10); - __Pyx_GIVEREF(__pyx_tuple__10); + __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 834, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< * * cdef inline int import_umath() except -1: */ - __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(1, 1000, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__11); - __Pyx_GIVEREF(__pyx_tuple__11); + __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 1000, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< * * cdef inline int import_ufunc() except -1: */ - __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(1, 1006, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__12); - __Pyx_GIVEREF(__pyx_tuple__12); + __pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(1, 1006, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< */ - __pyx_tuple__13 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(1, 1012, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__13); - __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(1, 1012, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); - /* "gensim/models/word2vec_inner.pyx":465 + /* "gensim/models/word2vec_inner.pyx":500 * * * def train_batch_sg(model, sentences, alpha, _work, compute_loss): # <<<<<<<<<<<<<< * """Update skip-gram model by training on a batch of sentences. * */ - __pyx_tuple__15 = PyTuple_Pack(40, __pyx_n_s_model, __pyx_n_s_sentences, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_compute_loss, __pyx_n_s_hs, __pyx_n_s_negative, __pyx_n_s_sample, __pyx_n_s_compute_loss_2, __pyx_n_s_running_training_loss_2, __pyx_n_s_syn0, __pyx_n_s_word_locks, __pyx_n_s_work_2, __pyx_n_s_alpha_2, __pyx_n_s_size, __pyx_n_s_codelens, __pyx_n_s_indexes, __pyx_n_s_reduced_windows, __pyx_n_s_sentence_idx, __pyx_n_s_window, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_syn1, __pyx_n_s_points, __pyx_n_s_codes, __pyx_n_s_syn1neg, __pyx_n_s_cum_table, __pyx_n_s_cum_table_len, __pyx_n_s_next_random, __pyx_n_s_vlookup, __pyx_n_s_sent, __pyx_n_s_token, __pyx_n_s_word, __pyx_n_s_item); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 465, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__15); - __Pyx_GIVEREF(__pyx_tuple__15); - __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(5, 0, 40, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_train_batch_sg, 465, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(0, 465, __pyx_L1_error) + __pyx_tuple__13 = PyTuple_Pack(19, __pyx_n_s_model, __pyx_n_s_sentences, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_compute_loss, __pyx_n_s_c, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_vlookup, __pyx_n_s_sent, __pyx_n_s_token, __pyx_n_s_word, __pyx_n_s_item); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(0, 500, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__13); + __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(5, 0, 19, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_train_batch_sg, 500, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(0, 500, __pyx_L1_error) - /* "gensim/models/word2vec_inner.pyx":596 + /* "gensim/models/word2vec_inner.pyx":591 * * * def train_batch_cbow(model, sentences, alpha, _work, _neu1, compute_loss): # <<<<<<<<<<<<<< * """Update CBOW model by training on a batch of sentences. * */ - __pyx_tuple__17 = PyTuple_Pack(43, __pyx_n_s_model, __pyx_n_s_sentences, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_compute_loss, __pyx_n_s_hs, __pyx_n_s_negative, __pyx_n_s_sample, __pyx_n_s_cbow_mean, __pyx_n_s_compute_loss_2, __pyx_n_s_running_training_loss_2, __pyx_n_s_syn0, __pyx_n_s_word_locks, __pyx_n_s_work_2, __pyx_n_s_alpha_2, __pyx_n_s_size, __pyx_n_s_codelens, __pyx_n_s_indexes, __pyx_n_s_reduced_windows, __pyx_n_s_sentence_idx, __pyx_n_s_window, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_syn1, __pyx_n_s_points, __pyx_n_s_codes, __pyx_n_s_syn1neg, __pyx_n_s_cum_table, __pyx_n_s_cum_table_len, __pyx_n_s_next_random, __pyx_n_s_neu1_2, __pyx_n_s_vlookup, __pyx_n_s_sent, __pyx_n_s_token, __pyx_n_s_word, __pyx_n_s_item); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 596, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__17); - __Pyx_GIVEREF(__pyx_tuple__17); - __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(6, 0, 43, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_train_batch_cbow, 596, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 596, __pyx_L1_error) + __pyx_tuple__15 = PyTuple_Pack(20, __pyx_n_s_model, __pyx_n_s_sentences, __pyx_n_s_alpha, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_compute_loss, __pyx_n_s_c, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_effective_words, __pyx_n_s_effective_sentences, __pyx_n_s_sent_idx, __pyx_n_s_idx_start, __pyx_n_s_idx_end, __pyx_n_s_vlookup, __pyx_n_s_sent, __pyx_n_s_token, __pyx_n_s_word, __pyx_n_s_item); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 591, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + __pyx_codeobj__16 = (PyObject*)__Pyx_PyCode_New(6, 0, 20, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__15, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_train_batch_cbow, 591, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__16)) __PYX_ERR(0, 591, __pyx_L1_error) - /* "gensim/models/word2vec_inner.pyx":727 + /* "gensim/models/word2vec_inner.pyx":679 * * * def score_sentence_sg(model, sentence, _work): # <<<<<<<<<<<<<< * """Obtain likelihood score for a single sentence in a fitted skip-gram representation. * */ - __pyx_tuple__19 = PyTuple_Pack(20, __pyx_n_s_model, __pyx_n_s_sentence, __pyx_n_s_work, __pyx_n_s_syn0, __pyx_n_s_work_2, __pyx_n_s_size, __pyx_n_s_codelens, __pyx_n_s_indexes, __pyx_n_s_sentence_len, __pyx_n_s_window, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_result, __pyx_n_s_syn1, __pyx_n_s_points, __pyx_n_s_codes, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_word); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(0, 727, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__19); - __Pyx_GIVEREF(__pyx_tuple__19); - __pyx_codeobj__20 = (PyObject*)__Pyx_PyCode_New(3, 0, 20, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_score_sentence_sg, 727, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__20)) __PYX_ERR(0, 727, __pyx_L1_error) + __pyx_tuple__17 = PyTuple_Pack(12, __pyx_n_s_model, __pyx_n_s_sentence, __pyx_n_s_work, __pyx_n_s_c, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_result, __pyx_n_s_sentence_len, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_word); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 679, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__17); + __Pyx_GIVEREF(__pyx_tuple__17); + __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(3, 0, 12, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_score_sentence_sg, 679, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 679, __pyx_L1_error) - /* "gensim/models/word2vec_inner.pyx":827 + /* "gensim/models/word2vec_inner.pyx":772 * work[0] += f * * def score_sentence_cbow(model, sentence, _work, _neu1): # <<<<<<<<<<<<<< * """Obtain likelihood score for a single sentence in a fitted CBOW representation. * */ - __pyx_tuple__21 = PyTuple_Pack(23, __pyx_n_s_model, __pyx_n_s_sentence, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_cbow_mean, __pyx_n_s_syn0, __pyx_n_s_work_2, __pyx_n_s_neu1_2, __pyx_n_s_size, __pyx_n_s_codelens, __pyx_n_s_indexes, __pyx_n_s_sentence_len, __pyx_n_s_window, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_result, __pyx_n_s_syn1, __pyx_n_s_points, __pyx_n_s_codes, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_word); if (unlikely(!__pyx_tuple__21)) __PYX_ERR(0, 827, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__21); - __Pyx_GIVEREF(__pyx_tuple__21); - __pyx_codeobj__22 = (PyObject*)__Pyx_PyCode_New(4, 0, 23, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__21, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_score_sentence_cbow, 827, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__22)) __PYX_ERR(0, 827, __pyx_L1_error) + __pyx_tuple__19 = PyTuple_Pack(13, __pyx_n_s_model, __pyx_n_s_sentence, __pyx_n_s_work, __pyx_n_s_neu1, __pyx_n_s_c, __pyx_n_s_i, __pyx_n_s_j, __pyx_n_s_k, __pyx_n_s_result, __pyx_n_s_vlookup, __pyx_n_s_token, __pyx_n_s_word, __pyx_n_s_sentence_len); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(0, 772, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__19); + __Pyx_GIVEREF(__pyx_tuple__19); + __pyx_codeobj__20 = (PyObject*)__Pyx_PyCode_New(4, 0, 13, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_score_sentence_cbow, 772, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__20)) __PYX_ERR(0, 772, __pyx_L1_error) - /* "gensim/models/word2vec_inner.pyx":945 + /* "gensim/models/word2vec_inner.pyx":880 * * * def init(): # <<<<<<<<<<<<<< * """Precompute function `sigmoid(x) = 1 / (1 + exp(-x))`, for x values discretized into table EXP_TABLE. * Also calculate log(sigmoid(x)) into LOG_TABLE. */ - __pyx_tuple__23 = PyTuple_Pack(7, __pyx_n_s_i, __pyx_n_s_x, __pyx_n_s_y, __pyx_n_s_expected, __pyx_n_s_size, __pyx_n_s_d_res, __pyx_n_s_p_res); if (unlikely(!__pyx_tuple__23)) __PYX_ERR(0, 945, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__23); - __Pyx_GIVEREF(__pyx_tuple__23); - __pyx_codeobj__24 = (PyObject*)__Pyx_PyCode_New(0, 0, 7, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__23, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_init, 945, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__24)) __PYX_ERR(0, 945, __pyx_L1_error) + __pyx_tuple__21 = PyTuple_Pack(7, __pyx_n_s_i, __pyx_n_s_x, __pyx_n_s_y, __pyx_n_s_expected, __pyx_n_s_size, __pyx_n_s_d_res, __pyx_n_s_p_res); if (unlikely(!__pyx_tuple__21)) __PYX_ERR(0, 880, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__21); + __Pyx_GIVEREF(__pyx_tuple__21); + __pyx_codeobj__22 = (PyObject*)__Pyx_PyCode_New(0, 0, 7, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__21, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gensim_models_word2vec_inner_pyx, __pyx_n_s_init, 880, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__22)) __PYX_ERR(0, 880, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; @@ -11614,6 +11331,11 @@ static int __Pyx_modinit_function_export_code(void) { if (__Pyx_ExportFunction("our_saxpy_noblas", (void (*)(void))__pyx_f_6gensim_6models_14word2vec_inner_our_saxpy_noblas, "void (int const *, float const *, float const *, int const *, float *, int const *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("bisect_left", (void (*)(void))__pyx_f_6gensim_6models_14word2vec_inner_bisect_left, "unsigned PY_LONG_LONG (__pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG, unsigned PY_LONG_LONG)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (__Pyx_ExportFunction("random_int32", (void (*)(void))__pyx_f_6gensim_6models_14word2vec_inner_random_int32, "unsigned PY_LONG_LONG (unsigned PY_LONG_LONG *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("w2v_fast_sentence_sg_hs", (void (*)(void))__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("w2v_fast_sentence_sg_neg", (void (*)(void))__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_sg_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const , __pyx_t_5numpy_uint32_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("w2v_fast_sentence_cbow_hs", (void (*)(void))__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_hs, "void (__pyx_t_5numpy_uint32_t const *, __pyx_t_5numpy_uint8_t const *, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("w2v_fast_sentence_cbow_neg", (void (*)(void))__pyx_f_6gensim_6models_14word2vec_inner_w2v_fast_sentence_cbow_neg, "unsigned PY_LONG_LONG (int const , __pyx_t_5numpy_uint32_t *, unsigned PY_LONG_LONG, int *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_5numpy_uint32_t const *, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int, int, int, int, unsigned PY_LONG_LONG, __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *, int const , __pyx_t_6gensim_6models_14word2vec_inner_REAL_t *)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + if (__Pyx_ExportFunction("init_w2v_config", (void (*)(void))__pyx_f_6gensim_6models_14word2vec_inner_init_w2v_config, "PyObject *(struct __pyx_t_6gensim_6models_14word2vec_inner_Word2VecConfig *, PyObject *, PyObject *, PyObject *, PyObject *, struct __pyx_opt_args_6gensim_6models_14word2vec_inner_init_w2v_config *__pyx_optional_args)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; @@ -11685,7 +11407,7 @@ static int __Pyx_modinit_function_import_code(void) { #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (!(defined(__cplusplus)) || (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4))) +#elif defined(__GNUC__) #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) #else #define CYTHON_SMALL_CODE @@ -11853,15 +11575,15 @@ if (!__Pyx_RefNanny) { * * import cython * import numpy as np # <<<<<<<<<<<<<< - * cimport numpy as np * + * cimport numpy as np */ __pyx_t_1 = __Pyx_Import(__pyx_n_s_numpy, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_np, __pyx_t_1) < 0) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":22 + /* "gensim/models/word2vec_inner.pyx":23 * * # scipy <= 0.15 * try: # <<<<<<<<<<<<<< @@ -11877,28 +11599,28 @@ if (!__Pyx_RefNanny) { __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { - /* "gensim/models/word2vec_inner.pyx":23 + /* "gensim/models/word2vec_inner.pyx":24 * # scipy <= 0.15 * try: * from scipy.linalg.blas import fblas # <<<<<<<<<<<<<< * except ImportError: * # in scipy > 0.15, fblas function has been removed */ - __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 23, __pyx_L2_error) + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 24, __pyx_L2_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_s_fblas); __Pyx_GIVEREF(__pyx_n_s_fblas); PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_fblas); - __pyx_t_5 = __Pyx_Import(__pyx_n_s_scipy_linalg_blas, __pyx_t_1, -1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 23, __pyx_L2_error) + __pyx_t_5 = __Pyx_Import(__pyx_n_s_scipy_linalg_blas, __pyx_t_1, -1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 24, __pyx_L2_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_5, __pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 23, __pyx_L2_error) + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_5, __pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 24, __pyx_L2_error) __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_fblas, __pyx_t_1) < 0) __PYX_ERR(0, 23, __pyx_L2_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_fblas, __pyx_t_1) < 0) __PYX_ERR(0, 24, __pyx_L2_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - /* "gensim/models/word2vec_inner.pyx":22 + /* "gensim/models/word2vec_inner.pyx":23 * * # scipy <= 0.15 * try: # <<<<<<<<<<<<<< @@ -11914,7 +11636,7 @@ if (!__Pyx_RefNanny) { __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - /* "gensim/models/word2vec_inner.pyx":24 + /* "gensim/models/word2vec_inner.pyx":25 * try: * from scipy.linalg.blas import fblas * except ImportError: # <<<<<<<<<<<<<< @@ -11924,27 +11646,27 @@ if (!__Pyx_RefNanny) { __pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_ImportError); if (__pyx_t_6) { __Pyx_AddTraceback("gensim.models.word2vec_inner", __pyx_clineno, __pyx_lineno, __pyx_filename); - if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_7) < 0) __PYX_ERR(0, 24, __pyx_L4_except_error) + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_7) < 0) __PYX_ERR(0, 25, __pyx_L4_except_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_GOTREF(__pyx_t_1); __Pyx_GOTREF(__pyx_t_7); - /* "gensim/models/word2vec_inner.pyx":26 + /* "gensim/models/word2vec_inner.pyx":27 * except ImportError: * # in scipy > 0.15, fblas function has been removed * import scipy.linalg.blas as fblas # <<<<<<<<<<<<<< * * REAL = np.float32 */ - __pyx_t_8 = PyList_New(1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 26, __pyx_L4_except_error) + __pyx_t_8 = PyList_New(1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 27, __pyx_L4_except_error) __Pyx_GOTREF(__pyx_t_8); - __Pyx_INCREF(__pyx_n_s__14); - __Pyx_GIVEREF(__pyx_n_s__14); - PyList_SET_ITEM(__pyx_t_8, 0, __pyx_n_s__14); - __pyx_t_9 = __Pyx_Import(__pyx_n_s_scipy_linalg_blas, __pyx_t_8, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 26, __pyx_L4_except_error) + __Pyx_INCREF(__pyx_n_s__12); + __Pyx_GIVEREF(__pyx_n_s__12); + PyList_SET_ITEM(__pyx_t_8, 0, __pyx_n_s__12); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_scipy_linalg_blas, __pyx_t_8, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 27, __pyx_L4_except_error) __Pyx_GOTREF(__pyx_t_9); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_fblas, __pyx_t_9) < 0) __PYX_ERR(0, 26, __pyx_L4_except_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_fblas, __pyx_t_9) < 0) __PYX_ERR(0, 27, __pyx_L4_except_error) __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; @@ -11954,7 +11676,7 @@ if (!__Pyx_RefNanny) { goto __pyx_L4_except_error; __pyx_L4_except_error:; - /* "gensim/models/word2vec_inner.pyx":22 + /* "gensim/models/word2vec_inner.pyx":23 * * # scipy <= 0.15 * try: # <<<<<<<<<<<<<< @@ -11974,130 +11696,130 @@ if (!__Pyx_RefNanny) { __pyx_L7_try_end:; } - /* "gensim/models/word2vec_inner.pyx":28 + /* "gensim/models/word2vec_inner.pyx":29 * import scipy.linalg.blas as fblas * * REAL = np.float32 # <<<<<<<<<<<<<< * * DEF MAX_SENTENCE_LEN = 10000 */ - __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_np); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 28, __pyx_L1_error) + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_np); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 29, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_float32); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_float32); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 29, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_REAL, __pyx_t_1) < 0) __PYX_ERR(0, 28, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_REAL, __pyx_t_1) < 0) __PYX_ERR(0, 29, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":32 + /* "gensim/models/word2vec_inner.pyx":33 * DEF MAX_SENTENCE_LEN = 10000 * * cdef scopy_ptr scopy=PyCObject_AsVoidPtr(fblas.scopy._cpointer) # y = x # <<<<<<<<<<<<<< * cdef saxpy_ptr saxpy=PyCObject_AsVoidPtr(fblas.saxpy._cpointer) # y += alpha * x * cdef sdot_ptr sdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # float = dot(x, y) */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 32, __pyx_L1_error) + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 33, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_scopy); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 32, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_scopy); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 33, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 32, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 33, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_v_6gensim_6models_14word2vec_inner_scopy = ((__pyx_t_6gensim_6models_14word2vec_inner_scopy_ptr)PyCObject_AsVoidPtr(__pyx_t_1)); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":33 + /* "gensim/models/word2vec_inner.pyx":34 * * cdef scopy_ptr scopy=PyCObject_AsVoidPtr(fblas.scopy._cpointer) # y = x * cdef saxpy_ptr saxpy=PyCObject_AsVoidPtr(fblas.saxpy._cpointer) # y += alpha * x # <<<<<<<<<<<<<< * cdef sdot_ptr sdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # float = dot(x, y) * cdef dsdot_ptr dsdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # double = dot(x, y) */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 33, __pyx_L1_error) + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_saxpy); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 33, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_saxpy); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 34, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 33, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_v_6gensim_6models_14word2vec_inner_saxpy = ((__pyx_t_6gensim_6models_14word2vec_inner_saxpy_ptr)PyCObject_AsVoidPtr(__pyx_t_1)); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":34 + /* "gensim/models/word2vec_inner.pyx":35 * cdef scopy_ptr scopy=PyCObject_AsVoidPtr(fblas.scopy._cpointer) # y = x * cdef saxpy_ptr saxpy=PyCObject_AsVoidPtr(fblas.saxpy._cpointer) # y += alpha * x * cdef sdot_ptr sdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # float = dot(x, y) # <<<<<<<<<<<<<< * cdef dsdot_ptr dsdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # double = dot(x, y) * cdef snrm2_ptr snrm2=PyCObject_AsVoidPtr(fblas.snrm2._cpointer) # sqrt(x^2) */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error) + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sdot); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 34, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sdot); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 35, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_v_6gensim_6models_14word2vec_inner_sdot = ((__pyx_t_6gensim_6models_14word2vec_inner_sdot_ptr)PyCObject_AsVoidPtr(__pyx_t_1)); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":35 + /* "gensim/models/word2vec_inner.pyx":36 * cdef saxpy_ptr saxpy=PyCObject_AsVoidPtr(fblas.saxpy._cpointer) # y += alpha * x * cdef sdot_ptr sdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # float = dot(x, y) * cdef dsdot_ptr dsdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # double = dot(x, y) # <<<<<<<<<<<<<< * cdef snrm2_ptr snrm2=PyCObject_AsVoidPtr(fblas.snrm2._cpointer) # sqrt(x^2) * cdef sscal_ptr sscal=PyCObject_AsVoidPtr(fblas.sscal._cpointer) # x = alpha * x */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sdot); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 35, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sdot); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_v_6gensim_6models_14word2vec_inner_dsdot = ((__pyx_t_6gensim_6models_14word2vec_inner_dsdot_ptr)PyCObject_AsVoidPtr(__pyx_t_1)); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":36 + /* "gensim/models/word2vec_inner.pyx":37 * cdef sdot_ptr sdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # float = dot(x, y) * cdef dsdot_ptr dsdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # double = dot(x, y) * cdef snrm2_ptr snrm2=PyCObject_AsVoidPtr(fblas.snrm2._cpointer) # sqrt(x^2) # <<<<<<<<<<<<<< * cdef sscal_ptr sscal=PyCObject_AsVoidPtr(fblas.sscal._cpointer) # x = alpha * x * */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error) + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 37, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_snrm2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 36, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_snrm2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 37, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 37, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_v_6gensim_6models_14word2vec_inner_snrm2 = ((__pyx_t_6gensim_6models_14word2vec_inner_snrm2_ptr)PyCObject_AsVoidPtr(__pyx_t_1)); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":37 + /* "gensim/models/word2vec_inner.pyx":38 * cdef dsdot_ptr dsdot=PyCObject_AsVoidPtr(fblas.sdot._cpointer) # double = dot(x, y) * cdef snrm2_ptr snrm2=PyCObject_AsVoidPtr(fblas.snrm2._cpointer) # sqrt(x^2) * cdef sscal_ptr sscal=PyCObject_AsVoidPtr(fblas.sscal._cpointer) # x = alpha * x # <<<<<<<<<<<<<< * * DEF EXP_TABLE_SIZE = 1000 */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 37, __pyx_L1_error) + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_fblas); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 38, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sscal); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 37, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_sscal); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 38, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 37, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_cpointer); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 38, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_v_6gensim_6models_14word2vec_inner_sscal = ((__pyx_t_6gensim_6models_14word2vec_inner_sscal_ptr)PyCObject_AsVoidPtr(__pyx_t_1)); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":45 + /* "gensim/models/word2vec_inner.pyx":46 * cdef REAL_t[EXP_TABLE_SIZE] LOG_TABLE * * cdef int ONE = 1 # <<<<<<<<<<<<<< @@ -12106,95 +11828,95 @@ if (!__Pyx_RefNanny) { */ __pyx_v_6gensim_6models_14word2vec_inner_ONE = 1; - /* "gensim/models/word2vec_inner.pyx":46 + /* "gensim/models/word2vec_inner.pyx":47 * * cdef int ONE = 1 * cdef REAL_t ONEF = 1.0 # <<<<<<<<<<<<<< * - * # for when fblas.sdot returns a double + * */ __pyx_v_6gensim_6models_14word2vec_inner_ONEF = ((__pyx_t_6gensim_6models_14word2vec_inner_REAL_t)1.0); - /* "gensim/models/word2vec_inner.pyx":465 + /* "gensim/models/word2vec_inner.pyx":500 * * * def train_batch_sg(model, sentences, alpha, _work, compute_loss): # <<<<<<<<<<<<<< * """Update skip-gram model by training on a batch of sentences. * */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_1train_batch_sg, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 465, __pyx_L1_error) + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_1train_batch_sg, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 500, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_batch_sg, __pyx_t_1) < 0) __PYX_ERR(0, 465, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_batch_sg, __pyx_t_1) < 0) __PYX_ERR(0, 500, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":596 + /* "gensim/models/word2vec_inner.pyx":591 * * * def train_batch_cbow(model, sentences, alpha, _work, _neu1, compute_loss): # <<<<<<<<<<<<<< * """Update CBOW model by training on a batch of sentences. * */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_3train_batch_cbow, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 596, __pyx_L1_error) + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_3train_batch_cbow, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 591, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_batch_cbow, __pyx_t_1) < 0) __PYX_ERR(0, 596, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_train_batch_cbow, __pyx_t_1) < 0) __PYX_ERR(0, 591, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":727 + /* "gensim/models/word2vec_inner.pyx":679 * * * def score_sentence_sg(model, sentence, _work): # <<<<<<<<<<<<<< * """Obtain likelihood score for a single sentence in a fitted skip-gram representation. * */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_5score_sentence_sg, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 727, __pyx_L1_error) + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_5score_sentence_sg, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 679, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_score_sentence_sg, __pyx_t_1) < 0) __PYX_ERR(0, 727, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_score_sentence_sg, __pyx_t_1) < 0) __PYX_ERR(0, 679, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":827 + /* "gensim/models/word2vec_inner.pyx":772 * work[0] += f * * def score_sentence_cbow(model, sentence, _work, _neu1): # <<<<<<<<<<<<<< * """Obtain likelihood score for a single sentence in a fitted CBOW representation. * */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_7score_sentence_cbow, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 827, __pyx_L1_error) + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_7score_sentence_cbow, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 772, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_score_sentence_cbow, __pyx_t_1) < 0) __PYX_ERR(0, 827, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_score_sentence_cbow, __pyx_t_1) < 0) __PYX_ERR(0, 772, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":945 + /* "gensim/models/word2vec_inner.pyx":880 * * * def init(): # <<<<<<<<<<<<<< * """Precompute function `sigmoid(x) = 1 / (1 + exp(-x))`, for x values discretized into table EXP_TABLE. * Also calculate log(sigmoid(x)) into LOG_TABLE. */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_9init, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 945, __pyx_L1_error) + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gensim_6models_14word2vec_inner_9init, NULL, __pyx_n_s_gensim_models_word2vec_inner); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 880, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_init, __pyx_t_1) < 0) __PYX_ERR(0, 945, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_init, __pyx_t_1) < 0) __PYX_ERR(0, 880, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "gensim/models/word2vec_inner.pyx":992 + /* "gensim/models/word2vec_inner.pyx":927 * return 2 * * FAST_VERSION = init() # initialize the module # <<<<<<<<<<<<<< * MAX_WORDS_IN_BATCH = MAX_SENTENCE_LEN */ - __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_init); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 992, __pyx_L1_error) + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_init); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 927, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = __Pyx_PyObject_CallNoArg(__pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 992, __pyx_L1_error) + __pyx_t_7 = __Pyx_PyObject_CallNoArg(__pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 927, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (PyDict_SetItem(__pyx_d, __pyx_n_s_FAST_VERSION, __pyx_t_7) < 0) __PYX_ERR(0, 992, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_FAST_VERSION, __pyx_t_7) < 0) __PYX_ERR(0, 927, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "gensim/models/word2vec_inner.pyx":993 + /* "gensim/models/word2vec_inner.pyx":928 * * FAST_VERSION = init() # initialize the module * MAX_WORDS_IN_BATCH = MAX_SENTENCE_LEN # <<<<<<<<<<<<<< */ - if (PyDict_SetItem(__pyx_d, __pyx_n_s_MAX_WORDS_IN_BATCH, __pyx_int_10000) < 0) __PYX_ERR(0, 993, __pyx_L1_error) + if (PyDict_SetItem(__pyx_d, __pyx_n_s_MAX_WORDS_IN_BATCH, __pyx_int_10000) < 0) __PYX_ERR(0, 928, __pyx_L1_error) /* "gensim/models/word2vec_inner.pyx":1 * #!/usr/bin/env cython # <<<<<<<<<<<<<< @@ -12206,7 +11928,7 @@ if (!__Pyx_RefNanny) { if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_7) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -12288,6 +12010,39 @@ static PyObject *__Pyx_GetBuiltinName(PyObject *name) { return result; } +/* ExtTypeTest */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + /* RaiseArgTupleInvalid */ static void __Pyx_RaiseArgtupleInvalid( const char* func_name, @@ -12430,39 +12185,6 @@ static int __Pyx_ParseOptionalKeywords( return -1; } -/* ExtTypeTest */ -static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { - if (unlikely(!type)) { - PyErr_SetString(PyExc_SystemError, "Missing type object"); - return 0; - } - if (likely(__Pyx_TypeCheck(obj, type))) - return 1; - PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", - Py_TYPE(obj)->tp_name, type->tp_name); - return 0; -} - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - /* GetItemInt */ static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { PyObject *r; @@ -13285,9 +13007,6 @@ static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_li #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); @@ -15390,9 +15109,6 @@ static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_DECREF(x); return ival; } -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } diff --git a/gensim/models/word2vec_inner.pxd b/gensim/models/word2vec_inner.pxd index 04cca9e887..fabea96321 100644 --- a/gensim/models/word2vec_inner.pxd +++ b/gensim/models/word2vec_inner.pxd @@ -1,14 +1,21 @@ +# cython: boundscheck=False +# cython: wraparound=False +# cython: cdivision=True +# cython: embedsignature=True +# coding: utf-8 # # shared type definitions for word2vec_inner # used by both word2vec_inner.pyx (automatically) and doc2vec_inner.pyx (by explicit cimport) # # Copyright (C) 2013 Radim Rehurek -# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.htmlcimport numpy as np +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +cimport numpy as np + cdef extern from "voidptr.h": void* PyCObject_AsVoidPtr(object obj) -cimport numpy as np ctypedef np.float32_t REAL_t # BLAS routine signatures @@ -31,6 +38,8 @@ DEF EXP_TABLE_SIZE = 1000 DEF MAX_EXP = 6 cdef REAL_t[EXP_TABLE_SIZE] EXP_TABLE +DEF MAX_SENTENCE_LEN = 10000 + # function implementations swapped based on BLAS detected in word2vec_inner.pyx init() ctypedef REAL_t (*our_dot_ptr) (const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X, const int *incX, float *Y, const int *incY) nogil @@ -38,6 +47,34 @@ ctypedef void (*our_saxpy_ptr) (const int *N, const float *alpha, const float *X cdef our_dot_ptr our_dot cdef our_saxpy_ptr our_saxpy + +cdef struct Word2VecConfig: + int hs, negative, sample, compute_loss, size, window, cbow_mean, workers + REAL_t running_training_loss, alpha + + REAL_t *syn0 + REAL_t *word_locks + REAL_t *work + REAL_t *neu1 + + int codelens[MAX_SENTENCE_LEN] + np.uint32_t indexes[MAX_SENTENCE_LEN] + np.uint32_t reduced_windows[MAX_SENTENCE_LEN] + int sentence_idx[MAX_SENTENCE_LEN + 1] + + # For hierarchical softmax + REAL_t *syn1 + np.uint32_t *points[MAX_SENTENCE_LEN] + np.uint8_t *codes[MAX_SENTENCE_LEN] + + # For negative sampling + REAL_t *syn1neg + np.uint32_t *cum_table + unsigned long long cum_table_len + # for sampling (negative and frequent-word downsampling) + unsigned long long next_random + + # for when fblas.sdot returns a double cdef REAL_t our_dot_double(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil @@ -52,3 +89,37 @@ cdef void our_saxpy_noblas(const int *N, const float *alpha, const float *X, con cdef unsigned long long bisect_left(np.uint32_t *a, unsigned long long x, unsigned long long lo, unsigned long long hi) nogil cdef unsigned long long random_int32(unsigned long long *next_random) nogil + + +cdef void w2v_fast_sentence_sg_hs( + const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, + REAL_t *syn0, REAL_t *syn1, const int size, + const np.uint32_t word2_index, const REAL_t alpha, REAL_t *work, REAL_t *word_locks, + const int _compute_loss, REAL_t *_running_training_loss_param) nogil + + +cdef unsigned long long w2v_fast_sentence_sg_neg( + const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, + REAL_t *syn0, REAL_t *syn1neg, const int size, const np.uint32_t word_index, + const np.uint32_t word2_index, const REAL_t alpha, REAL_t *work, + unsigned long long next_random, REAL_t *word_locks, + const int _compute_loss, REAL_t *_running_training_loss_param) nogil + + +cdef void w2v_fast_sentence_cbow_hs( + const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], + REAL_t *neu1, REAL_t *syn0, REAL_t *syn1, const int size, + const np.uint32_t indexes[MAX_SENTENCE_LEN], const REAL_t alpha, REAL_t *work, + int i, int j, int k, int cbow_mean, REAL_t *word_locks, + const int _compute_loss, REAL_t *_running_training_loss_param) nogil + + +cdef unsigned long long w2v_fast_sentence_cbow_neg( + const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, int codelens[MAX_SENTENCE_LEN], + REAL_t *neu1, REAL_t *syn0, REAL_t *syn1neg, const int size, + const np.uint32_t indexes[MAX_SENTENCE_LEN], const REAL_t alpha, REAL_t *work, + int i, int j, int k, int cbow_mean, unsigned long long next_random, REAL_t *word_locks, + const int _compute_loss, REAL_t *_running_training_loss_param) nogil + + +cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=*) diff --git a/gensim/models/word2vec_inner.pyx b/gensim/models/word2vec_inner.pyx index 14836179cc..0576773bd5 100755 --- a/gensim/models/word2vec_inner.pyx +++ b/gensim/models/word2vec_inner.pyx @@ -12,6 +12,7 @@ import cython import numpy as np + cimport numpy as np from libc.math cimport exp @@ -45,6 +46,7 @@ cdef REAL_t[EXP_TABLE_SIZE] LOG_TABLE cdef int ONE = 1 cdef REAL_t ONEF = 1.0 + # for when fblas.sdot returns a double cdef REAL_t our_dot_double(const int *N, const float *X, const int *incX, const float *Y, const int *incY) nogil: return dsdot(N, X, incX, Y, incY) @@ -69,7 +71,7 @@ cdef void our_saxpy_noblas(const int *N, const float *alpha, const float *X, con for i from 0 <= i < N[0] by 1: Y[i * (incY[0])] = (alpha[0]) * X[i * (incX[0])] + Y[i * (incY[0])] -cdef void fast_sentence_sg_hs( +cdef void w2v_fast_sentence_sg_hs( const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, REAL_t *syn0, REAL_t *syn1, const int size, const np.uint32_t word2_index, const REAL_t alpha, REAL_t *work, REAL_t *word_locks, @@ -154,7 +156,7 @@ cdef inline unsigned long long random_int32(unsigned long long *next_random) nog next_random[0] = (next_random[0] * 25214903917ULL + 11) & 281474976710655ULL return this_random -cdef unsigned long long fast_sentence_sg_neg( +cdef unsigned long long w2v_fast_sentence_sg_neg( const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, REAL_t *syn0, REAL_t *syn1neg, const int size, const np.uint32_t word_index, const np.uint32_t word2_index, const REAL_t alpha, REAL_t *work, @@ -245,7 +247,7 @@ cdef unsigned long long fast_sentence_sg_neg( return next_random -cdef void fast_sentence_cbow_hs( +cdef void w2v_fast_sentence_cbow_hs( const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], REAL_t *neu1, REAL_t *syn0, REAL_t *syn1, const int size, const np.uint32_t indexes[MAX_SENTENCE_LEN], const REAL_t alpha, REAL_t *work, @@ -343,7 +345,7 @@ cdef void fast_sentence_cbow_hs( our_saxpy(&size, &word_locks[indexes[m]], work, &ONE, &syn0[indexes[m] * size], &ONE) -cdef unsigned long long fast_sentence_cbow_neg( +cdef unsigned long long w2v_fast_sentence_cbow_neg( const int negative, np.uint32_t *cum_table, unsigned long long cum_table_len, int codelens[MAX_SENTENCE_LEN], REAL_t *neu1, REAL_t *syn0, REAL_t *syn1neg, const int size, const np.uint32_t indexes[MAX_SENTENCE_LEN], const REAL_t alpha, REAL_t *work, @@ -462,6 +464,39 @@ cdef unsigned long long fast_sentence_cbow_neg( return next_random +cdef init_w2v_config(Word2VecConfig *c, model, alpha, compute_loss, _work, _neu1=None): + c[0].hs = model.hs + c[0].negative = model.negative + c[0].sample = (model.vocabulary.sample != 0) + c[0].cbow_mean = model.cbow_mean + c[0].window = model.window + c[0].workers = model.workers + + c[0].compute_loss = (1 if compute_loss else 0) + c[0].running_training_loss = model.running_training_loss + + c[0].syn0 = (np.PyArray_DATA(model.wv.vectors)) + c[0].word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) + c[0].alpha = alpha + c[0].size = model.wv.vector_size + + if c[0].hs: + c[0].syn1 = (np.PyArray_DATA(model.trainables.syn1)) + + if c[0].negative: + c[0].syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) + c[0].cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) + c[0].cum_table_len = len(model.vocabulary.cum_table) + if c[0].negative or c[0].sample: + c[0].next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) + + # convert Python structures to primitive types, so we can release the GIL + c[0].work = np.PyArray_DATA(_work) + + if _neu1 is not None: + c[0].neu1 = np.PyArray_DATA(_neu1) + + def train_batch_sg(model, sentences, alpha, _work, compute_loss): """Update skip-gram model by training on a batch of sentences. @@ -487,57 +522,17 @@ def train_batch_sg(model, sentences, alpha, _work, compute_loss): and were not discarded by negative sampling). """ - cdef int hs = model.hs - cdef int negative = model.negative - cdef int sample = (model.vocabulary.sample != 0) - - cdef int _compute_loss = (1 if compute_loss else 0) - cdef REAL_t _running_training_loss = model.running_training_loss - - cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) - cdef REAL_t *word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) - cdef REAL_t *work - cdef REAL_t _alpha = alpha - cdef int size = model.wv.vector_size - - cdef int codelens[MAX_SENTENCE_LEN] - cdef np.uint32_t indexes[MAX_SENTENCE_LEN] - cdef np.uint32_t reduced_windows[MAX_SENTENCE_LEN] - cdef int sentence_idx[MAX_SENTENCE_LEN + 1] - cdef int window = model.window - + cdef Word2VecConfig c cdef int i, j, k cdef int effective_words = 0, effective_sentences = 0 cdef int sent_idx, idx_start, idx_end - # For hierarchical softmax - cdef REAL_t *syn1 - cdef np.uint32_t *points[MAX_SENTENCE_LEN] - cdef np.uint8_t *codes[MAX_SENTENCE_LEN] - - # For negative sampling - cdef REAL_t *syn1neg - cdef np.uint32_t *cum_table - cdef unsigned long long cum_table_len - # for sampling (negative and frequent-word downsampling) - cdef unsigned long long next_random + init_w2v_config(&c, model, alpha, compute_loss, _work) - if hs: - syn1 = (np.PyArray_DATA(model.trainables.syn1)) - - if negative: - syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - cum_table_len = len(model.vocabulary.cum_table) - if negative or sample: - next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - - # convert Python structures to primitive types, so we can release the GIL - work = np.PyArray_DATA(_work) # prepare C structures so we can go "full C" and release the Python GIL vlookup = model.wv.vocab - sentence_idx[0] = 0 # indices of the first sentence always start at 0 + c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 for sent in sentences: if not sent: continue # ignore empty sentences; leave effective_sentences unchanged @@ -545,13 +540,13 @@ def train_batch_sg(model, sentences, alpha, _work, compute_loss): word = vlookup[token] if token in vlookup else None if word is None: continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - if sample and word.sample_int < random_int32(&next_random): + if c.sample and word.sample_int < random_int32(&c.next_random): continue - indexes[effective_words] = word.index - if hs: - codelens[effective_words] = len(word.code) - codes[effective_words] = np.PyArray_DATA(word.code) - points[effective_words] = np.PyArray_DATA(word.point) + c.indexes[effective_words] = word.index + if c.hs: + c.codelens[effective_words] = len(word.code) + c.codes[effective_words] = np.PyArray_DATA(word.code) + c.points[effective_words] = np.PyArray_DATA(word.point) effective_words += 1 if effective_words == MAX_SENTENCE_LEN: break # TODO: log warning, tally overflow? @@ -560,36 +555,36 @@ def train_batch_sg(model, sentences, alpha, _work, compute_loss): # across sentence boundaries. # indices of sentence number X are between idx_end: k = idx_end for j in range(j, k): if j == i: continue - if hs: - fast_sentence_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], _alpha, work, word_locks, _compute_loss, &_running_training_loss) - if negative: - next_random = fast_sentence_sg_neg(negative, cum_table, cum_table_len, syn0, syn1neg, size, indexes[i], indexes[j], _alpha, work, next_random, word_locks, _compute_loss, &_running_training_loss) + if c.hs: + w2v_fast_sentence_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.alpha, c.work, c.word_locks, c.compute_loss, &c.running_training_loss) + if c.negative: + c.next_random = w2v_fast_sentence_sg_neg(c.negative, c.cum_table, c.cum_table_len, c.syn0, c.syn1neg, c.size, c.indexes[i], c.indexes[j], c.alpha, c.work, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) - model.running_training_loss = _running_training_loss + model.running_training_loss = c.running_training_loss return effective_words @@ -619,59 +614,16 @@ def train_batch_cbow(model, sentences, alpha, _work, _neu1, compute_loss): Number of words in the vocabulary actually used for training (They already existed in the vocabulary and were not discarded by negative sampling). """ - cdef int hs = model.hs - cdef int negative = model.negative - cdef int sample = (model.vocabulary.sample != 0) - cdef int cbow_mean = model.cbow_mean - - cdef int _compute_loss = (1 if compute_loss == True else 0) - cdef REAL_t _running_training_loss = model.running_training_loss - - cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) - cdef REAL_t *word_locks = (np.PyArray_DATA(model.trainables.vectors_lockf)) - cdef REAL_t *work - cdef REAL_t _alpha = alpha - cdef int size = model.wv.vector_size - - cdef int codelens[MAX_SENTENCE_LEN] - cdef np.uint32_t indexes[MAX_SENTENCE_LEN] - cdef np.uint32_t reduced_windows[MAX_SENTENCE_LEN] - cdef int sentence_idx[MAX_SENTENCE_LEN + 1] - cdef int window = model.window - + cdef Word2VecConfig c cdef int i, j, k cdef int effective_words = 0, effective_sentences = 0 cdef int sent_idx, idx_start, idx_end - # For hierarchical softmax - cdef REAL_t *syn1 - cdef np.uint32_t *points[MAX_SENTENCE_LEN] - cdef np.uint8_t *codes[MAX_SENTENCE_LEN] - - # For negative sampling - cdef REAL_t *syn1neg - cdef np.uint32_t *cum_table - cdef unsigned long long cum_table_len - # for sampling (negative and frequent-word downsampling) - cdef unsigned long long next_random - - if hs: - syn1 = (np.PyArray_DATA(model.trainables.syn1)) - - if negative: - syn1neg = (np.PyArray_DATA(model.trainables.syn1neg)) - cum_table = (np.PyArray_DATA(model.vocabulary.cum_table)) - cum_table_len = len(model.vocabulary.cum_table) - if negative or sample: - next_random = (2**24) * model.random.randint(0, 2**24) + model.random.randint(0, 2**24) - - # convert Python structures to primitive types, so we can release the GIL - work = np.PyArray_DATA(_work) - neu1 = np.PyArray_DATA(_neu1) + init_w2v_config(&c, model, alpha, compute_loss, _work, _neu1) # prepare C structures so we can go "full C" and release the Python GIL vlookup = model.wv.vocab - sentence_idx[0] = 0 # indices of the first sentence always start at 0 + c.sentence_idx[0] = 0 # indices of the first sentence always start at 0 for sent in sentences: if not sent: continue # ignore empty sentences; leave effective_sentences unchanged @@ -679,13 +631,13 @@ def train_batch_cbow(model, sentences, alpha, _work, _neu1, compute_loss): word = vlookup[token] if token in vlookup else None if word is None: continue # leaving `effective_words` unchanged = shortening the sentence = expanding the window - if sample and word.sample_int < random_int32(&next_random): + if c.sample and word.sample_int < random_int32(&c.next_random): continue - indexes[effective_words] = word.index - if hs: - codelens[effective_words] = len(word.code) - codes[effective_words] = np.PyArray_DATA(word.code) - points[effective_words] = np.PyArray_DATA(word.point) + c.indexes[effective_words] = word.index + if c.hs: + c.codelens[effective_words] = len(word.code) + c.codes[effective_words] = np.PyArray_DATA(word.code) + c.points[effective_words] = np.PyArray_DATA(word.point) effective_words += 1 if effective_words == MAX_SENTENCE_LEN: break # TODO: log warning, tally overflow? @@ -694,33 +646,33 @@ def train_batch_cbow(model, sentences, alpha, _work, _neu1, compute_loss): # across sentence boundaries. # indices of sentence number X are between idx_end: k = idx_end - if hs: - fast_sentence_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, _alpha, work, i, j, k, cbow_mean, word_locks, _compute_loss, &_running_training_loss) - if negative: - next_random = fast_sentence_cbow_neg(negative, cum_table, cum_table_len, codelens, neu1, syn0, syn1neg, size, indexes, _alpha, work, i, j, k, cbow_mean, next_random, word_locks, _compute_loss, &_running_training_loss) + if c.hs: + w2v_fast_sentence_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.word_locks, c.compute_loss, &c.running_training_loss) + if c.negative: + c.next_random = w2v_fast_sentence_cbow_neg(c.negative, c.cum_table, c.cum_table_len, c.codelens, c.neu1, c.syn0, c.syn1neg, c.size, c.indexes, c.alpha, c.work, i, j, k, c.cbow_mean, c.next_random, c.word_locks, c.compute_loss, &c.running_training_loss) - model.running_training_loss = _running_training_loss + model.running_training_loss = c.running_training_loss return effective_words @@ -747,27 +699,20 @@ def score_sentence_sg(model, sentence, _work): The probability assigned to this sentence by the Skip-Gram model. """ + cdef Word2VecConfig c + c.syn0 = (np.PyArray_DATA(model.wv.vectors)) + c.size = model.wv.vector_size - cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) - cdef REAL_t *work - cdef int size = model.wv.vector_size - - cdef int codelens[MAX_SENTENCE_LEN] - cdef np.uint32_t indexes[MAX_SENTENCE_LEN] - cdef int sentence_len - cdef int window = model.window + c.window = model.window cdef int i, j, k cdef long result = 0 + cdef int sentence_len - cdef REAL_t *syn1 - cdef np.uint32_t *points[MAX_SENTENCE_LEN] - cdef np.uint8_t *codes[MAX_SENTENCE_LEN] - - syn1 = (np.PyArray_DATA(model.trainables.syn1)) + c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) # convert Python structures to primitive types, so we can release the GIL - work = np.PyArray_DATA(_work) + c.work = np.PyArray_DATA(_work) vlookup = model.wv.vocab i = 0 @@ -775,10 +720,10 @@ def score_sentence_sg(model, sentence, _work): word = vlookup[token] if token in vlookup else None if word is None: continue # should drop the - indexes[i] = word.index - codelens[i] = len(word.code) - codes[i] = np.PyArray_DATA(word.code) - points[i] = np.PyArray_DATA(word.point) + c.indexes[i] = word.index + c.codelens[i] = len(word.code) + c.codes[i] = np.PyArray_DATA(word.code) + c.points[i] = np.PyArray_DATA(word.point) result += 1 i += 1 if i == MAX_SENTENCE_LEN: @@ -786,24 +731,24 @@ def score_sentence_sg(model, sentence, _work): sentence_len = i # release GIL & train on the sentence - work[0] = 0.0 + c.work[0] = 0.0 with nogil: for i in range(sentence_len): - if codelens[i] == 0: + if c.codelens[i] == 0: continue - j = i - window + j = i - c.window if j < 0: j = 0 - k = i + window + 1 + k = i + c.window + 1 if k > sentence_len: k = sentence_len for j in range(j, k): - if j == i or codelens[j] == 0: + if j == i or c.codelens[j] == 0: continue - score_pair_sg_hs(points[i], codes[i], codelens[i], syn0, syn1, size, indexes[j], work) + score_pair_sg_hs(c.points[i], c.codes[i], c.codelens[i], c.syn0, c.syn1, c.size, c.indexes[j], c.work) - return work[0] + return c.work[0] cdef void score_pair_sg_hs( const np.uint32_t *word_point, const np.uint8_t *word_code, const int codelen, @@ -849,31 +794,21 @@ def score_sentence_cbow(model, sentence, _work, _neu1): The probability assigned to this sentence by the Skip-Gram model. """ - cdef int cbow_mean = model.cbow_mean - - cdef REAL_t *syn0 = (np.PyArray_DATA(model.wv.vectors)) - cdef REAL_t *work - cdef REAL_t *neu1 - cdef int size = model.wv.vector_size + cdef Word2VecConfig c - cdef int codelens[MAX_SENTENCE_LEN] - cdef np.uint32_t indexes[MAX_SENTENCE_LEN] - cdef int sentence_len - cdef int window = model.window + c.cbow_mean = model.cbow_mean + c.syn0 = (np.PyArray_DATA(model.wv.vectors)) + c.size = model.wv.vector_size + c.window = model.window cdef int i, j, k cdef long result = 0 - # For hierarchical softmax - cdef REAL_t *syn1 - cdef np.uint32_t *points[MAX_SENTENCE_LEN] - cdef np.uint8_t *codes[MAX_SENTENCE_LEN] - - syn1 = (np.PyArray_DATA(model.trainables.syn1)) + c.syn1 = (np.PyArray_DATA(model.trainables.syn1)) # convert Python structures to primitive types, so we can release the GIL - work = np.PyArray_DATA(_work) - neu1 = np.PyArray_DATA(_neu1) + c.work = np.PyArray_DATA(_work) + c.neu1 = np.PyArray_DATA(_neu1) vlookup = model.wv.vocab i = 0 @@ -881,10 +816,10 @@ def score_sentence_cbow(model, sentence, _work, _neu1): word = vlookup[token] if token in vlookup else None if word is None: continue # for score, should this be a default negative value? - indexes[i] = word.index - codelens[i] = len(word.code) - codes[i] = np.PyArray_DATA(word.code) - points[i] = np.PyArray_DATA(word.point) + c.indexes[i] = word.index + c.codelens[i] = len(word.code) + c.codes[i] = np.PyArray_DATA(word.code) + c.points[i] = np.PyArray_DATA(word.point) result += 1 i += 1 if i == MAX_SENTENCE_LEN: @@ -892,20 +827,20 @@ def score_sentence_cbow(model, sentence, _work, _neu1): sentence_len = i # release GIL & train on the sentence - work[0] = 0.0 + c.work[0] = 0.0 with nogil: for i in range(sentence_len): - if codelens[i] == 0: + if c.codelens[i] == 0: continue - j = i - window + j = i - c.window if j < 0: j = 0 - k = i + window + 1 + k = i + c.window + 1 if k > sentence_len: k = sentence_len - score_pair_cbow_hs(points[i], codes[i], codelens, neu1, syn0, syn1, size, indexes, work, i, j, k, cbow_mean) + score_pair_cbow_hs(c.points[i], c.codes[i], c.codelens, c.neu1, c.syn0, c.syn1, c.size, c.indexes, c.work, i, j, k, c.cbow_mean) - return work[0] + return c.work[0] cdef void score_pair_cbow_hs( const np.uint32_t *word_point, const np.uint8_t *word_code, int codelens[MAX_SENTENCE_LEN], diff --git a/gensim/test/test_doc2vec.py b/gensim/test/test_doc2vec.py index 921881a660..de23529f14 100644 --- a/gensim/test/test_doc2vec.py +++ b/gensim/test/test_doc2vec.py @@ -14,6 +14,7 @@ import logging import unittest import os +import six from six.moves import zip as izip from collections import namedtuple @@ -23,7 +24,7 @@ from gensim import utils from gensim.models import doc2vec, keyedvectors -from gensim.test.utils import datapath, get_tmpfile, common_texts as raw_sentences +from gensim.test.utils import datapath, get_tmpfile, temporary_file, common_texts as raw_sentences class DocsLeeCorpus(object): @@ -59,6 +60,10 @@ def load_on_instance(): return model.load(tmpf) +def save_lee_corpus_as_line_sentence(corpus_file): + utils.save_as_line_sentence((doc.words for doc in DocsLeeCorpus()), corpus_file) + + class TestDoc2VecModel(unittest.TestCase): def test_persistence(self): """Test storing/loading the entire model.""" @@ -67,6 +72,17 @@ def test_persistence(self): model.save(tmpf) self.models_equal(model, doc2vec.Doc2Vec.load(tmpf)) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_persistence_fromfile(self): + """Test storing/loading the entire model.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) + + tmpf = get_tmpfile('gensim_doc2vec.tst') + model = doc2vec.Doc2Vec(corpus_file=corpus_file, min_count=1) + model.save(tmpf) + self.models_equal(model, doc2vec.Doc2Vec.load(tmpf)) + def testPersistenceWord2VecFormat(self): """Test storing the entire model in word2vec format.""" model = doc2vec.Doc2Vec(DocsLeeCorpus(), min_count=1) @@ -94,6 +110,7 @@ def testLoadOldModel(self): self.assertTrue(model.wv.vectors.shape == (3955, 100)) self.assertTrue(len(model.wv.vocab) == 3955) self.assertTrue(len(model.wv.index2word) == 3955) + self.assertIsNone(model.corpus_total_words) self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (3955, )) self.assertTrue(model.vocabulary.cum_table.shape == (3955, )) @@ -111,6 +128,7 @@ def testLoadOldModel(self): self.assertTrue(model.wv.vectors.shape == (3955, 100)) self.assertTrue(len(model.wv.vocab) == 3955) self.assertTrue(len(model.wv.index2word) == 3955) + self.assertIsNone(model.corpus_total_words) self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (3955, )) self.assertTrue(model.vocabulary.cum_table.shape == (3955, )) @@ -139,6 +157,7 @@ def testLoadOldModel(self): for old_version in old_versions: model = doc2vec.Doc2Vec.load(saved_models_dir.format(old_version)) self.assertTrue(len(model.wv.vocab) == 3) + self.assertIsNone(model.corpus_total_words) self.assertTrue(model.wv.vectors.shape == (3, 4)) self.assertTrue(model.docvecs.vectors_docs.shape == (2, 4)) self.assertTrue(model.docvecs.count == 2) @@ -154,6 +173,92 @@ def testLoadOldModel(self): sims_to_infer = loaded_model.docvecs.most_similar([doc0_inferred], topn=len(loaded_model.docvecs)) self.assertTrue(sims_to_infer) + @unittest.skipIf(os.name == 'nt', "See another test for Windows below") + def test_get_offsets_and_start_doctags(self): + # Each line takes 6 bytes (including '\n' character) + lines = ['line1\n', 'line2\n', 'line3\n', 'line4\n', 'line5\n'] + tmpf = get_tmpfile('gensim_doc2vec.tst') + + with utils.smart_open(tmpf, 'wb', encoding='utf8') as fout: + for line in lines: + fout.write(utils.any2unicode(line)) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 1) + self.assertEqual(offsets, [0]) + self.assertEqual(start_doctags, [0]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 2) + self.assertEqual(offsets, [0, 12]) + self.assertEqual(start_doctags, [0, 2]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 3) + self.assertEqual(offsets, [0, 6, 18]) + self.assertEqual(start_doctags, [0, 1, 3]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 4) + self.assertEqual(offsets, [0, 6, 12, 18]) + self.assertEqual(start_doctags, [0, 1, 2, 3]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 5) + self.assertEqual(offsets, [0, 6, 12, 18, 24]) + self.assertEqual(start_doctags, [0, 1, 2, 3, 4]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 6) + self.assertEqual(offsets, [0, 0, 6, 12, 18, 24]) + self.assertEqual(start_doctags, [0, 0, 1, 2, 3, 4]) + + @unittest.skipIf(os.name != 'nt', "See another test for posix above") + def test_get_offsets_and_start_doctags_win(self): + # Each line takes 7 bytes (including '\n' character which is actually '\r\n' on Windows) + lines = ['line1\n', 'line2\n', 'line3\n', 'line4\n', 'line5\n'] + tmpf = get_tmpfile('gensim_doc2vec.tst') + + with utils.smart_open(tmpf, 'wb', encoding='utf8') as fout: + for line in lines: + fout.write(utils.any2unicode(line)) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 1) + self.assertEqual(offsets, [0]) + self.assertEqual(start_doctags, [0]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 2) + self.assertEqual(offsets, [0, 14]) + self.assertEqual(start_doctags, [0, 2]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 3) + self.assertEqual(offsets, [0, 7, 21]) + self.assertEqual(start_doctags, [0, 1, 3]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 4) + self.assertEqual(offsets, [0, 7, 14, 21]) + self.assertEqual(start_doctags, [0, 1, 2, 3]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 5) + self.assertEqual(offsets, [0, 7, 14, 21, 28]) + self.assertEqual(start_doctags, [0, 1, 2, 3, 4]) + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 6) + self.assertEqual(offsets, [0, 0, 7, 14, 14, 21]) + self.assertEqual(start_doctags, [0, 0, 1, 2, 2, 3]) + + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def test_cython_linesentence_readline_after_getting_offsets(self): + lines = ['line1\n', 'line2\n', 'line3\n', 'line4\n', 'line5\n'] + tmpf = get_tmpfile('gensim_doc2vec.tst') + + with utils.smart_open(tmpf, 'wb', encoding='utf8') as fout: + for line in lines: + fout.write(utils.any2unicode(line)) + + from gensim.models.word2vec_corpusfile import CythonLineSentence + + offsets, start_doctags = doc2vec.Doc2Vec._get_offsets_and_start_doctags_for_corpusfile(tmpf, 5) + for offset, line in zip(offsets, lines): + ls = CythonLineSentence(tmpf, offset) + sentence = ls.read_sentence() + self.assertEqual(len(sentence), 1) + self.assertEqual(sentence[0], utils.any2utf8(line.strip())) + def test_unicode_in_doctag(self): """Test storing document vectors of a model with unicode titles.""" model = doc2vec.Doc2Vec(DocsLeeCorpus(unicode_tags=True), min_count=1) @@ -298,42 +403,35 @@ def test_training(self): model2 = doc2vec.Doc2Vec(corpus, size=100, min_count=2, iter=20, workers=1) self.models_equal(model, model2) - def test_multistream_training(self): - """Test doc2vec multistream training.""" - input_streams = [list_corpus[:len(list_corpus) // 2], list_corpus[len(list_corpus) // 2:]] - - model = doc2vec.Doc2Vec(inpsize=100, min_count=2, iter=20, workers=1, seed=42) - model.build_vocab(input_streams=input_streams, workers=1) - self.assertEqual(model.docvecs.doctag_syn0.shape, (300, 100)) - model.train(input_streams=input_streams, total_examples=model.corpus_count, epochs=model.iter) - self.model_sanity(model) - - # build vocab and train in one step; must be the same as above - model2 = doc2vec.Doc2Vec(input_streams=input_streams, size=100, min_count=2, iter=20, workers=1, seed=42) - - # check resulted vectors; note that order of words may be different - for word in model.wv.index2word: - self.assertEqual(model.wv.most_similar(word, topn=5), model2.wv.most_similar(word, topn=5)) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_training_fromfile(self): + """Test doc2vec training.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) - def test_multistream_build_vocab(self): - # Expected vocab - model = doc2vec.Doc2Vec(min_count=0) - model.build_vocab(list_corpus) - singlestream_vocab = model.vocabulary.raw_vocab + model = doc2vec.Doc2Vec(size=100, min_count=2, iter=20, workers=1) + model.build_vocab(corpus_file=corpus_file) + self.assertEqual(model.docvecs.doctag_syn0.shape, (300, 100)) + model.train(corpus_file=corpus_file, total_words=model.corpus_total_words, epochs=model.iter) - # Multistream vocab - model2 = doc2vec.Doc2Vec(min_count=0) - input_streams = [list_corpus[:len(list_corpus) // 2], list_corpus[len(list_corpus) // 2:]] - model2.build_vocab(input_streams=input_streams, workers=2) - multistream_vocab = model2.vocabulary.raw_vocab + self.model_sanity(model) - self.assertEqual(singlestream_vocab, multistream_vocab) + model = doc2vec.Doc2Vec(corpus_file=corpus_file, size=100, min_count=2, iter=20, workers=1) + self.model_sanity(model) def test_dbow_hs(self): """Test DBOW doc2vec training.""" model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=1, negative=0, min_count=2, iter=20) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_dbow_hs_fromfile(self): + """Test DBOW doc2vec training.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) + model = doc2vec.Doc2Vec(corpus_file=corpus_file, dm=0, hs=1, negative=0, min_count=2, iter=20) + self.model_sanity(model) + def test_dmm_hs(self): """Test DM/mean doc2vec training.""" model = doc2vec.Doc2Vec( @@ -342,6 +440,17 @@ def test_dmm_hs(self): ) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_dmm_hs_fromfile(self): + """Test DBOW doc2vec training.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) + model = doc2vec.Doc2Vec( + list_corpus, dm=1, dm_mean=1, size=24, window=4, + hs=1, negative=0, alpha=0.05, min_count=2, iter=20 + ) + self.model_sanity(model) + def test_dms_hs(self): """Test DM/sum doc2vec training.""" model = doc2vec.Doc2Vec( @@ -350,6 +459,17 @@ def test_dms_hs(self): ) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_dms_hs_fromfile(self): + """Test DBOW doc2vec training.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) + model = doc2vec.Doc2Vec( + list_corpus, dm=1, dm_mean=0, size=24, window=4, hs=1, + negative=0, alpha=0.05, min_count=2, iter=20 + ) + self.model_sanity(model) + def test_dmc_hs(self): """Test DM/concatenate doc2vec training.""" model = doc2vec.Doc2Vec( @@ -358,11 +478,30 @@ def test_dmc_hs(self): ) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_dmc_hs_fromfile(self): + """Test DBOW doc2vec training.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) + model = doc2vec.Doc2Vec( + list_corpus, dm=1, dm_concat=1, size=24, window=4, + hs=1, negative=0, alpha=0.05, min_count=2, iter=20 + ) + self.model_sanity(model) + def test_dbow_neg(self): """Test DBOW doc2vec training.""" model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=0, negative=10, min_count=2, iter=20) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_dbow_neg_fromfile(self): + """Test DBOW doc2vec training.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) + model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=0, negative=10, min_count=2, iter=20) + self.model_sanity(model) + def test_dmm_neg(self): """Test DM/mean doc2vec training.""" model = doc2vec.Doc2Vec( @@ -371,6 +510,17 @@ def test_dmm_neg(self): ) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_dmm_neg_fromfile(self): + """Test DBOW doc2vec training.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) + model = doc2vec.Doc2Vec( + list_corpus, dm=1, dm_mean=1, size=24, window=4, hs=0, + negative=10, alpha=0.05, min_count=2, iter=20 + ) + self.model_sanity(model) + def test_dms_neg(self): """Test DM/sum doc2vec training.""" model = doc2vec.Doc2Vec( @@ -379,6 +529,17 @@ def test_dms_neg(self): ) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_dms_neg_fromfile(self): + """Test DBOW doc2vec training.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) + model = doc2vec.Doc2Vec( + list_corpus, dm=1, dm_mean=0, size=24, window=4, hs=0, + negative=10, alpha=0.05, min_count=2, iter=20 + ) + self.model_sanity(model) + def test_dmc_neg(self): """Test DM/concatenate doc2vec training.""" model = doc2vec.Doc2Vec( @@ -387,6 +548,17 @@ def test_dmc_neg(self): ) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_dmc_neg_fromfile(self): + """Test DBOW doc2vec training.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + save_lee_corpus_as_line_sentence(corpus_file) + model = doc2vec.Doc2Vec( + list_corpus, dm=1, dm_concat=1, size=24, window=4, hs=0, + negative=10, alpha=0.05, min_count=2, iter=20 + ) + self.model_sanity(model) + def test_parallel(self): """Test doc2vec parallel training.""" if doc2vec.FAST_VERSION < 0: # don't test the plain NumPy version for parallelism (too slow) diff --git a/gensim/test/test_fasttext.py b/gensim/test/test_fasttext.py index 545d75b1e9..4060f0b4f8 100644 --- a/gensim/test/test_fasttext.py +++ b/gensim/test/test_fasttext.py @@ -6,6 +6,7 @@ import unittest import os import struct +import six import numpy as np @@ -15,7 +16,7 @@ from gensim.models.wrappers.fasttext import FastTextKeyedVectors from gensim.models.wrappers.fasttext import FastText as FT_wrapper from gensim.models.keyedvectors import Word2VecKeyedVectors -from gensim.test.utils import datapath, get_tmpfile, common_texts as sentences +from gensim.test.utils import datapath, get_tmpfile, temporary_file, common_texts as sentences logger = logging.getLogger(__name__) @@ -81,51 +82,36 @@ def test_training(self): oov_vec = model['minor'] # oov word self.assertEqual(len(oov_vec), 10) - def test_multistream_training(self): - input_streams = [sentences[:len(sentences) // 2], sentences[len(sentences) // 2:]] - model = FT_gensim(size=5, min_count=1, hs=1, negative=0, seed=42, workers=1) - model.build_vocab(input_streams=input_streams, workers=2) - self.model_sanity(model) - - model.train(input_streams=input_streams, total_examples=model.corpus_count, epochs=model.iter) - sims = model.most_similar('graph', topn=10) - - self.assertEqual(model.wv.syn0.shape, (12, 5)) - self.assertEqual(len(model.wv.vocab), 12) - self.assertEqual(model.wv.syn0_vocab.shape[1], 5) - self.assertEqual(model.wv.syn0_ngrams.shape[1], 5) - self.model_sanity(model) - - # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) - sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself - self.assertEqual(sims, sims2) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_training_fromfile(self): + with temporary_file(get_tmpfile('gensim_fasttext.tst')) as corpus_file: + utils.save_as_line_sentence(sentences, corpus_file) - # build vocab and train in one step; must be the same as above - model2 = FT_gensim(input_streams=input_streams, size=5, min_count=1, hs=1, negative=0, seed=42, workers=1) - self.models_equal(model, model2) + model = FT_gensim(size=10, min_count=1, hs=1, negative=0, seed=42, workers=1) + model.build_vocab(corpus_file=corpus_file) + self.model_sanity(model) - # verify oov-word vector retrieval - invocab_vec = model['minors'] # invocab word - self.assertEqual(len(invocab_vec), 5) + model.train(corpus_file=corpus_file, total_words=model.corpus_total_words, epochs=model.iter) + sims = model.most_similar('graph', topn=10) - oov_vec = model['minor'] # oov word - self.assertEqual(len(oov_vec), 5) + self.assertEqual(model.wv.syn0.shape, (12, 10)) + self.assertEqual(len(model.wv.vocab), 12) + self.assertEqual(model.wv.syn0_vocab.shape[1], 10) + self.assertEqual(model.wv.syn0_ngrams.shape[1], 10) + self.model_sanity(model) - def test_multistream_build_vocab(self): - # Expected vocab - model = FT_gensim(size=5, min_count=1, hs=1, negative=0, seed=42) - model.build_vocab(list_corpus) - singlestream_vocab = model.vocabulary.raw_vocab + # test querying for "most similar" by vector + graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] + sims2 = model.most_similar(positive=[graph_vector], topn=11) + sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself + self.assertEqual(sims, sims2) - # Multistream vocab - model2 = FT_gensim(size=5, min_count=1, hs=1, negative=0, seed=42) - input_streams = [list_corpus[:len(list_corpus) // 2], list_corpus[len(list_corpus) // 2:]] - model2.build_vocab(input_streams=input_streams, workers=2) - multistream_vocab = model2.vocabulary.raw_vocab + # verify oov-word vector retrieval + invocab_vec = model['minors'] # invocab word + self.assertEqual(len(invocab_vec), 10) - self.assertEqual(singlestream_vocab, multistream_vocab) + oov_vec = model['minor'] # oov word + self.assertEqual(len(oov_vec), 10) def models_equal(self, model, model2): self.assertEqual(len(model.wv.vocab), len(model2.wv.vocab)) @@ -153,6 +139,23 @@ def test_persistence(self): self.assertTrue(np.allclose(wv.syn0_ngrams, loaded_wv.syn0_ngrams)) self.assertEqual(len(wv.vocab), len(loaded_wv.vocab)) + @unittest.skipIf(os.name == 'nt', "corpus_file is not supported for Windows + Py2" + "and avoid memory error with Appveyor x32") + def test_persistence_fromfile(self): + with temporary_file(get_tmpfile('gensim_fasttext1.tst')) as corpus_file: + utils.save_as_line_sentence(sentences, corpus_file) + + tmpf = get_tmpfile('gensim_fasttext.tst') + model = FT_gensim(corpus_file=corpus_file, min_count=1) + model.save(tmpf) + self.models_equal(model, FT_gensim.load(tmpf)) + # test persistence of the KeyedVectors of a model + wv = model.wv + wv.save(tmpf) + loaded_wv = FastTextKeyedVectors.load(tmpf) + self.assertTrue(np.allclose(wv.syn0_ngrams, loaded_wv.syn0_ngrams)) + self.assertEqual(len(wv.vocab), len(loaded_wv.vocab)) + @unittest.skipIf(IS_WIN32, "avoid memory error with Appveyor x32") def test_norm_vectors_not_saved(self): tmpf = get_tmpfile('gensim_fasttext.tst') @@ -386,6 +389,40 @@ def test_cbow_hs_training(self): overlap_count = len(set(sims_gensim_words).intersection(expected_sims_words)) self.assertGreaterEqual(overlap_count, 2) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_cbow_hs_training_fromfile(self): + with temporary_file(get_tmpfile('gensim_fasttext.tst')) as corpus_file: + model_gensim = FT_gensim( + size=50, sg=0, cbow_mean=1, alpha=0.05, window=5, hs=1, negative=0, + min_count=5, iter=5, batch_words=1000, word_ngrams=1, sample=1e-3, min_n=3, max_n=6, + sorted_vocab=1, workers=1, min_alpha=0.0) + + lee_data = LineSentence(datapath('lee_background.cor')) + utils.save_as_line_sentence(lee_data, corpus_file) + + model_gensim.build_vocab(corpus_file=corpus_file) + orig0 = np.copy(model_gensim.wv.vectors[0]) + model_gensim.train(corpus_file=corpus_file, + total_words=model_gensim.corpus_total_words, + epochs=model_gensim.epochs) + self.assertFalse((orig0 == model_gensim.wv.vectors[0]).all()) # vector should vary after training + + sims_gensim = model_gensim.wv.most_similar('night', topn=10) + sims_gensim_words = [word for (word, distance) in sims_gensim] # get similar words + expected_sims_words = [ + u'night,', + u'night.', + u'rights', + u'kilometres', + u'in', + u'eight', + u'according', + u'flights', + u'during', + u'comes'] + overlap_count = len(set(sims_gensim_words).intersection(expected_sims_words)) + self.assertGreaterEqual(overlap_count, 2) + def test_sg_hs_training(self): model_gensim = FT_gensim( @@ -415,6 +452,40 @@ def test_sg_hs_training(self): overlap_count = len(set(sims_gensim_words).intersection(expected_sims_words)) self.assertGreaterEqual(overlap_count, 2) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_sg_hs_training_fromfile(self): + with temporary_file(get_tmpfile('gensim_fasttext.tst')) as corpus_file: + model_gensim = FT_gensim( + size=50, sg=1, cbow_mean=1, alpha=0.025, window=5, hs=1, negative=0, + min_count=5, iter=5, batch_words=1000, word_ngrams=1, sample=1e-3, min_n=3, max_n=6, + sorted_vocab=1, workers=1, min_alpha=0.0) + + lee_data = LineSentence(datapath('lee_background.cor')) + utils.save_as_line_sentence(lee_data, corpus_file) + + model_gensim.build_vocab(corpus_file=corpus_file) + orig0 = np.copy(model_gensim.wv.vectors[0]) + model_gensim.train(corpus_file=corpus_file, + total_words=model_gensim.corpus_total_words, + epochs=model_gensim.epochs) + self.assertFalse((orig0 == model_gensim.wv.vectors[0]).all()) # vector should vary after training + + sims_gensim = model_gensim.wv.most_similar('night', topn=10) + sims_gensim_words = [word for (word, distance) in sims_gensim] # get similar words + expected_sims_words = [ + u'night,', + u'night.', + u'eight', + u'nine', + u'overnight', + u'crew', + u'overnight.', + u'manslaughter', + u'north', + u'flight'] + overlap_count = len(set(sims_gensim_words).intersection(expected_sims_words)) + self.assertGreaterEqual(overlap_count, 2) + def test_cbow_neg_training(self): model_gensim = FT_gensim( @@ -444,6 +515,40 @@ def test_cbow_neg_training(self): overlap_count = len(set(sims_gensim_words).intersection(expected_sims_words)) self.assertGreaterEqual(overlap_count, 2) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_cbow_neg_training_fromfile(self): + with temporary_file(get_tmpfile('gensim_fasttext.tst')) as corpus_file: + model_gensim = FT_gensim( + size=50, sg=0, cbow_mean=1, alpha=0.05, window=5, hs=0, negative=5, + min_count=5, iter=5, batch_words=1000, word_ngrams=1, sample=1e-3, min_n=3, max_n=6, + sorted_vocab=1, workers=1, min_alpha=0.0) + + lee_data = LineSentence(datapath('lee_background.cor')) + utils.save_as_line_sentence(lee_data, corpus_file) + + model_gensim.build_vocab(corpus_file=corpus_file) + orig0 = np.copy(model_gensim.wv.vectors[0]) + model_gensim.train(corpus_file=corpus_file, + total_words=model_gensim.corpus_total_words, + epochs=model_gensim.epochs) + self.assertFalse((orig0 == model_gensim.wv.vectors[0]).all()) # vector should vary after training + + sims_gensim = model_gensim.wv.most_similar('night', topn=10) + sims_gensim_words = [word for (word, distance) in sims_gensim] # get similar words + expected_sims_words = [ + u'night.', + u'night,', + u'eight', + u'fight', + u'month', + u'hearings', + u'Washington', + u'remains', + u'overnight', + u'running'] + overlap_count = len(set(sims_gensim_words).intersection(expected_sims_words)) + self.assertGreaterEqual(overlap_count, 2) + def test_sg_neg_training(self): model_gensim = FT_gensim( @@ -473,6 +578,40 @@ def test_sg_neg_training(self): overlap_count = len(set(sims_gensim_words).intersection(expected_sims_words)) self.assertGreaterEqual(overlap_count, 2) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_sg_neg_training_fromfile(self): + with temporary_file(get_tmpfile('gensim_fasttext.tst')) as corpus_file: + model_gensim = FT_gensim( + size=50, sg=1, cbow_mean=1, alpha=0.025, window=5, hs=0, negative=5, + min_count=5, iter=5, batch_words=1000, word_ngrams=1, sample=1e-3, min_n=3, max_n=6, + sorted_vocab=1, workers=1, min_alpha=0.0) + + lee_data = LineSentence(datapath('lee_background.cor')) + utils.save_as_line_sentence(lee_data, corpus_file) + + model_gensim.build_vocab(corpus_file=corpus_file) + orig0 = np.copy(model_gensim.wv.vectors[0]) + model_gensim.train(corpus_file=corpus_file, + total_words=model_gensim.corpus_total_words, + epochs=model_gensim.epochs) + self.assertFalse((orig0 == model_gensim.wv.vectors[0]).all()) # vector should vary after training + + sims_gensim = model_gensim.wv.most_similar('night', topn=10) + sims_gensim_words = [word for (word, distance) in sims_gensim] # get similar words + expected_sims_words = [ + u'night.', + u'night,', + u'eight', + u'overnight', + u'overnight.', + u'month', + u'land', + u'firm', + u'singles', + u'death'] + overlap_count = len(set(sims_gensim_words).intersection(expected_sims_words)) + self.assertGreaterEqual(overlap_count, 2) + def test_online_learning(self): model_hs = FT_gensim(sentences, size=10, min_count=1, seed=42, hs=1, negative=0) self.assertTrue(len(model_hs.wv.vocab), 12) @@ -482,6 +621,21 @@ def test_online_learning(self): self.assertTrue(model_hs.wv.vocab['graph'].count, 4) self.assertTrue(model_hs.wv.vocab['artificial'].count, 4) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_online_learning_fromfile(self): + with temporary_file(get_tmpfile('gensim_fasttext1.tst')) as corpus_file, \ + temporary_file(get_tmpfile('gensim_fasttext2.tst')) as new_corpus_file: + utils.save_as_line_sentence(sentences, corpus_file) + utils.save_as_line_sentence(new_sentences, new_corpus_file) + + model_hs = FT_gensim(corpus_file=corpus_file, size=10, min_count=1, seed=42, hs=1, negative=0) + self.assertTrue(len(model_hs.wv.vocab), 12) + self.assertTrue(model_hs.wv.vocab['graph'].count, 3) + model_hs.build_vocab(corpus_file=new_corpus_file, update=True) # update vocab + self.assertEqual(len(model_hs.wv.vocab), 14) + self.assertTrue(model_hs.wv.vocab['graph'].count, 4) + self.assertTrue(model_hs.wv.vocab['artificial'].count, 4) + def test_online_learning_after_save(self): tmpf = get_tmpfile('gensim_fasttext.tst') model_neg = FT_gensim(sentences, size=10, min_count=0, seed=42, hs=0, negative=5) @@ -492,6 +646,23 @@ def test_online_learning_after_save(self): model_neg.train(new_sentences, total_examples=model_neg.corpus_count, epochs=model_neg.iter) self.assertEqual(len(model_neg.wv.vocab), 14) + @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") + def test_online_learning_after_save_fromfile(self): + with temporary_file(get_tmpfile('gensim_fasttext1.tst')) as corpus_file, \ + temporary_file(get_tmpfile('gensim_fasttext2.tst')) as new_corpus_file: + utils.save_as_line_sentence(sentences, corpus_file) + utils.save_as_line_sentence(new_sentences, new_corpus_file) + + tmpf = get_tmpfile('gensim_fasttext.tst') + model_neg = FT_gensim(corpus_file=corpus_file, size=10, min_count=0, seed=42, hs=0, negative=5) + model_neg.save(tmpf) + model_neg = FT_gensim.load(tmpf) + self.assertTrue(len(model_neg.wv.vocab), 12) + model_neg.build_vocab(corpus_file=new_corpus_file, update=True) # update vocab + model_neg.train(corpus_file=new_corpus_file, total_words=model_neg.corpus_total_words, + epochs=model_neg.iter) + self.assertEqual(len(model_neg.wv.vocab), 14) + def online_sanity(self, model): terro, others = [], [] for l in list_corpus: @@ -582,6 +753,7 @@ def testLoadOldModel(self): self.assertTrue(model.wv.vectors.shape == (12, 100)) self.assertTrue(len(model.wv.vocab) == 12) self.assertTrue(len(model.wv.index2word) == 12) + self.assertIsNone(model.corpus_total_words) self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (12, )) self.assertTrue(model.vocabulary.cum_table.shape == (12, )) @@ -596,6 +768,7 @@ def testLoadOldModel(self): self.assertTrue(model.wv.vectors.shape == (12, 100)) self.assertTrue(len(model.wv.vocab) == 12) self.assertTrue(len(model.wv.index2word) == 12) + self.assertIsNone(model.corpus_total_words) self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (12, )) self.assertTrue(model.vocabulary.cum_table.shape == (12, )) diff --git a/gensim/test/test_utils.py b/gensim/test/test_utils.py index 5b265d0f77..49acfd892e 100644 --- a/gensim/test/test_utils.py +++ b/gensim/test/test_utils.py @@ -15,7 +15,7 @@ from six import iteritems from gensim import utils -from gensim.test.utils import datapath +from gensim.test.utils import datapath, get_tmpfile class TestIsCorpus(unittest.TestCase): @@ -238,6 +238,27 @@ def test_flatten_not_nested(self): self.assertEqual(utils.flatten(not_nested), expected) +class TestSaveAsLineSentence(unittest.TestCase): + def test_save_as_line_sentence_en(self): + corpus_file = get_tmpfile('gensim_utils.tst') + ref_sentences = [l.split() for l in utils.any2unicode('hello world\nhow are you').split('\n')] + + utils.save_as_line_sentence(ref_sentences, corpus_file) + + with utils.smart_open(corpus_file, encoding='utf8') as fin: + sentences = [line.strip().split() for line in fin.read().strip().split('\n')] + self.assertEqual(sentences, ref_sentences) + + def test_save_as_line_sentence_ru(self): + corpus_file = get_tmpfile('gensim_utils.tst') + ref_sentences = [l.split() for l in utils.any2unicode('привет мир\nкак ты поживаешь').split('\n')] + utils.save_as_line_sentence(ref_sentences, corpus_file) + + with utils.smart_open(corpus_file, encoding='utf8') as fin: + sentences = [line.strip().split() for line in fin.read().strip().split('\n')] + self.assertEqual(sentences, ref_sentences) + + if __name__ == '__main__': logging.root.setLevel(logging.WARNING) unittest.main() diff --git a/gensim/test/test_word2vec.py b/gensim/test/test_word2vec.py index c2ee97062d..570d13980c 100644 --- a/gensim/test/test_word2vec.py +++ b/gensim/test/test_word2vec.py @@ -14,12 +14,13 @@ import os import bz2 import sys +import six import numpy as np from gensim import utils from gensim.models import word2vec, keyedvectors -from gensim.test.utils import datapath, get_tmpfile, common_texts as sentences +from gensim.test.utils import datapath, get_tmpfile, temporary_file, common_texts as sentences from testfixtures import log_capture try: @@ -166,20 +167,6 @@ def testMaxFinalVocab(self): self.assertEqual(reported_values['num_retained_words'], 4) self.assertEqual(model.vocabulary.effective_min_count, 3) - def testMultiStreamBuildVocab(self): - # Expected vocab - model = word2vec.Word2Vec(min_count=0) - model.build_vocab(sentences) - singlestream_vocab = model.vocabulary.raw_vocab - - # Multistream vocab - model = word2vec.Word2Vec(min_count=0) - input_streams = [sentences[:len(sentences) // 2], sentences[len(sentences) // 2:]] - model.build_vocab(input_streams=input_streams, workers=2) - multistream_vocab = model.vocabulary.raw_vocab - - self.assertEqual(singlestream_vocab, multistream_vocab) - def testOnlineLearning(self): """Test that the algorithm is able to add new words to the vocabulary and to a trained model when using a sorted vocabulary""" @@ -206,6 +193,51 @@ def testOnlineLearningAfterSave(self): model_neg.train(new_sentences, total_examples=model_neg.corpus_count, epochs=model_neg.iter) self.assertEqual(len(model_neg.wv.vocab), 14) + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def testOnlineLearningFromFile(self): + """Test that the algorithm is able to add new words to the + vocabulary and to a trained model when using a sorted vocabulary""" + with temporary_file(get_tmpfile('gensim_word2vec1.tst')) as corpus_file,\ + temporary_file(get_tmpfile('gensim_word2vec2.tst')) as new_corpus_file: + utils.save_as_line_sentence(sentences, corpus_file) + utils.save_as_line_sentence(new_sentences, new_corpus_file) + + model_hs = word2vec.Word2Vec(corpus_file=corpus_file, size=10, min_count=0, seed=42, hs=1, negative=0) + model_neg = word2vec.Word2Vec(corpus_file=corpus_file, size=10, min_count=0, seed=42, hs=0, negative=5) + self.assertTrue(len(model_hs.wv.vocab), 12) + self.assertTrue(model_hs.wv.vocab['graph'].count, 3) + model_hs.build_vocab(corpus_file=new_corpus_file, update=True) + model_hs.train(corpus_file=new_corpus_file, total_words=model_hs.corpus_total_words, epochs=model_hs.iter) + + model_neg.build_vocab(corpus_file=new_corpus_file, update=True) + model_neg.train(corpus_file=new_corpus_file, total_words=model_hs.corpus_total_words, epochs=model_hs.iter) + self.assertTrue(model_hs.wv.vocab['graph'].count, 4) + self.assertTrue(model_hs.wv.vocab['artificial'].count, 4) + self.assertEqual(len(model_hs.wv.vocab), 14) + self.assertEqual(len(model_neg.wv.vocab), 14) + + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def testOnlineLearningAfterSaveFromFile(self): + """Test that the algorithm is able to add new words to the + vocabulary and to a trained model when using a sorted vocabulary""" + with temporary_file(get_tmpfile('gensim_word2vec1.tst')) as corpus_file,\ + temporary_file(get_tmpfile('gensim_word2vec2.tst')) as new_corpus_file: + utils.save_as_line_sentence(sentences, corpus_file) + utils.save_as_line_sentence(new_sentences, new_corpus_file) + + tmpf = get_tmpfile('gensim_word2vec.tst') + model_neg = word2vec.Word2Vec(corpus_file=corpus_file, size=10, min_count=0, seed=42, hs=0, negative=5) + model_neg.save(tmpf) + model_neg = word2vec.Word2Vec.load(tmpf) + self.assertTrue(len(model_neg.wv.vocab), 12) + # Check that training works on the same data after load without calling build_vocab + model_neg.train(corpus_file=corpus_file, total_words=model_neg.corpus_total_words, epochs=model_neg.iter) + # Train on new corpus file + model_neg.build_vocab(corpus_file=new_corpus_file, update=True) + model_neg.train(corpus_file=new_corpus_file, total_words=model_neg.corpus_total_words, + epochs=model_neg.iter) + self.assertEqual(len(model_neg.wv.vocab), 14) + def onlineSanity(self, model, trained_model=False): terro, others = [], [] for l in list_corpus: @@ -264,6 +296,23 @@ def testPersistence(self): self.assertTrue(np.allclose(wv.syn0, loaded_wv.syn0)) self.assertEqual(len(wv.vocab), len(loaded_wv.vocab)) + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def testPersistenceFromFile(self): + """Test storing/loading the entire model trained with corpus_file argument.""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: + utils.save_as_line_sentence(sentences, corpus_file) + + tmpf = get_tmpfile('gensim_word2vec.tst') + model = word2vec.Word2Vec(corpus_file=corpus_file, min_count=1) + model.save(tmpf) + self.models_equal(model, word2vec.Word2Vec.load(tmpf)) + # test persistence of the KeyedVectors of a model + wv = model.wv + wv.save(tmpf) + loaded_wv = keyedvectors.KeyedVectors.load(tmpf) + self.assertTrue(np.allclose(wv.syn0, loaded_wv.syn0)) + self.assertEqual(len(wv.vocab), len(loaded_wv.vocab)) + def testPersistenceWithConstructorRule(self): """Test storing/loading the entire model with a vocab trimming rule passed in the constructor.""" tmpf = get_tmpfile('gensim_word2vec.tst') @@ -494,29 +543,28 @@ def testTraining(self): model2 = word2vec.Word2Vec(sentences, size=2, min_count=1, hs=1, negative=0) self.models_equal(model, model2) - def testMultistreamTraining(self): - """Test word2vec multistream training.""" + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def testTrainingFromFile(self): + """Test word2vec training with corpus_file argument.""" # build vocabulary, don't train yet - input_streams = [sentences[:len(sentences) // 2], sentences[len(sentences) // 2:]] - model = word2vec.Word2Vec(size=2, min_count=1, hs=1, negative=0, workers=1, seed=42) - model.build_vocab(input_streams=input_streams) + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as tf: + utils.save_as_line_sentence(sentences, tf) - self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), 2)) - self.assertTrue(model.syn1.shape == (len(model.wv.vocab), 2)) + model = word2vec.Word2Vec(size=2, min_count=1, hs=1, negative=0) + model.build_vocab(corpus_file=tf) - model.train(input_streams=input_streams, total_examples=model.corpus_count, epochs=model.iter) - sims = model.most_similar('graph', topn=10) + self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.syn1.shape == (len(model.wv.vocab), 2)) - # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) - sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself - self.assertEqual(sims, sims2) + model.train(corpus_file=tf, total_words=model.corpus_total_words, epochs=model.iter) + sims = model.most_similar('graph', topn=10) + # self.assertTrue(sims[0][0] == 'trees', sims) # most similar - # build vocab and train in one step; must be the same as above - model2 = word2vec.Word2Vec(input_streams=input_streams, size=2, min_count=1, hs=1, negative=0, - workers=1, seed=42) - self.models_equal(model, model2) + # test querying for "most similar" by vector + graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] + sims2 = model.most_similar(positive=[graph_vector], topn=11) + sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself + self.assertEqual(sims, sims2) def testScoring(self): """Test word2vec scoring.""" @@ -563,13 +611,34 @@ def testEvaluateWordPairs(self): self.assertTrue(0.1 < spearman < 1.0) self.assertTrue(0.0 <= oov < 90.0) - def model_sanity(self, model, train=True): + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def testEvaluateWordPairsFromFile(self): + """Test Spearman and Pearson correlation coefficients give sane results on similarity datasets""" + with temporary_file(get_tmpfile('gensim_word2vec.tst')) as tf: + utils.save_as_line_sentence(word2vec.LineSentence(datapath('head500.noblanks.cor.bz2')), tf) + + model = word2vec.Word2Vec(corpus_file=tf, min_count=3, iter=10) + correlation = model.evaluate_word_pairs(datapath('wordsim353.tsv')) + pearson = correlation[0][0] + spearman = correlation[1][0] + oov = correlation[2] + self.assertTrue(0.1 < pearson < 1.0) + self.assertTrue(0.1 < spearman < 1.0) + self.assertTrue(0.0 <= oov < 90.0) + + def model_sanity(self, model, train=True, with_corpus_file=False): """Even tiny models trained on LeeCorpus should pass these sanity checks""" # run extra before/after training tests if train=True if train: model.build_vocab(list_corpus) orig0 = np.copy(model.wv.syn0[0]) - model.train(list_corpus, total_examples=model.corpus_count, epochs=model.iter) + + if with_corpus_file: + tmpfile = get_tmpfile('gensim_word2vec.tst') + utils.save_as_line_sentence(list_corpus, tmpfile) + model.train(corpus_file=tmpfile, total_words=model.corpus_total_words, epochs=model.iter) + else: + model.train(list_corpus, total_examples=model.corpus_count, epochs=model.iter) self.assertFalse((orig0 == model.wv.syn0[1]).all()) # vector should vary after training sims = model.most_similar('war', topn=len(model.wv.index2word)) t_rank = [word for word, score in sims].index('terrorism') @@ -585,11 +654,21 @@ def test_sg_hs(self): model = word2vec.Word2Vec(sg=1, window=4, hs=1, negative=0, min_count=5, iter=10, workers=2) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def test_sg_hs_fromfile(self): + model = word2vec.Word2Vec(sg=1, window=4, hs=1, negative=0, min_count=5, iter=10, workers=2) + self.model_sanity(model, with_corpus_file=True) + def test_sg_neg(self): """Test skipgram w/ negative sampling""" model = word2vec.Word2Vec(sg=1, window=4, hs=0, negative=15, min_count=5, iter=10, workers=2) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def test_sg_neg_fromfile(self): + model = word2vec.Word2Vec(sg=1, window=4, hs=0, negative=15, min_count=5, iter=10, workers=2) + self.model_sanity(model, with_corpus_file=True) + def test_cbow_hs(self): """Test CBOW w/ hierarchical softmax""" model = word2vec.Word2Vec( @@ -598,6 +677,14 @@ def test_cbow_hs(self): ) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def test_cbow_hs_fromfile(self): + model = word2vec.Word2Vec( + sg=0, cbow_mean=1, alpha=0.05, window=8, hs=1, negative=0, + min_count=5, iter=10, workers=2, batch_words=1000 + ) + self.model_sanity(model, with_corpus_file=True) + def test_cbow_neg(self): """Test CBOW w/ negative sampling""" model = word2vec.Word2Vec( @@ -606,6 +693,14 @@ def test_cbow_neg(self): ) self.model_sanity(model) + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def test_cbow_neg_fromfile(self): + model = word2vec.Word2Vec( + sg=0, cbow_mean=1, alpha=0.05, window=5, hs=0, negative=15, + min_count=5, iter=10, workers=2, sample=0 + ) + self.model_sanity(model, with_corpus_file=True) + def test_cosmul(self): model = word2vec.Word2Vec(sentences, size=2, min_count=1, hs=1, negative=0) sims = model.most_similar_cosmul('graph', topn=10) @@ -838,6 +933,7 @@ def testLoadOldModel(self): saved_models_dir = datapath('old_w2v_models/w2v_{}.mdl') for old_version in old_versions: model = word2vec.Word2Vec.load(saved_models_dir.format(old_version)) + self.assertIsNone(model.corpus_total_words) self.assertTrue(len(model.wv.vocab) == 3) self.assertTrue(model.wv.vectors.shape == (3, 4)) # check if similarity search and online training works. @@ -967,6 +1063,15 @@ def testLineSentenceWorksWithFilename(self): for words in sentences: self.assertEqual(words, utils.to_unicode(orig.readline()).split()) + @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") + def testCythonLineSentenceWorksWithFilename(self): + """Does CythonLineSentence work with a filename argument?""" + from gensim.models import word2vec_corpusfile + with utils.smart_open(datapath('lee_background.cor')) as orig: + sentences = word2vec_corpusfile.CythonLineSentence(datapath('lee_background.cor')) + for words in sentences: + self.assertEqual(words, orig.readline().split()) + def testLineSentenceWorksWithCompressedFile(self): """Does LineSentence work with a compressed file object argument?""" with utils.smart_open(datapath('head500.noblanks.cor')) as orig: diff --git a/gensim/utils.py b/gensim/utils.py index 35abc203d8..28007853a2 100644 --- a/gensim/utils.py +++ b/gensim/utils.py @@ -2029,6 +2029,21 @@ def lazy_flatten(nested_list): yield el +def save_as_line_sentence(corpus, filename): + """Save the corpus in LineSentence format, i.e. each sentence on a separate line, + tokens are separated by space. + + Parameters + ---------- + corpus : iterable of iterables of strings + + """ + with smart_open(filename, mode='wb', encoding='utf8') as fout: + for sentence in corpus: + line = any2unicode(' '.join(sentence) + '\n') + fout.write(line) + + def effective_n_jobs(n_jobs): """Determines the number of jobs can run in parallel. diff --git a/setup.py b/setup.py index 555ccc44fd..cf362466b9 100644 --- a/setup.py +++ b/setup.py @@ -11,6 +11,7 @@ """ import os +import platform import sys import warnings import ez_setup @@ -242,30 +243,65 @@ def finalize_options(self): 'keras >= 2.0.4, <= 2.1.4', ] +ext_modules = [ + Extension('gensim.models.word2vec_inner', + sources=['./gensim/models/word2vec_inner.c'], + include_dirs=[model_dir]), + Extension('gensim.models.doc2vec_inner', + sources=['./gensim/models/doc2vec_inner.c'], + include_dirs=[model_dir]), + Extension('gensim.corpora._mmreader', + sources=['./gensim/corpora/_mmreader.c']), + Extension('gensim.models.fasttext_inner', + sources=['./gensim/models/fasttext_inner.c'], + include_dirs=[model_dir]), + Extension('gensim.models._utils_any2vec', + sources=['./gensim/models/_utils_any2vec.c'], + include_dirs=[model_dir]), + Extension('gensim._matutils', + sources=['./gensim/_matutils.c']), +] + +if not (os.name == 'nt' and sys.version_info[0] < 3): + extra_args = [] + system = platform.system() + + if system == 'Linux': + extra_args.append('-std=c++11') + elif system == 'Darwin': + extra_args.extend(['-stdlib=libc++', '-std=c++11']) + + ext_modules.append( + Extension('gensim.models.word2vec_corpusfile', + sources=['./gensim/models/word2vec_corpusfile.cpp'], + language='c++', + extra_compile_args=extra_args, + extra_link_args=extra_args) + ) + + ext_modules.append( + Extension('gensim.models.fasttext_corpusfile', + sources=['./gensim/models/fasttext_corpusfile.cpp'], + language='c++', + extra_compile_args=extra_args, + extra_link_args=extra_args) + ) + + ext_modules.append( + Extension('gensim.models.doc2vec_corpusfile', + sources=['./gensim/models/doc2vec_corpusfile.cpp'], + language='c++', + extra_compile_args=extra_args, + extra_link_args=extra_args) + ) + setup( name='gensim', version='3.5.0', description='Python framework for fast Vector Space Modelling', long_description=LONG_DESCRIPTION, - ext_modules=[ - Extension('gensim.models.word2vec_inner', - sources=['./gensim/models/word2vec_inner.c'], - include_dirs=[model_dir]), - Extension('gensim.models.doc2vec_inner', - sources=['./gensim/models/doc2vec_inner.c'], - include_dirs=[model_dir]), - Extension('gensim.corpora._mmreader', - sources=['./gensim/corpora/_mmreader.c']), - Extension('gensim.models.fasttext_inner', - sources=['./gensim/models/fasttext_inner.c'], - include_dirs=[model_dir]), - Extension('gensim.models._utils_any2vec', - sources=['./gensim/models/_utils_any2vec.c'], - include_dirs=[model_dir]), - Extension('gensim._matutils', - sources=['./gensim/_matutils.c']), - ], + ext_modules=ext_modules, cmdclass=cmdclass, packages=find_packages(), From 422487966bd94acf24ed48edbeef72f39b28a6e0 Mon Sep 17 00:00:00 2001 From: Laubeee Date: Tue, 18 Sep 2018 08:54:54 +0200 Subject: [PATCH 19/66] Fix formula in Mallet documentation (#2186) * Fix docstring its not displayed right in https://radimrehurek.com/gensim/models/wrappers/ldamallet.html * Update ldamallet.py * fix rendering --- gensim/models/wrappers/ldamallet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gensim/models/wrappers/ldamallet.py b/gensim/models/wrappers/ldamallet.py index fcd3b4aa6a..56509d3e4c 100644 --- a/gensim/models/wrappers/ldamallet.py +++ b/gensim/models/wrappers/ldamallet.py @@ -13,7 +13,7 @@ Notes ----- -MALLET's LDA training requires :math:O(#corpus_words) of memory, keeping the entire corpus in RAM. +MALLET's LDA training requires :math:`O(corpus\_words)` of memory, keeping the entire corpus in RAM. If you find yourself running out of memory, either decrease the `workers` constructor parameter, or use :class:`gensim.models.ldamodel.LdaModel` or :class:`gensim.models.ldamulticore.LdaMulticore` which needs only :math:`O(1)` memory. From 97783a40aa1d00ca7942b8ab483fd65a2075f8b6 Mon Sep 17 00:00:00 2001 From: "M.Cemil Guney" Date: Wed, 19 Sep 2018 05:55:38 +0300 Subject: [PATCH 20/66] Add scikit-learn wrapper for `FastText` (#2178) * Add scikit-learn wrapper for fasttext model. * Add sklearn fasttext wrapper test. * Fix docstring. * Add more examples. * Add tests for oov words. Fix some tests. * Pass all corpus on persistence test. * Remove numpy.random.seed calls. --- gensim/sklearn_api/__init__.py | 1 + gensim/sklearn_api/ftmodel.py | 224 ++++++++++++++++++++++++++++++++ gensim/test/test_sklearn_api.py | 105 +++++++++++++++ 3 files changed, 330 insertions(+) create mode 100644 gensim/sklearn_api/ftmodel.py diff --git a/gensim/sklearn_api/__init__.py b/gensim/sklearn_api/__init__.py index d5d80398dc..2f21fc3864 100644 --- a/gensim/sklearn_api/__init__.py +++ b/gensim/sklearn_api/__init__.py @@ -20,4 +20,5 @@ from .text2bow import Text2BowTransformer # noqa: F401 from .tfidf import TfIdfTransformer # noqa: F401 from .hdp import HdpTransformer # noqa: F401 +from .ftmodel import FTTransformer # noqa: F401 from .phrases import PhrasesTransformer # noqa: F401 diff --git a/gensim/sklearn_api/ftmodel.py b/gensim/sklearn_api/ftmodel.py new file mode 100644 index 0000000000..f4e542471a --- /dev/null +++ b/gensim/sklearn_api/ftmodel.py @@ -0,0 +1,224 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Authors: M.Cemil Guney +# Copyright (C) 2018 RaRe Technologies s.r.o. +# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html + +"""Scikit-learn interface for :class:`~gensim.models.fasttext.FastText`. + +Follows scikit-learn API conventions to facilitate using gensim along with scikit-learn. + + +Examples +-------- +>>> from gensim.test.utils import common_texts +>>> from gensim.sklearn_api import FTTransformer +>>> +>>> # Create a model to represent each word by a 10 dimensional vector. +>>> model = FTTransformer(size=10, min_count=1, seed=1) +>>> +>>> # What is the vector representations of the word 'graph' and 'system'? +>>> wordvecs = model.fit(common_texts).transform(['graph', 'system']) +>>> assert wordvecs.shape == (2, 10) + +Retrieve word-vector for vocab and out-of-vocab word: + +>>> existent_word = "system" +>>> existent_word in model.gensim_model.wv.vocab +True +>>> existent_word_vec = model.transform(existent_word) # numpy vector of a word +>>> assert existent_word_vec.shape == (1, 10) +>>> +>>> oov_word = "sys" +>>> oov_word in model.gensim_model.wv.vocab +False +>>> oov_word_vec = model.transform(oov_word) # numpy vector of a word +>>> assert oov_word_vec.shape == (1, 10) + +""" +import numpy as np +import six +from sklearn.base import TransformerMixin, BaseEstimator +from sklearn.exceptions import NotFittedError + +from gensim import models + + +class FTTransformer(TransformerMixin, BaseEstimator): + """Base FastText module, wraps :class:`~gensim.models.fasttext.FastText`. + + For more information please have a look to `Enriching Word Vectors with Subword + Information `_. + + """ + def __init__(self, sg=0, hs=0, size=100, alpha=0.025, window=5, min_count=5, + max_vocab_size=None, word_ngrams=1, sample=1e-3, seed=1, + workers=3, min_alpha=0.0001, negative=5, ns_exponent=0.75, + cbow_mean=1, hashfxn=hash, iter=5, null_word=0, min_n=3, + max_n=6, sorted_vocab=1, bucket=2000000, trim_rule=None, + batch_words=10000): + """ + + Parameters + ---------- + sg : {1, 0}, optional + Training algorithm: skip-gram if `sg=1`, otherwise CBOW. + hs : {1,0}, optional + If 1, hierarchical softmax will be used for model training. + If set to 0, and `negative` is non-zero, negative sampling will be used. + size : int, optional + Dimensionality of the word vectors. + alpha : float, optional + The initial learning rate. + window : int, optional + The maximum distance between the current and predicted word within a sentence. + min_count : int, optional + The model ignores all words with total frequency lower than this. + max_vocab_size : int, optional + Limits the RAM during vocabulary building; if there are more unique + words than this, then prune the infrequent ones. Every 10 million word types need about 1GB of RAM. + Set to `None` for no limit. + word_ngrams : {1,0}, optional + If 1, uses enriches word vectors with subword(n-grams) information. + If 0, this is equivalent to :class:`~gensim.models.word2vec.Word2Vec`. + sample : float, optional + The threshold for configuring which higher-frequency words are randomly downsampled, + useful range is (0, 1e-5). + seed : int, optional + Seed for the random number generator. Initial vectors for each word are seeded with a hash of + the concatenation of word + `str(seed)`. Note that for a fully deterministically-reproducible run, + you must also limit the model to a single worker thread (`workers=1`), to eliminate ordering jitter + from OS thread scheduling. (In Python 3, reproducibility between interpreter launches also requires + use of the `PYTHONHASHSEED` environment variable to control hash randomization). + workers : int, optional + Use these many worker threads to train the model (=faster training with multicore machines). + min_alpha : float, optional + Learning rate will linearly drop to `min_alpha` as training progresses. + negative : int, optional + If > 0, negative sampling will be used, the int for negative specifies how many "noise words" + should be drawn (usually between 5-20). + If set to 0, no negative sampling is used. + ns_exponent : float, optional + The exponent used to shape the negative sampling distribution. A value of 1.0 samples exactly in proportion + to the frequencies, 0.0 samples all words equally, while a negative value samples low-frequency words more + than high-frequency words. The popular default value of 0.75 was chosen by the original Word2Vec paper. + More recently, in https://arxiv.org/abs/1804.04212, Caselles-Dupré, Lesaint, & Royo-Letelier suggest that + other values may perform better for recommendation applications. + cbow_mean : {1,0}, optional + If 0, use the sum of the context word vectors. If 1, use the mean, only applies when cbow is used. + hashfxn : function, optional + Hash function to use to randomly initialize weights, for increased training reproducibility. + iter : int, optional + Number of iterations (epochs) over the corpus. + min_n : int, optional + Minimum length of char n-grams to be used for training word representations. + max_n : int, optional + Max length of char ngrams to be used for training word representations. Set `max_n` to be + lesser than `min_n` to avoid char ngrams being used. + sorted_vocab : {1,0}, optional + If 1, sort the vocabulary by descending frequency before assigning word indices. + bucket : int, optional + Character ngrams are hashed into a fixed number of buckets, in order to limit the + memory usage of the model. This option specifies the number of buckets used by the model. + trim_rule : function, optional + Vocabulary trimming rule, specifies whether certain words should remain in the vocabulary, + be trimmed away, or handled using the default (discard if word count < min_count). + Can be None (min_count will be used, look to :func:`~gensim.utils.keep_vocab_item`), + or a callable that accepts parameters (word, count, min_count) and returns either + :attr:`gensim.utils.RULE_DISCARD`, :attr:`gensim.utils.RULE_KEEP` or :attr:`gensim.utils.RULE_DEFAULT`. + The rule, if given, is only used to prune vocabulary during + :meth:`~gensim.models.fasttext.FastText.build_vocab` and is not stored as part of themodel. + + The input parameters are of the following types: + * `word` (str) - the word we are examining + * `count` (int) - the word's frequency count in the corpus + * `min_count` (int) - the minimum count threshold. + + batch_words : int, optional + Target size (in words) for batches of examples passed to worker threads (and + thus cython routines).(Larger batches will be passed if individual + texts are longer than 10000 words, but the standard cython code truncates to that maximum.) + + """ + self.gensim_model = None + self.sg = sg + self.hs = hs + self.size = size + self.alpha = alpha + self.window = window + self.min_count = min_count + self.max_vocab_size = max_vocab_size + self.word_ngrams = word_ngrams + self.sample = sample + self.seed = seed + self.workers = workers + self.min_alpha = min_alpha + self.negative = negative + self.ns_exponent = ns_exponent + self.cbow_mean = cbow_mean + self.hashfxn = hashfxn + self.iter = iter + self.null_word = null_word + self.min_n = min_n + self.max_n = max_n + self.sorted_vocab = sorted_vocab + self.bucket = bucket + self.trim_rule = trim_rule + self.batch_words = batch_words + + def fit(self, X, y=None): + """Fit the model according to the given training data. + + Parameters + ---------- + X : iterable of iterables of str + Can be simply a list of lists of tokens, but for larger corpora, + consider an iterable that streams the sentences directly from disk/network. + See :class:`~gensim.models.word2vec.BrownCorpus`, :class:`~gensim.models.word2vec.Text8Corpus` + or :class:`~gensim.models.word2vec.LineSentence` in :mod:`~gensim.models.word2vec` module for such examples. + + Returns + ------- + :class:`~gensim.sklearn_api.ftmodel.FTTransformer` + The trained model. + + """ + self.gensim_model = models.FastText( + sentences=X, sg=self.sg, hs=self.hs, size=self.size, + alpha=self.alpha, window=self.window, min_count=self.min_count, + max_vocab_size=self.max_vocab_size, word_ngrams=self.word_ngrams, + sample=self.sample, seed=self.seed, workers=self.workers, + min_alpha=self.min_alpha, negative=self.negative, + ns_exponent=self.ns_exponent, cbow_mean=self.cbow_mean, + hashfxn=self.hashfxn, iter=self.iter, null_word=self.null_word, + min_n=self.min_n, max_n=self.max_n, sorted_vocab=self.sorted_vocab, + bucket=self.bucket, trim_rule=self.trim_rule, + batch_words=self.batch_words + ) + return self + + def transform(self, words): + """Get the word vectors the input words. + + Parameters + ---------- + words : {iterable of str, str} + Word or a collection of words to be transformed. + + Returns + ------- + np.ndarray of shape [`len(words)`, `size`] + A 2D array where each row is the vector of one word. + + """ + if self.gensim_model is None: + raise NotFittedError( + "This model has not been fitted yet. Call 'fit' with appropriate arguments before using this method." + ) + + # The input as array of array + if isinstance(words, six.string_types): + words = [words] + vectors = [self.gensim_model[word] for word in words] + return np.reshape(np.array(vectors), (len(words), self.size)) diff --git a/gensim/test/test_sklearn_api.py b/gensim/test/test_sklearn_api.py index ed5516df37..2a27eb48db 100644 --- a/gensim/test/test_sklearn_api.py +++ b/gensim/test/test_sklearn_api.py @@ -11,6 +11,7 @@ except ImportError: raise unittest.SkipTest("Test requires scikit-learn to be installed, which is not available") +from gensim.sklearn_api.ftmodel import FTTransformer from gensim.sklearn_api.rpmodel import RpTransformer from gensim.sklearn_api.ldamodel import LdaTransformer from gensim.sklearn_api.lsimodel import LsiTransformer @@ -1213,5 +1214,109 @@ def testModelNotFitted(self): self.assertRaises(NotFittedError, phrases_transformer.transform, phrases_sentences[0]) +class TestFastTextWrapper(unittest.TestCase): + def setUp(self): + self.model = FTTransformer(size=10, min_count=0, seed=42) + self.model.fit(texts) + + def testTransform(self): + # tranform multiple words + words = [] + words = words + texts[0] + matrix = self.model.transform(words) + self.assertEqual(matrix.shape[0], 3) + self.assertEqual(matrix.shape[1], self.model.size) + + # tranform one word + word = texts[0][0] + matrix = self.model.transform(word) + self.assertEqual(matrix.shape[0], 1) + self.assertEqual(matrix.shape[1], self.model.size) + + # verify oov-word vector retrieval + invocab_vec = self.model.transform("computer") # invocab word + self.assertEqual(invocab_vec.shape[0], 1) + self.assertEqual(invocab_vec.shape[1], self.model.size) + + oov_vec = self.model.transform('compute') # oov word + self.assertEqual(oov_vec.shape[0], 1) + self.assertEqual(oov_vec.shape[1], self.model.size) + + def testConsistencyWithGensimModel(self): + # training a FTTransformer + self.model = FTTransformer(size=10, min_count=0, seed=42, workers=1) + self.model.fit(texts) + + # training a Gensim FastText model with the same params + gensim_ftmodel = models.FastText(texts, size=10, min_count=0, seed=42, + workers=1) + + # vectors returned by FTTransformer + vecs_transformer_api = self.model.transform( + [text for text_list in texts for text in text_list]) + # vectors returned by FastText + vecs_gensim_model = [gensim_ftmodel[text] for text_list in texts for text in text_list] + passed = numpy.allclose(vecs_transformer_api, vecs_gensim_model) + self.assertTrue(passed) + + # test for out of vocab words + oov_words = ["compute", "serve", "sys", "net"] + vecs_transformer_api = self.model.transform(oov_words) # vector returned by FTTransformer + vecs_gensim_model = [gensim_ftmodel[word] for word in oov_words] # vector returned by FastText + passed = numpy.allclose(vecs_transformer_api, vecs_gensim_model) + self.assertTrue(passed) + + def testPipeline(self): + model = FTTransformer(size=10, min_count=1) + model.fit(w2v_texts) + + class_dict = {'mathematics': 1, 'physics': 0} + train_data = [ + ('calculus', 'mathematics'), ('mathematical', 'mathematics'), + ('geometry', 'mathematics'), ('operations', 'mathematics'), + ('curves', 'mathematics'), ('natural', 'physics'), ('nuclear', 'physics'), + ('science', 'physics'), ('electromagnetism', 'physics'), ('natural', 'physics') + ] + train_input = [x[0] for x in train_data] + train_target = [class_dict[x[1]] for x in train_data] + + clf = linear_model.LogisticRegression(penalty='l2', C=0.1) + clf.fit(model.transform(train_input), train_target) + text_ft = Pipeline([('features', model,), ('classifier', clf)]) + score = text_ft.score(train_input, train_target) + self.assertGreater(score, 0.40) + + def testSetGetParams(self): + # updating only one param + self.model.set_params(negative=20) + model_params = self.model.get_params() + self.assertEqual(model_params["negative"], 20) + # verify that the attributes values are also changed for `gensim_model` after fitting + self.model.fit(texts) + self.assertEqual(getattr(self.model.gensim_model, 'negative'), 20) + + def testPersistence(self): + model_dump = pickle.dumps(self.model) + model_load = pickle.loads(model_dump) + + # pass all words in one list + words = [word for text_list in texts for word in text_list] + loaded_transformed_vecs = model_load.transform(words) + + # sanity check for transformation operation + self.assertEqual(loaded_transformed_vecs.shape[0], len(words)) + self.assertEqual(loaded_transformed_vecs.shape[1], model_load.size) + + # comparing the original and loaded models + original_transformed_vecs = self.model.transform(words) + passed = numpy.allclose(loaded_transformed_vecs, original_transformed_vecs, atol=1e-1) + self.assertTrue(passed) + + def testModelNotFitted(self): + ftmodel_wrapper = FTTransformer(size=10, min_count=0, seed=42) + word = texts[0][0] + self.assertRaises(NotFittedError, ftmodel_wrapper.transform, word) + + if __name__ == '__main__': unittest.main() From 5164f0f20910780b8cd7c97dd3d2560034ea2a9d Mon Sep 17 00:00:00 2001 From: ivan Date: Thu, 20 Sep 2018 17:01:05 +0500 Subject: [PATCH 21/66] bump version to 3.6.0 --- docs/src/conf.py | 4 ++-- gensim/__init__.py | 2 +- setup.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/src/conf.py b/docs/src/conf.py index d05558c540..3ba4ae06b2 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -55,9 +55,9 @@ # built documents. # # The short X.Y version. -version = '3.5' +version = '3.6' # The full version, including alpha/beta/rc tags. -release = '3.5.0' +release = '3.6.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/gensim/__init__.py b/gensim/__init__.py index f70ef0f412..4c55eada1e 100644 --- a/gensim/__init__.py +++ b/gensim/__init__.py @@ -5,7 +5,7 @@ from gensim import parsing, corpora, matutils, interfaces, models, similarities, summarization, utils # noqa:F401 import logging -__version__ = '3.5.0' +__version__ = '3.6.0' class NullHandler(logging.Handler): diff --git a/setup.py b/setup.py index cf362466b9..2ba7cf3b62 100644 --- a/setup.py +++ b/setup.py @@ -297,7 +297,7 @@ def finalize_options(self): setup( name='gensim', - version='3.5.0', + version='3.6.0', description='Python framework for fast Vector Space Modelling', long_description=LONG_DESCRIPTION, From e22419e9f86e671ea59e7fd54a4a5007429bae4a Mon Sep 17 00:00:00 2001 From: ivan Date: Thu, 20 Sep 2018 17:06:10 +0500 Subject: [PATCH 22/66] bump CHANGELOG to 3.6.0 --- CHANGELOG.md | 102 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7dd69eb6de..d54186837c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,107 @@ Changes =========== +## 3.6.0, 2018-09-20 + +### :star2: New features +* File-based training for `*2Vec` models (__[@persiyanov](https://github.com/persiyanov)__, [#2127](https://github.com/RaRe-Technologies/gensim/pull/2127) & [#2078](https://github.com/RaRe-Technologies/gensim/pull/2078) & [#2048](https://github.com/RaRe-Technologies/gensim/pull/2048)) + + New training mode for `*2Vec` models (word2vec, doc2vec, fasttext) that allows model training to scale linearly with the number of cores (full GIL elimination). The result of our Google Summer of Code 2018 project by Dmitry Persiyanov. + + **Benchmark** + - Dataset: full English Wikipedia + - Cloud: GCE + - CPU: Intel(R) Xeon(R) CPU @ 2.30GHz 32 cores + - BLAS: libblas3 (3.7.1-3ubuntu2) + + + | Model | Queue-based version [sec] | File-based version [sec] | speed up | Accuracy (queue-based) | Accuracy (file-based) | + |-------|------------|--------------------|----------|----------------|-----------------------| + | Word2Vec | 9230 | **2437** | **3.79x** | 0.754 (± 0.003) | 0.750 (± 0.001) | + | Doc2Vec | 18264 | **2889** | **6.32x** | 0.721 (± 0.002) | 0.683 (± 0.003) | + | FastText | 16361 | **10625** | **1.54x** | 0.642 (± 0.002) | 0.660 (± 0.001) | + + Usage: + + ```python + import gensim.downloader as api + from multiprocessing import cpu_count + from gensim.utils import save_as_line_sentence + from gensim.test.utils import get_tmpfile + from gensim.models import Word2Vec, Doc2Vec, FastText + + + # Convert any corpus to the needed format: 1 document per line, words delimited by " " + corpus = api.load("text8") + corpus_fname = get_tmpfile("text8-file-sentence.txt") + save_as_line_sentence(corpus, corpus_fname) + + # Choose num of cores that you want to use (let's use all, models scale linearly now!) + num_cores = cpu_count() + + # Train models using all cores + w2v_model = Word2Vec(corpus_file=corpus_fname, workers=num_cores) + d2v_model = Doc2Vec(corpus_file=corpus_fname, workers=num_cores) + ft_model = FastText(corpus_file=corpus_fname, workers=num_cores) + + ``` + [Read notebook tutorial with full description.](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/Any2Vec_Filebased.ipynb) + + +### :+1: Improvements + +* Add scikit-learn wrapper for `FastText` (__[@mcemilg](https://github.com/mcemilg)__, [#2178](https://github.com/RaRe-Technologies/gensim/pull/2178)) +* Add multiprocessing support for `BM25` (__[@Shiki-H](https://github.com/Shiki-H)__, [#2146](https://github.com/RaRe-Technologies/gensim/pull/2146)) +* Add `name_only` option for downloader api (__[@aneesh-joshi](https://github.com/aneesh-joshi)__, [#2143](https://github.com/RaRe-Technologies/gensim/pull/2143)) +* Make `word2vec2tensor` script compatible with `python3` (__[@vsocrates](https://github.com/vsocrates)__, [#2147](https://github.com/RaRe-Technologies/gensim/pull/2147)) +* Add custom filter for `Wikicorpus` (__[@mattilyra](https://github.com/mattilyra)__, [#2089](https://github.com/RaRe-Technologies/gensim/pull/2089)) +* Make `similarity_matrix` support non-contiguous dictionaries (__[@Witiko](https://github.com/Witiko)__, [#2047](https://github.com/RaRe-Technologies/gensim/pull/2047)) + + +### :red_circle: Bug fixes + +* Fix memory consumption in `AuthorTopicModel` (__[@philipphager](https://github.com/philipphager)__, [#2122](https://github.com/RaRe-Technologies/gensim/pull/2122)) +* Correctly process empty documents in `AuthorTopicModel` (__[@probinso](https://github.com/probinso)__, [#2133](https://github.com/RaRe-Technologies/gensim/pull/2133)) +* Fix ZeroDivisionError `keywords` issue with short input (__[@LShostenko](https://github.com/LShostenko)__, [#2154](https://github.com/RaRe-Technologies/gensim/pull/2154)) +* Fix `min_count` handling in phrases detection using `npmi_scorer` (__[@lopusz](https://github.com/lopusz)__, [#2072](https://github.com/RaRe-Technologies/gensim/pull/2072)) +* Remove duplicate count from `Phraser` log message (__[@robguinness](https://github.com/robguinness)__, [#2151](https://github.com/RaRe-Technologies/gensim/pull/2151)) +* Replace `np.integer` -> `np.int` in `AuthorTopicModel` (__[@menshikh-iv](https://github.com/menshikh-iv)__, [#2145](https://github.com/RaRe-Technologies/gensim/pull/2145)) + + +### :books: Tutorial and doc improvements + +* Update docstring with new analogy evaluation method (__[@akutuzov](https://github.com/akutuzov)__, [#2130](https://github.com/RaRe-Technologies/gensim/pull/2130)) +* Improve `prune_at` parameter description for `gensim.corpora.Dictionary` (__[@yxonic](https://github.com/yxonic)__, [#2128](https://github.com/RaRe-Technologies/gensim/pull/2128)) +* Fix `default` -> `auto` prior parameter in documentation for lda-related models (__[@Laubeee](https://github.com/Laubeee)__, [#2156](https://github.com/RaRe-Technologies/gensim/pull/2156)) +* Use heading instead of bold style in `gensim.models.translation_matrix` (__[@nzw0301](https://github.com/nzw0301)__, [#2164](https://github.com/RaRe-Technologies/gensim/pull/2164)) +* Fix quote of vocabulary from `gensim.models.Word2Vec` (__[@nzw0301](https://github.com/nzw0301)__, [#2161](https://github.com/RaRe-Technologies/gensim/pull/2161)) +* Replace deprecated parameters with new in docstring of `gensim.models.Doc2Vec` (__[@xuhdev](https://github.com/xuhdev)__, [#2165](https://github.com/RaRe-Technologies/gensim/pull/2165)) +* Fix formula in Mallet documentation (__[@Laubeee](https://github.com/Laubeee)__, [#2186](https://github.com/RaRe-Technologies/gensim/pull/2186)) +* Fix minor semantic issue in docs for `Phrases` (__[@RunHorst](https://github.com/RunHorst)__, [#2148](https://github.com/RaRe-Technologies/gensim/pull/2148)) +* Fix typo in documentation (__[@KenjiOhtsuka](https://github.com/KenjiOhtsuka)__, [#2157](https://github.com/RaRe-Technologies/gensim/pull/2157)) +* Additional documentation fixes (__[@piskvorky](https://github.com/piskvorky)__, [#2121](https://github.com/RaRe-Technologies/gensim/pull/2121)) + +### :warning: Deprecations (will be removed in the next major release) + +* Remove + - `gensim.models.wrappers.fasttext` (obsoleted by the new native `gensim.models.fasttext` implementation) + - `gensim.examples` + - `gensim.nosy` + - `gensim.scripts.word2vec_standalone` + - `gensim.scripts.make_wiki_lemma` + - `gensim.scripts.make_wiki_online` + - `gensim.scripts.make_wiki_online_lemma` + - `gensim.scripts.make_wiki_online_nodebug` + - `gensim.scripts.make_wiki` (all of these obsoleted by the new native `gensim.scripts.segment_wiki` implementation) + - "deprecated" functions and attributes + +* Move + - `gensim.scripts.make_wikicorpus` ➡ `gensim.scripts.make_wiki.py` + - `gensim.summarization` ➡ `gensim.models.summarization` + - `gensim.topic_coherence` ➡ `gensim.models._coherence` + - `gensim.utils` ➡ `gensim.utils.utils` (old imports will continue to work) + - `gensim.parsing.*` ➡ `gensim.utils.text_utils` + + ## 3.5.0, 2018-07-06 This release comprises a glorious 38 pull requests from 28 contributors. Most of the effort went into improving the documentation—hence the release code name "Docs 💬"! From 35d1b5bc62e8bb3cd9d54159e0be2e561f60790e Mon Sep 17 00:00:00 2001 From: ivan Date: Thu, 20 Sep 2018 17:12:48 +0500 Subject: [PATCH 23/66] regenerated C files with Cython --- gensim/_matutils.c | 42 ++- gensim/corpora/_mmreader.c | 42 ++- gensim/models/_utils_any2vec.c | 34 +- gensim/models/doc2vec_corpusfile.cpp | 491 ++++++++++++++------------ gensim/models/doc2vec_inner.c | 485 +++++++++++++------------ gensim/models/fasttext_corpusfile.cpp | 491 ++++++++++++++------------ gensim/models/fasttext_inner.c | 485 +++++++++++++------------ gensim/models/word2vec_corpusfile.cpp | 485 +++++++++++++------------ gensim/models/word2vec_inner.c | 485 +++++++++++++------------ 9 files changed, 1667 insertions(+), 1373 deletions(-) diff --git a/gensim/_matutils.c b/gensim/_matutils.c index 90afbdcd72..384881f47f 100644 --- a/gensim/_matutils.c +++ b/gensim/_matutils.c @@ -1,4 +1,4 @@ -/* Generated by Cython 0.28.3 */ +/* Generated by Cython 0.28.4 */ #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -7,7 +7,7 @@ #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else -#define CYTHON_ABI "0_28_3" +#define CYTHON_ABI "0_28_4" #define CYTHON_FUTURE_DIVISION 1 #include #ifndef offsetof @@ -24968,7 +24968,7 @@ static CYTHON_INLINE int __Pyx_dict_iter_next( /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 *type = tstate->exc_state.exc_type; *value = tstate->exc_state.exc_value; *tb = tstate->exc_state.exc_traceback; @@ -24983,7 +24983,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject * } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -25067,7 +25067,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -25368,7 +25368,7 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -25519,14 +25519,42 @@ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, return res; } #endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i #ifndef offsetof @@ -7865,7 +7865,7 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 *type = tstate->exc_state.exc_type; *value = tstate->exc_state.exc_value; *tb = tstate->exc_state.exc_traceback; @@ -7880,7 +7880,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject * } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -8146,7 +8146,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -9524,14 +9524,42 @@ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, return res; } #endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; diff --git a/gensim/models/_utils_any2vec.c b/gensim/models/_utils_any2vec.c index 2fd18bbbc7..dfaf7e8125 100644 --- a/gensim/models/_utils_any2vec.c +++ b/gensim/models/_utils_any2vec.c @@ -1,4 +1,4 @@ -/* Generated by Cython 0.28.3 */ +/* Generated by Cython 0.28.4 */ #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -7,7 +7,7 @@ #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else -#define CYTHON_ABI "0_28_3" +#define CYTHON_ABI "0_28_4" #define CYTHON_FUTURE_DIVISION 0 #include #ifndef offsetof @@ -3405,14 +3405,42 @@ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, return res; } #endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i #ifndef offsetof @@ -467,6 +467,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -673,7 +674,7 @@ static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ @@ -781,7 +782,7 @@ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_cython_runtime = NULL; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; @@ -832,7 +833,7 @@ static const char *__pyx_f[] = { #endif -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":730 * # in Cython to enable them only on the right systems. * * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< @@ -841,7 +842,7 @@ static const char *__pyx_f[] = { */ typedef npy_int8 __pyx_t_5numpy_int8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":731 * * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< @@ -850,7 +851,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t; */ typedef npy_int16 __pyx_t_5numpy_int16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":732 * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< @@ -859,7 +860,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t; */ typedef npy_int32 __pyx_t_5numpy_int32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":733 * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< @@ -868,7 +869,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t; */ typedef npy_int64 __pyx_t_5numpy_int64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":737 * #ctypedef npy_int128 int128_t * * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< @@ -877,7 +878,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t; */ typedef npy_uint8 __pyx_t_5numpy_uint8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":738 * * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< @@ -886,7 +887,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t; */ typedef npy_uint16 __pyx_t_5numpy_uint16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":739 * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< @@ -895,7 +896,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t; */ typedef npy_uint32 __pyx_t_5numpy_uint32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":740 * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< @@ -904,7 +905,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t; */ typedef npy_uint64 __pyx_t_5numpy_uint64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":744 * #ctypedef npy_uint128 uint128_t * * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< @@ -913,7 +914,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t; */ typedef npy_float32 __pyx_t_5numpy_float32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":745 * * ctypedef npy_float32 float32_t * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< @@ -922,7 +923,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t; */ typedef npy_float64 __pyx_t_5numpy_float64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":754 * # The int types are mapped a bit surprising -- * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t # <<<<<<<<<<<<<< @@ -931,7 +932,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t; */ typedef npy_long __pyx_t_5numpy_int_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":755 * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< @@ -940,7 +941,7 @@ typedef npy_long __pyx_t_5numpy_int_t; */ typedef npy_longlong __pyx_t_5numpy_long_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":756 * ctypedef npy_long int_t * ctypedef npy_longlong long_t * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< @@ -949,7 +950,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t; */ typedef npy_longlong __pyx_t_5numpy_longlong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":758 * ctypedef npy_longlong longlong_t * * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< @@ -958,7 +959,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t; */ typedef npy_ulong __pyx_t_5numpy_uint_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":759 * * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< @@ -967,7 +968,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":760 * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< @@ -976,7 +977,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":762 * ctypedef npy_ulonglong ulonglong_t * * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< @@ -985,7 +986,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; */ typedef npy_intp __pyx_t_5numpy_intp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":763 * * ctypedef npy_intp intp_t * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< @@ -994,7 +995,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t; */ typedef npy_uintp __pyx_t_5numpy_uintp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":765 * ctypedef npy_uintp uintp_t * * ctypedef npy_double float_t # <<<<<<<<<<<<<< @@ -1003,7 +1004,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t; */ typedef npy_double __pyx_t_5numpy_float_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":766 * * ctypedef npy_double float_t * ctypedef npy_double double_t # <<<<<<<<<<<<<< @@ -1012,7 +1013,7 @@ typedef npy_double __pyx_t_5numpy_float_t; */ typedef npy_double __pyx_t_5numpy_double_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":767 * ctypedef npy_double float_t * ctypedef npy_double double_t * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< @@ -1067,7 +1068,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":769 * ctypedef npy_longdouble longdouble_t * * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< @@ -1076,7 +1077,7 @@ struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab; */ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":770 * * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< @@ -1085,7 +1086,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; */ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":771 * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< @@ -1094,7 +1095,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; */ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":773 * ctypedef npy_clongdouble clongdouble_t * * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< @@ -1634,6 +1635,9 @@ static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void); /* proto */ /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); +/* None.proto */ +#include + /* RealImag.proto */ #if CYTHON_CCOMPLEX #ifdef __cplusplus @@ -1735,9 +1739,6 @@ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value); -/* None.proto */ -#include - /* CIntFromPy.proto */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); @@ -5786,7 +5787,7 @@ static PyObject *__pyx_pf_6gensim_6models_18doc2vec_corpusfile_4d2v_train_epoch_ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -5834,7 +5835,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_info->obj); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":222 * * cdef int i, ndim * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -5843,7 +5844,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":223 * cdef int i, ndim * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -5852,7 +5853,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":225 * cdef bint little_endian = ((&endian_detector)[0] != 0) * * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< @@ -5861,7 +5862,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -5875,7 +5876,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L4_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":228 * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -5886,7 +5887,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -5895,7 +5896,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -5908,7 +5909,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 229, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -5917,7 +5918,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -5931,7 +5932,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L7_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":232 * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -5942,7 +5943,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -5951,7 +5952,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -5964,7 +5965,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 233, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -5973,7 +5974,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":235 * raise ValueError(u"ndarray is not Fortran contiguous") * * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< @@ -5982,7 +5983,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":236 * * info.buf = PyArray_DATA(self) * info.ndim = ndim # <<<<<<<<<<<<<< @@ -5991,7 +5992,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->ndim = __pyx_v_ndim; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -6001,7 +6002,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":240 * # Allocate new buffer for strides and shape info. * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< @@ -6010,7 +6011,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":241 * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim # <<<<<<<<<<<<<< @@ -6019,7 +6020,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":242 * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim * for i in range(ndim): # <<<<<<<<<<<<<< @@ -6031,7 +6032,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":243 * info.shape = info.strides + ndim * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< @@ -6040,7 +6041,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":244 * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< @@ -6050,7 +6051,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -6060,7 +6061,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L9; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":246 * info.shape[i] = PyArray_DIMS(self)[i] * else: * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< @@ -6070,7 +6071,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":247 * else: * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< @@ -6081,7 +6082,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L9:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":248 * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL # <<<<<<<<<<<<<< @@ -6090,7 +6091,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->suboffsets = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":249 * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< @@ -6099,7 +6100,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":250 * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< @@ -6108,7 +6109,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":253 * * cdef int t * cdef char* f = NULL # <<<<<<<<<<<<<< @@ -6117,7 +6118,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_f = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":254 * cdef int t * cdef char* f = NULL * cdef dtype descr = self.descr # <<<<<<<<<<<<<< @@ -6129,7 +6130,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":257 * cdef int offset * * info.obj = self # <<<<<<<<<<<<<< @@ -6142,7 +6143,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = ((PyObject *)__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -6152,7 +6153,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":260 * * if not PyDataType_HASFIELDS(descr): * t = descr.type_num # <<<<<<<<<<<<<< @@ -6162,7 +6163,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_4 = __pyx_v_descr->type_num; __pyx_v_t = __pyx_t_4; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -6182,7 +6183,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L15_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":262 * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -6199,7 +6200,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L14_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -6208,7 +6209,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -6221,7 +6222,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 263, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -6230,7 +6231,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":264 * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< @@ -6242,7 +6243,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"b"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":265 * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< @@ -6253,7 +6254,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"B"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":266 * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< @@ -6264,7 +6265,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"h"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":267 * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< @@ -6275,7 +6276,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"H"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":268 * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< @@ -6286,7 +6287,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"i"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":269 * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< @@ -6297,7 +6298,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"I"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":270 * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< @@ -6308,7 +6309,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"l"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":271 * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< @@ -6319,7 +6320,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"L"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":272 * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< @@ -6330,7 +6331,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":273 * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< @@ -6341,7 +6342,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":274 * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< @@ -6352,7 +6353,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"f"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":275 * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< @@ -6363,7 +6364,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"d"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":276 * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< @@ -6374,7 +6375,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"g"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":277 * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< @@ -6385,7 +6386,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zf"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":278 * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< @@ -6396,7 +6397,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zd"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":279 * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< @@ -6407,7 +6408,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zg"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":280 * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< @@ -6419,7 +6420,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; default: - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":282 * elif t == NPY_OBJECT: f = "O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -6440,7 +6441,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":283 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f # <<<<<<<<<<<<<< @@ -6449,7 +6450,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->format = __pyx_v_f; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":284 * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f * return # <<<<<<<<<<<<<< @@ -6459,7 +6460,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_r = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -6468,7 +6469,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":286 * return * else: * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< @@ -6478,7 +6479,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":287 * else: * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< @@ -6487,7 +6488,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->format[0]) = '^'; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":288 * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 # <<<<<<<<<<<<<< @@ -6496,7 +6497,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_offset = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":289 * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< @@ -6506,7 +6507,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) __pyx_v_f = __pyx_t_8; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":292 * info.format + _buffer_format_string_len, * &offset) * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< @@ -6516,7 +6517,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_f[0]) = '\x00'; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -6548,7 +6549,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -6572,7 +6573,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s int __pyx_t_1; __Pyx_RefNannySetupContext("__releasebuffer__", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -6582,7 +6583,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":296 * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) # <<<<<<<<<<<<<< @@ -6591,7 +6592,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->format); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -6600,7 +6601,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -6610,7 +6611,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":298 * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): * PyObject_Free(info.strides) # <<<<<<<<<<<<<< @@ -6619,7 +6620,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->strides); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -6628,7 +6629,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -6640,7 +6641,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -6654,7 +6655,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":776 * * cdef inline object PyArray_MultiIterNew1(a): * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< @@ -6668,7 +6669,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -6687,7 +6688,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -6701,7 +6702,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":779 * * cdef inline object PyArray_MultiIterNew2(a, b): * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< @@ -6715,7 +6716,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -6734,7 +6735,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -6748,7 +6749,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":782 * * cdef inline object PyArray_MultiIterNew3(a, b, c): * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< @@ -6762,7 +6763,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -6781,7 +6782,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -6795,7 +6796,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":785 * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< @@ -6809,7 +6810,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -6828,7 +6829,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -6842,7 +6843,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":788 * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< @@ -6856,7 +6857,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -6875,7 +6876,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -6889,7 +6890,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ int __pyx_t_1; __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -6899,7 +6900,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":792 * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): * return d.subarray.shape # <<<<<<<<<<<<<< @@ -6911,7 +6912,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -6920,7 +6921,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":794 * return d.subarray.shape * else: * return () # <<<<<<<<<<<<<< @@ -6934,7 +6935,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -6949,7 +6950,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -6978,7 +6979,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx char *__pyx_t_9; __Pyx_RefNannySetupContext("_util_dtypestring", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":801 * * cdef dtype child * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -6987,7 +6988,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":802 * cdef dtype child * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -6996,7 +6997,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -7019,7 +7020,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":806 * * for childname in descr.names: * fields = descr.fields[childname] # <<<<<<<<<<<<<< @@ -7036,7 +7037,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":807 * for childname in descr.names: * fields = descr.fields[childname] * child, new_offset = fields # <<<<<<<<<<<<<< @@ -7071,7 +7072,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -7088,7 +7089,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -7101,7 +7102,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 810, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -7110,7 +7111,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7130,7 +7131,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L8_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":813 * * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -7147,7 +7148,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = __pyx_t_7; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7156,7 +7157,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -7169,7 +7170,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 814, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7178,7 +7179,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":824 * * # Output padding bytes * while offset[0] < new_offset: # <<<<<<<<<<<<<< @@ -7194,7 +7195,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (!__pyx_t_6) break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":825 * # Output padding bytes * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< @@ -7203,7 +7204,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ (__pyx_v_f[0]) = 0x78; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":826 * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte * f += 1 # <<<<<<<<<<<<<< @@ -7212,7 +7213,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":827 * f[0] = 120 # "x"; pad byte * f += 1 * offset[0] += 1 # <<<<<<<<<<<<<< @@ -7223,7 +7224,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":829 * offset[0] += 1 * * offset[0] += child.itemsize # <<<<<<<<<<<<<< @@ -7233,7 +7234,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_8 = 0; (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -7243,7 +7244,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); if (__pyx_t_6) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":832 * * if not PyDataType_HASFIELDS(child): * t = child.type_num # <<<<<<<<<<<<<< @@ -7255,7 +7256,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -7265,7 +7266,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -7278,7 +7279,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(1, 834, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -7287,7 +7288,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":837 * * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< @@ -7305,7 +7306,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":838 * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< @@ -7323,7 +7324,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":839 * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< @@ -7341,7 +7342,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":840 * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< @@ -7359,7 +7360,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":841 * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< @@ -7377,7 +7378,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":842 * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< @@ -7395,7 +7396,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":843 * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< @@ -7413,7 +7414,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":844 * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< @@ -7431,7 +7432,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":845 * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< @@ -7449,7 +7450,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":846 * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< @@ -7467,7 +7468,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":847 * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< @@ -7485,7 +7486,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":848 * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< @@ -7503,7 +7504,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":849 * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< @@ -7521,7 +7522,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":850 * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< @@ -7541,7 +7542,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":851 * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< @@ -7561,7 +7562,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":852 * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< @@ -7581,7 +7582,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":853 * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< @@ -7599,7 +7600,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":855 * elif t == NPY_OBJECT: f[0] = 79 #"O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -7618,7 +7619,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L15:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":856 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * f += 1 # <<<<<<<<<<<<<< @@ -7627,7 +7628,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -7637,7 +7638,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L13; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":860 * # Cython ignores struct boundary information ("T{...}"), * # so don't output it * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< @@ -7650,7 +7651,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L13:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -7660,7 +7661,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":861 * # so don't output it * f = _util_dtypestring(child, f, end, offset) * return f # <<<<<<<<<<<<<< @@ -7670,7 +7671,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_r = __pyx_v_f; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -7695,7 +7696,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -7710,7 +7711,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a int __pyx_t_2; __Pyx_RefNannySetupContext("set_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -7721,7 +7722,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":980 * cdef PyObject* baseptr * if base is None: * baseptr = NULL # <<<<<<<<<<<<<< @@ -7730,7 +7731,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_baseptr = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -7740,7 +7741,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a goto __pyx_L3; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":982 * baseptr = NULL * else: * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< @@ -7750,7 +7751,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a /*else*/ { Py_INCREF(__pyx_v_base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":983 * else: * Py_INCREF(base) # important to do this before decref below! * baseptr = base # <<<<<<<<<<<<<< @@ -7761,7 +7762,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a } __pyx_L3:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":984 * Py_INCREF(base) # important to do this before decref below! * baseptr = base * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< @@ -7770,7 +7771,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ Py_XDECREF(__pyx_v_arr->base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":985 * baseptr = base * Py_XDECREF(arr.base) * arr.base = baseptr # <<<<<<<<<<<<<< @@ -7779,7 +7780,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_arr->base = __pyx_v_baseptr; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -7791,7 +7792,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -7805,7 +7806,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py int __pyx_t_1; __Pyx_RefNannySetupContext("get_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -7815,7 +7816,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":989 * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: * return None # <<<<<<<<<<<<<< @@ -7826,7 +7827,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -7835,7 +7836,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":991 * return None * else: * return arr.base # <<<<<<<<<<<<<< @@ -7849,7 +7850,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -7864,7 +7865,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -7885,7 +7886,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_array", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -7901,7 +7902,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":998 * cdef inline int import_array() except -1: * try: * _import_array() # <<<<<<<<<<<<<< @@ -7910,7 +7911,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { */ __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -7924,7 +7925,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":999 * try: * _import_array() * except Exception: # <<<<<<<<<<<<<< @@ -7939,7 +7940,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -7955,7 +7956,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -7970,7 +7971,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -7993,7 +7994,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -8014,7 +8015,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_umath", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -8030,7 +8031,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1004 * cdef inline int import_umath() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -8039,7 +8040,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -8053,7 +8054,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1005 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -8068,7 +8069,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -8084,7 +8085,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -8099,7 +8100,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -8122,7 +8123,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -8143,7 +8144,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_ufunc", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -8159,7 +8160,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1010 * cdef inline int import_ufunc() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -8168,7 +8169,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -8182,7 +8183,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1011 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -8196,7 +8197,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -8210,7 +8211,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -8225,7 +8226,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -8378,7 +8379,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -8389,7 +8390,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple_); __Pyx_GIVEREF(__pyx_tuple_); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -8400,7 +8401,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__2); __Pyx_GIVEREF(__pyx_tuple__2); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -8411,7 +8412,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__3); __Pyx_GIVEREF(__pyx_tuple__3); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -8422,7 +8423,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__4); __Pyx_GIVEREF(__pyx_tuple__4); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -8433,7 +8434,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__5); __Pyx_GIVEREF(__pyx_tuple__5); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -8444,7 +8445,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__6); __Pyx_GIVEREF(__pyx_tuple__6); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -8455,7 +8456,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__7); __Pyx_GIVEREF(__pyx_tuple__7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -8466,7 +8467,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__8); __Pyx_GIVEREF(__pyx_tuple__8); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -8666,7 +8667,7 @@ static int __Pyx_modinit_function_import_code(void) { #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) +#elif defined(__GNUC__) && (!(defined(__cplusplus)) || (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4))) #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) #else #define CYTHON_SMALL_CODE @@ -9026,7 +9027,7 @@ if (!__Pyx_RefNanny) { if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_7) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -9709,7 +9710,7 @@ static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 *type = tstate->exc_state.exc_type; *value = tstate->exc_state.exc_value; *tb = tstate->exc_state.exc_traceback; @@ -9724,7 +9725,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject * } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -9808,7 +9809,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -9947,6 +9948,9 @@ static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_li #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); @@ -11013,14 +11017,42 @@ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, return res; } #endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i #ifndef offsetof @@ -453,6 +453,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -650,7 +651,7 @@ static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ @@ -758,7 +759,7 @@ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_cython_runtime = NULL; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; @@ -808,7 +809,7 @@ static const char *__pyx_f[] = { #endif -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":730 * # in Cython to enable them only on the right systems. * * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< @@ -817,7 +818,7 @@ static const char *__pyx_f[] = { */ typedef npy_int8 __pyx_t_5numpy_int8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":731 * * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< @@ -826,7 +827,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t; */ typedef npy_int16 __pyx_t_5numpy_int16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":732 * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< @@ -835,7 +836,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t; */ typedef npy_int32 __pyx_t_5numpy_int32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":733 * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< @@ -844,7 +845,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t; */ typedef npy_int64 __pyx_t_5numpy_int64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":737 * #ctypedef npy_int128 int128_t * * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< @@ -853,7 +854,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t; */ typedef npy_uint8 __pyx_t_5numpy_uint8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":738 * * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< @@ -862,7 +863,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t; */ typedef npy_uint16 __pyx_t_5numpy_uint16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":739 * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< @@ -871,7 +872,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t; */ typedef npy_uint32 __pyx_t_5numpy_uint32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":740 * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< @@ -880,7 +881,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t; */ typedef npy_uint64 __pyx_t_5numpy_uint64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":744 * #ctypedef npy_uint128 uint128_t * * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< @@ -889,7 +890,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t; */ typedef npy_float32 __pyx_t_5numpy_float32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":745 * * ctypedef npy_float32 float32_t * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< @@ -898,7 +899,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t; */ typedef npy_float64 __pyx_t_5numpy_float64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":754 * # The int types are mapped a bit surprising -- * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t # <<<<<<<<<<<<<< @@ -907,7 +908,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t; */ typedef npy_long __pyx_t_5numpy_int_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":755 * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< @@ -916,7 +917,7 @@ typedef npy_long __pyx_t_5numpy_int_t; */ typedef npy_longlong __pyx_t_5numpy_long_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":756 * ctypedef npy_long int_t * ctypedef npy_longlong long_t * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< @@ -925,7 +926,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t; */ typedef npy_longlong __pyx_t_5numpy_longlong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":758 * ctypedef npy_longlong longlong_t * * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< @@ -934,7 +935,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t; */ typedef npy_ulong __pyx_t_5numpy_uint_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":759 * * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< @@ -943,7 +944,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":760 * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< @@ -952,7 +953,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":762 * ctypedef npy_ulonglong ulonglong_t * * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< @@ -961,7 +962,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; */ typedef npy_intp __pyx_t_5numpy_intp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":763 * * ctypedef npy_intp intp_t * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< @@ -970,7 +971,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t; */ typedef npy_uintp __pyx_t_5numpy_uintp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":765 * ctypedef npy_uintp uintp_t * * ctypedef npy_double float_t # <<<<<<<<<<<<<< @@ -979,7 +980,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t; */ typedef npy_double __pyx_t_5numpy_float_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":766 * * ctypedef npy_double float_t * ctypedef npy_double double_t # <<<<<<<<<<<<<< @@ -988,7 +989,7 @@ typedef npy_double __pyx_t_5numpy_float_t; */ typedef npy_double __pyx_t_5numpy_double_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":767 * ctypedef npy_double float_t * ctypedef npy_double double_t * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< @@ -1032,7 +1033,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do /*--- Type declarations ---*/ -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":769 * ctypedef npy_longdouble longdouble_t * * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< @@ -1041,7 +1042,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do */ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":770 * * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< @@ -1050,7 +1051,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; */ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":771 * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< @@ -1059,7 +1060,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; */ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":773 * ctypedef npy_clongdouble clongdouble_t * * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< @@ -7753,7 +7754,7 @@ static PyObject *__pyx_pf_6gensim_6models_13doc2vec_inner_4train_document_dm_con return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -7801,7 +7802,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_info->obj); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":222 * * cdef int i, ndim * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -7810,7 +7811,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":223 * cdef int i, ndim * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -7819,7 +7820,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":225 * cdef bint little_endian = ((&endian_detector)[0] != 0) * * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< @@ -7828,7 +7829,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7842,7 +7843,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L4_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":228 * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -7853,7 +7854,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7862,7 +7863,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -7875,7 +7876,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 229, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7884,7 +7885,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7898,7 +7899,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L7_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":232 * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -7909,7 +7910,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7918,7 +7919,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -7931,7 +7932,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 233, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7940,7 +7941,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":235 * raise ValueError(u"ndarray is not Fortran contiguous") * * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< @@ -7949,7 +7950,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":236 * * info.buf = PyArray_DATA(self) * info.ndim = ndim # <<<<<<<<<<<<<< @@ -7958,7 +7959,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->ndim = __pyx_v_ndim; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -7968,7 +7969,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":240 * # Allocate new buffer for strides and shape info. * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< @@ -7977,7 +7978,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":241 * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim # <<<<<<<<<<<<<< @@ -7986,7 +7987,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":242 * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim * for i in range(ndim): # <<<<<<<<<<<<<< @@ -7998,7 +7999,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":243 * info.shape = info.strides + ndim * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< @@ -8007,7 +8008,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":244 * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< @@ -8017,7 +8018,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8027,7 +8028,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L9; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":246 * info.shape[i] = PyArray_DIMS(self)[i] * else: * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< @@ -8037,7 +8038,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":247 * else: * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< @@ -8048,7 +8049,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L9:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":248 * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL # <<<<<<<<<<<<<< @@ -8057,7 +8058,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->suboffsets = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":249 * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< @@ -8066,7 +8067,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":250 * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< @@ -8075,7 +8076,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":253 * * cdef int t * cdef char* f = NULL # <<<<<<<<<<<<<< @@ -8084,7 +8085,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_f = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":254 * cdef int t * cdef char* f = NULL * cdef dtype descr = self.descr # <<<<<<<<<<<<<< @@ -8096,7 +8097,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":257 * cdef int offset * * info.obj = self # <<<<<<<<<<<<<< @@ -8109,7 +8110,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = ((PyObject *)__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -8119,7 +8120,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":260 * * if not PyDataType_HASFIELDS(descr): * t = descr.type_num # <<<<<<<<<<<<<< @@ -8129,7 +8130,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_4 = __pyx_v_descr->type_num; __pyx_v_t = __pyx_t_4; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8149,7 +8150,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L15_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":262 * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -8166,7 +8167,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L14_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8175,7 +8176,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -8188,7 +8189,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 263, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8197,7 +8198,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":264 * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< @@ -8209,7 +8210,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"b"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":265 * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< @@ -8220,7 +8221,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"B"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":266 * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< @@ -8231,7 +8232,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"h"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":267 * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< @@ -8242,7 +8243,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"H"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":268 * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< @@ -8253,7 +8254,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"i"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":269 * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< @@ -8264,7 +8265,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"I"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":270 * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< @@ -8275,7 +8276,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"l"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":271 * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< @@ -8286,7 +8287,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"L"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":272 * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< @@ -8297,7 +8298,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":273 * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< @@ -8308,7 +8309,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":274 * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< @@ -8319,7 +8320,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"f"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":275 * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< @@ -8330,7 +8331,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"d"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":276 * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< @@ -8341,7 +8342,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"g"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":277 * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< @@ -8352,7 +8353,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zf"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":278 * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< @@ -8363,7 +8364,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zd"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":279 * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< @@ -8374,7 +8375,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zg"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":280 * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< @@ -8386,7 +8387,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; default: - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":282 * elif t == NPY_OBJECT: f = "O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -8407,7 +8408,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":283 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f # <<<<<<<<<<<<<< @@ -8416,7 +8417,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->format = __pyx_v_f; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":284 * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f * return # <<<<<<<<<<<<<< @@ -8426,7 +8427,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_r = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -8435,7 +8436,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":286 * return * else: * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< @@ -8445,7 +8446,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":287 * else: * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< @@ -8454,7 +8455,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->format[0]) = '^'; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":288 * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 # <<<<<<<<<<<<<< @@ -8463,7 +8464,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_offset = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":289 * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< @@ -8473,7 +8474,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) __pyx_v_f = __pyx_t_8; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":292 * info.format + _buffer_format_string_len, * &offset) * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< @@ -8483,7 +8484,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_f[0]) = '\x00'; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -8515,7 +8516,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -8539,7 +8540,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s int __pyx_t_1; __Pyx_RefNannySetupContext("__releasebuffer__", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -8549,7 +8550,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":296 * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) # <<<<<<<<<<<<<< @@ -8558,7 +8559,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->format); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -8567,7 +8568,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8577,7 +8578,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":298 * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): * PyObject_Free(info.strides) # <<<<<<<<<<<<<< @@ -8586,7 +8587,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->strides); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8595,7 +8596,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -8607,7 +8608,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -8621,7 +8622,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":776 * * cdef inline object PyArray_MultiIterNew1(a): * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< @@ -8635,7 +8636,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -8654,7 +8655,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -8668,7 +8669,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":779 * * cdef inline object PyArray_MultiIterNew2(a, b): * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< @@ -8682,7 +8683,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -8701,7 +8702,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -8715,7 +8716,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":782 * * cdef inline object PyArray_MultiIterNew3(a, b, c): * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< @@ -8729,7 +8730,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -8748,7 +8749,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -8762,7 +8763,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":785 * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< @@ -8776,7 +8777,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -8795,7 +8796,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -8809,7 +8810,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":788 * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< @@ -8823,7 +8824,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -8842,7 +8843,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -8856,7 +8857,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ int __pyx_t_1; __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -8866,7 +8867,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":792 * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): * return d.subarray.shape # <<<<<<<<<<<<<< @@ -8878,7 +8879,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -8887,7 +8888,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":794 * return d.subarray.shape * else: * return () # <<<<<<<<<<<<<< @@ -8901,7 +8902,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -8916,7 +8917,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -8945,7 +8946,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx char *__pyx_t_9; __Pyx_RefNannySetupContext("_util_dtypestring", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":801 * * cdef dtype child * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -8954,7 +8955,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":802 * cdef dtype child * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -8963,7 +8964,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -8986,7 +8987,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":806 * * for childname in descr.names: * fields = descr.fields[childname] # <<<<<<<<<<<<<< @@ -9003,7 +9004,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":807 * for childname in descr.names: * fields = descr.fields[childname] * child, new_offset = fields # <<<<<<<<<<<<<< @@ -9038,7 +9039,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -9055,7 +9056,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -9068,7 +9069,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 810, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -9077,7 +9078,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9097,7 +9098,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L8_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":813 * * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -9114,7 +9115,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = __pyx_t_7; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9123,7 +9124,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -9136,7 +9137,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 814, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9145,7 +9146,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":824 * * # Output padding bytes * while offset[0] < new_offset: # <<<<<<<<<<<<<< @@ -9161,7 +9162,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (!__pyx_t_6) break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":825 * # Output padding bytes * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< @@ -9170,7 +9171,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ (__pyx_v_f[0]) = 0x78; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":826 * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte * f += 1 # <<<<<<<<<<<<<< @@ -9179,7 +9180,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":827 * f[0] = 120 # "x"; pad byte * f += 1 * offset[0] += 1 # <<<<<<<<<<<<<< @@ -9190,7 +9191,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":829 * offset[0] += 1 * * offset[0] += child.itemsize # <<<<<<<<<<<<<< @@ -9200,7 +9201,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_8 = 0; (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -9210,7 +9211,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); if (__pyx_t_6) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":832 * * if not PyDataType_HASFIELDS(child): * t = child.type_num # <<<<<<<<<<<<<< @@ -9222,7 +9223,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -9232,7 +9233,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -9245,7 +9246,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(1, 834, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -9254,7 +9255,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":837 * * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< @@ -9272,7 +9273,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":838 * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< @@ -9290,7 +9291,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":839 * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< @@ -9308,7 +9309,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":840 * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< @@ -9326,7 +9327,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":841 * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< @@ -9344,7 +9345,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":842 * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< @@ -9362,7 +9363,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":843 * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< @@ -9380,7 +9381,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":844 * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< @@ -9398,7 +9399,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":845 * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< @@ -9416,7 +9417,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":846 * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< @@ -9434,7 +9435,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":847 * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< @@ -9452,7 +9453,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":848 * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< @@ -9470,7 +9471,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":849 * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< @@ -9488,7 +9489,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":850 * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< @@ -9508,7 +9509,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":851 * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< @@ -9528,7 +9529,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":852 * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< @@ -9548,7 +9549,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":853 * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< @@ -9566,7 +9567,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":855 * elif t == NPY_OBJECT: f[0] = 79 #"O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -9585,7 +9586,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L15:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":856 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * f += 1 # <<<<<<<<<<<<<< @@ -9594,7 +9595,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -9604,7 +9605,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L13; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":860 * # Cython ignores struct boundary information ("T{...}"), * # so don't output it * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< @@ -9617,7 +9618,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L13:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -9627,7 +9628,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":861 * # so don't output it * f = _util_dtypestring(child, f, end, offset) * return f # <<<<<<<<<<<<<< @@ -9637,7 +9638,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_r = __pyx_v_f; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -9662,7 +9663,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -9677,7 +9678,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a int __pyx_t_2; __Pyx_RefNannySetupContext("set_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -9688,7 +9689,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":980 * cdef PyObject* baseptr * if base is None: * baseptr = NULL # <<<<<<<<<<<<<< @@ -9697,7 +9698,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_baseptr = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -9707,7 +9708,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a goto __pyx_L3; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":982 * baseptr = NULL * else: * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< @@ -9717,7 +9718,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a /*else*/ { Py_INCREF(__pyx_v_base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":983 * else: * Py_INCREF(base) # important to do this before decref below! * baseptr = base # <<<<<<<<<<<<<< @@ -9728,7 +9729,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a } __pyx_L3:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":984 * Py_INCREF(base) # important to do this before decref below! * baseptr = base * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< @@ -9737,7 +9738,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ Py_XDECREF(__pyx_v_arr->base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":985 * baseptr = base * Py_XDECREF(arr.base) * arr.base = baseptr # <<<<<<<<<<<<<< @@ -9746,7 +9747,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_arr->base = __pyx_v_baseptr; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -9758,7 +9759,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -9772,7 +9773,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py int __pyx_t_1; __Pyx_RefNannySetupContext("get_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -9782,7 +9783,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":989 * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: * return None # <<<<<<<<<<<<<< @@ -9793,7 +9794,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -9802,7 +9803,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":991 * return None * else: * return arr.base # <<<<<<<<<<<<<< @@ -9816,7 +9817,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -9831,7 +9832,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -9852,7 +9853,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_array", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -9868,7 +9869,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":998 * cdef inline int import_array() except -1: * try: * _import_array() # <<<<<<<<<<<<<< @@ -9877,7 +9878,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { */ __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -9891,7 +9892,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":999 * try: * _import_array() * except Exception: # <<<<<<<<<<<<<< @@ -9906,7 +9907,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -9922,7 +9923,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -9937,7 +9938,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -9960,7 +9961,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -9981,7 +9982,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_umath", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -9997,7 +9998,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1004 * cdef inline int import_umath() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -10006,7 +10007,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -10020,7 +10021,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1005 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -10035,7 +10036,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -10051,7 +10052,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -10066,7 +10067,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -10089,7 +10090,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -10110,7 +10111,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_ufunc", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -10126,7 +10127,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1010 * cdef inline int import_ufunc() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -10135,7 +10136,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -10149,7 +10150,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1011 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -10163,7 +10164,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -10177,7 +10178,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -10192,7 +10193,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -10369,7 +10370,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__3); __Pyx_GIVEREF(__pyx_tuple__3); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -10380,7 +10381,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__4); __Pyx_GIVEREF(__pyx_tuple__4); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -10391,7 +10392,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__5); __Pyx_GIVEREF(__pyx_tuple__5); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -10402,7 +10403,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__6); __Pyx_GIVEREF(__pyx_tuple__6); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -10413,7 +10414,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__7); __Pyx_GIVEREF(__pyx_tuple__7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -10424,7 +10425,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__8); __Pyx_GIVEREF(__pyx_tuple__8); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -10435,7 +10436,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__9); __Pyx_GIVEREF(__pyx_tuple__9); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -10446,7 +10447,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__10); __Pyx_GIVEREF(__pyx_tuple__10); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -10457,7 +10458,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__11); __Pyx_GIVEREF(__pyx_tuple__11); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -10648,7 +10649,7 @@ static int __Pyx_modinit_function_import_code(void) { #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) +#elif defined(__GNUC__) && (!(defined(__cplusplus)) || (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4))) #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) #else #define CYTHON_SMALL_CODE @@ -11029,7 +11030,7 @@ if (!__Pyx_RefNanny) { if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_7) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -11855,7 +11856,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObjec /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 *type = tstate->exc_state.exc_type; *value = tstate->exc_state.exc_value; *tb = tstate->exc_state.exc_traceback; @@ -11870,7 +11871,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject * } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -11954,7 +11955,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -12073,6 +12074,9 @@ static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_li #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); @@ -13724,14 +13728,42 @@ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, return res; } #endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i #ifndef offsetof @@ -467,6 +467,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -673,7 +674,7 @@ static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ @@ -781,7 +782,7 @@ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_cython_runtime = NULL; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; @@ -832,7 +833,7 @@ static const char *__pyx_f[] = { #endif -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":730 * # in Cython to enable them only on the right systems. * * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< @@ -841,7 +842,7 @@ static const char *__pyx_f[] = { */ typedef npy_int8 __pyx_t_5numpy_int8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":731 * * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< @@ -850,7 +851,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t; */ typedef npy_int16 __pyx_t_5numpy_int16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":732 * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< @@ -859,7 +860,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t; */ typedef npy_int32 __pyx_t_5numpy_int32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":733 * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< @@ -868,7 +869,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t; */ typedef npy_int64 __pyx_t_5numpy_int64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":737 * #ctypedef npy_int128 int128_t * * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< @@ -877,7 +878,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t; */ typedef npy_uint8 __pyx_t_5numpy_uint8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":738 * * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< @@ -886,7 +887,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t; */ typedef npy_uint16 __pyx_t_5numpy_uint16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":739 * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< @@ -895,7 +896,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t; */ typedef npy_uint32 __pyx_t_5numpy_uint32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":740 * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< @@ -904,7 +905,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t; */ typedef npy_uint64 __pyx_t_5numpy_uint64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":744 * #ctypedef npy_uint128 uint128_t * * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< @@ -913,7 +914,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t; */ typedef npy_float32 __pyx_t_5numpy_float32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":745 * * ctypedef npy_float32 float32_t * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< @@ -922,7 +923,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t; */ typedef npy_float64 __pyx_t_5numpy_float64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":754 * # The int types are mapped a bit surprising -- * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t # <<<<<<<<<<<<<< @@ -931,7 +932,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t; */ typedef npy_long __pyx_t_5numpy_int_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":755 * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< @@ -940,7 +941,7 @@ typedef npy_long __pyx_t_5numpy_int_t; */ typedef npy_longlong __pyx_t_5numpy_long_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":756 * ctypedef npy_long int_t * ctypedef npy_longlong long_t * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< @@ -949,7 +950,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t; */ typedef npy_longlong __pyx_t_5numpy_longlong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":758 * ctypedef npy_longlong longlong_t * * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< @@ -958,7 +959,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t; */ typedef npy_ulong __pyx_t_5numpy_uint_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":759 * * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< @@ -967,7 +968,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":760 * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< @@ -976,7 +977,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":762 * ctypedef npy_ulonglong ulonglong_t * * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< @@ -985,7 +986,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; */ typedef npy_intp __pyx_t_5numpy_intp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":763 * * ctypedef npy_intp intp_t * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< @@ -994,7 +995,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t; */ typedef npy_uintp __pyx_t_5numpy_uintp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":765 * ctypedef npy_uintp uintp_t * * ctypedef npy_double float_t # <<<<<<<<<<<<<< @@ -1003,7 +1004,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t; */ typedef npy_double __pyx_t_5numpy_float_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":766 * * ctypedef npy_double float_t * ctypedef npy_double double_t # <<<<<<<<<<<<<< @@ -1012,7 +1013,7 @@ typedef npy_double __pyx_t_5numpy_float_t; */ typedef npy_double __pyx_t_5numpy_double_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":767 * ctypedef npy_double float_t * ctypedef npy_double double_t * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< @@ -1076,7 +1077,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":769 * ctypedef npy_longdouble longdouble_t * * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< @@ -1085,7 +1086,7 @@ struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab; */ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":770 * * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< @@ -1094,7 +1095,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; */ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":771 * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< @@ -1103,7 +1104,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; */ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":773 * ctypedef npy_clongdouble clongdouble_t * * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< @@ -1609,6 +1610,9 @@ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); /* None.proto */ static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void); /* proto */ +/* None.proto */ +#include + /* RealImag.proto */ #if CYTHON_CCOMPLEX #ifdef __cplusplus @@ -1710,9 +1714,6 @@ static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void); /* proto */ /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value); -/* None.proto */ -#include - /* CIntFromPy.proto */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); @@ -3877,7 +3878,7 @@ static PyObject *__pyx_pf_6gensim_6models_19fasttext_corpusfile_2train_epoch_cbo return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -3925,7 +3926,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_info->obj); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":222 * * cdef int i, ndim * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -3934,7 +3935,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":223 * cdef int i, ndim * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -3943,7 +3944,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":225 * cdef bint little_endian = ((&endian_detector)[0] != 0) * * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< @@ -3952,7 +3953,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -3966,7 +3967,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L4_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":228 * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -3977,7 +3978,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -3986,7 +3987,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -3999,7 +4000,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 229, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -4008,7 +4009,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -4022,7 +4023,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L7_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":232 * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -4033,7 +4034,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -4042,7 +4043,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -4055,7 +4056,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 233, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -4064,7 +4065,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":235 * raise ValueError(u"ndarray is not Fortran contiguous") * * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< @@ -4073,7 +4074,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":236 * * info.buf = PyArray_DATA(self) * info.ndim = ndim # <<<<<<<<<<<<<< @@ -4082,7 +4083,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->ndim = __pyx_v_ndim; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -4092,7 +4093,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":240 * # Allocate new buffer for strides and shape info. * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< @@ -4101,7 +4102,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":241 * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim # <<<<<<<<<<<<<< @@ -4110,7 +4111,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":242 * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim * for i in range(ndim): # <<<<<<<<<<<<<< @@ -4122,7 +4123,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":243 * info.shape = info.strides + ndim * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< @@ -4131,7 +4132,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":244 * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< @@ -4141,7 +4142,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -4151,7 +4152,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L9; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":246 * info.shape[i] = PyArray_DIMS(self)[i] * else: * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< @@ -4161,7 +4162,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":247 * else: * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< @@ -4172,7 +4173,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L9:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":248 * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL # <<<<<<<<<<<<<< @@ -4181,7 +4182,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->suboffsets = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":249 * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< @@ -4190,7 +4191,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":250 * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< @@ -4199,7 +4200,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":253 * * cdef int t * cdef char* f = NULL # <<<<<<<<<<<<<< @@ -4208,7 +4209,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_f = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":254 * cdef int t * cdef char* f = NULL * cdef dtype descr = self.descr # <<<<<<<<<<<<<< @@ -4220,7 +4221,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":257 * cdef int offset * * info.obj = self # <<<<<<<<<<<<<< @@ -4233,7 +4234,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = ((PyObject *)__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -4243,7 +4244,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":260 * * if not PyDataType_HASFIELDS(descr): * t = descr.type_num # <<<<<<<<<<<<<< @@ -4253,7 +4254,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_4 = __pyx_v_descr->type_num; __pyx_v_t = __pyx_t_4; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -4273,7 +4274,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L15_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":262 * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -4290,7 +4291,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L14_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -4299,7 +4300,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -4312,7 +4313,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 263, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -4321,7 +4322,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":264 * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< @@ -4333,7 +4334,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"b"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":265 * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< @@ -4344,7 +4345,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"B"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":266 * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< @@ -4355,7 +4356,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"h"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":267 * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< @@ -4366,7 +4367,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"H"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":268 * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< @@ -4377,7 +4378,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"i"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":269 * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< @@ -4388,7 +4389,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"I"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":270 * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< @@ -4399,7 +4400,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"l"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":271 * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< @@ -4410,7 +4411,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"L"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":272 * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< @@ -4421,7 +4422,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":273 * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< @@ -4432,7 +4433,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":274 * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< @@ -4443,7 +4444,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"f"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":275 * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< @@ -4454,7 +4455,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"d"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":276 * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< @@ -4465,7 +4466,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"g"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":277 * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< @@ -4476,7 +4477,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zf"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":278 * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< @@ -4487,7 +4488,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zd"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":279 * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< @@ -4498,7 +4499,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zg"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":280 * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< @@ -4510,7 +4511,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; default: - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":282 * elif t == NPY_OBJECT: f = "O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -4531,7 +4532,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":283 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f # <<<<<<<<<<<<<< @@ -4540,7 +4541,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->format = __pyx_v_f; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":284 * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f * return # <<<<<<<<<<<<<< @@ -4550,7 +4551,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_r = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -4559,7 +4560,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":286 * return * else: * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< @@ -4569,7 +4570,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":287 * else: * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< @@ -4578,7 +4579,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->format[0]) = '^'; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":288 * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 # <<<<<<<<<<<<<< @@ -4587,7 +4588,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_offset = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":289 * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< @@ -4597,7 +4598,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) __pyx_v_f = __pyx_t_8; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":292 * info.format + _buffer_format_string_len, * &offset) * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< @@ -4607,7 +4608,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_f[0]) = '\x00'; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -4639,7 +4640,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -4663,7 +4664,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s int __pyx_t_1; __Pyx_RefNannySetupContext("__releasebuffer__", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -4673,7 +4674,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":296 * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) # <<<<<<<<<<<<<< @@ -4682,7 +4683,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->format); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -4691,7 +4692,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -4701,7 +4702,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":298 * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): * PyObject_Free(info.strides) # <<<<<<<<<<<<<< @@ -4710,7 +4711,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->strides); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -4719,7 +4720,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -4731,7 +4732,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -4745,7 +4746,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":776 * * cdef inline object PyArray_MultiIterNew1(a): * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< @@ -4759,7 +4760,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -4778,7 +4779,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -4792,7 +4793,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":779 * * cdef inline object PyArray_MultiIterNew2(a, b): * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< @@ -4806,7 +4807,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -4825,7 +4826,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -4839,7 +4840,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":782 * * cdef inline object PyArray_MultiIterNew3(a, b, c): * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< @@ -4853,7 +4854,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -4872,7 +4873,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -4886,7 +4887,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":785 * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< @@ -4900,7 +4901,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -4919,7 +4920,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -4933,7 +4934,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":788 * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< @@ -4947,7 +4948,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -4966,7 +4967,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -4980,7 +4981,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ int __pyx_t_1; __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -4990,7 +4991,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":792 * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): * return d.subarray.shape # <<<<<<<<<<<<<< @@ -5002,7 +5003,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -5011,7 +5012,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":794 * return d.subarray.shape * else: * return () # <<<<<<<<<<<<<< @@ -5025,7 +5026,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -5040,7 +5041,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -5069,7 +5070,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx char *__pyx_t_9; __Pyx_RefNannySetupContext("_util_dtypestring", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":801 * * cdef dtype child * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -5078,7 +5079,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":802 * cdef dtype child * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -5087,7 +5088,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -5110,7 +5111,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":806 * * for childname in descr.names: * fields = descr.fields[childname] # <<<<<<<<<<<<<< @@ -5127,7 +5128,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":807 * for childname in descr.names: * fields = descr.fields[childname] * child, new_offset = fields # <<<<<<<<<<<<<< @@ -5162,7 +5163,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -5179,7 +5180,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -5192,7 +5193,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 810, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -5201,7 +5202,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -5221,7 +5222,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L8_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":813 * * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -5238,7 +5239,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = __pyx_t_7; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -5247,7 +5248,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -5260,7 +5261,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 814, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -5269,7 +5270,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":824 * * # Output padding bytes * while offset[0] < new_offset: # <<<<<<<<<<<<<< @@ -5285,7 +5286,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (!__pyx_t_6) break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":825 * # Output padding bytes * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< @@ -5294,7 +5295,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ (__pyx_v_f[0]) = 0x78; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":826 * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte * f += 1 # <<<<<<<<<<<<<< @@ -5303,7 +5304,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":827 * f[0] = 120 # "x"; pad byte * f += 1 * offset[0] += 1 # <<<<<<<<<<<<<< @@ -5314,7 +5315,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":829 * offset[0] += 1 * * offset[0] += child.itemsize # <<<<<<<<<<<<<< @@ -5324,7 +5325,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_8 = 0; (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -5334,7 +5335,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); if (__pyx_t_6) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":832 * * if not PyDataType_HASFIELDS(child): * t = child.type_num # <<<<<<<<<<<<<< @@ -5346,7 +5347,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -5356,7 +5357,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -5369,7 +5370,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(1, 834, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -5378,7 +5379,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":837 * * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< @@ -5396,7 +5397,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":838 * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< @@ -5414,7 +5415,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":839 * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< @@ -5432,7 +5433,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":840 * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< @@ -5450,7 +5451,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":841 * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< @@ -5468,7 +5469,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":842 * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< @@ -5486,7 +5487,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":843 * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< @@ -5504,7 +5505,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":844 * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< @@ -5522,7 +5523,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":845 * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< @@ -5540,7 +5541,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":846 * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< @@ -5558,7 +5559,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":847 * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< @@ -5576,7 +5577,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":848 * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< @@ -5594,7 +5595,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":849 * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< @@ -5612,7 +5613,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":850 * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< @@ -5632,7 +5633,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":851 * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< @@ -5652,7 +5653,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":852 * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< @@ -5672,7 +5673,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":853 * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< @@ -5690,7 +5691,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":855 * elif t == NPY_OBJECT: f[0] = 79 #"O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -5709,7 +5710,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L15:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":856 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * f += 1 # <<<<<<<<<<<<<< @@ -5718,7 +5719,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -5728,7 +5729,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L13; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":860 * # Cython ignores struct boundary information ("T{...}"), * # so don't output it * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< @@ -5741,7 +5742,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L13:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -5751,7 +5752,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":861 * # so don't output it * f = _util_dtypestring(child, f, end, offset) * return f # <<<<<<<<<<<<<< @@ -5761,7 +5762,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_r = __pyx_v_f; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -5786,7 +5787,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -5801,7 +5802,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a int __pyx_t_2; __Pyx_RefNannySetupContext("set_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -5812,7 +5813,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":980 * cdef PyObject* baseptr * if base is None: * baseptr = NULL # <<<<<<<<<<<<<< @@ -5821,7 +5822,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_baseptr = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -5831,7 +5832,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a goto __pyx_L3; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":982 * baseptr = NULL * else: * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< @@ -5841,7 +5842,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a /*else*/ { Py_INCREF(__pyx_v_base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":983 * else: * Py_INCREF(base) # important to do this before decref below! * baseptr = base # <<<<<<<<<<<<<< @@ -5852,7 +5853,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a } __pyx_L3:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":984 * Py_INCREF(base) # important to do this before decref below! * baseptr = base * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< @@ -5861,7 +5862,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ Py_XDECREF(__pyx_v_arr->base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":985 * baseptr = base * Py_XDECREF(arr.base) * arr.base = baseptr # <<<<<<<<<<<<<< @@ -5870,7 +5871,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_arr->base = __pyx_v_baseptr; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -5882,7 +5883,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -5896,7 +5897,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py int __pyx_t_1; __Pyx_RefNannySetupContext("get_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -5906,7 +5907,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":989 * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: * return None # <<<<<<<<<<<<<< @@ -5917,7 +5918,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -5926,7 +5927,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":991 * return None * else: * return arr.base # <<<<<<<<<<<<<< @@ -5940,7 +5941,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -5955,7 +5956,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -5976,7 +5977,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_array", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -5992,7 +5993,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":998 * cdef inline int import_array() except -1: * try: * _import_array() # <<<<<<<<<<<<<< @@ -6001,7 +6002,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { */ __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -6015,7 +6016,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":999 * try: * _import_array() * except Exception: # <<<<<<<<<<<<<< @@ -6030,7 +6031,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -6046,7 +6047,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -6061,7 +6062,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -6084,7 +6085,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -6105,7 +6106,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_umath", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -6121,7 +6122,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1004 * cdef inline int import_umath() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -6130,7 +6131,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -6144,7 +6145,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1005 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -6159,7 +6160,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -6175,7 +6176,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -6190,7 +6191,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -6213,7 +6214,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -6234,7 +6235,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_ufunc", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -6250,7 +6251,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1010 * cdef inline int import_ufunc() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -6259,7 +6260,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -6273,7 +6274,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1011 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -6287,7 +6288,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -6301,7 +6302,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -6316,7 +6317,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -6451,7 +6452,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -6462,7 +6463,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple_); __Pyx_GIVEREF(__pyx_tuple_); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -6473,7 +6474,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__2); __Pyx_GIVEREF(__pyx_tuple__2); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -6484,7 +6485,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__3); __Pyx_GIVEREF(__pyx_tuple__3); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -6495,7 +6496,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__4); __Pyx_GIVEREF(__pyx_tuple__4); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -6506,7 +6507,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__5); __Pyx_GIVEREF(__pyx_tuple__5); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -6517,7 +6518,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__6); __Pyx_GIVEREF(__pyx_tuple__6); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -6528,7 +6529,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__7); __Pyx_GIVEREF(__pyx_tuple__7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -6539,7 +6540,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__8); __Pyx_GIVEREF(__pyx_tuple__8); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -6725,7 +6726,7 @@ static int __Pyx_modinit_function_import_code(void) { #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) +#elif defined(__GNUC__) && (!(defined(__cplusplus)) || (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4))) #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) #else #define CYTHON_SMALL_CODE @@ -6934,7 +6935,7 @@ if (!__Pyx_RefNanny) { if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -7613,7 +7614,7 @@ static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 *type = tstate->exc_state.exc_type; *value = tstate->exc_state.exc_value; *tb = tstate->exc_state.exc_traceback; @@ -7628,7 +7629,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject * } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -7712,7 +7713,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -7837,6 +7838,9 @@ static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_li #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); @@ -8903,14 +8907,42 @@ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, return res; } #endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i #ifndef offsetof @@ -453,6 +453,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -651,7 +652,7 @@ static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ @@ -759,7 +760,7 @@ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_cython_runtime = NULL; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; @@ -809,7 +810,7 @@ static const char *__pyx_f[] = { #endif -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":730 * # in Cython to enable them only on the right systems. * * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< @@ -818,7 +819,7 @@ static const char *__pyx_f[] = { */ typedef npy_int8 __pyx_t_5numpy_int8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":731 * * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< @@ -827,7 +828,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t; */ typedef npy_int16 __pyx_t_5numpy_int16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":732 * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< @@ -836,7 +837,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t; */ typedef npy_int32 __pyx_t_5numpy_int32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":733 * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< @@ -845,7 +846,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t; */ typedef npy_int64 __pyx_t_5numpy_int64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":737 * #ctypedef npy_int128 int128_t * * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< @@ -854,7 +855,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t; */ typedef npy_uint8 __pyx_t_5numpy_uint8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":738 * * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< @@ -863,7 +864,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t; */ typedef npy_uint16 __pyx_t_5numpy_uint16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":739 * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< @@ -872,7 +873,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t; */ typedef npy_uint32 __pyx_t_5numpy_uint32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":740 * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< @@ -881,7 +882,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t; */ typedef npy_uint64 __pyx_t_5numpy_uint64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":744 * #ctypedef npy_uint128 uint128_t * * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< @@ -890,7 +891,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t; */ typedef npy_float32 __pyx_t_5numpy_float32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":745 * * ctypedef npy_float32 float32_t * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< @@ -899,7 +900,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t; */ typedef npy_float64 __pyx_t_5numpy_float64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":754 * # The int types are mapped a bit surprising -- * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t # <<<<<<<<<<<<<< @@ -908,7 +909,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t; */ typedef npy_long __pyx_t_5numpy_int_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":755 * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< @@ -917,7 +918,7 @@ typedef npy_long __pyx_t_5numpy_int_t; */ typedef npy_longlong __pyx_t_5numpy_long_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":756 * ctypedef npy_long int_t * ctypedef npy_longlong long_t * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< @@ -926,7 +927,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t; */ typedef npy_longlong __pyx_t_5numpy_longlong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":758 * ctypedef npy_longlong longlong_t * * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< @@ -935,7 +936,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t; */ typedef npy_ulong __pyx_t_5numpy_uint_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":759 * * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< @@ -944,7 +945,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":760 * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< @@ -953,7 +954,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":762 * ctypedef npy_ulonglong ulonglong_t * * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< @@ -962,7 +963,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; */ typedef npy_intp __pyx_t_5numpy_intp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":763 * * ctypedef npy_intp intp_t * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< @@ -971,7 +972,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t; */ typedef npy_uintp __pyx_t_5numpy_uintp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":765 * ctypedef npy_uintp uintp_t * * ctypedef npy_double float_t # <<<<<<<<<<<<<< @@ -980,7 +981,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t; */ typedef npy_double __pyx_t_5numpy_float_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":766 * * ctypedef npy_double float_t * ctypedef npy_double double_t # <<<<<<<<<<<<<< @@ -989,7 +990,7 @@ typedef npy_double __pyx_t_5numpy_float_t; */ typedef npy_double __pyx_t_5numpy_double_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":767 * ctypedef npy_double float_t * ctypedef npy_double double_t * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< @@ -1033,7 +1034,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do /*--- Type declarations ---*/ -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":769 * ctypedef npy_longdouble longdouble_t * * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< @@ -1042,7 +1043,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do */ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":770 * * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< @@ -1051,7 +1052,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; */ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":771 * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< @@ -1060,7 +1061,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; */ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":773 * ctypedef npy_clongdouble clongdouble_t * * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< @@ -6477,7 +6478,7 @@ static PyObject *__pyx_pf_6gensim_6models_14fasttext_inner_4init(CYTHON_UNUSED P return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -6525,7 +6526,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_info->obj); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":222 * * cdef int i, ndim * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -6534,7 +6535,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":223 * cdef int i, ndim * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -6543,7 +6544,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":225 * cdef bint little_endian = ((&endian_detector)[0] != 0) * * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< @@ -6552,7 +6553,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6566,7 +6567,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L4_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":228 * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -6577,7 +6578,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6586,7 +6587,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -6599,7 +6600,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 229, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6608,7 +6609,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6622,7 +6623,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L7_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":232 * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -6633,7 +6634,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6642,7 +6643,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -6655,7 +6656,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 233, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -6664,7 +6665,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":235 * raise ValueError(u"ndarray is not Fortran contiguous") * * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< @@ -6673,7 +6674,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":236 * * info.buf = PyArray_DATA(self) * info.ndim = ndim # <<<<<<<<<<<<<< @@ -6682,7 +6683,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->ndim = __pyx_v_ndim; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -6692,7 +6693,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":240 * # Allocate new buffer for strides and shape info. * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< @@ -6701,7 +6702,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":241 * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim # <<<<<<<<<<<<<< @@ -6710,7 +6711,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":242 * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim * for i in range(ndim): # <<<<<<<<<<<<<< @@ -6722,7 +6723,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":243 * info.shape = info.strides + ndim * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< @@ -6731,7 +6732,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":244 * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< @@ -6741,7 +6742,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -6751,7 +6752,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L9; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":246 * info.shape[i] = PyArray_DIMS(self)[i] * else: * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< @@ -6761,7 +6762,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":247 * else: * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< @@ -6772,7 +6773,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L9:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":248 * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL # <<<<<<<<<<<<<< @@ -6781,7 +6782,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->suboffsets = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":249 * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< @@ -6790,7 +6791,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":250 * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< @@ -6799,7 +6800,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":253 * * cdef int t * cdef char* f = NULL # <<<<<<<<<<<<<< @@ -6808,7 +6809,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_f = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":254 * cdef int t * cdef char* f = NULL * cdef dtype descr = self.descr # <<<<<<<<<<<<<< @@ -6820,7 +6821,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":257 * cdef int offset * * info.obj = self # <<<<<<<<<<<<<< @@ -6833,7 +6834,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = ((PyObject *)__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -6843,7 +6844,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":260 * * if not PyDataType_HASFIELDS(descr): * t = descr.type_num # <<<<<<<<<<<<<< @@ -6853,7 +6854,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_4 = __pyx_v_descr->type_num; __pyx_v_t = __pyx_t_4; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -6873,7 +6874,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L15_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":262 * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -6890,7 +6891,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L14_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -6899,7 +6900,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -6912,7 +6913,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 263, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -6921,7 +6922,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":264 * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< @@ -6933,7 +6934,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"b"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":265 * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< @@ -6944,7 +6945,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"B"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":266 * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< @@ -6955,7 +6956,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"h"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":267 * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< @@ -6966,7 +6967,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"H"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":268 * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< @@ -6977,7 +6978,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"i"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":269 * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< @@ -6988,7 +6989,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"I"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":270 * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< @@ -6999,7 +7000,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"l"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":271 * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< @@ -7010,7 +7011,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"L"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":272 * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< @@ -7021,7 +7022,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":273 * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< @@ -7032,7 +7033,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":274 * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< @@ -7043,7 +7044,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"f"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":275 * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< @@ -7054,7 +7055,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"d"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":276 * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< @@ -7065,7 +7066,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"g"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":277 * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< @@ -7076,7 +7077,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zf"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":278 * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< @@ -7087,7 +7088,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zd"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":279 * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< @@ -7098,7 +7099,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zg"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":280 * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< @@ -7110,7 +7111,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; default: - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":282 * elif t == NPY_OBJECT: f = "O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -7131,7 +7132,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":283 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f # <<<<<<<<<<<<<< @@ -7140,7 +7141,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->format = __pyx_v_f; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":284 * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f * return # <<<<<<<<<<<<<< @@ -7150,7 +7151,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_r = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -7159,7 +7160,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":286 * return * else: * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< @@ -7169,7 +7170,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":287 * else: * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< @@ -7178,7 +7179,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->format[0]) = '^'; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":288 * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 # <<<<<<<<<<<<<< @@ -7187,7 +7188,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_offset = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":289 * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< @@ -7197,7 +7198,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) __pyx_v_f = __pyx_t_8; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":292 * info.format + _buffer_format_string_len, * &offset) * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< @@ -7207,7 +7208,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_f[0]) = '\x00'; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -7239,7 +7240,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -7263,7 +7264,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s int __pyx_t_1; __Pyx_RefNannySetupContext("__releasebuffer__", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -7273,7 +7274,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":296 * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) # <<<<<<<<<<<<<< @@ -7282,7 +7283,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->format); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -7291,7 +7292,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -7301,7 +7302,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":298 * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): * PyObject_Free(info.strides) # <<<<<<<<<<<<<< @@ -7310,7 +7311,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->strides); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -7319,7 +7320,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -7331,7 +7332,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -7345,7 +7346,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":776 * * cdef inline object PyArray_MultiIterNew1(a): * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< @@ -7359,7 +7360,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -7378,7 +7379,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -7392,7 +7393,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":779 * * cdef inline object PyArray_MultiIterNew2(a, b): * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< @@ -7406,7 +7407,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -7425,7 +7426,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -7439,7 +7440,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":782 * * cdef inline object PyArray_MultiIterNew3(a, b, c): * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< @@ -7453,7 +7454,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -7472,7 +7473,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -7486,7 +7487,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":785 * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< @@ -7500,7 +7501,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -7519,7 +7520,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -7533,7 +7534,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":788 * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< @@ -7547,7 +7548,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -7566,7 +7567,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -7580,7 +7581,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ int __pyx_t_1; __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -7590,7 +7591,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":792 * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): * return d.subarray.shape # <<<<<<<<<<<<<< @@ -7602,7 +7603,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -7611,7 +7612,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":794 * return d.subarray.shape * else: * return () # <<<<<<<<<<<<<< @@ -7625,7 +7626,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -7640,7 +7641,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -7669,7 +7670,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx char *__pyx_t_9; __Pyx_RefNannySetupContext("_util_dtypestring", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":801 * * cdef dtype child * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -7678,7 +7679,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":802 * cdef dtype child * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -7687,7 +7688,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -7710,7 +7711,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":806 * * for childname in descr.names: * fields = descr.fields[childname] # <<<<<<<<<<<<<< @@ -7727,7 +7728,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":807 * for childname in descr.names: * fields = descr.fields[childname] * child, new_offset = fields # <<<<<<<<<<<<<< @@ -7762,7 +7763,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -7779,7 +7780,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -7792,7 +7793,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 810, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -7801,7 +7802,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7821,7 +7822,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L8_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":813 * * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -7838,7 +7839,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = __pyx_t_7; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7847,7 +7848,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -7860,7 +7861,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 814, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7869,7 +7870,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":824 * * # Output padding bytes * while offset[0] < new_offset: # <<<<<<<<<<<<<< @@ -7885,7 +7886,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (!__pyx_t_6) break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":825 * # Output padding bytes * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< @@ -7894,7 +7895,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ (__pyx_v_f[0]) = 0x78; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":826 * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte * f += 1 # <<<<<<<<<<<<<< @@ -7903,7 +7904,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":827 * f[0] = 120 # "x"; pad byte * f += 1 * offset[0] += 1 # <<<<<<<<<<<<<< @@ -7914,7 +7915,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":829 * offset[0] += 1 * * offset[0] += child.itemsize # <<<<<<<<<<<<<< @@ -7924,7 +7925,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_8 = 0; (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -7934,7 +7935,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); if (__pyx_t_6) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":832 * * if not PyDataType_HASFIELDS(child): * t = child.type_num # <<<<<<<<<<<<<< @@ -7946,7 +7947,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -7956,7 +7957,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -7969,7 +7970,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(1, 834, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -7978,7 +7979,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":837 * * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< @@ -7996,7 +7997,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":838 * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< @@ -8014,7 +8015,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":839 * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< @@ -8032,7 +8033,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":840 * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< @@ -8050,7 +8051,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":841 * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< @@ -8068,7 +8069,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":842 * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< @@ -8086,7 +8087,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":843 * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< @@ -8104,7 +8105,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":844 * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< @@ -8122,7 +8123,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":845 * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< @@ -8140,7 +8141,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":846 * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< @@ -8158,7 +8159,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":847 * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< @@ -8176,7 +8177,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":848 * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< @@ -8194,7 +8195,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":849 * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< @@ -8212,7 +8213,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":850 * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< @@ -8232,7 +8233,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":851 * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< @@ -8252,7 +8253,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":852 * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< @@ -8272,7 +8273,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":853 * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< @@ -8290,7 +8291,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":855 * elif t == NPY_OBJECT: f[0] = 79 #"O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -8309,7 +8310,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L15:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":856 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * f += 1 # <<<<<<<<<<<<<< @@ -8318,7 +8319,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -8328,7 +8329,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L13; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":860 * # Cython ignores struct boundary information ("T{...}"), * # so don't output it * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< @@ -8341,7 +8342,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L13:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -8351,7 +8352,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":861 * # so don't output it * f = _util_dtypestring(child, f, end, offset) * return f # <<<<<<<<<<<<<< @@ -8361,7 +8362,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_r = __pyx_v_f; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -8386,7 +8387,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -8401,7 +8402,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a int __pyx_t_2; __Pyx_RefNannySetupContext("set_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -8412,7 +8413,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":980 * cdef PyObject* baseptr * if base is None: * baseptr = NULL # <<<<<<<<<<<<<< @@ -8421,7 +8422,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_baseptr = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -8431,7 +8432,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a goto __pyx_L3; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":982 * baseptr = NULL * else: * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< @@ -8441,7 +8442,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a /*else*/ { Py_INCREF(__pyx_v_base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":983 * else: * Py_INCREF(base) # important to do this before decref below! * baseptr = base # <<<<<<<<<<<<<< @@ -8452,7 +8453,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a } __pyx_L3:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":984 * Py_INCREF(base) # important to do this before decref below! * baseptr = base * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< @@ -8461,7 +8462,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ Py_XDECREF(__pyx_v_arr->base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":985 * baseptr = base * Py_XDECREF(arr.base) * arr.base = baseptr # <<<<<<<<<<<<<< @@ -8470,7 +8471,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_arr->base = __pyx_v_baseptr; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -8482,7 +8483,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -8496,7 +8497,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py int __pyx_t_1; __Pyx_RefNannySetupContext("get_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -8506,7 +8507,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":989 * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: * return None # <<<<<<<<<<<<<< @@ -8517,7 +8518,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -8526,7 +8527,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":991 * return None * else: * return arr.base # <<<<<<<<<<<<<< @@ -8540,7 +8541,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -8555,7 +8556,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -8576,7 +8577,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_array", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -8592,7 +8593,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":998 * cdef inline int import_array() except -1: * try: * _import_array() # <<<<<<<<<<<<<< @@ -8601,7 +8602,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { */ __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -8615,7 +8616,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":999 * try: * _import_array() * except Exception: # <<<<<<<<<<<<<< @@ -8630,7 +8631,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -8646,7 +8647,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -8661,7 +8662,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -8684,7 +8685,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -8705,7 +8706,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_umath", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -8721,7 +8722,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1004 * cdef inline int import_umath() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -8730,7 +8731,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -8744,7 +8745,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1005 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -8759,7 +8760,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -8775,7 +8776,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -8790,7 +8791,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -8813,7 +8814,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -8834,7 +8835,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_ufunc", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -8850,7 +8851,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1010 * cdef inline int import_ufunc() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -8859,7 +8860,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -8873,7 +8874,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1011 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -8887,7 +8888,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -8901,7 +8902,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -8916,7 +8917,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -9103,7 +9104,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__4); __Pyx_GIVEREF(__pyx_tuple__4); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -9114,7 +9115,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__5); __Pyx_GIVEREF(__pyx_tuple__5); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -9125,7 +9126,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__6); __Pyx_GIVEREF(__pyx_tuple__6); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -9136,7 +9137,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__7); __Pyx_GIVEREF(__pyx_tuple__7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -9147,7 +9148,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__8); __Pyx_GIVEREF(__pyx_tuple__8); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -9158,7 +9159,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__9); __Pyx_GIVEREF(__pyx_tuple__9); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -9169,7 +9170,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__10); __Pyx_GIVEREF(__pyx_tuple__10); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -9180,7 +9181,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__11); __Pyx_GIVEREF(__pyx_tuple__11); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -9191,7 +9192,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__12); __Pyx_GIVEREF(__pyx_tuple__12); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -9387,7 +9388,7 @@ static int __Pyx_modinit_function_import_code(void) { #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) +#elif defined(__GNUC__) && (!(defined(__cplusplus)) || (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4))) #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) #else #define CYTHON_SMALL_CODE @@ -9776,7 +9777,7 @@ if (!__Pyx_RefNanny) { if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_7) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -10575,7 +10576,7 @@ static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 *type = tstate->exc_state.exc_type; *value = tstate->exc_state.exc_value; *tb = tstate->exc_state.exc_traceback; @@ -10590,7 +10591,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject * } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -10674,7 +10675,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -10841,6 +10842,9 @@ static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_li #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); @@ -12523,14 +12527,42 @@ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, return res; } #endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i #ifndef offsetof @@ -467,6 +467,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -673,7 +674,7 @@ static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ @@ -781,7 +782,7 @@ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_cython_runtime = NULL; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; @@ -833,7 +834,7 @@ static const char *__pyx_f[] = { #define __Pyx_FastGilFuncInit() -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":730 * # in Cython to enable them only on the right systems. * * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< @@ -842,7 +843,7 @@ static const char *__pyx_f[] = { */ typedef npy_int8 __pyx_t_5numpy_int8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":731 * * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< @@ -851,7 +852,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t; */ typedef npy_int16 __pyx_t_5numpy_int16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":732 * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< @@ -860,7 +861,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t; */ typedef npy_int32 __pyx_t_5numpy_int32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":733 * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< @@ -869,7 +870,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t; */ typedef npy_int64 __pyx_t_5numpy_int64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":737 * #ctypedef npy_int128 int128_t * * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< @@ -878,7 +879,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t; */ typedef npy_uint8 __pyx_t_5numpy_uint8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":738 * * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< @@ -887,7 +888,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t; */ typedef npy_uint16 __pyx_t_5numpy_uint16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":739 * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< @@ -896,7 +897,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t; */ typedef npy_uint32 __pyx_t_5numpy_uint32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":740 * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< @@ -905,7 +906,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t; */ typedef npy_uint64 __pyx_t_5numpy_uint64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":744 * #ctypedef npy_uint128 uint128_t * * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< @@ -914,7 +915,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t; */ typedef npy_float32 __pyx_t_5numpy_float32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":745 * * ctypedef npy_float32 float32_t * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< @@ -923,7 +924,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t; */ typedef npy_float64 __pyx_t_5numpy_float64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":754 * # The int types are mapped a bit surprising -- * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t # <<<<<<<<<<<<<< @@ -932,7 +933,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t; */ typedef npy_long __pyx_t_5numpy_int_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":755 * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< @@ -941,7 +942,7 @@ typedef npy_long __pyx_t_5numpy_int_t; */ typedef npy_longlong __pyx_t_5numpy_long_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":756 * ctypedef npy_long int_t * ctypedef npy_longlong long_t * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< @@ -950,7 +951,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t; */ typedef npy_longlong __pyx_t_5numpy_longlong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":758 * ctypedef npy_longlong longlong_t * * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< @@ -959,7 +960,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t; */ typedef npy_ulong __pyx_t_5numpy_uint_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":759 * * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< @@ -968,7 +969,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":760 * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< @@ -977,7 +978,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":762 * ctypedef npy_ulonglong ulonglong_t * * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< @@ -986,7 +987,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; */ typedef npy_intp __pyx_t_5numpy_intp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":763 * * ctypedef npy_intp intp_t * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< @@ -995,7 +996,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t; */ typedef npy_uintp __pyx_t_5numpy_uintp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":765 * ctypedef npy_uintp uintp_t * * ctypedef npy_double float_t # <<<<<<<<<<<<<< @@ -1004,7 +1005,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t; */ typedef npy_double __pyx_t_5numpy_float_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":766 * * ctypedef npy_double float_t * ctypedef npy_double double_t # <<<<<<<<<<<<<< @@ -1013,7 +1014,7 @@ typedef npy_double __pyx_t_5numpy_float_t; */ typedef npy_double __pyx_t_5numpy_double_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":767 * ctypedef npy_double float_t * ctypedef npy_double double_t * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< @@ -1069,7 +1070,7 @@ struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonLineSentence; struct __pyx_obj_6gensim_6models_19word2vec_corpusfile_CythonVocab; struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____iter__; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":769 * ctypedef npy_longdouble longdouble_t * * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< @@ -1078,7 +1079,7 @@ struct __pyx_obj_6gensim_6models_19word2vec_corpusfile___pyx_scope_struct____ite */ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":770 * * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< @@ -1087,7 +1088,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; */ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":771 * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< @@ -1096,7 +1097,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; */ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":773 * ctypedef npy_clongdouble clongdouble_t * * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< @@ -7184,7 +7185,7 @@ static PyObject *__pyx_pf_6gensim_6models_19word2vec_corpusfile_4train_epoch_cbo return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -7232,7 +7233,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_info->obj); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":222 * * cdef int i, ndim * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -7241,7 +7242,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":223 * cdef int i, ndim * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -7250,7 +7251,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":225 * cdef bint little_endian = ((&endian_detector)[0] != 0) * * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< @@ -7259,7 +7260,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7273,7 +7274,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L4_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":228 * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -7284,7 +7285,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7293,7 +7294,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -7306,7 +7307,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(3, 229, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7315,7 +7316,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7329,7 +7330,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L7_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":232 * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -7340,7 +7341,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7349,7 +7350,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -7362,7 +7363,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(3, 233, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -7371,7 +7372,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":235 * raise ValueError(u"ndarray is not Fortran contiguous") * * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< @@ -7380,7 +7381,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":236 * * info.buf = PyArray_DATA(self) * info.ndim = ndim # <<<<<<<<<<<<<< @@ -7389,7 +7390,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->ndim = __pyx_v_ndim; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -7399,7 +7400,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":240 * # Allocate new buffer for strides and shape info. * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< @@ -7408,7 +7409,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":241 * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim # <<<<<<<<<<<<<< @@ -7417,7 +7418,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":242 * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim * for i in range(ndim): # <<<<<<<<<<<<<< @@ -7429,7 +7430,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":243 * info.shape = info.strides + ndim * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< @@ -7438,7 +7439,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":244 * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< @@ -7448,7 +7449,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -7458,7 +7459,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L9; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":246 * info.shape[i] = PyArray_DIMS(self)[i] * else: * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< @@ -7468,7 +7469,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":247 * else: * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< @@ -7479,7 +7480,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L9:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":248 * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL # <<<<<<<<<<<<<< @@ -7488,7 +7489,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->suboffsets = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":249 * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< @@ -7497,7 +7498,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":250 * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< @@ -7506,7 +7507,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":253 * * cdef int t * cdef char* f = NULL # <<<<<<<<<<<<<< @@ -7515,7 +7516,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_f = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":254 * cdef int t * cdef char* f = NULL * cdef dtype descr = self.descr # <<<<<<<<<<<<<< @@ -7527,7 +7528,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":257 * cdef int offset * * info.obj = self # <<<<<<<<<<<<<< @@ -7540,7 +7541,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = ((PyObject *)__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -7550,7 +7551,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":260 * * if not PyDataType_HASFIELDS(descr): * t = descr.type_num # <<<<<<<<<<<<<< @@ -7560,7 +7561,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_4 = __pyx_v_descr->type_num; __pyx_v_t = __pyx_t_4; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7580,7 +7581,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L15_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":262 * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -7597,7 +7598,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L14_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7606,7 +7607,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -7619,7 +7620,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(3, 263, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -7628,7 +7629,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":264 * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< @@ -7640,7 +7641,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"b"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":265 * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< @@ -7651,7 +7652,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"B"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":266 * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< @@ -7662,7 +7663,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"h"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":267 * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< @@ -7673,7 +7674,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"H"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":268 * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< @@ -7684,7 +7685,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"i"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":269 * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< @@ -7695,7 +7696,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"I"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":270 * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< @@ -7706,7 +7707,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"l"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":271 * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< @@ -7717,7 +7718,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"L"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":272 * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< @@ -7728,7 +7729,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":273 * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< @@ -7739,7 +7740,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":274 * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< @@ -7750,7 +7751,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"f"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":275 * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< @@ -7761,7 +7762,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"d"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":276 * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< @@ -7772,7 +7773,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"g"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":277 * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< @@ -7783,7 +7784,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zf"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":278 * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< @@ -7794,7 +7795,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zd"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":279 * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< @@ -7805,7 +7806,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zg"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":280 * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< @@ -7817,7 +7818,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; default: - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":282 * elif t == NPY_OBJECT: f = "O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -7838,7 +7839,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":283 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f # <<<<<<<<<<<<<< @@ -7847,7 +7848,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->format = __pyx_v_f; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":284 * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f * return # <<<<<<<<<<<<<< @@ -7857,7 +7858,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_r = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -7866,7 +7867,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":286 * return * else: * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< @@ -7876,7 +7877,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":287 * else: * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< @@ -7885,7 +7886,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->format[0]) = '^'; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":288 * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 # <<<<<<<<<<<<<< @@ -7894,7 +7895,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_offset = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":289 * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< @@ -7904,7 +7905,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(3, 289, __pyx_L1_error) __pyx_v_f = __pyx_t_8; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":292 * info.format + _buffer_format_string_len, * &offset) * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< @@ -7914,7 +7915,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_f[0]) = '\x00'; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -7946,7 +7947,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -7970,7 +7971,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s int __pyx_t_1; __Pyx_RefNannySetupContext("__releasebuffer__", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -7980,7 +7981,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":296 * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) # <<<<<<<<<<<<<< @@ -7989,7 +7990,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->format); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -7998,7 +7999,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8008,7 +8009,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":298 * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): * PyObject_Free(info.strides) # <<<<<<<<<<<<<< @@ -8017,7 +8018,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->strides); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8026,7 +8027,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -8038,7 +8039,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -8052,7 +8053,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":776 * * cdef inline object PyArray_MultiIterNew1(a): * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< @@ -8066,7 +8067,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -8085,7 +8086,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -8099,7 +8100,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":779 * * cdef inline object PyArray_MultiIterNew2(a, b): * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< @@ -8113,7 +8114,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -8132,7 +8133,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -8146,7 +8147,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":782 * * cdef inline object PyArray_MultiIterNew3(a, b, c): * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< @@ -8160,7 +8161,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -8179,7 +8180,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -8193,7 +8194,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":785 * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< @@ -8207,7 +8208,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -8226,7 +8227,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -8240,7 +8241,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":788 * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< @@ -8254,7 +8255,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -8273,7 +8274,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -8287,7 +8288,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ int __pyx_t_1; __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -8297,7 +8298,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":792 * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): * return d.subarray.shape # <<<<<<<<<<<<<< @@ -8309,7 +8310,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -8318,7 +8319,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":794 * return d.subarray.shape * else: * return () # <<<<<<<<<<<<<< @@ -8332,7 +8333,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -8347,7 +8348,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -8376,7 +8377,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx char *__pyx_t_9; __Pyx_RefNannySetupContext("_util_dtypestring", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":801 * * cdef dtype child * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -8385,7 +8386,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":802 * cdef dtype child * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -8394,7 +8395,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -8417,7 +8418,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":806 * * for childname in descr.names: * fields = descr.fields[childname] # <<<<<<<<<<<<<< @@ -8434,7 +8435,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":807 * for childname in descr.names: * fields = descr.fields[childname] * child, new_offset = fields # <<<<<<<<<<<<<< @@ -8469,7 +8470,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -8486,7 +8487,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -8499,7 +8500,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(3, 810, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -8508,7 +8509,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8528,7 +8529,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L8_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":813 * * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -8545,7 +8546,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = __pyx_t_7; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8554,7 +8555,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -8567,7 +8568,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(3, 814, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8576,7 +8577,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":824 * * # Output padding bytes * while offset[0] < new_offset: # <<<<<<<<<<<<<< @@ -8592,7 +8593,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (!__pyx_t_6) break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":825 * # Output padding bytes * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< @@ -8601,7 +8602,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ (__pyx_v_f[0]) = 0x78; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":826 * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte * f += 1 # <<<<<<<<<<<<<< @@ -8610,7 +8611,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":827 * f[0] = 120 # "x"; pad byte * f += 1 * offset[0] += 1 # <<<<<<<<<<<<<< @@ -8621,7 +8622,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":829 * offset[0] += 1 * * offset[0] += child.itemsize # <<<<<<<<<<<<<< @@ -8631,7 +8632,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_8 = 0; (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -8641,7 +8642,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); if (__pyx_t_6) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":832 * * if not PyDataType_HASFIELDS(child): * t = child.type_num # <<<<<<<<<<<<<< @@ -8653,7 +8654,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -8663,7 +8664,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -8676,7 +8677,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(3, 834, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -8685,7 +8686,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":837 * * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< @@ -8703,7 +8704,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":838 * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< @@ -8721,7 +8722,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":839 * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< @@ -8739,7 +8740,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":840 * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< @@ -8757,7 +8758,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":841 * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< @@ -8775,7 +8776,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":842 * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< @@ -8793,7 +8794,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":843 * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< @@ -8811,7 +8812,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":844 * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< @@ -8829,7 +8830,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":845 * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< @@ -8847,7 +8848,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":846 * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< @@ -8865,7 +8866,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":847 * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< @@ -8883,7 +8884,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":848 * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< @@ -8901,7 +8902,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":849 * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< @@ -8919,7 +8920,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":850 * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< @@ -8939,7 +8940,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":851 * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< @@ -8959,7 +8960,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":852 * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< @@ -8979,7 +8980,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":853 * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< @@ -8997,7 +8998,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":855 * elif t == NPY_OBJECT: f[0] = 79 #"O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -9016,7 +9017,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L15:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":856 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * f += 1 # <<<<<<<<<<<<<< @@ -9025,7 +9026,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -9035,7 +9036,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L13; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":860 * # Cython ignores struct boundary information ("T{...}"), * # so don't output it * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< @@ -9048,7 +9049,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L13:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -9058,7 +9059,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":861 * # so don't output it * f = _util_dtypestring(child, f, end, offset) * return f # <<<<<<<<<<<<<< @@ -9068,7 +9069,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_r = __pyx_v_f; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -9093,7 +9094,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -9108,7 +9109,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a int __pyx_t_2; __Pyx_RefNannySetupContext("set_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -9119,7 +9120,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":980 * cdef PyObject* baseptr * if base is None: * baseptr = NULL # <<<<<<<<<<<<<< @@ -9128,7 +9129,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_baseptr = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -9138,7 +9139,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a goto __pyx_L3; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":982 * baseptr = NULL * else: * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< @@ -9148,7 +9149,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a /*else*/ { Py_INCREF(__pyx_v_base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":983 * else: * Py_INCREF(base) # important to do this before decref below! * baseptr = base # <<<<<<<<<<<<<< @@ -9159,7 +9160,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a } __pyx_L3:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":984 * Py_INCREF(base) # important to do this before decref below! * baseptr = base * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< @@ -9168,7 +9169,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ Py_XDECREF(__pyx_v_arr->base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":985 * baseptr = base * Py_XDECREF(arr.base) * arr.base = baseptr # <<<<<<<<<<<<<< @@ -9177,7 +9178,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_arr->base = __pyx_v_baseptr; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -9189,7 +9190,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -9203,7 +9204,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py int __pyx_t_1; __Pyx_RefNannySetupContext("get_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -9213,7 +9214,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":989 * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: * return None # <<<<<<<<<<<<<< @@ -9224,7 +9225,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -9233,7 +9234,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":991 * return None * else: * return arr.base # <<<<<<<<<<<<<< @@ -9247,7 +9248,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -9262,7 +9263,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -9283,7 +9284,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_array", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -9299,7 +9300,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":998 * cdef inline int import_array() except -1: * try: * _import_array() # <<<<<<<<<<<<<< @@ -9308,7 +9309,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { */ __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(3, 998, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -9322,7 +9323,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":999 * try: * _import_array() * except Exception: # <<<<<<<<<<<<<< @@ -9337,7 +9338,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -9353,7 +9354,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -9368,7 +9369,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -9391,7 +9392,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -9412,7 +9413,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_umath", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -9428,7 +9429,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1004 * cdef inline int import_umath() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -9437,7 +9438,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(3, 1004, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -9451,7 +9452,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1005 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -9466,7 +9467,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -9482,7 +9483,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -9497,7 +9498,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -9520,7 +9521,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -9541,7 +9542,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_ufunc", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -9557,7 +9558,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1010 * cdef inline int import_ufunc() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -9566,7 +9567,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(3, 1010, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -9580,7 +9581,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1011 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -9594,7 +9595,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -9608,7 +9609,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -9623,7 +9624,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -10714,7 +10715,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__3); __Pyx_GIVEREF(__pyx_tuple__3); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -10725,7 +10726,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__4); __Pyx_GIVEREF(__pyx_tuple__4); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -10736,7 +10737,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__5); __Pyx_GIVEREF(__pyx_tuple__5); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -10747,7 +10748,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__6); __Pyx_GIVEREF(__pyx_tuple__6); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -10758,7 +10759,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__7); __Pyx_GIVEREF(__pyx_tuple__7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -10769,7 +10770,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__8); __Pyx_GIVEREF(__pyx_tuple__8); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -10780,7 +10781,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__9); __Pyx_GIVEREF(__pyx_tuple__9); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -10791,7 +10792,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__10); __Pyx_GIVEREF(__pyx_tuple__10); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -10802,7 +10803,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__11); __Pyx_GIVEREF(__pyx_tuple__11); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -11028,7 +11029,7 @@ static int __Pyx_modinit_function_import_code(void) { #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) +#elif defined(__GNUC__) && (!(defined(__cplusplus)) || (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4))) #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) #else #define CYTHON_SMALL_CODE @@ -12189,7 +12190,7 @@ static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 *type = tstate->exc_state.exc_type; *value = tstate->exc_state.exc_value; *tb = tstate->exc_state.exc_traceback; @@ -12204,7 +12205,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject * } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -12288,7 +12289,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -12541,6 +12542,9 @@ static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_li #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); @@ -14205,14 +14209,42 @@ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, return res; } #endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -15726,6 +15758,9 @@ static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_DECREF(x); return ival; } +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } diff --git a/gensim/models/word2vec_inner.c b/gensim/models/word2vec_inner.c index 79ec298beb..94c1acd950 100644 --- a/gensim/models/word2vec_inner.c +++ b/gensim/models/word2vec_inner.c @@ -1,4 +1,4 @@ -/* Generated by Cython 0.28.2 */ +/* Generated by Cython 0.28.4 */ #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -7,7 +7,7 @@ #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else -#define CYTHON_ABI "0_28_2" +#define CYTHON_ABI "0_28_4" #define CYTHON_FUTURE_DIVISION 0 #include #ifndef offsetof @@ -453,6 +453,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) @@ -651,7 +652,7 @@ static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ @@ -759,7 +760,7 @@ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_cython_runtime = NULL; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; @@ -809,7 +810,7 @@ static const char *__pyx_f[] = { #endif -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":730 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":730 * # in Cython to enable them only on the right systems. * * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< @@ -818,7 +819,7 @@ static const char *__pyx_f[] = { */ typedef npy_int8 __pyx_t_5numpy_int8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":731 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":731 * * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< @@ -827,7 +828,7 @@ typedef npy_int8 __pyx_t_5numpy_int8_t; */ typedef npy_int16 __pyx_t_5numpy_int16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":732 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":732 * ctypedef npy_int8 int8_t * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< @@ -836,7 +837,7 @@ typedef npy_int16 __pyx_t_5numpy_int16_t; */ typedef npy_int32 __pyx_t_5numpy_int32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":733 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":733 * ctypedef npy_int16 int16_t * ctypedef npy_int32 int32_t * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< @@ -845,7 +846,7 @@ typedef npy_int32 __pyx_t_5numpy_int32_t; */ typedef npy_int64 __pyx_t_5numpy_int64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":737 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":737 * #ctypedef npy_int128 int128_t * * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< @@ -854,7 +855,7 @@ typedef npy_int64 __pyx_t_5numpy_int64_t; */ typedef npy_uint8 __pyx_t_5numpy_uint8_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":738 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":738 * * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< @@ -863,7 +864,7 @@ typedef npy_uint8 __pyx_t_5numpy_uint8_t; */ typedef npy_uint16 __pyx_t_5numpy_uint16_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":739 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":739 * ctypedef npy_uint8 uint8_t * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< @@ -872,7 +873,7 @@ typedef npy_uint16 __pyx_t_5numpy_uint16_t; */ typedef npy_uint32 __pyx_t_5numpy_uint32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":740 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":740 * ctypedef npy_uint16 uint16_t * ctypedef npy_uint32 uint32_t * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< @@ -881,7 +882,7 @@ typedef npy_uint32 __pyx_t_5numpy_uint32_t; */ typedef npy_uint64 __pyx_t_5numpy_uint64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":744 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":744 * #ctypedef npy_uint128 uint128_t * * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< @@ -890,7 +891,7 @@ typedef npy_uint64 __pyx_t_5numpy_uint64_t; */ typedef npy_float32 __pyx_t_5numpy_float32_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":745 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":745 * * ctypedef npy_float32 float32_t * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< @@ -899,7 +900,7 @@ typedef npy_float32 __pyx_t_5numpy_float32_t; */ typedef npy_float64 __pyx_t_5numpy_float64_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":754 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":754 * # The int types are mapped a bit surprising -- * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t # <<<<<<<<<<<<<< @@ -908,7 +909,7 @@ typedef npy_float64 __pyx_t_5numpy_float64_t; */ typedef npy_long __pyx_t_5numpy_int_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":755 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":755 * # numpy.int corresponds to 'l' and numpy.long to 'q' * ctypedef npy_long int_t * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< @@ -917,7 +918,7 @@ typedef npy_long __pyx_t_5numpy_int_t; */ typedef npy_longlong __pyx_t_5numpy_long_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":756 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":756 * ctypedef npy_long int_t * ctypedef npy_longlong long_t * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< @@ -926,7 +927,7 @@ typedef npy_longlong __pyx_t_5numpy_long_t; */ typedef npy_longlong __pyx_t_5numpy_longlong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":758 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":758 * ctypedef npy_longlong longlong_t * * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< @@ -935,7 +936,7 @@ typedef npy_longlong __pyx_t_5numpy_longlong_t; */ typedef npy_ulong __pyx_t_5numpy_uint_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":759 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":759 * * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< @@ -944,7 +945,7 @@ typedef npy_ulong __pyx_t_5numpy_uint_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":760 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":760 * ctypedef npy_ulong uint_t * ctypedef npy_ulonglong ulong_t * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< @@ -953,7 +954,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulong_t; */ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":762 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":762 * ctypedef npy_ulonglong ulonglong_t * * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< @@ -962,7 +963,7 @@ typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; */ typedef npy_intp __pyx_t_5numpy_intp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":763 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":763 * * ctypedef npy_intp intp_t * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< @@ -971,7 +972,7 @@ typedef npy_intp __pyx_t_5numpy_intp_t; */ typedef npy_uintp __pyx_t_5numpy_uintp_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":765 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":765 * ctypedef npy_uintp uintp_t * * ctypedef npy_double float_t # <<<<<<<<<<<<<< @@ -980,7 +981,7 @@ typedef npy_uintp __pyx_t_5numpy_uintp_t; */ typedef npy_double __pyx_t_5numpy_float_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":766 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":766 * * ctypedef npy_double float_t * ctypedef npy_double double_t # <<<<<<<<<<<<<< @@ -989,7 +990,7 @@ typedef npy_double __pyx_t_5numpy_float_t; */ typedef npy_double __pyx_t_5numpy_double_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":767 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":767 * ctypedef npy_double float_t * ctypedef npy_double double_t * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< @@ -1033,7 +1034,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do /*--- Type declarations ---*/ -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":769 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":769 * ctypedef npy_longdouble longdouble_t * * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< @@ -1042,7 +1043,7 @@ static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(do */ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":770 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":770 * * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< @@ -1051,7 +1052,7 @@ typedef npy_cfloat __pyx_t_5numpy_cfloat_t; */ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":771 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":771 * ctypedef npy_cfloat cfloat_t * ctypedef npy_cdouble cdouble_t * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< @@ -1060,7 +1061,7 @@ typedef npy_cdouble __pyx_t_5numpy_cdouble_t; */ typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":773 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":773 * ctypedef npy_clongdouble clongdouble_t * * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< @@ -8484,7 +8485,7 @@ static PyObject *__pyx_pf_6gensim_6models_14word2vec_inner_8init(CYTHON_UNUSED P return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -8532,7 +8533,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_info->obj); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":222 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":222 * * cdef int i, ndim * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -8541,7 +8542,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":223 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":223 * cdef int i, ndim * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -8550,7 +8551,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":225 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":225 * cdef bint little_endian = ((&endian_detector)[0] != 0) * * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< @@ -8559,7 +8560,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8573,7 +8574,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L4_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":228 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":228 * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -8584,7 +8585,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8593,7 +8594,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -8606,7 +8607,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 229, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":227 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":227 * ndim = PyArray_NDIM(self) * * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8615,7 +8616,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8629,7 +8630,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L7_bool_binop_done; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":232 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":232 * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< @@ -8640,7 +8641,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8649,7 +8650,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -8662,7 +8663,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 233, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":231 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":231 * raise ValueError(u"ndarray is not C contiguous") * * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< @@ -8671,7 +8672,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":235 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":235 * raise ValueError(u"ndarray is not Fortran contiguous") * * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< @@ -8680,7 +8681,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":236 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":236 * * info.buf = PyArray_DATA(self) * info.ndim = ndim # <<<<<<<<<<<<<< @@ -8689,7 +8690,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->ndim = __pyx_v_ndim; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8699,7 +8700,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":240 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":240 * # Allocate new buffer for strides and shape info. * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< @@ -8708,7 +8709,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":241 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":241 * # This is allocated as one block, strides first. * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim # <<<<<<<<<<<<<< @@ -8717,7 +8718,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":242 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":242 * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) * info.shape = info.strides + ndim * for i in range(ndim): # <<<<<<<<<<<<<< @@ -8729,7 +8730,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":243 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":243 * info.shape = info.strides + ndim * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< @@ -8738,7 +8739,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":244 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":244 * for i in range(ndim): * info.strides[i] = PyArray_STRIDES(self)[i] * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< @@ -8748,7 +8749,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":237 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":237 * info.buf = PyArray_DATA(self) * info.ndim = ndim * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -8758,7 +8759,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P goto __pyx_L9; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":246 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":246 * info.shape[i] = PyArray_DIMS(self)[i] * else: * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< @@ -8768,7 +8769,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":247 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":247 * else: * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< @@ -8779,7 +8780,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L9:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":248 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":248 * info.strides = PyArray_STRIDES(self) * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL # <<<<<<<<<<<<<< @@ -8788,7 +8789,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->suboffsets = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":249 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":249 * info.shape = PyArray_DIMS(self) * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< @@ -8797,7 +8798,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":250 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":250 * info.suboffsets = NULL * info.itemsize = PyArray_ITEMSIZE(self) * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< @@ -8806,7 +8807,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":253 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":253 * * cdef int t * cdef char* f = NULL # <<<<<<<<<<<<<< @@ -8815,7 +8816,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_f = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":254 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":254 * cdef int t * cdef char* f = NULL * cdef dtype descr = self.descr # <<<<<<<<<<<<<< @@ -8827,7 +8828,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":257 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":257 * cdef int offset * * info.obj = self # <<<<<<<<<<<<<< @@ -8840,7 +8841,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = ((PyObject *)__pyx_v_self); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -8850,7 +8851,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":260 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":260 * * if not PyDataType_HASFIELDS(descr): * t = descr.type_num # <<<<<<<<<<<<<< @@ -8860,7 +8861,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_4 = __pyx_v_descr->type_num; __pyx_v_t = __pyx_t_4; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8880,7 +8881,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P } __pyx_L15_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":262 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":262 * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -8897,7 +8898,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_1 = __pyx_t_2; __pyx_L14_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8906,7 +8907,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ if (unlikely(__pyx_t_1)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -8919,7 +8920,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 263, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":261 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":261 * if not PyDataType_HASFIELDS(descr): * t = descr.type_num * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -8928,7 +8929,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":264 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":264 * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< @@ -8940,7 +8941,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"b"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":265 * raise ValueError(u"Non-native byte order not supported") * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< @@ -8951,7 +8952,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"B"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":266 * if t == NPY_BYTE: f = "b" * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< @@ -8962,7 +8963,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"h"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":267 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":267 * elif t == NPY_UBYTE: f = "B" * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< @@ -8973,7 +8974,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"H"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":268 * elif t == NPY_SHORT: f = "h" * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< @@ -8984,7 +8985,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"i"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":269 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":269 * elif t == NPY_USHORT: f = "H" * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< @@ -8995,7 +8996,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"I"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":270 * elif t == NPY_INT: f = "i" * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< @@ -9006,7 +9007,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"l"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":271 * elif t == NPY_UINT: f = "I" * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< @@ -9017,7 +9018,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"L"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":272 * elif t == NPY_LONG: f = "l" * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< @@ -9028,7 +9029,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":273 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":273 * elif t == NPY_ULONG: f = "L" * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< @@ -9039,7 +9040,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Q"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":274 * elif t == NPY_LONGLONG: f = "q" * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< @@ -9050,7 +9051,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"f"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":275 * elif t == NPY_ULONGLONG: f = "Q" * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< @@ -9061,7 +9062,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"d"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":276 * elif t == NPY_FLOAT: f = "f" * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< @@ -9072,7 +9073,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"g"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":277 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":277 * elif t == NPY_DOUBLE: f = "d" * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< @@ -9083,7 +9084,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zf"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":278 * elif t == NPY_LONGDOUBLE: f = "g" * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< @@ -9094,7 +9095,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zd"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":279 * elif t == NPY_CFLOAT: f = "Zf" * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< @@ -9105,7 +9106,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_v_f = ((char *)"Zg"); break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":280 * elif t == NPY_CDOUBLE: f = "Zd" * elif t == NPY_CLONGDOUBLE: f = "Zg" * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< @@ -9117,7 +9118,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; default: - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":282 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":282 * elif t == NPY_OBJECT: f = "O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -9138,7 +9139,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P break; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":283 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f # <<<<<<<<<<<<<< @@ -9147,7 +9148,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_info->format = __pyx_v_f; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":284 * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * info.format = f * return # <<<<<<<<<<<<<< @@ -9157,7 +9158,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_r = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":259 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":259 * info.obj = self * * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< @@ -9166,7 +9167,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":286 * return * else: * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< @@ -9176,7 +9177,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P /*else*/ { __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":287 * else: * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< @@ -9185,7 +9186,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ (__pyx_v_info->format[0]) = '^'; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":288 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":288 * info.format = PyObject_Malloc(_buffer_format_string_len) * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 # <<<<<<<<<<<<<< @@ -9194,7 +9195,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P */ __pyx_v_offset = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":289 * info.format[0] = c'^' # Native data types, manual alignment * offset = 0 * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< @@ -9204,7 +9205,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P __pyx_t_8 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((char *)NULL))) __PYX_ERR(1, 289, __pyx_L1_error) __pyx_v_f = __pyx_t_8; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":292 * info.format + _buffer_format_string_len, * &offset) * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< @@ -9214,7 +9215,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P (__pyx_v_f[0]) = '\x00'; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":215 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":215 * # experimental exception made for __getbuffer__ and __releasebuffer__ * # -- the details of this may change. * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< @@ -9246,7 +9247,7 @@ static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, P return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -9270,7 +9271,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s int __pyx_t_1; __Pyx_RefNannySetupContext("__releasebuffer__", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -9280,7 +9281,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":296 * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) # <<<<<<<<<<<<<< @@ -9289,7 +9290,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->format); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":295 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":295 * * def __releasebuffer__(ndarray self, Py_buffer* info): * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< @@ -9298,7 +9299,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -9308,7 +9309,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":298 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":298 * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): * PyObject_Free(info.strides) # <<<<<<<<<<<<<< @@ -9317,7 +9318,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ PyObject_Free(__pyx_v_info->strides); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":297 * if PyArray_HASFIELDS(self): * PyObject_Free(info.format) * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< @@ -9326,7 +9327,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":294 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":294 * f[0] = c'\0' # Terminate format string * * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< @@ -9338,7 +9339,7 @@ static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_s __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -9352,7 +9353,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":776 * * cdef inline object PyArray_MultiIterNew1(a): * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< @@ -9366,7 +9367,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":775 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":775 * ctypedef npy_cdouble complex_t * * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< @@ -9385,7 +9386,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -9399,7 +9400,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":779 * * cdef inline object PyArray_MultiIterNew2(a, b): * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< @@ -9413,7 +9414,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":778 * return PyArray_MultiIterNew(1, a) * * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< @@ -9432,7 +9433,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -9446,7 +9447,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":782 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":782 * * cdef inline object PyArray_MultiIterNew3(a, b, c): * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< @@ -9460,7 +9461,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":781 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":781 * return PyArray_MultiIterNew(2, a, b) * * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< @@ -9479,7 +9480,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -9493,7 +9494,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":785 * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< @@ -9507,7 +9508,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":784 * return PyArray_MultiIterNew(3, a, b, c) * * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< @@ -9526,7 +9527,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -9540,7 +9541,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":788 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":788 * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< @@ -9554,7 +9555,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ __pyx_t_1 = 0; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":787 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":787 * return PyArray_MultiIterNew(4, a, b, c, d) * * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< @@ -9573,7 +9574,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -9587,7 +9588,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ int __pyx_t_1; __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -9597,7 +9598,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":792 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":792 * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): * return d.subarray.shape # <<<<<<<<<<<<<< @@ -9609,7 +9610,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":791 * * cdef inline tuple PyDataType_SHAPE(dtype d): * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< @@ -9618,7 +9619,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":794 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":794 * return d.subarray.shape * else: * return () # <<<<<<<<<<<<<< @@ -9632,7 +9633,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":790 * return PyArray_MultiIterNew(5, a, b, c, d, e) * * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< @@ -9647,7 +9648,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__ return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -9676,7 +9677,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx char *__pyx_t_9; __Pyx_RefNannySetupContext("_util_dtypestring", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":801 * * cdef dtype child * cdef int endian_detector = 1 # <<<<<<<<<<<<<< @@ -9685,7 +9686,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_endian_detector = 1; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":802 * cdef dtype child * cdef int endian_detector = 1 * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< @@ -9694,7 +9695,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -9717,7 +9718,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":806 * * for childname in descr.names: * fields = descr.fields[childname] # <<<<<<<<<<<<<< @@ -9734,7 +9735,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":807 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":807 * for childname in descr.names: * fields = descr.fields[childname] * child, new_offset = fields # <<<<<<<<<<<<<< @@ -9769,7 +9770,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -9786,7 +9787,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -9799,7 +9800,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 810, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":809 * child, new_offset = fields * * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< @@ -9808,7 +9809,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9828,7 +9829,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L8_next_or:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":813 * * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< @@ -9845,7 +9846,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = __pyx_t_7; __pyx_L7_bool_binop_done:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9854,7 +9855,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -9867,7 +9868,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 814, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":812 * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") * * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< @@ -9876,7 +9877,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":824 * * # Output padding bytes * while offset[0] < new_offset: # <<<<<<<<<<<<<< @@ -9892,7 +9893,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (!__pyx_t_6) break; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":825 * # Output padding bytes * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< @@ -9901,7 +9902,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ (__pyx_v_f[0]) = 0x78; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":826 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":826 * while offset[0] < new_offset: * f[0] = 120 # "x"; pad byte * f += 1 # <<<<<<<<<<<<<< @@ -9910,7 +9911,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":827 * f[0] = 120 # "x"; pad byte * f += 1 * offset[0] += 1 # <<<<<<<<<<<<<< @@ -9921,7 +9922,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":829 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":829 * offset[0] += 1 * * offset[0] += child.itemsize # <<<<<<<<<<<<<< @@ -9931,7 +9932,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_8 = 0; (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -9941,7 +9942,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); if (__pyx_t_6) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":832 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":832 * * if not PyDataType_HASFIELDS(child): * t = child.type_num # <<<<<<<<<<<<<< @@ -9953,7 +9954,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); __pyx_t_4 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -9963,7 +9964,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); if (unlikely(__pyx_t_6)) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -9976,7 +9977,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(1, 834, __pyx_L1_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":833 * if not PyDataType_HASFIELDS(child): * t = child.type_num * if end - f < 5: # <<<<<<<<<<<<<< @@ -9985,7 +9986,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":837 * * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< @@ -10003,7 +10004,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":838 * # Until ticket #99 is fixed, use integers to avoid warnings * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< @@ -10021,7 +10022,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":839 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":839 * if t == NPY_BYTE: f[0] = 98 #"b" * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< @@ -10039,7 +10040,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":840 * elif t == NPY_UBYTE: f[0] = 66 #"B" * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< @@ -10057,7 +10058,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":841 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":841 * elif t == NPY_SHORT: f[0] = 104 #"h" * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< @@ -10075,7 +10076,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":842 * elif t == NPY_USHORT: f[0] = 72 #"H" * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< @@ -10093,7 +10094,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":843 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":843 * elif t == NPY_INT: f[0] = 105 #"i" * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< @@ -10111,7 +10112,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":844 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":844 * elif t == NPY_UINT: f[0] = 73 #"I" * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< @@ -10129,7 +10130,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":845 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":845 * elif t == NPY_LONG: f[0] = 108 #"l" * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< @@ -10147,7 +10148,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":846 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":846 * elif t == NPY_ULONG: f[0] = 76 #"L" * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< @@ -10165,7 +10166,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":847 * elif t == NPY_LONGLONG: f[0] = 113 #"q" * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< @@ -10183,7 +10184,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":848 * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< @@ -10201,7 +10202,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":849 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":849 * elif t == NPY_FLOAT: f[0] = 102 #"f" * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< @@ -10219,7 +10220,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":850 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":850 * elif t == NPY_DOUBLE: f[0] = 100 #"d" * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< @@ -10239,7 +10240,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":851 * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< @@ -10259,7 +10260,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":852 * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< @@ -10279,7 +10280,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":853 * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< @@ -10297,7 +10298,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L15; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":855 * elif t == NPY_OBJECT: f[0] = 79 #"O" * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< @@ -10316,7 +10317,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L15:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":856 * else: * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) * f += 1 # <<<<<<<<<<<<<< @@ -10325,7 +10326,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx */ __pyx_v_f = (__pyx_v_f + 1); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":831 * offset[0] += child.itemsize * * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< @@ -10335,7 +10336,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx goto __pyx_L13; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":860 * # Cython ignores struct boundary information ("T{...}"), * # so don't output it * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< @@ -10348,7 +10349,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __pyx_L13:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":805 * cdef tuple fields * * for childname in descr.names: # <<<<<<<<<<<<<< @@ -10358,7 +10359,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":861 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":861 * # so don't output it * f = _util_dtypestring(child, f, end, offset) * return f # <<<<<<<<<<<<<< @@ -10368,7 +10369,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx __pyx_r = __pyx_v_f; goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":796 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":796 * return () * * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< @@ -10393,7 +10394,7 @@ static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -10408,7 +10409,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a int __pyx_t_2; __Pyx_RefNannySetupContext("set_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -10419,7 +10420,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":980 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":980 * cdef PyObject* baseptr * if base is None: * baseptr = NULL # <<<<<<<<<<<<<< @@ -10428,7 +10429,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_baseptr = NULL; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":979 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":979 * cdef inline void set_array_base(ndarray arr, object base): * cdef PyObject* baseptr * if base is None: # <<<<<<<<<<<<<< @@ -10438,7 +10439,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a goto __pyx_L3; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":982 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":982 * baseptr = NULL * else: * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< @@ -10448,7 +10449,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a /*else*/ { Py_INCREF(__pyx_v_base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":983 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":983 * else: * Py_INCREF(base) # important to do this before decref below! * baseptr = base # <<<<<<<<<<<<<< @@ -10459,7 +10460,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a } __pyx_L3:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":984 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":984 * Py_INCREF(base) # important to do this before decref below! * baseptr = base * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< @@ -10468,7 +10469,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ Py_XDECREF(__pyx_v_arr->base); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":985 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":985 * baseptr = base * Py_XDECREF(arr.base) * arr.base = baseptr # <<<<<<<<<<<<<< @@ -10477,7 +10478,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a */ __pyx_v_arr->base = __pyx_v_baseptr; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":977 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":977 * * * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< @@ -10489,7 +10490,7 @@ static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_a __Pyx_RefNannyFinishContext(); } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -10503,7 +10504,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py int __pyx_t_1; __Pyx_RefNannySetupContext("get_array_base", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -10513,7 +10514,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_t_1 = ((__pyx_v_arr->base == NULL) != 0); if (__pyx_t_1) { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":989 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":989 * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: * return None # <<<<<<<<<<<<<< @@ -10524,7 +10525,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":988 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":988 * * cdef inline object get_array_base(ndarray arr): * if arr.base is NULL: # <<<<<<<<<<<<<< @@ -10533,7 +10534,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py */ } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":991 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":991 * return None * else: * return arr.base # <<<<<<<<<<<<<< @@ -10547,7 +10548,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py goto __pyx_L0; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":987 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":987 * arr.base = baseptr * * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< @@ -10562,7 +10563,7 @@ static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__py return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -10583,7 +10584,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_array", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -10599,7 +10600,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":998 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":998 * cdef inline int import_array() except -1: * try: * _import_array() # <<<<<<<<<<<<<< @@ -10608,7 +10609,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { */ __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 998, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -10622,7 +10623,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":999 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":999 * try: * _import_array() * except Exception: # <<<<<<<<<<<<<< @@ -10637,7 +10638,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -10653,7 +10654,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":997 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":997 * # Cython code. * cdef inline int import_array() except -1: * try: # <<<<<<<<<<<<<< @@ -10668,7 +10669,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":996 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":996 * # Versions of the import_* functions which are more suitable for * # Cython code. * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< @@ -10691,7 +10692,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -10712,7 +10713,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_umath", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -10728,7 +10729,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1004 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1004 * cdef inline int import_umath() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -10737,7 +10738,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1004, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -10751,7 +10752,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1005 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1005 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -10766,7 +10767,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -10782,7 +10783,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1003 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1003 * * cdef inline int import_umath() except -1: * try: # <<<<<<<<<<<<<< @@ -10797,7 +10798,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1002 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1002 * raise ImportError("numpy.core.multiarray failed to import") * * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< @@ -10820,7 +10821,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { return __pyx_r; } -/* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 +/* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -10841,7 +10842,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { PyObject *__pyx_t_8 = NULL; __Pyx_RefNannySetupContext("import_ufunc", 0); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -10857,7 +10858,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_XGOTREF(__pyx_t_3); /*try:*/ { - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1010 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1010 * cdef inline int import_ufunc() except -1: * try: * _import_umath() # <<<<<<<<<<<<<< @@ -10866,7 +10867,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { */ __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1010, __pyx_L3_error) - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -10880,7 +10881,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L8_try_end; __pyx_L3_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1011 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1011 * try: * _import_umath() * except Exception: # <<<<<<<<<<<<<< @@ -10894,7 +10895,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -10908,7 +10909,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { goto __pyx_L5_except_error; __pyx_L5_except_error:; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1009 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1009 * * cdef inline int import_ufunc() except -1: * try: # <<<<<<<<<<<<<< @@ -10923,7 +10924,7 @@ static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { __pyx_L8_try_end:; } - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -11110,7 +11111,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__2); __Pyx_GIVEREF(__pyx_tuple__2); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":229 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":229 * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< @@ -11121,7 +11122,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__3); __Pyx_GIVEREF(__pyx_tuple__3); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":233 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":233 * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< @@ -11132,7 +11133,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__4); __Pyx_GIVEREF(__pyx_tuple__4); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":263 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":263 * if ((descr.byteorder == c'>' and little_endian) or * (descr.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -11143,7 +11144,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__5); __Pyx_GIVEREF(__pyx_tuple__5); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":810 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":810 * * if (end - f) - (new_offset - offset[0]) < 15: * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< @@ -11154,7 +11155,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__6); __Pyx_GIVEREF(__pyx_tuple__6); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":814 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":814 * if ((child.byteorder == c'>' and little_endian) or * (child.byteorder == c'<' and not little_endian)): * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< @@ -11165,7 +11166,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__7); __Pyx_GIVEREF(__pyx_tuple__7); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":834 * t = child.type_num * if end - f < 5: * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< @@ -11176,7 +11177,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__8); __Pyx_GIVEREF(__pyx_tuple__8); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1000 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1000 * _import_array() * except Exception: * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< @@ -11187,7 +11188,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__9); __Pyx_GIVEREF(__pyx_tuple__9); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1006 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1006 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -11198,7 +11199,7 @@ static int __Pyx_InitCachedConstants(void) { __Pyx_GOTREF(__pyx_tuple__10); __Pyx_GIVEREF(__pyx_tuple__10); - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1012 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1012 * _import_umath() * except Exception: * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< @@ -11407,7 +11408,7 @@ static int __Pyx_modinit_function_import_code(void) { #ifndef CYTHON_SMALL_CODE #if defined(__clang__) #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) +#elif defined(__GNUC__) && (!(defined(__cplusplus)) || (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4))) #define CYTHON_SMALL_CODE __attribute__((optimize("Os"))) #else #define CYTHON_SMALL_CODE @@ -11928,7 +11929,7 @@ if (!__Pyx_RefNanny) { if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_7) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - /* "../anaconda3/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1008 + /* "../../.virtualenvs/math/local/lib/python2.7/site-packages/Cython/Includes/numpy/__init__.pxd":1008 * raise ImportError("numpy.core.umath failed to import") * * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< @@ -12741,7 +12742,7 @@ static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 *type = tstate->exc_state.exc_type; *value = tstate->exc_state.exc_value; *tb = tstate->exc_state.exc_traceback; @@ -12756,7 +12757,7 @@ static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject * } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -12840,7 +12841,7 @@ static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE - #if PY_VERSION_HEX >= 0x030700A2 + #if PY_VERSION_HEX >= 0x030700A3 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; @@ -13007,6 +13008,9 @@ static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_li #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); @@ -14685,14 +14689,42 @@ static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, return res; } #endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i Date: Sun, 23 Sep 2018 11:24:33 +0500 Subject: [PATCH 24/66] Get rid most of warnings in testing (#2191) * ignore new broken pattern, use old one * fix d2v warnings * fix dtm warnings * fix varembed warnings * fix translation_matrix warnings * fix similarities warnings * fix sklearn wrappers warnings * upldate blas info * fix fasttext warnings * fix load api warnings * fix similarities warning * fix word2vec deprecation warnings * ignore deprecationwarning in save/load process * more cleanup * fix flake8 * fix assertions --- CHANGELOG.md | 8 +- gensim/models/deprecated/doc2vec.py | 4 +- gensim/models/fasttext.py | 16 +- gensim/models/translation_matrix.py | 4 +- gensim/models/wrappers/dtmmodel.py | 6 +- gensim/models/wrappers/varembed.py | 8 +- gensim/similarities/docsim.py | 2 +- gensim/similarities/index.py | 2 +- gensim/sklearn_api/ftmodel.py | 2 +- gensim/sklearn_api/w2vmodel.py | 2 +- gensim/test/test_api.py | 2 +- gensim/test/test_doc2vec.py | 130 +++++++------- gensim/test/test_dtm.py | 4 +- gensim/test/test_fasttext.py | 214 +++++++++++------------ gensim/test/test_keras_integration.py | 12 +- gensim/test/test_keyedvectors.py | 8 +- gensim/test/test_similarities.py | 14 +- gensim/test/test_sklearn_api.py | 6 +- gensim/test/test_varembed_wrapper.py | 6 +- gensim/test/test_word2vec.py | 219 ++++++++++++------------ gensim/test/test_wordrank_wrapper.py | 2 +- gensim/topic_coherence/text_analysis.py | 2 +- gensim/utils.py | 34 ++-- setup.py | 2 +- 24 files changed, 364 insertions(+), 345 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d54186837c..59ebd74101 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,10 +8,10 @@ Changes New training mode for `*2Vec` models (word2vec, doc2vec, fasttext) that allows model training to scale linearly with the number of cores (full GIL elimination). The result of our Google Summer of Code 2018 project by Dmitry Persiyanov. **Benchmark** - - Dataset: full English Wikipedia - - Cloud: GCE - - CPU: Intel(R) Xeon(R) CPU @ 2.30GHz 32 cores - - BLAS: libblas3 (3.7.1-3ubuntu2) + - Dataset: `full English Wikipedia` + - Cloud: `GCE` + - CPU: `Intel(R) Xeon(R) CPU @ 2.30GHz 32 cores` + - BLAS: `MKL` | Model | Queue-based version [sec] | File-based version [sec] | speed up | Accuracy (queue-based) | Accuracy (file-based) | diff --git a/gensim/models/deprecated/doc2vec.py b/gensim/models/deprecated/doc2vec.py index 96ba7cd1fe..4b10224a87 100644 --- a/gensim/models/deprecated/doc2vec.py +++ b/gensim/models/deprecated/doc2vec.py @@ -91,7 +91,7 @@ def load_old_doc2vec(*args, **kwargs): 'dm_tag_count': old_model.dm_tag_count, 'docvecs_mapfile': old_model.__dict__.get('docvecs_mapfile', None), 'comment': old_model.__dict__.get('comment', None), - 'size': old_model.vector_size, + 'vector_size': old_model.vector_size, 'alpha': old_model.alpha, 'window': old_model.window, 'min_count': old_model.min_count, @@ -104,7 +104,7 @@ def load_old_doc2vec(*args, **kwargs): 'negative': old_model.negative, 'cbow_mean': old_model.cbow_mean, 'hashfxn': old_model.hashfxn, - 'iter': old_model.iter, + 'epochs': old_model.iter, 'sorted_vocab': old_model.__dict__.get('sorted_vocab', 1), 'batch_words': old_model.__dict__.get('batch_words', MAX_WORDS_IN_BATCH), 'compute_loss': old_model.__dict__.get('compute_loss', None) diff --git a/gensim/models/fasttext.py b/gensim/models/fasttext.py index 5bc1109cce..cbdcba3d7a 100644 --- a/gensim/models/fasttext.py +++ b/gensim/models/fasttext.py @@ -363,8 +363,8 @@ def __init__(self, sentences=None, corpus_file=None, sg=0, hs=0, size=100, alpha >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] >>> >>> model = FastText(sentences, min_count=1) - >>> say_vector = model['say'] # get vector for word - >>> of_vector = model['of'] # get vector for out-of-vocab word + >>> say_vector = model.wv['say'] # get vector for word + >>> of_vector = model.wv['of'] # get vector for out-of-vocab word """ self.load = call_on_class_only @@ -380,7 +380,7 @@ def __init__(self, sentences=None, corpus_file=None, sg=0, hs=0, size=100, alpha sorted_vocab=bool(sorted_vocab), null_word=null_word, ns_exponent=ns_exponent) self.trainables = FastTextTrainables( vector_size=size, seed=seed, bucket=bucket, hashfxn=hashfxn) - self.wv.bucket = self.bucket + self.wv.bucket = self.trainables.bucket super(FastText, self).__init__( sentences=sentences, corpus_file=corpus_file, workers=workers, vector_size=size, epochs=iter, @@ -487,10 +487,10 @@ def build_vocab(self, sentences=None, corpus_file=None, update=False, progress_p >>> >>> model = FastText(min_count=1) >>> model.build_vocab(sentences_1) - >>> model.train(sentences_1, total_examples=model.corpus_count, epochs=model.iter) + >>> model.train(sentences_1, total_examples=model.corpus_count, epochs=model.epochs) >>> >>> model.build_vocab(sentences_2, update=True) - >>> model.train(sentences_2, total_examples=model.corpus_count, epochs=model.iter) + >>> model.train(sentences_2, total_examples=model.corpus_count, epochs=model.epochs) """ if update: @@ -519,11 +519,11 @@ def _clear_post_train(self): def estimate_memory(self, vocab_size=None, report=None): vocab_size = vocab_size or len(self.wv.vocab) vec_size = self.vector_size * np.dtype(np.float32).itemsize - l1_size = self.layer1_size * np.dtype(np.float32).itemsize + l1_size = self.trainables.layer1_size * np.dtype(np.float32).itemsize report = report or {} report['vocab'] = len(self.wv.vocab) * (700 if self.hs else 500) report['syn0_vocab'] = len(self.wv.vocab) * vec_size - num_buckets = self.bucket + num_buckets = self.trainables.bucket if self.hs: report['syn1'] = len(self.wv.vocab) * l1_size if self.negative: @@ -657,7 +657,7 @@ def train(self, sentences=None, corpus_file=None, total_examples=None, total_wor >>> >>> model = FastText(min_count=1) >>> model.build_vocab(sentences) - >>> model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) + >>> model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) """ super(FastText, self).train( diff --git a/gensim/models/translation_matrix.py b/gensim/models/translation_matrix.py index 4bf638f3ce..7ee724c4e3 100644 --- a/gensim/models/translation_matrix.py +++ b/gensim/models/translation_matrix.py @@ -136,12 +136,12 @@ def build(cls, lang_vec, lexicon=None): # if the lexicon is not provided, using the all the Keyedvectors's words as default for item in lexicon: words.append(item) - mat.append(lang_vec.syn0[lang_vec.vocab[item].index]) + mat.append(lang_vec.vectors[lang_vec.vocab[item].index]) else: for item in lang_vec.vocab.keys(): words.append(item) - mat.append(lang_vec.syn0[lang_vec.vocab[item].index]) + mat.append(lang_vec.vectors[lang_vec.vocab[item].index]) return Space(mat, words) diff --git a/gensim/models/wrappers/dtmmodel.py b/gensim/models/wrappers/dtmmodel.py index 0817098d85..0df73e6be8 100644 --- a/gensim/models/wrappers/dtmmodel.py +++ b/gensim/models/wrappers/dtmmodel.py @@ -464,9 +464,9 @@ def show_topics(self, num_topics=10, times=5, num_words=10, log=False, formatted for time in chosen_times: for i in chosen_topics: if formatted: - topic = self.print_topic(i, time, num_words=num_words) + topic = self.print_topic(i, time, topn=num_words) else: - topic = self.show_topic(i, time, num_words=num_words) + topic = self.show_topic(i, time, topn=num_words) shown.append(topic) return shown @@ -529,7 +529,7 @@ def print_topic(self, topicid, time, topn=10, num_words=None): warnings.warn("The parameter `num_words` is deprecated, will be removed in 4.0.0, use `topn` instead.") topn = num_words - return ' + '.join(['%.3f*%s' % v for v in self.show_topic(topicid, time, topn)]) + return ' + '.join(['%.3f*%s' % v for v in self.show_topic(topicid, time, topn=topn)]) def dtm_vis(self, corpus, time): """Get data specified by pyLDAvis format. diff --git a/gensim/models/wrappers/varembed.py b/gensim/models/wrappers/varembed.py index f6fed35db4..ca8227ac01 100644 --- a/gensim/models/wrappers/varembed.py +++ b/gensim/models/wrappers/varembed.py @@ -95,14 +95,14 @@ def load_word_embeddings(self, word_embeddings, word_to_ix): counts[word] = counts.get(word, 0) + 1 self.vocab_size = len(counts) self.vector_size = word_embeddings.shape[1] - self.syn0 = np.zeros((self.vocab_size, self.vector_size)) + self.vectors = np.zeros((self.vocab_size, self.vector_size)) self.index2word = [None] * self.vocab_size logger.info("Corpus has %i words", len(self.vocab)) for word_id, word in enumerate(counts): self.vocab[word] = Vocab(index=word_id, count=counts[word]) - self.syn0[word_id] = word_embeddings[word_to_ix[word]] + self.vectors[word_id] = word_embeddings[word_to_ix[word]] self.index2word[word_id] = word - assert((len(self.vocab), self.vector_size) == self.syn0.shape) + assert((len(self.vocab), self.vector_size) == self.vectors.shape) logger.info("Loaded matrix of %d size and %d dimensions", self.vocab_size, self.vector_size) def add_morphemes_to_embeddings(self, morfessor_model, morpho_embeddings, morpho_to_ix): @@ -125,5 +125,5 @@ def add_morphemes_to_embeddings(self, morfessor_model, morpho_embeddings, morpho for m in morfessor_model.viterbi_segment(word)[0] ] ).sum(axis=0) - self.syn0[self.vocab[word].index] += morpheme_embedding + self.vectors[self.vocab[word].index] += morpheme_embedding logger.info("Added morphemes to word vectors") diff --git a/gensim/similarities/docsim.py b/gensim/similarities/docsim.py index 4f7782a402..8b59df04e3 100755 --- a/gensim/similarities/docsim.py +++ b/gensim/similarities/docsim.py @@ -1051,7 +1051,7 @@ def get_similarities(self, query): result = [] for qidx in range(n_queries): # Compute similarity for each query. - qresult = [self.w2v_model.wmdistance(document, query[qidx]) for document in self.corpus] + qresult = [self.w2v_model.wv.wmdistance(document, query[qidx]) for document in self.corpus] qresult = numpy.array(qresult) qresult = 1. / (1. + qresult) # Similarity is the negative of the distance. diff --git a/gensim/similarities/index.py b/gensim/similarities/index.py index f0b1ebc6de..2c1a5e66fa 100644 --- a/gensim/similarities/index.py +++ b/gensim/similarities/index.py @@ -174,7 +174,7 @@ def build_from_keyedvectors(self): """Build an Annoy index using word vectors from a KeyedVectors model.""" self.model.init_sims() - return self._build_from_model(self.model.syn0norm, self.model.index2word, self.model.vector_size) + return self._build_from_model(self.model.vectors_norm, self.model.index2word, self.model.vector_size) def _build_from_model(self, vectors, labels, num_features): index = AnnoyIndex(num_features) diff --git a/gensim/sklearn_api/ftmodel.py b/gensim/sklearn_api/ftmodel.py index f4e542471a..606a5f3d21 100644 --- a/gensim/sklearn_api/ftmodel.py +++ b/gensim/sklearn_api/ftmodel.py @@ -220,5 +220,5 @@ def transform(self, words): # The input as array of array if isinstance(words, six.string_types): words = [words] - vectors = [self.gensim_model[word] for word in words] + vectors = [self.gensim_model.wv[word] for word in words] return np.reshape(np.array(vectors), (len(words), self.size)) diff --git a/gensim/sklearn_api/w2vmodel.py b/gensim/sklearn_api/w2vmodel.py index 8ef2ef18d1..6c63dc6397 100644 --- a/gensim/sklearn_api/w2vmodel.py +++ b/gensim/sklearn_api/w2vmodel.py @@ -173,7 +173,7 @@ def transform(self, words): # The input as array of array if isinstance(words, six.string_types): words = [words] - vectors = [self.gensim_model[word] for word in words] + vectors = [self.gensim_model.wv[word] for word in words] return np.reshape(np.array(vectors), (len(words), self.size)) def partial_fit(self, X): diff --git a/gensim/test/test_api.py b/gensim/test/test_api.py index 13245b2205..624ed4a765 100644 --- a/gensim/test/test_api.py +++ b/gensim/test/test_api.py @@ -47,7 +47,7 @@ def test_load_model(self): base_dir, "__testing_word2vec-matrix-synopsis", "__testing_word2vec-matrix-synopsis.gz" ) model = api.load("__testing_word2vec-matrix-synopsis") - vector_dead_calc = model["dead"] + vector_dead_calc = model.wv["dead"] self.assertTrue(np.allclose(vector_dead, vector_dead_calc)) shutil.rmtree(base_dir) self.assertEqual(api.load("__testing_word2vec-matrix-synopsis", return_path=True), dataset_path) diff --git a/gensim/test/test_doc2vec.py b/gensim/test/test_doc2vec.py index de23529f14..d35b907800 100644 --- a/gensim/test/test_doc2vec.py +++ b/gensim/test/test_doc2vec.py @@ -111,7 +111,7 @@ def testLoadOldModel(self): self.assertTrue(len(model.wv.vocab) == 3955) self.assertTrue(len(model.wv.index2word) == 3955) self.assertIsNone(model.corpus_total_words) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (3955, )) self.assertTrue(model.vocabulary.cum_table.shape == (3955, )) @@ -129,7 +129,7 @@ def testLoadOldModel(self): self.assertTrue(len(model.wv.vocab) == 3955) self.assertTrue(len(model.wv.index2word) == 3955) self.assertIsNone(model.corpus_total_words) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (3955, )) self.assertTrue(model.vocabulary.cum_table.shape == (3955, )) @@ -286,7 +286,7 @@ def test_int_doctags(self): model = doc2vec.Doc2Vec(min_count=1) model.build_vocab(corpus) - self.assertEqual(len(model.docvecs.doctag_syn0), 300) + self.assertEqual(len(model.docvecs.vectors_docs), 300) self.assertEqual(model.docvecs[0].shape, (100,)) self.assertEqual(model.docvecs[np.int64(0)].shape, (100,)) self.assertRaises(KeyError, model.__getitem__, '_*0') @@ -310,7 +310,7 @@ def test_string_doctags(self): model = doc2vec.Doc2Vec(min_count=1) model.build_vocab(corpus) - self.assertEqual(len(model.docvecs.doctag_syn0), 300) + self.assertEqual(len(model.docvecs.vectors_docs), 300) self.assertEqual(model.docvecs[0].shape, (100,)) self.assertEqual(model.docvecs['_*0'].shape, (100,)) self.assertTrue(all(model.docvecs['_*0'] == model.docvecs[0])) @@ -319,7 +319,7 @@ def test_string_doctags(self): max( model.docvecs._int_index(str_key, model.docvecs.doctags, model.docvecs.max_rawint) for str_key in model.docvecs.doctags.keys()) - < len(model.docvecs.doctag_syn0) + < len(model.docvecs.vectors_docs) ) # verify docvecs.most_similar() returns string doctags rather than indexes self.assertEqual(model.docvecs.offset2doctag[0], model.docvecs.most_similar([model.docvecs[0]])[0][0]) @@ -387,20 +387,20 @@ def model_sanity(self, model, keep_training=True): tmpf = get_tmpfile('gensim_doc2vec.tst') model.save(tmpf) loaded = doc2vec.Doc2Vec.load(tmpf) - loaded.train(sentences, total_examples=loaded.corpus_count, epochs=loaded.iter) + loaded.train(sentences, total_examples=loaded.corpus_count, epochs=loaded.epochs) def test_training(self): """Test doc2vec training.""" corpus = DocsLeeCorpus() - model = doc2vec.Doc2Vec(size=100, min_count=2, iter=20, workers=1) + model = doc2vec.Doc2Vec(vector_size=100, min_count=2, epochs=20, workers=1) model.build_vocab(corpus) - self.assertEqual(model.docvecs.doctag_syn0.shape, (300, 100)) - model.train(corpus, total_examples=model.corpus_count, epochs=model.iter) + self.assertEqual(model.docvecs.vectors_docs.shape, (300, 100)) + model.train(corpus, total_examples=model.corpus_count, epochs=model.epochs) self.model_sanity(model) # build vocab and train in one step; must be the same as above - model2 = doc2vec.Doc2Vec(corpus, size=100, min_count=2, iter=20, workers=1) + model2 = doc2vec.Doc2Vec(corpus, vector_size=100, min_count=2, epochs=20, workers=1) self.models_equal(model, model2) @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") @@ -409,19 +409,19 @@ def test_training_fromfile(self): with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: save_lee_corpus_as_line_sentence(corpus_file) - model = doc2vec.Doc2Vec(size=100, min_count=2, iter=20, workers=1) + model = doc2vec.Doc2Vec(vector_size=100, min_count=2, epochs=20, workers=1) model.build_vocab(corpus_file=corpus_file) - self.assertEqual(model.docvecs.doctag_syn0.shape, (300, 100)) - model.train(corpus_file=corpus_file, total_words=model.corpus_total_words, epochs=model.iter) + self.assertEqual(model.docvecs.vectors_docs.shape, (300, 100)) + model.train(corpus_file=corpus_file, total_words=model.corpus_total_words, epochs=model.epochs) self.model_sanity(model) - model = doc2vec.Doc2Vec(corpus_file=corpus_file, size=100, min_count=2, iter=20, workers=1) + model = doc2vec.Doc2Vec(corpus_file=corpus_file, vector_size=100, min_count=2, epochs=20, workers=1) self.model_sanity(model) def test_dbow_hs(self): """Test DBOW doc2vec training.""" - model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=1, negative=0, min_count=2, iter=20) + model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=1, negative=0, min_count=2, epochs=20) self.model_sanity(model) @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") @@ -429,14 +429,14 @@ def test_dbow_hs_fromfile(self): """Test DBOW doc2vec training.""" with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: save_lee_corpus_as_line_sentence(corpus_file) - model = doc2vec.Doc2Vec(corpus_file=corpus_file, dm=0, hs=1, negative=0, min_count=2, iter=20) + model = doc2vec.Doc2Vec(corpus_file=corpus_file, dm=0, hs=1, negative=0, min_count=2, epochs=20) self.model_sanity(model) def test_dmm_hs(self): """Test DM/mean doc2vec training.""" model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=1, size=24, window=4, - hs=1, negative=0, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=1, vector_size=24, window=4, + hs=1, negative=0, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) @@ -446,16 +446,16 @@ def test_dmm_hs_fromfile(self): with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: save_lee_corpus_as_line_sentence(corpus_file) model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=1, size=24, window=4, - hs=1, negative=0, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=1, vector_size=24, window=4, + hs=1, negative=0, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) def test_dms_hs(self): """Test DM/sum doc2vec training.""" model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=0, size=24, window=4, hs=1, - negative=0, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=0, vector_size=24, window=4, hs=1, + negative=0, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) @@ -465,16 +465,16 @@ def test_dms_hs_fromfile(self): with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: save_lee_corpus_as_line_sentence(corpus_file) model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=0, size=24, window=4, hs=1, - negative=0, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=0, vector_size=24, window=4, hs=1, + negative=0, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) def test_dmc_hs(self): """Test DM/concatenate doc2vec training.""" model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_concat=1, size=24, window=4, - hs=1, negative=0, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_concat=1, vector_size=24, window=4, + hs=1, negative=0, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) @@ -484,14 +484,14 @@ def test_dmc_hs_fromfile(self): with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: save_lee_corpus_as_line_sentence(corpus_file) model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_concat=1, size=24, window=4, - hs=1, negative=0, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_concat=1, vector_size=24, window=4, + hs=1, negative=0, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) def test_dbow_neg(self): """Test DBOW doc2vec training.""" - model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=0, negative=10, min_count=2, iter=20) + model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=0, negative=10, min_count=2, epochs=20) self.model_sanity(model) @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") @@ -499,14 +499,14 @@ def test_dbow_neg_fromfile(self): """Test DBOW doc2vec training.""" with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: save_lee_corpus_as_line_sentence(corpus_file) - model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=0, negative=10, min_count=2, iter=20) + model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=0, negative=10, min_count=2, epochs=20) self.model_sanity(model) def test_dmm_neg(self): """Test DM/mean doc2vec training.""" model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=1, size=24, window=4, hs=0, - negative=10, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=1, vector_size=24, window=4, hs=0, + negative=10, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) @@ -516,16 +516,16 @@ def test_dmm_neg_fromfile(self): with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: save_lee_corpus_as_line_sentence(corpus_file) model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=1, size=24, window=4, hs=0, - negative=10, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=1, vector_size=24, window=4, hs=0, + negative=10, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) def test_dms_neg(self): """Test DM/sum doc2vec training.""" model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=0, size=24, window=4, hs=0, - negative=10, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=0, vector_size=24, window=4, hs=0, + negative=10, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) @@ -535,16 +535,16 @@ def test_dms_neg_fromfile(self): with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: save_lee_corpus_as_line_sentence(corpus_file) model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=0, size=24, window=4, hs=0, - negative=10, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=0, vector_size=24, window=4, hs=0, + negative=10, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) def test_dmc_neg(self): """Test DM/concatenate doc2vec training.""" model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_concat=1, size=24, window=4, hs=0, - negative=10, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_concat=1, vector_size=24, window=4, hs=0, + negative=10, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) @@ -554,8 +554,8 @@ def test_dmc_neg_fromfile(self): with temporary_file(get_tmpfile('gensim_word2vec.tst')) as corpus_file: save_lee_corpus_as_line_sentence(corpus_file) model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_concat=1, size=24, window=4, hs=0, - negative=10, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_concat=1, vector_size=24, window=4, hs=0, + negative=10, alpha=0.05, min_count=2, epochs=20 ) self.model_sanity(model) @@ -588,31 +588,31 @@ def test_deterministic_dmc(self): """Test doc2vec results identical with identical RNG seed.""" # bigger, dmc model = doc2vec.Doc2Vec( - DocsLeeCorpus(), dm=1, dm_concat=1, size=24, + DocsLeeCorpus(), dm=1, dm_concat=1, vector_size=24, window=4, hs=1, negative=3, seed=42, workers=1 ) model2 = doc2vec.Doc2Vec( - DocsLeeCorpus(), dm=1, dm_concat=1, size=24, + DocsLeeCorpus(), dm=1, dm_concat=1, vector_size=24, window=4, hs=1, negative=3, seed=42, workers=1 ) self.models_equal(model, model2) def test_mixed_tag_types(self): - """Ensure alternating int/string tags don't share indexes in doctag_syn0""" + """Ensure alternating int/string tags don't share indexes in vectors_docs""" mixed_tag_corpus = [doc2vec.TaggedDocument(words, [i, words[0]]) for i, words in enumerate(raw_sentences)] model = doc2vec.Doc2Vec() model.build_vocab(mixed_tag_corpus) expected_length = len(sentences) + len(model.docvecs.doctags) # 9 sentences, 7 unique first tokens - self.assertEqual(len(model.docvecs.doctag_syn0), expected_length) + self.assertEqual(len(model.docvecs.vectors_docs), expected_length) def models_equal(self, model, model2): # check words/hidden-weights self.assertEqual(len(model.wv.vocab), len(model2.wv.vocab)) - self.assertTrue(np.allclose(model.wv.syn0, model2.wv.syn0)) + self.assertTrue(np.allclose(model.wv.vectors, model2.wv.vectors)) if model.hs: - self.assertTrue(np.allclose(model.syn1, model2.syn1)) + self.assertTrue(np.allclose(model.trainables.syn1, model2.trainables.syn1)) if model.negative: - self.assertTrue(np.allclose(model.syn1neg, model2.syn1neg)) + self.assertTrue(np.allclose(model.trainables.syn1neg, model2.trainables.syn1neg)) # check docvecs self.assertEqual(len(model.docvecs.doctags), len(model2.docvecs.doctags)) self.assertEqual(len(model.docvecs.offset2doctag), len(model2.docvecs.offset2doctag)) @@ -621,35 +621,35 @@ def test_delete_temporary_training_data(self): """Test doc2vec model after delete_temporary_training_data""" for i in [0, 1]: for j in [0, 1]: - model = doc2vec.Doc2Vec(sentences, size=5, min_count=1, window=4, hs=i, negative=j) + model = doc2vec.Doc2Vec(sentences, vector_size=5, min_count=1, window=4, hs=i, negative=j) if i: - self.assertTrue(hasattr(model, 'syn1')) + self.assertTrue(hasattr(model.trainables, 'syn1')) if j: - self.assertTrue(hasattr(model, 'syn1neg')) + self.assertTrue(hasattr(model.trainables, 'syn1neg')) self.assertTrue(hasattr(model, 'syn0_lockf')) model.delete_temporary_training_data(keep_doctags_vectors=False, keep_inference=False) self.assertTrue(len(model['human']), 10) self.assertTrue(model.wv.vocab['graph'].count, 5) - self.assertTrue(not hasattr(model, 'syn1')) - self.assertTrue(not hasattr(model, 'syn1neg')) - self.assertTrue(not hasattr(model, 'syn0_lockf')) - self.assertTrue(model.docvecs and not hasattr(model.docvecs, 'doctag_syn0')) + self.assertTrue(not hasattr(model.trainables, 'syn1')) + self.assertTrue(not hasattr(model.trainables, 'syn1neg')) + self.assertTrue(not hasattr(model.trainables, 'syn0_lockf')) + self.assertTrue(model.docvecs and not hasattr(model.docvecs, 'vectors_docs')) self.assertTrue(model.docvecs and not hasattr(model.docvecs, 'doctag_syn0_lockf')) model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=1, size=24, window=4, hs=1, - negative=0, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=1, vector_size=24, window=4, hs=1, + negative=0, alpha=0.05, min_count=2, epochs=20 ) model.delete_temporary_training_data(keep_doctags_vectors=True, keep_inference=True) - self.assertTrue(model.docvecs and hasattr(model.docvecs, 'doctag_syn0')) - self.assertTrue(hasattr(model, 'syn1')) + self.assertTrue(model.docvecs and hasattr(model.docvecs, 'vectors_docs')) + self.assertTrue(hasattr(model.trainables, 'syn1')) self.model_sanity(model, keep_training=False) model = doc2vec.Doc2Vec( - list_corpus, dm=1, dm_mean=1, size=24, window=4, hs=0, - negative=1, alpha=0.05, min_count=2, iter=20 + list_corpus, dm=1, dm_mean=1, vector_size=24, window=4, hs=0, + negative=1, alpha=0.05, min_count=2, epochs=20 ) model.delete_temporary_training_data(keep_doctags_vectors=True, keep_inference=True) self.model_sanity(model, keep_training=False) - self.assertTrue(hasattr(model, 'syn1neg')) + self.assertTrue(hasattr(model.trainables, 'syn1neg')) def test_word_vec_non_writeable(self): model = keyedvectors.KeyedVectors.load_word2vec_format(datapath('word2vec_pre_kv_c')) @@ -673,10 +673,10 @@ def testTrainWarning(self, l): raw_sentences = [['human'], ['graph', 'trees']] sentences = [doc2vec.TaggedDocument(words, [i]) for i, words in enumerate(raw_sentences)] - model = doc2vec.Doc2Vec(alpha=0.025, min_alpha=0.025, min_count=1, workers=8, size=5) + model = doc2vec.Doc2Vec(alpha=0.025, min_alpha=0.025, min_count=1, workers=8, vector_size=5) model.build_vocab(sentences) for epoch in range(10): - model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) + model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) model.alpha -= 0.002 model.min_alpha = model.alpha if epoch == 5: diff --git a/gensim/test/test_dtm.py b/gensim/test/test_dtm.py index efc84f4e75..6373ca542f 100644 --- a/gensim/test/test_dtm.py +++ b/gensim/test/test_dtm.py @@ -36,7 +36,7 @@ def testDtm(self): topics = model.show_topics(num_topics=2, times=2, num_words=10) self.assertEqual(len(topics), 4) - one_topic = model.show_topic(topicid=1, time=1, num_words=10) + one_topic = model.show_topic(topicid=1, time=1, topn=10) self.assertEqual(len(one_topic), 10) self.assertEqual(one_topic[0][1], u'idexx') @@ -50,7 +50,7 @@ def testDim(self): topics = model.show_topics(num_topics=2, times=2, num_words=10) self.assertEqual(len(topics), 4) - one_topic = model.show_topic(topicid=1, time=1, num_words=10) + one_topic = model.show_topic(topicid=1, time=1, topn=10) self.assertEqual(len(one_topic), 10) self.assertEqual(one_topic[0][1], u'skills') diff --git a/gensim/test/test_fasttext.py b/gensim/test/test_fasttext.py index 4060f0b4f8..5b1134a87b 100644 --- a/gensim/test/test_fasttext.py +++ b/gensim/test/test_fasttext.py @@ -56,18 +56,18 @@ def test_training(self): model.build_vocab(sentences) self.model_sanity(model) - model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) - sims = model.most_similar('graph', topn=10) + model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) + sims = model.wv.most_similar('graph', topn=10) - self.assertEqual(model.wv.syn0.shape, (12, 10)) + self.assertEqual(model.wv.vectors.shape, (12, 10)) self.assertEqual(len(model.wv.vocab), 12) - self.assertEqual(model.wv.syn0_vocab.shape[1], 10) - self.assertEqual(model.wv.syn0_ngrams.shape[1], 10) + self.assertEqual(model.wv.vectors_vocab.shape[1], 10) + self.assertEqual(model.wv.vectors_ngrams.shape[1], 10) self.model_sanity(model) # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) + graph_vector = model.wv.vectors_norm[model.wv.vocab['graph'].index] + sims2 = model.wv.most_similar(positive=[graph_vector], topn=11) sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself self.assertEqual(sims, sims2) @@ -76,10 +76,10 @@ def test_training(self): self.models_equal(model, model2) # verify oov-word vector retrieval - invocab_vec = model['minors'] # invocab word + invocab_vec = model.wv['minors'] # invocab word self.assertEqual(len(invocab_vec), 10) - oov_vec = model['minor'] # oov word + oov_vec = model.wv['minor'] # oov word self.assertEqual(len(oov_vec), 10) @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") @@ -91,40 +91,40 @@ def test_training_fromfile(self): model.build_vocab(corpus_file=corpus_file) self.model_sanity(model) - model.train(corpus_file=corpus_file, total_words=model.corpus_total_words, epochs=model.iter) - sims = model.most_similar('graph', topn=10) + model.train(corpus_file=corpus_file, total_words=model.corpus_total_words, epochs=model.epochs) + sims = model.wv.most_similar('graph', topn=10) - self.assertEqual(model.wv.syn0.shape, (12, 10)) + self.assertEqual(model.wv.vectors.shape, (12, 10)) self.assertEqual(len(model.wv.vocab), 12) - self.assertEqual(model.wv.syn0_vocab.shape[1], 10) - self.assertEqual(model.wv.syn0_ngrams.shape[1], 10) + self.assertEqual(model.wv.vectors_vocab.shape[1], 10) + self.assertEqual(model.wv.vectors_ngrams.shape[1], 10) self.model_sanity(model) # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) + graph_vector = model.wv.vectors_norm[model.wv.vocab['graph'].index] + sims2 = model.wv.most_similar(positive=[graph_vector], topn=11) sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself self.assertEqual(sims, sims2) # verify oov-word vector retrieval - invocab_vec = model['minors'] # invocab word + invocab_vec = model.wv['minors'] # invocab word self.assertEqual(len(invocab_vec), 10) - oov_vec = model['minor'] # oov word + oov_vec = model.wv['minor'] # oov word self.assertEqual(len(oov_vec), 10) def models_equal(self, model, model2): self.assertEqual(len(model.wv.vocab), len(model2.wv.vocab)) - self.assertEqual(model.num_ngram_vectors, model2.num_ngram_vectors) - self.assertTrue(np.allclose(model.wv.syn0_vocab, model2.wv.syn0_vocab)) - self.assertTrue(np.allclose(model.wv.syn0_ngrams, model2.wv.syn0_ngrams)) - self.assertTrue(np.allclose(model.wv.syn0, model2.wv.syn0)) + self.assertEqual(model.wv.num_ngram_vectors, model2.wv.num_ngram_vectors) + self.assertTrue(np.allclose(model.wv.vectors_vocab, model2.wv.vectors_vocab)) + self.assertTrue(np.allclose(model.wv.vectors_ngrams, model2.wv.vectors_ngrams)) + self.assertTrue(np.allclose(model.wv.vectors, model2.wv.vectors)) if model.hs: - self.assertTrue(np.allclose(model.syn1, model2.syn1)) + self.assertTrue(np.allclose(model.trainables.syn1, model2.trainables.syn1)) if model.negative: - self.assertTrue(np.allclose(model.syn1neg, model2.syn1neg)) + self.assertTrue(np.allclose(model.trainables.syn1neg, model2.trainables.syn1neg)) most_common_word = max(model.wv.vocab.items(), key=lambda item: item[1].count)[0] - self.assertTrue(np.allclose(model[most_common_word], model2[most_common_word])) + self.assertTrue(np.allclose(model.wv[most_common_word], model2.wv[most_common_word])) @unittest.skipIf(IS_WIN32, "avoid memory error with Appveyor x32") def test_persistence(self): @@ -136,7 +136,7 @@ def test_persistence(self): wv = model.wv wv.save(tmpf) loaded_wv = FastTextKeyedVectors.load(tmpf) - self.assertTrue(np.allclose(wv.syn0_ngrams, loaded_wv.syn0_ngrams)) + self.assertTrue(np.allclose(wv.vectors_ngrams, loaded_wv.vectors_ngrams)) self.assertEqual(len(wv.vocab), len(loaded_wv.vocab)) @unittest.skipIf(os.name == 'nt', "corpus_file is not supported for Windows + Py2" @@ -153,7 +153,7 @@ def test_persistence_fromfile(self): wv = model.wv wv.save(tmpf) loaded_wv = FastTextKeyedVectors.load(tmpf) - self.assertTrue(np.allclose(wv.syn0_ngrams, loaded_wv.syn0_ngrams)) + self.assertTrue(np.allclose(wv.vectors_ngrams, loaded_wv.vectors_ngrams)) self.assertEqual(len(wv.vocab), len(loaded_wv.vocab)) @unittest.skipIf(IS_WIN32, "avoid memory error with Appveyor x32") @@ -163,19 +163,19 @@ def test_norm_vectors_not_saved(self): model.init_sims() model.save(tmpf) loaded_model = FT_gensim.load(tmpf) - self.assertTrue(loaded_model.wv.syn0norm is None) - self.assertTrue(loaded_model.wv.syn0_ngrams_norm is None) + self.assertTrue(loaded_model.wv.vectors_norm is None) + self.assertTrue(loaded_model.wv.vectors_ngrams_norm is None) wv = model.wv wv.save(tmpf) loaded_kv = FastTextKeyedVectors.load(tmpf) - self.assertTrue(loaded_kv.syn0norm is None) - self.assertTrue(loaded_kv.syn0_ngrams_norm is None) + self.assertTrue(loaded_kv.vectors_norm is None) + self.assertTrue(loaded_kv.vectors_ngrams_norm is None) def model_sanity(self, model): - self.assertEqual(model.wv.syn0.shape, (len(model.wv.vocab), model.vector_size)) - self.assertEqual(model.wv.syn0_vocab.shape, (len(model.wv.vocab), model.vector_size)) - self.assertEqual(model.wv.syn0_ngrams.shape, (model.num_ngram_vectors, model.vector_size)) + self.assertEqual(model.wv.vectors.shape, (len(model.wv.vocab), model.vector_size)) + self.assertEqual(model.wv.vectors_vocab.shape, (len(model.wv.vocab), model.vector_size)) + self.assertEqual(model.wv.vectors_ngrams.shape, (model.wv.num_ngram_vectors, model.vector_size)) def test_load_fasttext_format(self): try: @@ -183,9 +183,9 @@ def test_load_fasttext_format(self): except Exception as exc: self.fail('Unable to load FastText model from file %s: %s' % (self.test_model_file, exc)) vocab_size, model_size = 1762, 10 - self.assertEqual(model.wv.syn0.shape, (vocab_size, model_size)) + self.assertEqual(model.wv.vectors.shape, (vocab_size, model_size)) self.assertEqual(len(model.wv.vocab), vocab_size, model_size) - self.assertEqual(model.wv.syn0_ngrams.shape, (model.num_ngram_vectors, model_size)) + self.assertEqual(model.wv.vectors_ngrams.shape, (model.wv.num_ngram_vectors, model_size)) expected_vec = [ -0.57144, @@ -199,7 +199,7 @@ def test_load_fasttext_format(self): 0.23418, 0.060007 ] # obtained using ./fasttext print-word-vectors lee_fasttext_new.bin - self.assertTrue(np.allclose(model["hundred"], expected_vec, atol=1e-4)) + self.assertTrue(np.allclose(model.wv["hundred"], expected_vec, atol=1e-4)) # vector for oov words are slightly different from original FastText due to discarding unused ngrams # obtained using a modified version of ./fasttext print-word-vectors lee_fasttext_new.bin @@ -215,18 +215,18 @@ def test_load_fasttext_format(self): 0.53203, 0.77568 ] - self.assertTrue(np.allclose(model["rejection"], expected_vec_oov, atol=1e-4)) + self.assertTrue(np.allclose(model.wv["rejection"], expected_vec_oov, atol=1e-4)) - self.assertEqual(model.min_count, 5) + self.assertEqual(model.vocabulary.min_count, 5) self.assertEqual(model.window, 5) - self.assertEqual(model.iter, 5) + self.assertEqual(model.epochs, 5) self.assertEqual(model.negative, 5) - self.assertEqual(model.sample, 0.0001) - self.assertEqual(model.bucket, 1000) + self.assertEqual(model.vocabulary.sample, 0.0001) + self.assertEqual(model.trainables.bucket, 1000) self.assertEqual(model.wv.max_n, 6) self.assertEqual(model.wv.min_n, 3) - self.assertEqual(model.wv.syn0.shape, (len(model.wv.vocab), model.vector_size)) - self.assertEqual(model.wv.syn0_ngrams.shape, (model.num_ngram_vectors, model.vector_size)) + self.assertEqual(model.wv.vectors.shape, (len(model.wv.vocab), model.vector_size)) + self.assertEqual(model.wv.vectors_ngrams.shape, (model.wv.num_ngram_vectors, model.vector_size)) def test_load_fasttext_new_format(self): try: @@ -234,9 +234,9 @@ def test_load_fasttext_new_format(self): except Exception as exc: self.fail('Unable to load FastText model from file %s: %s' % (self.test_new_model_file, exc)) vocab_size, model_size = 1763, 10 - self.assertEqual(new_model.wv.syn0.shape, (vocab_size, model_size)) + self.assertEqual(new_model.wv.vectors.shape, (vocab_size, model_size)) self.assertEqual(len(new_model.wv.vocab), vocab_size, model_size) - self.assertEqual(new_model.wv.syn0_ngrams.shape, (new_model.num_ngram_vectors, model_size)) + self.assertEqual(new_model.wv.vectors_ngrams.shape, (new_model.wv.num_ngram_vectors, model_size)) expected_vec = [ -0.025627, @@ -250,7 +250,7 @@ def test_load_fasttext_new_format(self): -0.19685, -0.13179 ] # obtained using ./fasttext print-word-vectors lee_fasttext_new.bin - self.assertTrue(np.allclose(new_model["hundred"], expected_vec, atol=1e-4)) + self.assertTrue(np.allclose(new_model.wv["hundred"], expected_vec, atol=1e-4)) # vector for oov words are slightly different from original FastText due to discarding unused ngrams # obtained using a modified version of ./fasttext print-word-vectors lee_fasttext_new.bin @@ -266,18 +266,18 @@ def test_load_fasttext_new_format(self): -0.17856, 0.19815 ] - self.assertTrue(np.allclose(new_model["rejection"], expected_vec_oov, atol=1e-4)) + self.assertTrue(np.allclose(new_model.wv["rejection"], expected_vec_oov, atol=1e-4)) - self.assertEqual(new_model.min_count, 5) + self.assertEqual(new_model.vocabulary.min_count, 5) self.assertEqual(new_model.window, 5) - self.assertEqual(new_model.iter, 5) + self.assertEqual(new_model.epochs, 5) self.assertEqual(new_model.negative, 5) - self.assertEqual(new_model.sample, 0.0001) - self.assertEqual(new_model.bucket, 1000) + self.assertEqual(new_model.vocabulary.sample, 0.0001) + self.assertEqual(new_model.trainables.bucket, 1000) self.assertEqual(new_model.wv.max_n, 6) self.assertEqual(new_model.wv.min_n, 3) - self.assertEqual(new_model.wv.syn0.shape, (len(new_model.wv.vocab), new_model.vector_size)) - self.assertEqual(new_model.wv.syn0_ngrams.shape, (new_model.num_ngram_vectors, new_model.vector_size)) + self.assertEqual(new_model.wv.vectors.shape, (len(new_model.wv.vocab), new_model.vector_size)) + self.assertEqual(new_model.wv.vectors_ngrams.shape, (new_model.wv.num_ngram_vectors, new_model.vector_size)) def test_load_model_supervised(self): with self.assertRaises(NotImplementedError): @@ -285,79 +285,83 @@ def test_load_model_supervised(self): def test_load_model_with_non_ascii_vocab(self): model = FT_gensim.load_fasttext_format(datapath('non_ascii_fasttext')) - self.assertTrue(u'který' in model) + self.assertTrue(u'který' in model.wv) try: - model[u'který'] + model.wv[u'který'] except UnicodeDecodeError: self.fail('Unable to access vector for utf8 encoded non-ascii word') def test_load_model_non_utf8_encoding(self): model = FT_gensim.load_fasttext_format(datapath('cp852_fasttext'), encoding='cp852') - self.assertTrue(u'který' in model) + self.assertTrue(u'který' in model.wv) try: - model[u'který'] + model.wv[u'který'] except KeyError: self.fail('Unable to access vector for cp-852 word') def test_n_similarity(self): # In vocab, sanity check - self.assertTrue(np.allclose(self.test_model.n_similarity(['the', 'and'], ['and', 'the']), 1.0)) - self.assertEqual(self.test_model.n_similarity(['the'], ['and']), self.test_model.n_similarity(['and'], ['the'])) + self.assertTrue(np.allclose(self.test_model.wv.n_similarity(['the', 'and'], ['and', 'the']), 1.0)) + self.assertEqual( + self.test_model.wv.n_similarity(['the'], ['and']), self.test_model.wv.n_similarity(['and'], ['the'])) # Out of vocab check - self.assertTrue(np.allclose(self.test_model.n_similarity(['night', 'nights'], ['nights', 'night']), 1.0)) + self.assertTrue(np.allclose(self.test_model.wv.n_similarity(['night', 'nights'], ['nights', 'night']), 1.0)) self.assertEqual( - self.test_model.n_similarity(['night'], ['nights']), self.test_model.n_similarity(['nights'], ['night']) + self.test_model.wv.n_similarity(['night'], ['nights']), + self.test_model.wv.n_similarity(['nights'], ['night']) ) def test_similarity(self): # In vocab, sanity check - self.assertTrue(np.allclose(self.test_model.similarity('the', 'the'), 1.0)) - self.assertEqual(self.test_model.similarity('the', 'and'), self.test_model.similarity('and', 'the')) + self.assertTrue(np.allclose(self.test_model.wv.similarity('the', 'the'), 1.0)) + self.assertEqual(self.test_model.wv.similarity('the', 'and'), self.test_model.wv.similarity('and', 'the')) # Out of vocab check - self.assertTrue(np.allclose(self.test_model.similarity('nights', 'nights'), 1.0)) - self.assertEqual(self.test_model.similarity('night', 'nights'), self.test_model.similarity('nights', 'night')) + self.assertTrue(np.allclose(self.test_model.wv.similarity('nights', 'nights'), 1.0)) + self.assertEqual( + self.test_model.wv.similarity('night', 'nights'), self.test_model.wv.similarity('nights', 'night')) def test_most_similar(self): # In vocab, sanity check - self.assertEqual(len(self.test_model.most_similar(positive=['the', 'and'], topn=5)), 5) - self.assertEqual(self.test_model.most_similar('the'), self.test_model.most_similar(positive=['the'])) + self.assertEqual(len(self.test_model.wv.most_similar(positive=['the', 'and'], topn=5)), 5) + self.assertEqual(self.test_model.wv.most_similar('the'), self.test_model.wv.most_similar(positive=['the'])) # Out of vocab check - self.assertEqual(len(self.test_model.most_similar(['night', 'nights'], topn=5)), 5) - self.assertEqual(self.test_model.most_similar('nights'), self.test_model.most_similar(positive=['nights'])) + self.assertEqual(len(self.test_model.wv.most_similar(['night', 'nights'], topn=5)), 5) + self.assertEqual( + self.test_model.wv.most_similar('nights'), self.test_model.wv.most_similar(positive=['nights'])) def test_most_similar_cosmul(self): # In vocab, sanity check - self.assertEqual(len(self.test_model.most_similar_cosmul(positive=['the', 'and'], topn=5)), 5) + self.assertEqual(len(self.test_model.wv.most_similar_cosmul(positive=['the', 'and'], topn=5)), 5) self.assertEqual( - self.test_model.most_similar_cosmul('the'), - self.test_model.most_similar_cosmul(positive=['the'])) + self.test_model.wv.most_similar_cosmul('the'), + self.test_model.wv.most_similar_cosmul(positive=['the'])) # Out of vocab check - self.assertEqual(len(self.test_model.most_similar_cosmul(['night', 'nights'], topn=5)), 5) + self.assertEqual(len(self.test_model.wv.most_similar_cosmul(['night', 'nights'], topn=5)), 5) self.assertEqual( - self.test_model.most_similar_cosmul('nights'), - self.test_model.most_similar_cosmul(positive=['nights'])) + self.test_model.wv.most_similar_cosmul('nights'), + self.test_model.wv.most_similar_cosmul(positive=['nights'])) def test_lookup(self): # In vocab, sanity check self.assertTrue('night' in self.test_model.wv.vocab) - self.assertTrue(np.allclose(self.test_model['night'], self.test_model[['night']])) + self.assertTrue(np.allclose(self.test_model.wv['night'], self.test_model.wv[['night']])) # Out of vocab check self.assertFalse('nights' in self.test_model.wv.vocab) - self.assertTrue(np.allclose(self.test_model['nights'], self.test_model[['nights']])) + self.assertTrue(np.allclose(self.test_model.wv['nights'], self.test_model.wv[['nights']])) def test_contains(self): # In vocab, sanity check self.assertTrue('night' in self.test_model.wv.vocab) - self.assertTrue('night' in self.test_model) + self.assertTrue('night' in self.test_model.wv) # Out of vocab check self.assertFalse('nights' in self.test_model.wv.vocab) - self.assertTrue('nights' in self.test_model) + self.assertTrue('nights' in self.test_model.wv) def test_wm_distance(self): doc = ['night', 'payment'] oov_doc = ['nights', 'forests', 'payments'] - dist = self.test_model.wmdistance(doc, oov_doc) + dist = self.test_model.wv.wmdistance(doc, oov_doc) self.assertNotEqual(float('inf'), dist) def test_cbow_hs_training(self): @@ -643,7 +647,7 @@ def test_online_learning_after_save(self): model_neg = FT_gensim.load(tmpf) self.assertTrue(len(model_neg.wv.vocab), 12) model_neg.build_vocab(new_sentences, update=True) # update vocab - model_neg.train(new_sentences, total_examples=model_neg.corpus_count, epochs=model_neg.iter) + model_neg.train(new_sentences, total_examples=model_neg.corpus_count, epochs=model_neg.epochs) self.assertEqual(len(model_neg.wv.vocab), 14) @unittest.skipIf(os.name == 'nt' and six.PY2, "corpus_file training is not supported on Windows + Py27") @@ -660,7 +664,7 @@ def test_online_learning_after_save_fromfile(self): self.assertTrue(len(model_neg.wv.vocab), 12) model_neg.build_vocab(corpus_file=new_corpus_file, update=True) # update vocab model_neg.train(corpus_file=new_corpus_file, total_words=model_neg.corpus_total_words, - epochs=model_neg.iter) + epochs=model_neg.epochs) self.assertEqual(len(model_neg.wv.vocab), 14) def online_sanity(self, model): @@ -672,17 +676,17 @@ def online_sanity(self, model): others.append(l) self.assertTrue(all(['terrorism' not in l for l in others])) model.build_vocab(others) - model.train(others, total_examples=model.corpus_count, epochs=model.iter) - # checks that `syn0` is different from `syn0_vocab` - self.assertFalse(np.all(np.equal(model.wv.syn0, model.wv.syn0_vocab))) + model.train(others, total_examples=model.corpus_count, epochs=model.epochs) + # checks that `vectors` is different from `vectors_vocab` + self.assertFalse(np.all(np.equal(model.wv.vectors, model.wv.vectors_vocab))) self.assertFalse('terrorism' in model.wv.vocab) model.build_vocab(terro, update=True) # update vocab - self.assertTrue(model.wv.syn0_ngrams.dtype == 'float32') + self.assertTrue(model.wv.vectors_ngrams.dtype == 'float32') self.assertTrue('terrorism' in model.wv.vocab) - orig0_all = np.copy(model.wv.syn0_ngrams) - model.train(terro, total_examples=len(terro), epochs=model.iter) - self.assertFalse(np.allclose(model.wv.syn0_ngrams, orig0_all)) - sim = model.n_similarity(['war'], ['terrorism']) + orig0_all = np.copy(model.wv.vectors_ngrams) + model.train(terro, total_examples=len(terro), epochs=model.epochs) + self.assertFalse(np.allclose(model.wv.vectors_ngrams, orig0_all)) + sim = model.wv.n_similarity(['war'], ['terrorism']) self.assertLess(0., sim) @unittest.skipIf(IS_WIN32, "avoid memory error with Appveyor x32") @@ -713,9 +717,9 @@ def test_cbow_neg_online(self): def test_get_vocab_word_vecs(self): model = FT_gensim(size=10, min_count=1, seed=42) model.build_vocab(sentences) - original_syn0_vocab = np.copy(model.wv.syn0_vocab) + original_syn0_vocab = np.copy(model.wv.vectors_vocab) model.trainables.get_vocab_word_vecs(model.wv) - self.assertTrue(np.all(np.equal(model.wv.syn0_vocab, original_syn0_vocab))) + self.assertTrue(np.all(np.equal(model.wv.vectors_vocab, original_syn0_vocab))) def test_persistence_word2vec_format(self): """Test storing/loading the model in word2vec format.""" @@ -724,14 +728,14 @@ def test_persistence_word2vec_format(self): model.wv.save_word2vec_format(tmpf, binary=True) loaded_model_kv = Word2VecKeyedVectors.load_word2vec_format(tmpf, binary=True) self.assertEqual(len(model.wv.vocab), len(loaded_model_kv.vocab)) - self.assertTrue(np.allclose(model['human'], loaded_model_kv['human'])) + self.assertTrue(np.allclose(model.wv['human'], loaded_model_kv['human'])) def test_bucket_ngrams(self): model = FT_gensim(size=10, min_count=1, bucket=20) model.build_vocab(sentences) - self.assertEqual(model.wv.syn0_ngrams.shape, (20, 10)) + self.assertEqual(model.wv.vectors_ngrams.shape, (20, 10)) model.build_vocab(new_sentences, update=True) - self.assertEqual(model.wv.syn0_ngrams.shape, (20, 10)) + self.assertEqual(model.wv.vectors_ngrams.shape, (20, 10)) def test_estimate_memory(self): model = FT_gensim(sg=1, hs=1, size=10, negative=5, min_count=3) @@ -754,7 +758,7 @@ def testLoadOldModel(self): self.assertTrue(len(model.wv.vocab) == 12) self.assertTrue(len(model.wv.index2word) == 12) self.assertIsNone(model.corpus_total_words) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (12, )) self.assertTrue(model.vocabulary.cum_table.shape == (12, )) @@ -769,7 +773,7 @@ def testLoadOldModel(self): self.assertTrue(len(model.wv.vocab) == 12) self.assertTrue(len(model.wv.index2word) == 12) self.assertIsNone(model.corpus_total_words) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (12, )) self.assertTrue(model.vocabulary.cum_table.shape == (12, )) @@ -779,7 +783,7 @@ def testLoadOldModel(self): def compare_with_wrapper(self, model_gensim, model_wrapper): # make sure we get >=2 overlapping words for top-10 similar words suggested for `night` - sims_gensim = model_gensim.most_similar('night', topn=10) + sims_gensim = model_gensim.wv.most_similar('night', topn=10) sims_gensim_words = (list(map(lambda x: x[0], sims_gensim))) # get similar words sims_wrapper = model_wrapper.most_similar('night', topn=10) @@ -809,9 +813,9 @@ def test_cbow_hs_against_wrapper(self): lee_data = LineSentence(datapath('lee_background.cor')) model_gensim.build_vocab(lee_data) - orig0 = np.copy(model_gensim.wv.syn0[0]) - model_gensim.train(lee_data, total_examples=model_gensim.corpus_count, epochs=model_gensim.iter) - self.assertFalse((orig0 == model_gensim.wv.syn0[0]).all()) # vector should vary after training + orig0 = np.copy(model_gensim.wv.vectors[0]) + model_gensim.train(lee_data, total_examples=model_gensim.corpus_count, epochs=model_gensim.epochs) + self.assertFalse((orig0 == model_gensim.wv.vectors[0]).all()) # vector should vary after training self.compare_with_wrapper(model_gensim, model_wrapper) def test_sg_hs_against_wrapper(self): @@ -832,9 +836,9 @@ def test_sg_hs_against_wrapper(self): lee_data = LineSentence(datapath('lee_background.cor')) model_gensim.build_vocab(lee_data) - orig0 = np.copy(model_gensim.wv.syn0[0]) - model_gensim.train(lee_data, total_examples=model_gensim.corpus_count, epochs=model_gensim.iter) - self.assertFalse((orig0 == model_gensim.wv.syn0[0]).all()) # vector should vary after training + orig0 = np.copy(model_gensim.wv.vectors[0]) + model_gensim.train(lee_data, total_examples=model_gensim.corpus_count, epochs=model_gensim.epochs) + self.assertFalse((orig0 == model_gensim.wv.vectors[0]).all()) # vector should vary after training self.compare_with_wrapper(model_gensim, model_wrapper) diff --git a/gensim/test/test_keras_integration.py b/gensim/test/test_keras_integration.py index 85c2c2c4bb..bad0bb8b95 100644 --- a/gensim/test/test_keras_integration.py +++ b/gensim/test/test_keras_integration.py @@ -33,14 +33,14 @@ def testWord2VecTraining(self): Test word2vec training. """ model = self.model_cos_sim - self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), 100)) - self.assertTrue(model.syn1.shape == (len(model.wv.vocab), 100)) - sims = model.most_similar('graph', topn=10) + self.assertTrue(model.wv.vectors.shape == (len(model.wv.vocab), 100)) + self.assertTrue(model.trainables.syn1.shape == (len(model.wv.vocab), 100)) + sims = model.wv.most_similar('graph', topn=10) # self.assertTrue(sims[0][0] == 'trees', sims) # most similar # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) + graph_vector = model.wv.vectors_norm[model.wv.vocab['graph'].index] + sims2 = model.wv.most_similar(positive=[graph_vector], topn=11) sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself self.assertEqual(sims, sims2) @@ -119,7 +119,7 @@ def testEmbeddingLayer20NewsGroup(self): # prepare the embedding layer using the wrapper keras_w2v = self.model_twenty_ng keras_w2v.build_vocab(texts_w2v) - keras_w2v.train(texts, total_examples=keras_w2v.corpus_count, epochs=keras_w2v.iter) + keras_w2v.train(texts, total_examples=keras_w2v.corpus_count, epochs=keras_w2v.epochs) keras_w2v_wv = keras_w2v.wv embedding_layer = keras_w2v_wv.get_keras_embedding() diff --git a/gensim/test/test_keyedvectors.py b/gensim/test/test_keyedvectors.py index 26eb443cc1..927b896c64 100644 --- a/gensim/test/test_keyedvectors.py +++ b/gensim/test/test_keyedvectors.py @@ -43,7 +43,7 @@ def test_similarity_matrix(self): # checking that thresholding works as expected similarity_matrix = self.vectors.similarity_matrix(dictionary, threshold=0.45).todense() - self.assertEquals(18, np.sum(similarity_matrix == 0)) + self.assertEqual(18, np.sum(similarity_matrix == 0)) # checking that exponent works as expected similarity_matrix = self.vectors.similarity_matrix(dictionary, exponent=1.0).todense() @@ -51,10 +51,10 @@ def test_similarity_matrix(self): # checking that nonzero_limit works as expected similarity_matrix = self.vectors.similarity_matrix(dictionary, nonzero_limit=4).todense() - self.assertEquals(4, np.sum(similarity_matrix == 0)) + self.assertEqual(4, np.sum(similarity_matrix == 0)) similarity_matrix = self.vectors.similarity_matrix(dictionary, nonzero_limit=3).todense() - self.assertEquals(20, np.sum(similarity_matrix == 0)) + self.assertEqual(20, np.sum(similarity_matrix == 0)) # check that processing rows in the order given by IDF has desired effect @@ -182,7 +182,7 @@ def test_rank(self): def test_wv_property(self): """Test that the deprecated `wv` property returns `self`. To be removed in v4.0.0.""" - self.assertTrue(self.vectors is self.vectors.wv) + self.assertTrue(self.vectors is self.vectors) def test_add_single(self): """Test that adding entity in a manual way works correctly.""" diff --git a/gensim/test/test_similarities.py b/gensim/test/test_similarities.py index 20b50c2d94..4965d96d6f 100644 --- a/gensim/test/test_similarities.py +++ b/gensim/test/test_similarities.py @@ -450,9 +450,9 @@ def testChunking(self): sims = index[query] for i, chunk in enumerate(sims): expected = i - self.assertAlmostEquals(expected, chunk[0][0], places=2) + self.assertAlmostEqual(expected, chunk[0][0], places=2) expected = 1.0 - self.assertAlmostEquals(expected, chunk[0][1], places=2) + self.assertAlmostEqual(expected, chunk[0][1], places=2) def testIter(self): # Override testIter. @@ -592,7 +592,7 @@ def testLoadMissingRaisesError(self): self.assertRaises(IOError, test_index.load, fname='test-index') def assertVectorIsSimilarToItself(self, wv, index): - vector = wv.syn0norm[0] + vector = wv.vectors_norm[0] label = wv.index2word[0] approx_neighbors = index.most_similar(vector, 1) word, similarity = approx_neighbors[0] @@ -601,9 +601,9 @@ def assertVectorIsSimilarToItself(self, wv, index): self.assertAlmostEqual(similarity, 1.0, places=2) def assertApproxNeighborsMatchExact(self, model, wv, index): - vector = wv.syn0norm[0] - approx_neighbors = model.most_similar([vector], topn=5, indexer=index) - exact_neighbors = model.most_similar(positive=[vector], topn=5) + vector = wv.vectors_norm[0] + approx_neighbors = model.wv.most_similar([vector], topn=5, indexer=index) + exact_neighbors = model.wv.most_similar(positive=[vector], topn=5) approx_words = [neighbor[0] for neighbor in approx_neighbors] exact_words = [neighbor[0] for neighbor in exact_neighbors] @@ -644,7 +644,7 @@ def setUp(self): self.model = doc2vec.Doc2Vec(sentences, min_count=1) self.model.init_sims() self.index = AnnoyIndexer(self.model, 300) - self.vector = self.model.docvecs.doctag_syn0norm[0] + self.vector = self.model.docvecs.vectors_docs_norm[0] def testDocumentIsSimilarToItself(self): approx_neighbors = self.index.most_similar(self.vector, 1) diff --git a/gensim/test/test_sklearn_api.py b/gensim/test/test_sklearn_api.py index 2a27eb48db..55b094e6c7 100644 --- a/gensim/test/test_sklearn_api.py +++ b/gensim/test/test_sklearn_api.py @@ -674,7 +674,7 @@ def testConsistencyWithGensimModel(self): word = texts[0][0] vec_transformer_api = self.model.transform(word) # vector returned by W2VTransformer - vec_gensim_model = gensim_w2vmodel[word] # vector returned by Word2Vec + vec_gensim_model = gensim_w2vmodel.wv[word] # vector returned by Word2Vec passed = numpy.allclose(vec_transformer_api, vec_gensim_model, atol=1e-1) self.assertTrue(passed) @@ -1255,14 +1255,14 @@ def testConsistencyWithGensimModel(self): vecs_transformer_api = self.model.transform( [text for text_list in texts for text in text_list]) # vectors returned by FastText - vecs_gensim_model = [gensim_ftmodel[text] for text_list in texts for text in text_list] + vecs_gensim_model = [gensim_ftmodel.wv[text] for text_list in texts for text in text_list] passed = numpy.allclose(vecs_transformer_api, vecs_gensim_model) self.assertTrue(passed) # test for out of vocab words oov_words = ["compute", "serve", "sys", "net"] vecs_transformer_api = self.model.transform(oov_words) # vector returned by FTTransformer - vecs_gensim_model = [gensim_ftmodel[word] for word in oov_words] # vector returned by FastText + vecs_gensim_model = [gensim_ftmodel.wv[word] for word in oov_words] # vector returned by FastText passed = numpy.allclose(vecs_transformer_api, vecs_gensim_model) self.assertTrue(passed) diff --git a/gensim/test/test_varembed_wrapper.py b/gensim/test/test_varembed_wrapper.py index c94c2bbcdb..e95a48b6d0 100644 --- a/gensim/test/test_varembed_wrapper.py +++ b/gensim/test/test_varembed_wrapper.py @@ -43,8 +43,8 @@ def testSimilarity(self): def model_sanity(self, model): """Check vocabulary and vector size""" - self.assertEqual(model.syn0.shape, (model.vocab_size, model.vector_size)) - self.assertTrue(model.syn0.shape[0] == len(model.vocab)) + self.assertEqual(model.vectors.shape, (model.vocab_size, model.vector_size)) + self.assertTrue(model.vectors.shape[0] == len(model.vocab)) @unittest.skipIf(sys.version_info < (2, 7), 'Supported only on Python 2.7 and above') def testAddMorphemesToEmbeddings(self): @@ -56,7 +56,7 @@ def testAddMorphemesToEmbeddings(self): vectors=varembed_model_vector_file, morfessor_model=varembed_model_morfessor_file) self.model_sanity(model_with_morphemes) # Check syn0 is different for both models. - self.assertFalse(np.allclose(model.syn0, model_with_morphemes.syn0)) + self.assertFalse(np.allclose(model.vectors, model_with_morphemes.vectors)) def testLookup(self): """Test lookup of vector for a particular word and list""" diff --git a/gensim/test/test_word2vec.py b/gensim/test/test_word2vec.py index 570d13980c..9cce7e6fa9 100644 --- a/gensim/test/test_word2vec.py +++ b/gensim/test/test_word2vec.py @@ -190,7 +190,7 @@ def testOnlineLearningAfterSave(self): model_neg = word2vec.Word2Vec.load(tmpf) self.assertTrue(len(model_neg.wv.vocab), 12) model_neg.build_vocab(new_sentences, update=True) - model_neg.train(new_sentences, total_examples=model_neg.corpus_count, epochs=model_neg.iter) + model_neg.train(new_sentences, total_examples=model_neg.corpus_count, epochs=model_neg.epochs) self.assertEqual(len(model_neg.wv.vocab), 14) @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") @@ -207,10 +207,11 @@ def testOnlineLearningFromFile(self): self.assertTrue(len(model_hs.wv.vocab), 12) self.assertTrue(model_hs.wv.vocab['graph'].count, 3) model_hs.build_vocab(corpus_file=new_corpus_file, update=True) - model_hs.train(corpus_file=new_corpus_file, total_words=model_hs.corpus_total_words, epochs=model_hs.iter) + model_hs.train(corpus_file=new_corpus_file, total_words=model_hs.corpus_total_words, epochs=model_hs.epochs) model_neg.build_vocab(corpus_file=new_corpus_file, update=True) - model_neg.train(corpus_file=new_corpus_file, total_words=model_hs.corpus_total_words, epochs=model_hs.iter) + model_neg.train( + corpus_file=new_corpus_file, total_words=model_hs.corpus_total_words, epochs=model_hs.epochs) self.assertTrue(model_hs.wv.vocab['graph'].count, 4) self.assertTrue(model_hs.wv.vocab['artificial'].count, 4) self.assertEqual(len(model_hs.wv.vocab), 14) @@ -231,11 +232,11 @@ def testOnlineLearningAfterSaveFromFile(self): model_neg = word2vec.Word2Vec.load(tmpf) self.assertTrue(len(model_neg.wv.vocab), 12) # Check that training works on the same data after load without calling build_vocab - model_neg.train(corpus_file=corpus_file, total_words=model_neg.corpus_total_words, epochs=model_neg.iter) + model_neg.train(corpus_file=corpus_file, total_words=model_neg.corpus_total_words, epochs=model_neg.epochs) # Train on new corpus file model_neg.build_vocab(corpus_file=new_corpus_file, update=True) model_neg.train(corpus_file=new_corpus_file, total_words=model_neg.corpus_total_words, - epochs=model_neg.iter) + epochs=model_neg.epochs) self.assertEqual(len(model_neg.wv.vocab), 14) def onlineSanity(self, model, trained_model=False): @@ -247,14 +248,14 @@ def onlineSanity(self, model, trained_model=False): others.append(l) self.assertTrue(all(['terrorism' not in l for l in others])) model.build_vocab(others, update=trained_model) - model.train(others, total_examples=model.corpus_count, epochs=model.iter) + model.train(others, total_examples=model.corpus_count, epochs=model.epochs) self.assertFalse('terrorism' in model.wv.vocab) model.build_vocab(terro, update=True) self.assertTrue('terrorism' in model.wv.vocab) - orig0 = np.copy(model.wv.syn0) - model.train(terro, total_examples=len(terro), epochs=model.iter) - self.assertFalse(np.allclose(model.wv.syn0, orig0)) - sim = model.n_similarity(['war'], ['terrorism']) + orig0 = np.copy(model.wv.vectors) + model.train(terro, total_examples=len(terro), epochs=model.epochs) + self.assertFalse(np.allclose(model.wv.vectors, orig0)) + sim = model.wv.n_similarity(['war'], ['terrorism']) self.assertLess(0., sim) def test_sg_hs_online(self): @@ -293,7 +294,7 @@ def testPersistence(self): wv = model.wv wv.save(tmpf) loaded_wv = keyedvectors.KeyedVectors.load(tmpf) - self.assertTrue(np.allclose(wv.syn0, loaded_wv.syn0)) + self.assertTrue(np.allclose(wv.vectors, loaded_wv.vectors)) self.assertEqual(len(wv.vocab), len(loaded_wv.vocab)) @unittest.skipIf(os.name == 'nt' and six.PY2, "CythonLineSentence is not supported on Windows + Py27") @@ -310,7 +311,7 @@ def testPersistenceFromFile(self): wv = model.wv wv.save(tmpf) loaded_wv = keyedvectors.KeyedVectors.load(tmpf) - self.assertTrue(np.allclose(wv.syn0, loaded_wv.syn0)) + self.assertTrue(np.allclose(wv.vectors, loaded_wv.vectors)) self.assertEqual(len(wv.vocab), len(loaded_wv.vocab)) def testPersistenceWithConstructorRule(self): @@ -348,12 +349,12 @@ def testSyn0NormNotSaved(self): model.init_sims() model.save(tmpf) loaded_model = word2vec.Word2Vec.load(tmpf) - self.assertTrue(loaded_model.wv.syn0norm is None) + self.assertTrue(loaded_model.wv.vectors_norm is None) wv = model.wv wv.save(tmpf) loaded_kv = keyedvectors.KeyedVectors.load(tmpf) - self.assertTrue(loaded_kv.syn0norm is None) + self.assertTrue(loaded_kv.vectors_norm is None) def testLoadPreKeyedVectorModel(self): """Test loading pre-KeyedVectors word2vec model""" @@ -368,19 +369,19 @@ def testLoadPreKeyedVectorModel(self): # Model stored in one file model_file = 'word2vec_pre_kv%s' % model_file_suffix model = word2vec.Word2Vec.load(datapath(model_file)) - self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), model.vector_size)) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) + self.assertTrue(model.wv.vectors.shape == (len(model.wv.vocab), model.vector_size)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) # Model stored in multiple files model_file = 'word2vec_pre_kv_sep%s' % model_file_suffix model = word2vec.Word2Vec.load(datapath(model_file)) - self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), model.vector_size)) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) + self.assertTrue(model.wv.vectors.shape == (len(model.wv.vocab), model.vector_size)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), model.vector_size)) def testLoadPreKeyedVectorModelCFormat(self): """Test loading pre-KeyedVectors word2vec model saved in word2vec format""" model = keyedvectors.KeyedVectors.load_word2vec_format(datapath('word2vec_pre_kv_c')) - self.assertTrue(model.syn0.shape[0] == len(model.vocab)) + self.assertTrue(model.vectors.shape[0] == len(model.vocab)) def testPersistenceWord2VecFormat(self): """Test storing/loading the entire model in word2vec format.""" @@ -390,17 +391,17 @@ def testPersistenceWord2VecFormat(self): model.wv.save_word2vec_format(tmpf, binary=True) binary_model_kv = keyedvectors.KeyedVectors.load_word2vec_format(tmpf, binary=True) binary_model_kv.init_sims(replace=False) - self.assertTrue(np.allclose(model['human'], binary_model_kv['human'])) + self.assertTrue(np.allclose(model.wv['human'], binary_model_kv['human'])) norm_only_model = keyedvectors.KeyedVectors.load_word2vec_format(tmpf, binary=True) norm_only_model.init_sims(replace=True) - self.assertFalse(np.allclose(model['human'], norm_only_model['human'])) - self.assertTrue(np.allclose(model.wv.syn0norm[model.wv.vocab['human'].index], norm_only_model['human'])) + self.assertFalse(np.allclose(model.wv['human'], norm_only_model['human'])) + self.assertTrue(np.allclose(model.wv.vectors_norm[model.wv.vocab['human'].index], norm_only_model['human'])) limited_model_kv = keyedvectors.KeyedVectors.load_word2vec_format(tmpf, binary=True, limit=3) - self.assertEqual(len(limited_model_kv.syn0), 3) + self.assertEqual(len(limited_model_kv.vectors), 3) half_precision_model_kv = keyedvectors.KeyedVectors.load_word2vec_format( tmpf, binary=True, datatype=np.float16 ) - self.assertEqual(binary_model_kv.syn0.nbytes, half_precision_model_kv.syn0.nbytes * 2) + self.assertEqual(binary_model_kv.vectors.nbytes, half_precision_model_kv.vectors.nbytes * 2) def testNoTrainingCFormat(self): tmpf = get_tmpfile('gensim_word2vec.tst') @@ -440,12 +441,12 @@ def testPersistenceWord2VecFormatNonBinary(self): model.wv.save_word2vec_format(tmpf, binary=False) text_model = keyedvectors.KeyedVectors.load_word2vec_format(tmpf, binary=False) text_model.init_sims(False) - self.assertTrue(np.allclose(model['human'], text_model['human'], atol=1e-6)) + self.assertTrue(np.allclose(model.wv['human'], text_model['human'], atol=1e-6)) norm_only_model = keyedvectors.KeyedVectors.load_word2vec_format(tmpf, binary=False) norm_only_model.init_sims(True) - self.assertFalse(np.allclose(model['human'], norm_only_model['human'], atol=1e-6)) + self.assertFalse(np.allclose(model.wv['human'], norm_only_model['human'], atol=1e-6)) self.assertTrue(np.allclose( - model.wv.syn0norm[model.wv.vocab['human'].index], norm_only_model['human'], atol=1e-4 + model.wv.vectors_norm[model.wv.vocab['human'].index], norm_only_model['human'], atol=1e-4 )) def testPersistenceWord2VecFormatWithVocab(self): @@ -526,16 +527,16 @@ def testTraining(self): model = word2vec.Word2Vec(size=2, min_count=1, hs=1, negative=0) model.build_vocab(sentences) - self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), 2)) - self.assertTrue(model.syn1.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.wv.vectors.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.trainables.syn1.shape == (len(model.wv.vocab), 2)) - model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) - sims = model.most_similar('graph', topn=10) + model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) + sims = model.wv.most_similar('graph', topn=10) # self.assertTrue(sims[0][0] == 'trees', sims) # most similar # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) + graph_vector = model.wv.vectors_norm[model.wv.vocab['graph'].index] + sims2 = model.wv.most_similar(positive=[graph_vector], topn=11) sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself self.assertEqual(sims, sims2) @@ -553,16 +554,16 @@ def testTrainingFromFile(self): model = word2vec.Word2Vec(size=2, min_count=1, hs=1, negative=0) model.build_vocab(corpus_file=tf) - self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), 2)) - self.assertTrue(model.syn1.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.wv.vectors.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.trainables.syn1.shape == (len(model.wv.vocab), 2)) - model.train(corpus_file=tf, total_words=model.corpus_total_words, epochs=model.iter) - sims = model.most_similar('graph', topn=10) + model.train(corpus_file=tf, total_words=model.corpus_total_words, epochs=model.epochs) + sims = model.wv.most_similar('graph', topn=10) # self.assertTrue(sims[0][0] == 'trees', sims) # most similar # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) + graph_vector = model.wv.vectors_norm[model.wv.vocab['graph'].index] + sims2 = model.wv.most_similar(positive=[graph_vector], topn=11) sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself self.assertEqual(sims, sims2) @@ -583,27 +584,27 @@ def testLocking(self): model.build_vocab(corpus) # remember two vectors - locked0 = np.copy(model.wv.syn0[0]) - unlocked1 = np.copy(model.wv.syn0[1]) + locked0 = np.copy(model.wv.vectors[0]) + unlocked1 = np.copy(model.wv.vectors[1]) # lock the vector in slot 0 against change - model.syn0_lockf[0] = 0.0 + model.trainables.vectors_lockf[0] = 0.0 - model.train(corpus, total_examples=model.corpus_count, epochs=model.iter) - self.assertFalse((unlocked1 == model.wv.syn0[1]).all()) # unlocked vector should vary - self.assertTrue((locked0 == model.wv.syn0[0]).all()) # locked vector should not vary + model.train(corpus, total_examples=model.corpus_count, epochs=model.epochs) + self.assertFalse((unlocked1 == model.wv.vectors[1]).all()) # unlocked vector should vary + self.assertTrue((locked0 == model.wv.vectors[0]).all()) # locked vector should not vary def testAccuracy(self): """Test Word2Vec accuracy and KeyedVectors accuracy give the same result""" model = word2vec.Word2Vec(LeeCorpus()) - w2v_accuracy = model.accuracy(datapath('questions-words.txt')) - kv_accuracy = model.wv.accuracy(datapath('questions-words.txt')) + w2v_accuracy = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) + kv_accuracy = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) self.assertEqual(w2v_accuracy, kv_accuracy) def testEvaluateWordPairs(self): """Test Spearman and Pearson correlation coefficients give sane results on similarity datasets""" corpus = word2vec.LineSentence(datapath('head500.noblanks.cor.bz2')) model = word2vec.Word2Vec(corpus, min_count=3, iter=10) - correlation = model.evaluate_word_pairs(datapath('wordsim353.tsv')) + correlation = model.wv.evaluate_word_pairs(datapath('wordsim353.tsv')) pearson = correlation[0][0] spearman = correlation[1][0] oov = correlation[2] @@ -618,7 +619,7 @@ def testEvaluateWordPairsFromFile(self): utils.save_as_line_sentence(word2vec.LineSentence(datapath('head500.noblanks.cor.bz2')), tf) model = word2vec.Word2Vec(corpus_file=tf, min_count=3, iter=10) - correlation = model.evaluate_word_pairs(datapath('wordsim353.tsv')) + correlation = model.wv.evaluate_word_pairs(datapath('wordsim353.tsv')) pearson = correlation[0][0] spearman = correlation[1][0] oov = correlation[2] @@ -631,21 +632,21 @@ def model_sanity(self, model, train=True, with_corpus_file=False): # run extra before/after training tests if train=True if train: model.build_vocab(list_corpus) - orig0 = np.copy(model.wv.syn0[0]) + orig0 = np.copy(model.wv.vectors[0]) if with_corpus_file: tmpfile = get_tmpfile('gensim_word2vec.tst') utils.save_as_line_sentence(list_corpus, tmpfile) - model.train(corpus_file=tmpfile, total_words=model.corpus_total_words, epochs=model.iter) + model.train(corpus_file=tmpfile, total_words=model.corpus_total_words, epochs=model.epochs) else: - model.train(list_corpus, total_examples=model.corpus_count, epochs=model.iter) - self.assertFalse((orig0 == model.wv.syn0[1]).all()) # vector should vary after training - sims = model.most_similar('war', topn=len(model.wv.index2word)) + model.train(list_corpus, total_examples=model.corpus_count, epochs=model.epochs) + self.assertFalse((orig0 == model.wv.vectors[1]).all()) # vector should vary after training + sims = model.wv.most_similar('war', topn=len(model.wv.index2word)) t_rank = [word for word, score in sims].index('terrorism') # in >200 calibration runs w/ calling parameters, 'terrorism' in 50-most_sim for 'war' self.assertLess(t_rank, 50) - war_vec = model['war'] - sims2 = model.most_similar([war_vec], topn=51) + war_vec = model.wv['war'] + sims2 = model.wv.most_similar([war_vec], topn=51) self.assertTrue('war' in [word for word, score in sims2]) self.assertTrue('terrorism' in [word for word, score in sims2]) @@ -703,12 +704,12 @@ def test_cbow_neg_fromfile(self): def test_cosmul(self): model = word2vec.Word2Vec(sentences, size=2, min_count=1, hs=1, negative=0) - sims = model.most_similar_cosmul('graph', topn=10) + sims = model.wv.most_similar_cosmul('graph', topn=10) # self.assertTrue(sims[0][0] == 'trees', sims) # most similar # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar_cosmul(positive=[graph_vector], topn=11) + graph_vector = model.wv.vectors_norm[model.wv.vocab['graph'].index] + sims2 = model.wv.most_similar_cosmul(positive=[graph_vector], topn=11) sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself self.assertEqual(sims, sims2) @@ -718,16 +719,16 @@ def testTrainingCbow(self): # build vocabulary, don't train yet model = word2vec.Word2Vec(size=2, min_count=1, sg=0, hs=1, negative=0) model.build_vocab(sentences) - self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), 2)) - self.assertTrue(model.syn1.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.wv.vectors.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.trainables.syn1.shape == (len(model.wv.vocab), 2)) - model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) - sims = model.most_similar('graph', topn=10) + model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) + sims = model.wv.most_similar('graph', topn=10) # self.assertTrue(sims[0][0] == 'trees', sims) # most similar # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) + graph_vector = model.wv.vectors_norm[model.wv.vocab['graph'].index] + sims2 = model.wv.most_similar(positive=[graph_vector], topn=11) sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself self.assertEqual(sims, sims2) @@ -741,16 +742,16 @@ def testTrainingSgNegative(self): # build vocabulary, don't train yet model = word2vec.Word2Vec(size=2, min_count=1, sg=1, hs=0, negative=2) model.build_vocab(sentences) - self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), 2)) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.wv.vectors.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), 2)) - model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) - sims = model.most_similar('graph', topn=10) + model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) + sims = model.wv.most_similar('graph', topn=10) # self.assertTrue(sims[0][0] == 'trees', sims) # most similar # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) + graph_vector = model.wv.vectors_norm[model.wv.vocab['graph'].index] + sims2 = model.wv.most_similar(positive=[graph_vector], topn=11) sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself self.assertEqual(sims, sims2) @@ -764,16 +765,16 @@ def testTrainingCbowNegative(self): # build vocabulary, don't train yet model = word2vec.Word2Vec(size=2, min_count=1, sg=0, hs=0, negative=2) model.build_vocab(sentences) - self.assertTrue(model.wv.syn0.shape == (len(model.wv.vocab), 2)) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.wv.vectors.shape == (len(model.wv.vocab), 2)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), 2)) - model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) - sims = model.most_similar('graph', topn=10) + model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) + sims = model.wv.most_similar('graph', topn=10) # self.assertTrue(sims[0][0] == 'trees', sims) # most similar # test querying for "most similar" by vector - graph_vector = model.wv.syn0norm[model.wv.vocab['graph'].index] - sims2 = model.most_similar(positive=[graph_vector], topn=11) + graph_vector = model.wv.vectors_norm[model.wv.vocab['graph'].index] + sims2 = model.wv.most_similar(positive=[graph_vector], topn=11) sims2 = [(w, sim) for w, sim in sims2 if w != 'graph'] # ignore 'graph' itself self.assertEqual(sims, sims2) @@ -786,21 +787,21 @@ def testSimilarities(self): # The model is trained using CBOW model = word2vec.Word2Vec(size=2, min_count=1, sg=0, hs=0, negative=2) model.build_vocab(sentences) - model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) + model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) - self.assertTrue(model.n_similarity(['graph', 'trees'], ['trees', 'graph'])) - self.assertTrue(model.n_similarity(['graph'], ['trees']) == model.similarity('graph', 'trees')) - self.assertRaises(ZeroDivisionError, model.n_similarity, ['graph', 'trees'], []) - self.assertRaises(ZeroDivisionError, model.n_similarity, [], ['graph', 'trees']) - self.assertRaises(ZeroDivisionError, model.n_similarity, [], []) + self.assertTrue(model.wv.n_similarity(['graph', 'trees'], ['trees', 'graph'])) + self.assertTrue(model.wv.n_similarity(['graph'], ['trees']) == model.wv.similarity('graph', 'trees')) + self.assertRaises(ZeroDivisionError, model.wv.n_similarity, ['graph', 'trees'], []) + self.assertRaises(ZeroDivisionError, model.wv.n_similarity, [], ['graph', 'trees']) + self.assertRaises(ZeroDivisionError, model.wv.n_similarity, [], []) def testSimilarBy(self): """Test word2vec similar_by_word and similar_by_vector.""" model = word2vec.Word2Vec(sentences, size=2, min_count=1, hs=1, negative=0) - wordsims = model.similar_by_word('graph', topn=10) - wordsims2 = model.most_similar(positive='graph', topn=10) - vectorsims = model.similar_by_vector(model['graph'], topn=10) - vectorsims2 = model.most_similar([model['graph']], topn=10) + wordsims = model.wv.similar_by_word('graph', topn=10) + wordsims2 = model.wv.most_similar(positive='graph', topn=10) + vectorsims = model.wv.similar_by_vector(model.wv['graph'], topn=10) + vectorsims2 = model.wv.most_similar([model.wv['graph']], topn=10) self.assertEqual(wordsims, wordsims2) self.assertEqual(vectorsims, vectorsims2) @@ -813,7 +814,7 @@ def testParallel(self): for workers in [2, 4]: model = word2vec.Word2Vec(corpus, workers=workers) - sims = model.most_similar('israeli') # noqa:F841 + sims = model.wv.most_similar('israeli') # noqa:F841 # the exact vectors and therefore similarities may differ, due to different thread collisions/randomization # so let's test only for top3 # TODO: commented out for now; find a more robust way to compare against "gold standard" @@ -827,13 +828,13 @@ def testRNG(self): def models_equal(self, model, model2): self.assertEqual(len(model.wv.vocab), len(model2.wv.vocab)) - self.assertTrue(np.allclose(model.wv.syn0, model2.wv.syn0)) + self.assertTrue(np.allclose(model.wv.vectors, model2.wv.vectors)) if model.hs: - self.assertTrue(np.allclose(model.syn1, model2.syn1)) + self.assertTrue(np.allclose(model.trainables.syn1, model2.trainables.syn1)) if model.negative: - self.assertTrue(np.allclose(model.syn1neg, model2.syn1neg)) + self.assertTrue(np.allclose(model.trainables.syn1neg, model2.trainables.syn1neg)) most_common_word = max(model.wv.vocab.items(), key=lambda item: item[1].count)[0] - self.assertTrue(np.allclose(model[most_common_word], model2[most_common_word])) + self.assertTrue(np.allclose(model.wv[most_common_word], model2.wv[most_common_word])) def testDeleteTemporaryTrainingData(self): """Test word2vec model after delete_temporary_training_data""" @@ -841,17 +842,17 @@ def testDeleteTemporaryTrainingData(self): for j in [0, 1]: model = word2vec.Word2Vec(sentences, size=10, min_count=0, seed=42, hs=i, negative=j) if i: - self.assertTrue(hasattr(model, 'syn1')) + self.assertTrue(hasattr(model.trainables, 'syn1')) if j: self.assertTrue(hasattr(model, 'syn1neg')) self.assertTrue(hasattr(model, 'syn0_lockf')) model.delete_temporary_training_data(replace_word_vectors_with_normalized=True) - self.assertTrue(len(model['human']), 10) + self.assertTrue(len(model.wv['human']), 10) self.assertTrue(len(model.wv.vocab), 12) self.assertTrue(model.wv.vocab['graph'].count, 3) - self.assertTrue(not hasattr(model, 'syn1')) - self.assertTrue(not hasattr(model, 'syn1neg')) - self.assertTrue(not hasattr(model, 'syn0_lockf')) + self.assertTrue(not hasattr(model.trainables, 'syn1')) + self.assertTrue(not hasattr(model.trainables, 'syn1neg')) + self.assertTrue(not hasattr(model.trainables, 'syn0_lockf')) def testNormalizeAfterTrainingData(self): tmpf = get_tmpfile('gensim_word2vec.tst') @@ -859,7 +860,7 @@ def testNormalizeAfterTrainingData(self): model.save(tmpf) norm_only_model = word2vec.Word2Vec.load(tmpf) norm_only_model.delete_temporary_training_data(replace_word_vectors_with_normalized=True) - self.assertFalse(np.allclose(model['human'], norm_only_model['human'])) + self.assertFalse(np.allclose(model.wv['human'], norm_only_model.wv['human'])) def testPredictOutputWord(self): '''Test word2vec predict_output_word method handling for negative sampling scheme''' @@ -893,7 +894,7 @@ def testLoadOldModel(self): self.assertTrue(model.wv.vectors.shape == (12, 100)) self.assertTrue(len(model.wv.vocab) == 12) self.assertTrue(len(model.wv.index2word) == 12) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.wv.vector_size)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), model.wv.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (12,)) self.assertTrue(model.vocabulary.cum_table.shape == (12,)) @@ -905,7 +906,7 @@ def testLoadOldModel(self): self.assertTrue(model.wv.vectors.shape == (12, 100)) self.assertTrue(len(model.wv.vocab) == 12) self.assertTrue(len(model.wv.index2word) == 12) - self.assertTrue(model.syn1neg.shape == (len(model.wv.vocab), model.wv.vector_size)) + self.assertTrue(model.trainables.syn1neg.shape == (len(model.wv.vocab), model.wv.vector_size)) self.assertTrue(model.trainables.vectors_lockf.shape == (12,)) self.assertTrue(model.vocabulary.cum_table.shape == (12,)) @@ -939,13 +940,13 @@ def testLoadOldModel(self): # check if similarity search and online training works. self.assertTrue(len(model.wv.most_similar('sentence')) == 2) model.build_vocab(list_corpus, update=True) - model.train(list_corpus, total_examples=model.corpus_count, epochs=model.iter) + model.train(list_corpus, total_examples=model.corpus_count, epochs=model.epochs) # check if similarity search and online training works after saving and loading back the model. tmpf = get_tmpfile('gensim_word2vec.tst') model.save(tmpf) loaded_model = word2vec.Word2Vec.load(tmpf) loaded_model.build_vocab(list_corpus, update=True) - loaded_model.train(list_corpus, total_examples=model.corpus_count, epochs=model.iter) + loaded_model.train(list_corpus, total_examples=model.corpus_count, epochs=model.epochs) @log_capture() def testBuildVocabWarning(self, l): @@ -966,7 +967,7 @@ def testTrainWarning(self, l): model = word2vec.Word2Vec(min_count=1) model.build_vocab(sentences) for epoch in range(10): - model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) + model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) model.alpha -= 0.002 model.min_alpha = model.alpha if epoch == 5: @@ -981,7 +982,7 @@ def test_train_with_explicit_param(self): model.train(sentences, total_examples=model.corpus_count) with self.assertRaises(ValueError): - model.train(sentences, epochs=model.iter) + model.train(sentences, epochs=model.epochs) with self.assertRaises(ValueError): model.train(sentences) @@ -1008,7 +1009,7 @@ def test_reset_from(self): def test_compute_training_loss(self): model = word2vec.Word2Vec(min_count=1, sg=1, negative=5, hs=1) model.build_vocab(sentences) - model.train(sentences, compute_loss=True, total_examples=model.corpus_count, epochs=model.iter) + model.train(sentences, compute_loss=True, total_examples=model.corpus_count, epochs=model.epochs) training_loss_val = model.get_latest_training_loss() self.assertTrue(training_loss_val > 0.0) @@ -1025,7 +1026,7 @@ def testNonzero(self): model = word2vec.Word2Vec(sentences, min_count=2, seed=42, workers=1) sentence1 = ['human', 'interface', 'computer'] sentence2 = ['survey', 'user', 'computer', 'system', 'response', 'time'] - distance = model.wmdistance(sentence1, sentence2) + distance = model.wv.wmdistance(sentence1, sentence2) # Check that distance is non-zero. self.assertFalse(distance == 0.0) @@ -1039,8 +1040,8 @@ def testSymmetry(self): model = word2vec.Word2Vec(sentences, min_count=2, seed=42, workers=1) sentence1 = ['human', 'interface', 'computer'] sentence2 = ['survey', 'user', 'computer', 'system', 'response', 'time'] - distance1 = model.wmdistance(sentence1, sentence2) - distance2 = model.wmdistance(sentence2, sentence1) + distance1 = model.wv.wmdistance(sentence1, sentence2) + distance2 = model.wv.wmdistance(sentence2, sentence1) self.assertTrue(np.allclose(distance1, distance2)) def testIdenticalSentences(self): @@ -1051,7 +1052,7 @@ def testIdenticalSentences(self): model = word2vec.Word2Vec(sentences, min_count=1) sentence = ['survey', 'user', 'computer', 'system', 'response', 'time'] - distance = model.wmdistance(sentence, sentence) + distance = model.wv.wmdistance(sentence, sentence) self.assertEqual(0.0, distance) diff --git a/gensim/test/test_wordrank_wrapper.py b/gensim/test/test_wordrank_wrapper.py index 10f335cae2..dc565001fa 100644 --- a/gensim/test/test_wordrank_wrapper.py +++ b/gensim/test/test_wordrank_wrapper.py @@ -37,7 +37,7 @@ def testLoadWordrankFormat(self): """Test model successfully loaded from Wordrank format file""" model = wordrank.Wordrank.load_wordrank_model(self.wr_file) vocab_size, dim = 76, 50 - self.assertEqual(model.syn0.shape, (vocab_size, dim)) + self.assertEqual(model.vectors.shape, (vocab_size, dim)) self.assertEqual(len(model.vocab), vocab_size) os.remove(self.wr_file + '.w2vformat') diff --git a/gensim/topic_coherence/text_analysis.py b/gensim/topic_coherence/text_analysis.py index b759e0a13a..3ec859b6fd 100644 --- a/gensim/topic_coherence/text_analysis.py +++ b/gensim/topic_coherence/text_analysis.py @@ -641,7 +641,7 @@ def accumulate(self, texts, window_size): self.model = Word2Vec(**kwargs) self.model.build_vocab(texts) - self.model.train(texts, total_examples=self.model.corpus_count, epochs=self.model.iter) + self.model.train(texts, total_examples=self.model.corpus_count, epochs=self.model.epochs) self.model = self.model.wv # retain KeyedVectors return self diff --git a/gensim/utils.py b/gensim/utils.py index 28007853a2..d9de018171 100644 --- a/gensim/utils.py +++ b/gensim/utils.py @@ -450,7 +450,8 @@ def mmap_error(obj, filename): for attrib in getattr(self, '__recursive_saveloads', []): cfname = '.'.join((fname, attrib)) logger.info("loading %s recursively from %s.* with mmap=%s", attrib, cfname, mmap) - getattr(self, attrib)._load_specials(cfname, mmap, compress, subname) + with ignore_deprecation_warning(): + getattr(self, attrib)._load_specials(cfname, mmap, compress, subname) for attrib in getattr(self, '__numpys', []): logger.info("loading %s from %s with mmap=%s", attrib, subname(fname, attrib), mmap) @@ -463,7 +464,8 @@ def mmap_error(obj, filename): else: val = np.load(subname(fname, attrib), mmap_mode=mmap) - setattr(self, attrib, val) + with ignore_deprecation_warning(): + setattr(self, attrib, val) for attrib in getattr(self, '__scipys', []): logger.info("loading %s from %s with mmap=%s", attrib, subname(fname, attrib), mmap) @@ -481,11 +483,13 @@ def mmap_error(obj, filename): sparse.indptr = np.load(subname(fname, attrib, 'indptr'), mmap_mode=mmap) sparse.indices = np.load(subname(fname, attrib, 'indices'), mmap_mode=mmap) - setattr(self, attrib, sparse) + with ignore_deprecation_warning(): + setattr(self, attrib, sparse) for attrib in getattr(self, '__ignoreds', []): logger.info("setting ignored attribute %s to None", attrib) - setattr(self, attrib, None) + with ignore_deprecation_warning(): + setattr(self, attrib, None) @staticmethod def _adapt_by_suffix(fname): @@ -543,7 +547,8 @@ def _smart_save(self, fname, separately=None, sep_limit=10 * 1024**2, ignore=fro # restore attribs handled specially for obj, asides in restores: for attrib, val in iteritems(asides): - setattr(obj, attrib, val) + with ignore_deprecation_warning(): + setattr(obj, attrib, val) logger.info("saved %s", fname) def _save_specials(self, fname, separately, sep_limit, ignore, pickle_protocol, compress, subname): @@ -584,11 +589,12 @@ def _save_specials(self, fname, separately, sep_limit, ignore, pickle_protocol, elif isinstance(val, sparse_matrices) and val.nnz >= sep_limit: separately.append(attrib) - # whatever's in `separately` or `ignore` at this point won't get pickled - for attrib in separately + list(ignore): - if hasattr(self, attrib): - asides[attrib] = getattr(self, attrib) - delattr(self, attrib) + with ignore_deprecation_warning(): + # whatever's in `separately` or `ignore` at this point won't get pickled + for attrib in separately + list(ignore): + if hasattr(self, attrib): + asides[attrib] = getattr(self, attrib) + delattr(self, attrib) recursive_saveloads = [] restores = [] @@ -1442,6 +1448,14 @@ def new_func2(*args, **kwargs): raise TypeError(repr(type(reason))) +@contextmanager +def ignore_deprecation_warning(): + """Contextmanager for ignoring DeprecationWarning.""" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + yield + + @deprecated("Function will be removed in 4.0.0") def toptexts(query, texts, index, n=10): """Debug fnc to help inspect the top `n` most similar documents (according to a similarity index `index`), diff --git a/setup.py b/setup.py index 2ba7cf3b62..ff40e5d7dc 100644 --- a/setup.py +++ b/setup.py @@ -351,7 +351,7 @@ def finalize_options(self): 'distributed': distributed_env, 'test-win': win_testenv, 'test': linux_testenv, - 'docs': linux_testenv + distributed_env + ['sphinx', 'sphinxcontrib-napoleon', 'plotly', 'pattern', 'sphinxcontrib.programoutput'], + 'docs': linux_testenv + distributed_env + ['sphinx', 'sphinxcontrib-napoleon', 'plotly', 'pattern <= 2.6', 'sphinxcontrib.programoutput'], }, include_package_data=True, From 62687e951dd0667f8257ace826a646529cbaff4c Mon Sep 17 00:00:00 2001 From: Ivan Menshikh Date: Sun, 23 Sep 2018 13:57:42 +0500 Subject: [PATCH 25/66] Add missing content to tarball (#2194) * add missing cython-related files to tarball distribution * fix skip-condition for fasttext test * disable test back (OOM on Appveyor) --- MANIFEST.in | 15 +++++++++++++++ gensim/test/test_fasttext.py | 4 ++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 9bfc31660f..da4b2ee47e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,14 +4,29 @@ include CHANGELOG.md include COPYING include COPYING.LESSER include ez_setup.py + include gensim/models/voidptr.h +include gensim/models/fast_line_sentence.h + include gensim/models/word2vec_inner.c include gensim/models/word2vec_inner.pyx include gensim/models/word2vec_inner.pxd +include gensim/models/word2vec_corpusfile.cpp +include gensim/models/word2vec_corpusfile.pyx +include gensim/models/word2vec_corpusfile.pxd + include gensim/models/doc2vec_inner.c include gensim/models/doc2vec_inner.pyx +include gensim/models/doc2vec_inner.pxd +include gensim/models/doc2vec_corpusfile.cpp +include gensim/models/doc2vec_corpusfile.pyx + include gensim/models/fasttext_inner.c include gensim/models/fasttext_inner.pyx +include gensim/models/fasttext_inner.pxd +include gensim/models/fasttext_corpusfile.cpp +include gensim/models/fasttext_corpusfile.pyx + include gensim/models/_utils_any2vec.c include gensim/models/_utils_any2vec.pyx include gensim/corpora/_mmreader.c diff --git a/gensim/test/test_fasttext.py b/gensim/test/test_fasttext.py index 5b1134a87b..4cca6d5d1a 100644 --- a/gensim/test/test_fasttext.py +++ b/gensim/test/test_fasttext.py @@ -139,8 +139,8 @@ def test_persistence(self): self.assertTrue(np.allclose(wv.vectors_ngrams, loaded_wv.vectors_ngrams)) self.assertEqual(len(wv.vocab), len(loaded_wv.vocab)) - @unittest.skipIf(os.name == 'nt', "corpus_file is not supported for Windows + Py2" - "and avoid memory error with Appveyor x32") + @unittest.skipIf(os.name == 'nt', + "corpus_file is not supported for Windows + Py2 and avoid memory error with Appveyor x32") def test_persistence_fromfile(self): with temporary_file(get_tmpfile('gensim_fasttext1.tst')) as corpus_file: utils.save_as_line_sentence(sentences, corpus_file) From da5c603c58d3ffa00231f9ee1e1507efe4c99417 Mon Sep 17 00:00:00 2001 From: Ivan Menshikh Date: Mon, 24 Sep 2018 14:33:18 +0500 Subject: [PATCH 26/66] Fix non-deterministic test failures (pin `PYTHONHASHSEED`) (#2196) --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index df9575f78b..ed26490654 100644 --- a/tox.ini +++ b/tox.ini @@ -32,6 +32,7 @@ setenv = DTM_PATH={env:DTM_PATH:} MALLET_HOME={env:MALLET_HOME:} SKIP_NETWORK_TESTS={env:SKIP_NETWORK_TESTS:} + PYTHONHASHSEED=1 commands = python -c "from gensim.models.word2vec import FAST_VERSION; print(FAST_VERSION)" From ca0dcaa1eca8b1764f6456adac5719309e0d8e6d Mon Sep 17 00:00:00 2001 From: Ivan Menshikh Date: Mon, 24 Sep 2018 15:33:14 +0500 Subject: [PATCH 27/66] Fix for `sphinx==1.8.1` (last release) --- gensim/corpora/wikicorpus.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gensim/corpora/wikicorpus.py b/gensim/corpora/wikicorpus.py index f204a6d834..86545a11ab 100644 --- a/gensim/corpora/wikicorpus.py +++ b/gensim/corpora/wikicorpus.py @@ -90,7 +90,7 @@ 'MediaWiki', 'User', 'Help', 'Book', 'Draft', 'WikiProject', 'Special', 'Talk' ] -"""`MediaWiki namespaces `_ that ought to be ignored.""" +"""MediaWiki namespaces that ought to be ignored.""" def filter_example(elem, text, *args, **kwargs): From 28da936eb1d9df9a54422a88b3f48d68de075774 Mon Sep 17 00:00:00 2001 From: Rohit Kumar Date: Mon, 1 Oct 2018 15:15:28 +0530 Subject: [PATCH 28/66] Fix "aliasing chunkize to chunkize_serial" warning on Windows (#2202) --- gensim/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gensim/utils.py b/gensim/utils.py index d9de018171..1331c17c6d 100644 --- a/gensim/utils.py +++ b/gensim/utils.py @@ -1215,8 +1215,6 @@ def run(self): if os.name == 'nt': - warnings.warn("detected Windows; aliasing chunkize to chunkize_serial") - def chunkize(corpus, chunksize, maxsize=0, as_numpy=False): """Split `corpus` into fixed-sized chunks, using :func:`~gensim.utils.chunkize_serial`. @@ -1237,6 +1235,8 @@ def chunkize(corpus, chunksize, maxsize=0, as_numpy=False): "chunksize"-ed chunks of elements from `corpus`. """ + if maxsize > 0: + warnings.warn("detected Windows; aliasing chunkize to chunkize_serial") for chunk in chunkize_serial(corpus, chunksize, as_numpy=as_numpy): yield chunk else: From b3d7ba6bca94cf6e0728738f7a83efdda02ce246 Mon Sep 17 00:00:00 2001 From: Stig Johan Berggren Date: Wed, 3 Oct 2018 04:05:32 +0200 Subject: [PATCH 29/66] Update return sections for `KeyedVectors.evaluate_word_*` (#2205) --- gensim/models/keyedvectors.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/gensim/models/keyedvectors.py b/gensim/models/keyedvectors.py index d105a2d924..cb318322b4 100644 --- a/gensim/models/keyedvectors.py +++ b/gensim/models/keyedvectors.py @@ -1067,8 +1067,12 @@ def evaluate_word_analogies(self, analogies, restrict_vocab=300000, case_insensi Returns ------- - (float, list of dict of (str, (str, str, str)) - Overall evaluation score and full lists of correct and incorrect predictions divided by sections. + score : float + The overall evaluation score on the entire evaluation set + sections : list of dict of {str : str or tuple of (str, str, str, str)} + Results broken down by each section of the evaluation set. Each dict contains the name of the section + under the key 'section', and lists of correctly and incorrectly predicted 4-tuples of words under the + keys 'correct' and 'incorrect'. """ ok_vocab = [(w, self.vocab[w]) for w in self.index2word[:restrict_vocab]] @@ -1281,9 +1285,13 @@ def evaluate_word_pairs(self, pairs, delimiter='\t', restrict_vocab=300000, Returns ------- - (float, float, float) - Pearson correlation coefficient, Spearman rank-order correlation coefficient between the similarities - from the dataset and the similarities produced by the model itself, ratio of pairs with unknown words. + pearson : tuple of (float, float) + Pearson correlation coefficient with 2-tailed p-value. + spearman : tuple of (float, float) + Spearman rank-order correlation coefficient between the similarities from the dataset and the + similarities produced by the model itself, with 2-tailed p-value. + oov_ratio : float + The ratio of pairs with unknown words. """ ok_vocab = [(w, self.vocab[w]) for w in self.index2word[:restrict_vocab]] From 8bf7396c8464feed06e8311fe08311d93eabfbc2 Mon Sep 17 00:00:00 2001 From: Stig Johan Berggren Date: Thu, 4 Oct 2018 04:01:05 +0200 Subject: [PATCH 30/66] Fix return type in `KeyedVector.evaluate_word_analogies` (#2207) --- gensim/models/keyedvectors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gensim/models/keyedvectors.py b/gensim/models/keyedvectors.py index cb318322b4..72a33c5bba 100644 --- a/gensim/models/keyedvectors.py +++ b/gensim/models/keyedvectors.py @@ -1069,7 +1069,7 @@ def evaluate_word_analogies(self, analogies, restrict_vocab=300000, case_insensi ------- score : float The overall evaluation score on the entire evaluation set - sections : list of dict of {str : str or tuple of (str, str, str, str)} + sections : list of dict of {str : str or list of tuple of (str, str, str, str)} Results broken down by each section of the evaluation set. Each dict contains the name of the section under the key 'section', and lists of correctly and incorrectly predicted 4-tuples of words under the keys 'correct' and 'incorrect'. From 485fa3473b336287e16328da572ec44472b78886 Mon Sep 17 00:00:00 2001 From: jeni Shah Date: Thu, 4 Oct 2018 11:19:02 +0530 Subject: [PATCH 31/66] Remove `__getitem__` code duplication in `gensim.models.phrases` (#2206) Remove `__getitem__` code duplication in `gensim.models.phrases` --- gensim/models/phrases.py | 93 +++++++++++++++++++++------------------- 1 file changed, 48 insertions(+), 45 deletions(-) diff --git a/gensim/models/phrases.py b/gensim/models/phrases.py index 09d709b193..1c0db3abe8 100644 --- a/gensim/models/phrases.py +++ b/gensim/models/phrases.py @@ -232,6 +232,52 @@ def load(cls, *args, **kwargs): return model +def _sentence2token(phrase_class, sentence): + """ Convert the input tokens `sentence` into tokens where detected bigrams are joined by a selected delimiter. + + This function is used by: meth:`~gensim.models.phrases.Phrases.__getitem__` and + meth:`~gensim.models.phrases.Phraser.__getitem__` + + Parameters + ---------- + phrase_class : + class:`~gensim.models.phrases.Phrases` or :class:`~gensim.models.phrases.Phraser` + sentence : {list of str, iterable of list of str} + Sentence or text corpus. + + Returns + ------- + {list of str, :class:`~gensim.interfaces.TransformedCorpus`} + `sentence` with detected phrase bigrams merged together, or a streamed corpus of such sentences + if the input was a corpus. + + """ + is_single, sentence = _is_single(sentence) + if not is_single: + # if the input is an entire corpus (rather than a single sentence), + # return an iterable stream. + return phrase_class._apply(sentence) + + delimiter = phrase_class.delimiter + if hasattr(phrase_class, 'vocab'): + scorer = ft.partial( + phrase_class.scoring, + len_vocab=float(len(phrase_class.vocab)), + min_count=float(phrase_class.min_count), + corpus_word_count=float(phrase_class.corpus_word_count)) + else: + scorer = None + bigrams = phrase_class.analyze_sentence(sentence, threshold=phrase_class.threshold, + common_terms=phrase_class.common_terms, scorer=scorer) + + new_s = [] + for words, score in bigrams: + if score is not None: + words = delimiter.join(words) + new_s.append(words) + return [utils.to_unicode(w) for w in new_s] + + class Phrases(SentenceAnalyzer, PhrasesTransformation): """Detect phrases based on collocation counts.""" @@ -597,33 +643,7 @@ def __getitem__(self, sentence): """ warnings.warn("For a faster implementation, use the gensim.models.phrases.Phraser class") - delimiter = self.delimiter # delimiter used for lookup - - is_single, sentence = _is_single(sentence) - if not is_single: - # if the input is an entire corpus (rather than a single sentence), - # return an iterable stream. - return self._apply(sentence) - - delimiter = self.delimiter - bigrams = self.analyze_sentence( - sentence, - threshold=self.threshold, - common_terms=self.common_terms, - scorer=ft.partial( - self.scoring, - len_vocab=float(len(self.vocab)), - min_count=float(self.min_count), - corpus_word_count=float(self.corpus_word_count), - ), - ) - new_s = [] - for words, score in bigrams: - if score is not None: - words = delimiter.join(words) - new_s.append(words) - - return [utils.to_unicode(w) for w in new_s] + return _sentence2token(self, sentence) def original_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count, corpus_word_count): @@ -855,24 +875,7 @@ def __getitem__(self, sentence): [u'graph_minors'] """ - is_single, sentence = _is_single(sentence) - if not is_single: - # if the input is an entire corpus (rather than a single sentence), - # return an iterable stream. - return self._apply(sentence) - - delimiter = self.delimiter - bigrams = self.analyze_sentence( - sentence, - threshold=self.threshold, - common_terms=self.common_terms, - scorer=None) # we will use our score_item function redefinition - new_s = [] - for words, score in bigrams: - if score is not None: - words = delimiter.join(words) - new_s.append(words) - return [utils.to_unicode(w) for w in new_s] + return _sentence2token(self, sentence) if __name__ == '__main__': From 4543646d3fe3496e11bc935e72cbf9b18504442e Mon Sep 17 00:00:00 2001 From: Denis Kataev Date: Thu, 4 Oct 2018 14:56:23 +0500 Subject: [PATCH 32/66] Add `flake8-rst` for docstring code examples (#2192) * Add sourcecode:pycon * Unused imports * Indentation is not a multiple of four * Missing whitespace around operator * Add to codesource * Space before comment * Print as function * Correct indent * missing whitespace after ',' * Tox ini * Tox command * travis * Remove ignore * Fix flake8 line len * run flake8-rst on python3 * add proper PEP8 checking to travis (2.7 & 3.6), cleanup excludes * cleanup doc-examples * rename & add options * add todo to toxfile (remove ignore when all docstring examples fixed) * unify header & add it to missing rst files --- .travis.yml | 5 +- docs/src/changes_080.rst | 18 +- docs/src/dist_lda.rst | 10 +- docs/src/dist_lsi.rst | 44 ++- docs/src/simserver.rst | 182 +++++++----- docs/src/tut1.rst | 205 +++++++------ docs/src/tut2.rst | 134 +++++---- docs/src/tut3.rst | 79 +++-- docs/src/tutorial.rst | 50 ++-- docs/src/wiki.rst | 46 ++- gensim/corpora/dictionary.py | 223 +++++++------- gensim/corpora/hashdictionary.py | 65 +++-- gensim/corpora/indexedcorpus.py | 43 +-- gensim/corpora/lowcorpus.py | 49 ++-- gensim/corpora/malletcorpus.py | 63 ++-- gensim/corpora/mmcorpus.py | 32 ++- gensim/corpora/sharded_corpus.py | 46 +-- gensim/corpora/textcorpus.py | 43 +-- gensim/corpora/ucicorpus.py | 25 +- gensim/corpora/wikicorpus.py | 50 ++-- gensim/downloader.py | 83 +++--- gensim/interfaces.py | 80 +++--- gensim/models/__init__.py | 12 +- gensim/models/atmodel.py | 93 +++--- gensim/models/callbacks.py | 118 ++++---- gensim/models/coherencemodel.py | 62 ++-- gensim/models/deprecated/doc2vec.py | 26 +- gensim/models/deprecated/fasttext.py | 72 +++-- gensim/models/deprecated/fasttext_wrapper.py | 15 +- gensim/models/deprecated/keyedvectors.py | 183 +++++++----- gensim/models/deprecated/word2vec.py | 79 +++-- gensim/models/doc2vec.py | 46 +-- gensim/models/fasttext.py | 131 +++++---- gensim/models/hdpmodel.py | 24 +- gensim/models/keyedvectors.py | 140 ++++----- gensim/models/lda_dispatcher.py | 6 +- gensim/models/lda_worker.py | 6 +- gensim/models/ldamodel.py | 119 ++++---- gensim/models/ldamulticore.py | 51 ++-- gensim/models/ldaseqmodel.py | 34 ++- gensim/models/logentropy_model.py | 18 +- gensim/models/lsi_dispatcher.py | 12 +- gensim/models/lsi_worker.py | 12 +- gensim/models/lsimodel.py | 32 ++- gensim/models/phrases.py | 223 +++++++------- gensim/models/poincare.py | 235 ++++++++------- gensim/models/rpmodel.py | 52 ++-- gensim/models/tfidfmodel.py | 22 +- gensim/models/translation_matrix.py | 130 +++++---- gensim/models/word2vec.py | 109 ++++--- gensim/models/wrappers/dtmmodel.py | 19 +- gensim/models/wrappers/fasttext.py | 8 +- gensim/models/wrappers/ldamallet.py | 15 +- gensim/models/wrappers/ldavowpalwabbit.py | 47 +-- gensim/models/wrappers/wordrank.py | 15 +- gensim/parsing/porter.py | 189 ++++++------ gensim/parsing/preprocessing.py | 119 +++++--- gensim/scripts/glove2word2vec.py | 24 +- gensim/scripts/package_info.py | 8 +- gensim/scripts/segment_wiki.py | 28 +- gensim/similarities/docsim.py | 271 ++++++++++-------- gensim/similarities/index.py | 50 ++-- gensim/sklearn_api/atmodel.py | 32 ++- gensim/sklearn_api/d2vmodel.py | 12 +- gensim/sklearn_api/ftmodel.py | 44 +-- gensim/sklearn_api/hdp.py | 14 +- gensim/sklearn_api/ldamodel.py | 14 +- gensim/sklearn_api/ldaseqmodel.py | 20 +- gensim/sklearn_api/lsimodel.py | 32 ++- gensim/sklearn_api/phrases.py | 28 +- gensim/sklearn_api/rpmodel.py | 18 +- gensim/sklearn_api/text2bow.py | 24 +- gensim/sklearn_api/tfidf.py | 14 +- gensim/sklearn_api/w2vmodel.py | 20 +- gensim/summarization/bm25.py | 33 ++- gensim/summarization/commons.py | 19 +- gensim/summarization/graph.py | 32 ++- gensim/summarization/keywords.py | 18 +- gensim/summarization/pagerank_weighted.py | 28 +- gensim/summarization/summarizer.py | 56 ++-- gensim/summarization/textcleaner.py | 67 +++-- gensim/test/utils.py | 120 ++++---- gensim/topic_coherence/aggregation.py | 8 +- .../direct_confirmation_measure.py | 90 +++--- .../indirect_confirmation_measure.py | 112 ++++---- .../topic_coherence/probability_estimation.py | 204 +++++++------ gensim/topic_coherence/segmentation.py | 42 +-- gensim/topic_coherence/text_analysis.py | 70 +++-- gensim/utils.py | 147 ++++++---- tox.ini | 14 +- 90 files changed, 3289 insertions(+), 2473 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3cbccc0b0a..c4c8603f43 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,7 +13,10 @@ language: python matrix: include: - python: '2.7' - env: TOXENV="flake8" + env: TOXENV="flake8,flake8-docs" + + - python: '3.6' + env: TOXENV="flake8,flake8-docs" - python: '2.7' env: TOXENV="py27-linux" diff --git a/docs/src/changes_080.rst b/docs/src/changes_080.rst index e3c36b9db3..2786d4b71a 100644 --- a/docs/src/changes_080.rst +++ b/docs/src/changes_080.rst @@ -23,10 +23,12 @@ That's not as tragic as it sounds, gensim was almost there anyway. The changes a If you stored a model that is affected by this to disk, you'll need to rename its attributes manually: ->>> lsa = gensim.models.LsiModel.load('/some/path') # load old <0.8.0 model ->>> lsa.num_terms, lsa.num_topics = lsa.numTerms, lsa.numTopics # rename attributes ->>> del lsa.numTerms, lsa.numTopics # clean up old attributes (optional) ->>> lsa.save('/some/path') # save again to disk, as 0.8.0 compatible +.. sourcecode:: pycon + + >>> lsa = gensim.models.LsiModel.load('/some/path') # load old <0.8.0 model + >>> lsa.num_terms, lsa.num_topics = lsa.numTerms, lsa.numTopics # rename attributes + >>> del lsa.numTerms, lsa.numTopics # clean up old attributes (optional) + >>> lsa.save('/some/path') # save again to disk, as 0.8.0 compatible Only attributes (variables) need to be renamed; method names (functions) are not affected, due to the way `pickle` works. @@ -41,9 +43,11 @@ and can be processed independently. In addition, documents can now be added to a There is also a new way to query the similarity indexes: ->>> index = MatrixSimilarity(corpus) # create an index ->>> sims = index[document] # get cosine similarity of query "document" against every document in the index ->>> sims = index[chunk_of_documents] # new syntax! +.. sourcecode:: pycon + + >>> index = MatrixSimilarity(corpus) # create an index + >>> sims = index[document] # get cosine similarity of query "document" against every document in the index + >>> sims = index[chunk_of_documents] # new syntax! Advantage of the last line (querying multiple documents at the same time) is faster execution. diff --git a/docs/src/dist_lda.rst b/docs/src/dist_lda.rst index baf2d28aba..a8d0cb9816 100644 --- a/docs/src/dist_lda.rst +++ b/docs/src/dist_lda.rst @@ -19,7 +19,9 @@ Running LDA ____________ Run LDA like you normally would, but turn on the `distributed=True` constructor -parameter:: +parameter + +.. sourcecode:: pycon >>> # extract 100 LDA topics, using default parameters >>> lda = LdaModel(corpus=mm, id2word=id2word, num_topics=100, distributed=True) @@ -34,7 +36,9 @@ In distributed mode with four workers (Linux, Xeons of 2Ghz, 4GB RAM with `ATLAS `_), the wallclock time taken drops to 3h20m. To run standard batch LDA (no online updates of mini-batches) instead, you would similarly -call:: +call + +.. sourcecode:: pycon >>> lda = LdaModel(corpus=mm, id2word=id2token, num_topics=100, update_every=0, passes=20, distributed=True) using distributed version with 4 workers @@ -50,7 +54,7 @@ and then, some two days later:: iteration 19, dispatching documents up to #3199665/3199665 reached the end of input; now waiting for all remaining jobs to finish -:: +.. sourcecode:: pycon >>> lda.print_topics(20) topic #0: 0.007*disease + 0.006*medical + 0.005*treatment + 0.005*cells + 0.005*cell + 0.005*cancer + 0.005*health + 0.005*blood + 0.004*patients + 0.004*drug diff --git a/docs/src/dist_lsi.rst b/docs/src/dist_lsi.rst index 15dfb41f9c..45c79cb222 100644 --- a/docs/src/dist_lsi.rst +++ b/docs/src/dist_lsi.rst @@ -58,16 +58,20 @@ ____________ So let's test our setup and run one computation of distributed LSA. Open a Python shell on one of the five machines (again, this can be done on any computer in the same `broadcast domain `_, -our choice is incidental) and try:: +our choice is incidental) and try: - >>> from gensim import corpora, models, utils +.. sourcecode:: pycon + + >>> from gensim import corpora, models >>> import logging + >>> >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) - - >>> corpus = corpora.MmCorpus('/tmp/deerwester.mm') # load a corpus of nine documents, from the Tutorials + >>> + >>> corpus = corpora.MmCorpus('/tmp/deerwester.mm') # load a corpus of nine documents, from the Tutorials >>> id2word = corpora.Dictionary.load('/tmp/deerwester.dict') - - >>> lsi = models.LsiModel(corpus, id2word=id2word, num_topics=200, chunksize=1, distributed=True) # run distributed LSA on nine documents + >>> + >>> # run distributed LSA on nine documents + >>> lsi = models.LsiModel(corpus, id2word=id2word, num_topics=200, chunksize=1, distributed=True) This uses the corpus and feature-token mapping created in the :doc:`tut1` tutorial. If you look at the log in your Python session, you should see a line similar to:: @@ -76,7 +80,9 @@ If you look at the log in your Python session, you should see a line similar to: which means all went well. You can also check the logs coming from your worker and dispatcher processes --- this is especially helpful in case of problems. -To check the LSA results, let's print the first two latent topics:: +To check the LSA results, let's print the first two latent topics: + +.. sourcecode:: pycon >>> lsi.print_topics(num_topics=2, num_words=5) topic #0(3.341): 0.644*"system" + 0.404*"user" + 0.301*"eps" + 0.265*"time" + 0.265*"response" @@ -86,13 +92,15 @@ Success! But a corpus of nine documents is no challenge for our powerful cluster In fact, we had to lower the job size (`chunksize` parameter above) to a single document at a time, otherwise all documents would be processed by a single worker all at once. -So let's run LSA on **one million documents** instead:: +So let's run LSA on **one million documents** instead + +.. sourcecode:: pycon >>> # inflate the corpus to 1M documents, by repeating its documents over&over >>> corpus1m = utils.RepeatCorpus(corpus, 1000000) >>> # run distributed LSA on 1 million documents >>> lsi1m = models.LsiModel(corpus1m, id2word=id2word, num_topics=200, chunksize=10000, distributed=True) - + >>> >>> lsi1m.print_topics(num_topics=2, num_words=5) topic #0(1113.628): 0.644*"system" + 0.404*"user" + 0.301*"eps" + 0.265*"time" + 0.265*"response" topic #1(847.233): 0.623*"graph" + 0.490*"trees" + 0.451*"minors" + 0.274*"survey" + -0.167*"system" @@ -118,25 +126,31 @@ Distributed LSA on Wikipedia ++++++++++++++++++++++++++++++ First, download and prepare the Wikipedia corpus as per :doc:`wiki`, then load -the corpus iterator with:: +the corpus iterator with - >>> import logging, gensim - >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) +.. sourcecode:: pycon + >>> import logging + >>> import gensim + >>> + >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) + >>> >>> # load id->word mapping (the dictionary) >>> id2word = gensim.corpora.Dictionary.load_from_text('wiki_en_wordids.txt') >>> # load corpus iterator >>> mm = gensim.corpora.MmCorpus('wiki_en_tfidf.mm') >>> # mm = gensim.corpora.MmCorpus('wiki_en_tfidf.mm.bz2') # use this if you compressed the TFIDF output - + >>> >>> print(mm) MmCorpus(3199665 documents, 100000 features, 495547400 non-zero entries) -Now we're ready to run distributed LSA on the English Wikipedia:: +Now we're ready to run distributed LSA on the English Wikipedia: + +.. sourcecode:: pycon >>> # extract 400 LSI topics, using a cluster of nodes >>> lsi = gensim.models.lsimodel.LsiModel(corpus=mm, id2word=id2word, num_topics=400, chunksize=20000, distributed=True) - + >>> >>> # print the most contributing words (both positively and negatively) for each of the first ten topics >>> lsi.print_topics(10) 2010-11-03 16:08:27,602 : INFO : topic #0(200.990): -0.475*"delete" + -0.383*"deletion" + -0.275*"debate" + -0.223*"comments" + -0.220*"edits" + -0.213*"modify" + -0.208*"appropriate" + -0.194*"subsequent" + -0.155*"wp" + -0.117*"notability" diff --git a/docs/src/simserver.rst b/docs/src/simserver.rst index 1b0d2b4396..49b26ab5d4 100644 --- a/docs/src/simserver.rst +++ b/docs/src/simserver.rst @@ -20,20 +20,20 @@ Conceptually, a service that lets you : 2. index arbitrary documents using this semantic model 3. query the index for similar documents (the query can be either an id of a document already in the index, or an arbitrary text) - ->>> from simserver import SessionServer ->>> server = SessionServer('/tmp/my_server') # resume server (or create a new one) - ->>> server.train(training_corpus, method='lsi') # create a semantic model ->>> server.index(some_documents) # convert plain text to semantic representation and index it ->>> server.find_similar(query) # convert query to semantic representation and compare against index ->>> ... ->>> server.index(more_documents) # add to index: incremental indexing works ->>> server.find_similar(query) ->>> ... ->>> server.delete(ids_to_delete) # incremental deleting also works ->>> server.find_similar(query) ->>> ... + .. sourcecode:: pycon + + >>> from simserver import SessionServer + >>> server = SessionServer('/tmp/my_server') # resume server (or create a new one) + >>> + >>> server.train(training_corpus, method='lsi') # create a semantic model + >>> server.index(some_documents) # convert plain text to semantic representation and index it + >>> server.find_similar(query) # convert query to semantic representation and compare against index + >>> + >>> server.index(more_documents) # add to index: incremental indexing works + >>> server.find_similar(query) + >>> + >>> server.delete(ids_to_delete) # incremental deleting also works + >>> server.find_similar(query) .. note:: "Semantic" here refers to semantics of the crude, statistical type -- @@ -89,19 +89,23 @@ version 4.8 as of this writing):: $ sudo easy_install Pyro4 .. note:: - Don't forget to initialize logging to see logging messages:: + Don't forget to initialize logging to see logging messages: + + .. sourcecode:: pycon - >>> import logging - >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) + >>> import logging + >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) What is a document? ------------------- -In case of text documents, the service expects:: +In case of text documents, the service expects: ->>> document = {'id': 'some_unique_string', ->>> 'tokens': ['content', 'of', 'the', 'document', '...'], ->>> 'other_fields_are_allowed_but_ignored': None} +.. sourcecode:: pycon + + >>> document = {'id': 'some_unique_string', + >>> 'tokens': ['content', 'of', 'the', 'document', '...'], + >>> 'other_fields_are_allowed_but_ignored': None} This format was chosen because it coincides with plain JSON and is therefore easy to serialize and send over the wire, in almost any language. All strings involved must be utf8-encoded. @@ -113,23 +117,29 @@ What is a corpus? A sequence of documents. Anything that supports the `for document in corpus: ...` iterator protocol. Generators are ok. Plain lists are also ok (but consume more memory). ->>> from gensim import utils ->>> texts = ["Human machine interface for lab abc computer applications", ->>> "A survey of user opinion of computer system response time", ->>> "The EPS user interface management system", ->>> "System and human system engineering testing of EPS", ->>> "Relation of user perceived response time to error measurement", ->>> "The generation of random binary unordered trees", ->>> "The intersection graph of paths in trees", ->>> "Graph minors IV Widths of trees and well quasi ordering", ->>> "Graph minors A survey"] ->>> corpus = [{'id': 'doc_%i' % num, 'tokens': utils.simple_preprocess(text)} ->>> for num, text in enumerate(texts)] +.. sourcecode:: pycon + + >>> from gensim import utils + >>> + >>> texts = ["Human machine interface for lab abc computer applications", + >>> "A survey of user opinion of computer system response time", + >>> "The EPS user interface management system", + >>> "System and human system engineering testing of EPS", + >>> "Relation of user perceived response time to error measurement", + >>> "The generation of random binary unordered trees", + >>> "The intersection graph of paths in trees", + >>> "Graph minors IV Widths of trees and well quasi ordering", + >>> "Graph minors A survey"] + >>> + >>> corpus = [{'id': 'doc_%i' % num, 'tokens': utils.simple_preprocess(text)} + >>> for num, text in enumerate(texts)] Since corpora are allowed to be arbitrarily large, it is recommended client splits them into smaller chunks before uploading them to the server: ->>> utils.upload_chunked(server, corpus, chunksize=1000) # send 1k docs at a time +.. sourcecode:: pycon + + >>> utils.upload_chunked(server, corpus, chunksize=1000) # send 1k docs at a time Wait, upload what, where? ------------------------- @@ -141,11 +151,13 @@ option, not a necessity. Document similarity can also act as a long-running service, a daemon process on a separate machine. In that case, I'll call the service object a *server*. -But let's start with a local object. Open your `favourite shell `_ and:: +But let's start with a local object. Open your `favourite shell `_ and ->>> from gensim import utils ->>> from simserver import SessionServer ->>> service = SessionServer('/tmp/my_server/') # or wherever +.. sourcecode:: pycon + + >>> from simserver import SessionServer + >>> + >>> service = SessionServer('/tmp/my_server/') # or wherever That initialized a new service, located in `/tmp/my_server` (you need write access rights to that directory). @@ -162,14 +174,18 @@ Model training We can start indexing right away: ->>> service.index(corpus) -AttributeError: must initialize model for /tmp/my_server/b before indexing documents +.. sourcecode:: pycon + + >>> service.index(corpus) + AttributeError: must initialize model for /tmp/my_server/b before indexing documents Oops, we can not. The service indexes documents in a semantic representation, which is different to the plain text we give it. We must teach the service how to convert -between plain text and semantics first:: +between plain text and semantics first: ->>> service.train(corpus, method='lsi') +.. sourcecode:: pycon + + >>> service.train(corpus, method='lsi') That was easy. The `method='lsi'` parameter meant that we trained a model for `Latent Semantic Indexing `_ @@ -188,19 +204,25 @@ on a corpus that is: Indexing documents ------------------ ->>> service.index(corpus) # index the same documents that we trained on... +.. sourcecode:: pycon + + >>> service.index(corpus) # index the same documents that we trained on... Indexing can happen over any documents, but I'm too lazy to create another example corpus, so we index the same 9 docs used for training. -Delete documents with:: +Delete documents with: - >>> service.delete(['doc_5', 'doc_8']) # supply a list of document ids to be removed from the index +.. sourcecode:: pycon + + >>> service.delete(['doc_5', 'doc_8']) # supply a list of document ids to be removed from the index When you pass documents that have the same id as some already indexed document, the indexed document is overwritten by the new input (=only the latest counts; -document ids are always unique per service):: +document ids are always unique per service): + +.. sourcecode:: pycon - >>> service.index(corpus[:3]) # overall index size unchanged (just 3 docs overwritten) + >>> service.index(corpus[:3]) # overall index size unchanged (just 3 docs overwritten) The index/delete/overwrite calls can be arbitrarily interspersed with queries. You don't have to index **all** documents first to start querying, indexing can be incremental. @@ -212,26 +234,26 @@ There are two types of queries: 1. by id: - .. code-block:: python - - >>> print(service.find_similar('doc_0')) - [('doc_0', 1.0, None), ('doc_2', 0.30426699, None), ('doc_1', 0.25648531, None), ('doc_3', 0.25480536, None)] + .. sourcecode:: pycon - >>> print(service.find_similar('doc_5')) # we deleted doc_5 and doc_8, remember? - ValueError: document 'doc_5' not in index + >>> print(service.find_similar('doc_0')) + [('doc_0', 1.0, None), ('doc_2', 0.30426699, None), ('doc_1', 0.25648531, None), ('doc_3', 0.25480536, None)] + >>> + >>> print(service.find_similar('doc_5')) # we deleted doc_5 and doc_8, remember? + ValueError: document 'doc_5' not in index - In the resulting 3-tuples, `doc_n` is the document id we supplied during indexing, - `0.30426699` is the similarity of `doc_n` to the query, but what's up with that `None`, you ask? - Well, you can associate each document with a "payload", during indexing. - This payload object (anything pickle-able) is later returned during querying. - If you don't specify `doc['payload']` during indexing, queries simply return `None` in the result tuple, as in our example here. + In the resulting 3-tuples, `doc_n` is the document id we supplied during indexing, + `0.30426699` is the similarity of `doc_n` to the query, but what's up with that `None`, you ask? + Well, you can associate each document with a "payload", during indexing. + This payload object (anything pickle-able) is later returned during querying. + If you don't specify `doc['payload']` during indexing, queries simply return `None` in the result tuple, as in our example here. 2. or by document (using `document['tokens']`; id is ignored in this case): - .. code-block:: python + .. sourcecode:: pycon - >>> doc = {'tokens': utils.simple_preprocess('Graph and minors and humans and trees.')} - >>> print(service.find_similar(doc, min_score=0.4, max_results=50)) + >>> doc = {'tokens': utils.simple_preprocess('Graph and minors and humans and trees.')} + >>> print(service.find_similar(doc, min_score=0.4, max_results=50)) [('doc_7', 0.93350589, None), ('doc_3', 0.42718196, None)] Remote access @@ -250,20 +272,23 @@ included with simserver, run it with:: You can just `ctrl+c` to terminate the server, but leave it running for now. -Now open your Python shell again, in another terminal window or possibly on another machine, and:: +Now open your Python shell again, in another terminal window or possibly on another machine, and ->>> import Pyro4 ->>> service = Pyro4.Proxy(Pyro4.locateNS().lookup('gensim.testserver')) +.. sourcecode:: pycon + + >>> import Pyro4 + >>> service = Pyro4.Proxy(Pyro4.locateNS().lookup('gensim.testserver')) Now `service` is only a proxy object: every call is physically executed wherever you ran the `run_server.py` script, which can be a totally different computer -(within a network broadcast domain), but you don't even know:: +(within a network broadcast domain), but you don't even know: ->>> print(service.status()) ->>> service.train(corpus) ->>> service.index(other_corpus) ->>> service.find_similar(query) ->>> ... +.. sourcecode:: pycon + + >>> print(service.status()) + >>> service.train(corpus) + >>> service.index(other_corpus) + >>> service.find_similar(query) It is worth mentioning that Irmen, the author of Pyro, also released `Pyrolite `_ recently. That is a package @@ -300,7 +325,9 @@ with how the session went), it can be rolled back. It also means other clients c continue querying the original index during index updates. The mechanism is hidden from users by default through auto-committing (it was already happening -in the examples above too), but auto-committing can be turned off explicitly:: +in the examples above too), but auto-committing can be turned off explicitly + +.. sourcecode:: pycon >>> service.set_autosession(False) >>> service.train(corpus) @@ -309,19 +336,22 @@ in the examples above too), but auto-committing can be turned off explicitly:: >>> service.train(corpus) >>> service.index(corpus) >>> service.delete(doc_ids) - >>> ... None of these changes are visible to other clients, yet. Also, other clients' calls to index/train/etc will block until this session is committed/rolled back---there cannot be two open sessions at the same time. -To end a session:: +To end a session + +.. sourcecode:: pycon + + >>> service.rollback() # discard all changes since open_session() - >>> service.rollback() # discard all changes since open_session() +or -or:: +.. sourcecode:: pycon - >>> service.commit() # make changes public; now other clients can see changes/acquire the modification lock + >>> service.commit() # make changes public; now other clients can see changes/acquire the modification lock Other stuff diff --git a/docs/src/tut1.rst b/docs/src/tut1.rst index 4d6f80b375..992858ffad 100644 --- a/docs/src/tut1.rst +++ b/docs/src/tut1.rst @@ -20,17 +20,17 @@ From Strings to Vectors This time, let's start from documents represented as strings: ->>> from gensim import corpora ->>> ->>> documents = ["Human machine interface for lab abc computer applications", ->>> "A survey of user opinion of computer system response time", ->>> "The EPS user interface management system", ->>> "System and human system engineering testing of EPS", ->>> "Relation of user perceived response time to error measurement", ->>> "The generation of random binary unordered trees", ->>> "The intersection graph of paths in trees", ->>> "Graph minors IV Widths of trees and well quasi ordering", ->>> "Graph minors A survey"] +.. sourcecode:: pycon + + >>> documents = ["Human machine interface for lab abc computer applications", + >>> "A survey of user opinion of computer system response time", + >>> "The EPS user interface management system", + >>> "System and human system engineering testing of EPS", + >>> "Relation of user perceived response time to error measurement", + >>> "The generation of random binary unordered trees", + >>> "The intersection graph of paths in trees", + >>> "Graph minors IV Widths of trees and well quasi ordering", + >>> "Graph minors A survey"] This is a tiny corpus of nine documents, each consisting of only a single sentence. @@ -38,32 +38,35 @@ This is a tiny corpus of nine documents, each consisting of only a single senten First, let's tokenize the documents, remove common words (using a toy stoplist) as well as words that only appear once in the corpus: ->>> # remove common words and tokenize ->>> stoplist = set('for a of the and to in'.split()) ->>> texts = [[word for word in document.lower().split() if word not in stoplist] ->>> for document in documents] ->>> ->>> # remove words that appear only once ->>> from collections import defaultdict ->>> frequency = defaultdict(int) ->>> for text in texts: ->>> for token in text: ->>> frequency[token] += 1 ->>> ->>> texts = [[token for token in text if frequency[token] > 1] ->>> for text in texts] ->>> ->>> from pprint import pprint # pretty-printer ->>> pprint(texts) -[['human', 'interface', 'computer'], - ['survey', 'user', 'computer', 'system', 'response', 'time'], - ['eps', 'user', 'interface', 'system'], - ['system', 'human', 'system', 'eps'], - ['user', 'response', 'time'], - ['trees'], - ['graph', 'trees'], - ['graph', 'minors', 'trees'], - ['graph', 'minors', 'survey']] +.. sourcecode:: pycon + + >>> from pprint import pprint # pretty-printer + >>> from collections import defaultdict + >>> + >>> # remove common words and tokenize + >>> stoplist = set('for a of the and to in'.split()) + >>> texts = [[word for word in document.lower().split() if word not in stoplist] + >>> for document in documents] + >>> + >>> # remove words that appear only once + >>> frequency = defaultdict(int) + >>> for text in texts: + >>> for token in text: + >>> frequency[token] += 1 + >>> + >>> texts = [[token for token in text if frequency[token] > 1] + >>> for text in texts] + >>> + >>> pprint(texts) + [['human', 'interface', 'computer'], + ['survey', 'user', 'computer', 'system', 'response', 'time'], + ['eps', 'user', 'interface', 'system'], + ['system', 'human', 'system', 'eps'], + ['user', 'response', 'time'], + ['trees'], + ['graph', 'trees'], + ['graph', 'minors', 'trees'], + ['graph', 'minors', 'survey']] Your way of processing the documents will likely vary; here, I only split on whitespace to tokenize, followed by lowercasing each word. In fact, I use this particular @@ -98,16 +101,20 @@ and relevant statistics. In the end, we see there are twelve distinct words in t processed corpus, which means each document will be represented by twelve numbers (ie., by a 12-D vector). To see the mapping between words and their ids: ->>> print(dictionary.token2id) -{'minors': 11, 'graph': 10, 'system': 5, 'trees': 9, 'eps': 8, 'computer': 0, -'survey': 4, 'user': 7, 'human': 1, 'time': 6, 'interface': 2, 'response': 3} +.. sourcecode:: pycon + + >>> print(dictionary.token2id) + {'minors': 11, 'graph': 10, 'system': 5, 'trees': 9, 'eps': 8, 'computer': 0, + 'survey': 4, 'user': 7, 'human': 1, 'time': 6, 'interface': 2, 'response': 3} To actually convert tokenized documents to vectors: ->>> new_doc = "Human computer interaction" ->>> new_vec = dictionary.doc2bow(new_doc.lower().split()) ->>> print(new_vec) # the word "interaction" does not appear in the dictionary and is ignored -[(0, 1), (1, 1)] +.. sourcecode:: pycon + + >>> new_doc = "Human computer interaction" + >>> new_vec = dictionary.doc2bow(new_doc.lower().split()) + >>> print(new_vec) # the word "interaction" does not appear in the dictionary and is ignored + [(0, 1), (1, 1)] The function :func:`doc2bow` simply counts the number of occurrences of each distinct word, converts the word to its integer word id @@ -115,6 +122,8 @@ and returns the result as a sparse vector. The sparse vector ``[(0, 1), (1, 1)]` therefore reads: in the document `"Human computer interaction"`, the words `computer` (id 0) and `human` (id 1) appear once; the other ten dictionary words appear (implicitly) zero times. +.. sourcecode:: pycon + >>> corpus = [dictionary.doc2bow(text) for text in texts] >>> corpora.MmCorpus.serialize('/tmp/deerwester.mm', corpus) # store to disk, for later use >>> print(corpus) @@ -140,13 +149,15 @@ Note that `corpus` above resides fully in memory, as a plain Python list. In this simple example, it doesn't matter much, but just to make things clear, let's assume there are millions of documents in the corpus. Storing all of them in RAM won't do. Instead, let's assume the documents are stored in a file on disk, one document per line. Gensim -only requires that a corpus must be able to return one document vector at a time:: +only requires that a corpus must be able to return one document vector at a time: + +.. sourcecode:: pycon ->>> class MyCorpus(object): ->>> def __iter__(self): ->>> for line in open('mycorpus.txt'): ->>> # assume there's one document per line, tokens separated by whitespace ->>> yield dictionary.doc2bow(line.lower().split()) + >>> class MyCorpus(object): + >>> def __iter__(self): + >>> for line in open('mycorpus.txt'): + >>> # assume there's one document per line, tokens separated by whitespace + >>> yield dictionary.doc2bow(line.lower().split()) Download the sample `mycorpus.txt file here <./mycorpus.txt>`_. The assumption that each document occupies one line in a single file is not important; you can mold @@ -155,13 +166,17 @@ Walking directories, parsing XML, accessing network... Just parse your input to retrieve a clean list of tokens in each document, then convert the tokens via a dictionary to their ids and yield the resulting sparse vector inside `__iter__`. ->>> corpus_memory_friendly = MyCorpus() # doesn't load the corpus into memory! ->>> print(corpus_memory_friendly) -<__main__.MyCorpus object at 0x10d5690> +.. sourcecode:: pycon + + >>> corpus_memory_friendly = MyCorpus() # doesn't load the corpus into memory! + >>> print(corpus_memory_friendly) + <__main__.MyCorpus object at 0x10d5690> Corpus is now an object. We didn't define any way to print it, so `print` just outputs address of the object in memory. Not very useful. To see the constituent vectors, let's -iterate over the corpus and print each document vector (one at a time):: +iterate over the corpus and print each document vector (one at a time): + +.. sourcecode:: pycon >>> for vector in corpus_memory_friendly: # load one vector into memory at a time ... print(vector) @@ -179,7 +194,9 @@ Although the output is the same as for the plain Python list, the corpus is now more memory friendly, because at most one vector resides in RAM at a time. Your corpus can now be as large as you want. -Similarly, to construct the dictionary without loading all texts into memory:: +Similarly, to construct the dictionary without loading all texts into memory: + +.. sourcecode:: pycon >>> from six import iteritems >>> # collect statistics about all tokens @@ -215,49 +232,63 @@ a time, without the whole corpus being read into main memory at once. One of the more notable file formats is the `Market Matrix format `_. To save a corpus in the Matrix Market format: ->>> # create a toy corpus of 2 documents, as a plain Python list ->>> corpus = [[(1, 0.5)], []] # make one document empty, for the heck of it ->>> ->>> corpora.MmCorpus.serialize('/tmp/corpus.mm', corpus) +.. sourcecode:: pycon + + >>> # create a toy corpus of 2 documents, as a plain Python list + >>> corpus = [[(1, 0.5)], []] # make one document empty, for the heck of it + >>> + >>> corpora.MmCorpus.serialize('/tmp/corpus.mm', corpus) Other formats include `Joachim's SVMlight format `_, `Blei's LDA-C format `_ and `GibbsLDA++ format `_. ->>> corpora.SvmLightCorpus.serialize('/tmp/corpus.svmlight', corpus) ->>> corpora.BleiCorpus.serialize('/tmp/corpus.lda-c', corpus) ->>> corpora.LowCorpus.serialize('/tmp/corpus.low', corpus) +.. sourcecode:: pycon + + >>> corpora.SvmLightCorpus.serialize('/tmp/corpus.svmlight', corpus) + >>> corpora.BleiCorpus.serialize('/tmp/corpus.lda-c', corpus) + >>> corpora.LowCorpus.serialize('/tmp/corpus.low', corpus) Conversely, to load a corpus iterator from a Matrix Market file: ->>> corpus = corpora.MmCorpus('/tmp/corpus.mm') +.. sourcecode:: pycon + + >>> corpus = corpora.MmCorpus('/tmp/corpus.mm') Corpus objects are streams, so typically you won't be able to print them directly: ->>> print(corpus) -MmCorpus(2 documents, 2 features, 1 non-zero entries) +.. sourcecode:: pycon + + >>> print(corpus) + MmCorpus(2 documents, 2 features, 1 non-zero entries) Instead, to view the contents of a corpus: ->>> # one way of printing a corpus: load it entirely into memory ->>> print(list(corpus)) # calling list() will convert any sequence to a plain Python list -[[(1, 0.5)], []] +.. sourcecode:: pycon + + >>> # one way of printing a corpus: load it entirely into memory + >>> print(list(corpus)) # calling list() will convert any sequence to a plain Python list + [[(1, 0.5)], []] or ->>> # another way of doing it: print one document at a time, making use of the streaming interface ->>> for doc in corpus: -... print(doc) -[(1, 0.5)] -[] +.. sourcecode:: pycon + + >>> # another way of doing it: print one document at a time, making use of the streaming interface + >>> for doc in corpus: + ... print(doc) + [(1, 0.5)] + [] The second way is obviously more memory-friendly, but for testing and development purposes, nothing beats the simplicity of calling ``list(corpus)``. To save the same Matrix Market document stream in Blei's LDA-C format, ->>> corpora.BleiCorpus.serialize('/tmp/corpus.lda-c', corpus) +.. sourcecode:: pycon + + >>> corpora.BleiCorpus.serialize('/tmp/corpus.lda-c', corpus) In this way, `gensim` can also be used as a memory-efficient **I/O format conversion tool**: just load a document stream using one format and immediately save it in another format. @@ -268,20 +299,24 @@ Compatibility with NumPy and SciPy ---------------------------------- Gensim also contains `efficient utility functions `_ -to help converting from/to numpy matrices:: +to help converting from/to numpy matrices + +.. sourcecode:: pycon + + >>> import gensim + >>> import numpy as np + >>> numpy_matrix = np.random.randint(10, size=[5, 2]) # random matrix as an example + >>> corpus = gensim.matutils.Dense2Corpus(numpy_matrix) + >>> numpy_matrix = gensim.matutils.corpus2dense(corpus, num_terms=number_of_corpus_features) ->>> import gensim ->>> import numpy as np ->>> numpy_matrix = np.random.randint(10, size=[5,2]) # random matrix as an example ->>> corpus = gensim.matutils.Dense2Corpus(numpy_matrix) ->>> numpy_matrix = gensim.matutils.corpus2dense(corpus, num_terms=number_of_corpus_features) +and from/to `scipy.sparse` matrices -and from/to `scipy.sparse` matrices:: +.. sourcecode:: pycon ->>> import scipy.sparse ->>> scipy_sparse_matrix = scipy.sparse.random(5,2) # random sparse matrix as example ->>> corpus = gensim.matutils.Sparse2Corpus(scipy_sparse_matrix) ->>> scipy_csc_matrix = gensim.matutils.corpus2csc(corpus) + >>> import scipy.sparse + >>> scipy_sparse_matrix = scipy.sparse.random(5, 2) # random sparse matrix as example + >>> corpus = gensim.matutils.Sparse2Corpus(scipy_sparse_matrix) + >>> scipy_csc_matrix = gensim.matutils.corpus2csc(corpus) ------------- diff --git a/docs/src/tut2.rst b/docs/src/tut2.rst index 130bba7375..24db8ae092 100644 --- a/docs/src/tut2.rst +++ b/docs/src/tut2.rst @@ -6,8 +6,10 @@ Topics and Transformations Don't forget to set ->>> import logging ->>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) +.. sourcecode:: pycon + + >>> import logging + >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) if you want to see logging events. @@ -17,15 +19,18 @@ Transformation interface In the previous tutorial on :doc:`tut1`, we created a corpus of documents represented as a stream of vectors. To continue, let's fire up gensim and use that corpus: ->>> from gensim import corpora, models, similarities ->>> if (os.path.exists("/tmp/deerwester.dict")): ->>> dictionary = corpora.Dictionary.load('/tmp/deerwester.dict') ->>> corpus = corpora.MmCorpus('/tmp/deerwester.mm') ->>> print("Used files generated from first tutorial") ->>> else: ->>> print("Please run first tutorial to generate data set") +.. sourcecode:: pycon + + >>> from gensim import corpora + >>> + >>> if (os.path.exists("/tmp/deerwester.dict")): + >>> dictionary = corpora.Dictionary.load('/tmp/deerwester.dict') + >>> corpus = corpora.MmCorpus('/tmp/deerwester.mm') + >>> print("Used files generated from first tutorial") + >>> else: + >>> print("Please run first tutorial to generate data set") -MmCorpus(9 documents, 12 features, 28 non-zero entries) + MmCorpus(9 documents, 12 features, 28 non-zero entries) In this tutorial, I will show how to transform documents from one vector representation into another. This process serves two goals: @@ -43,7 +48,9 @@ Creating a transformation The transformations are standard Python objects, typically initialized by means of a :dfn:`training corpus`: ->>> tfidf = models.TfidfModel(corpus) # step 1 -- initialize a model +.. sourcecode:: pycon + + >>> tfidf = models.TfidfModel(corpus) # step 1 -- initialize a model We used our old corpus from tutorial 1 to initialize (train) the transformation model. Different transformations may require different initialization parameters; in case of TfIdf, the @@ -69,24 +76,28 @@ From now on, ``tfidf`` is treated as a read-only object that can be used to conv any vector from the old representation (bag-of-words integer counts) to the new representation (TfIdf real-valued weights): ->>> doc_bow = [(0, 1), (1, 1)] ->>> print(tfidf[doc_bow]) # step 2 -- use the model to transform vectors -[(0, 0.70710678), (1, 0.70710678)] +.. sourcecode:: pycon + + >>> doc_bow = [(0, 1), (1, 1)] + >>> print(tfidf[doc_bow]) # step 2 -- use the model to transform vectors + [(0, 0.70710678), (1, 0.70710678)] Or to apply a transformation to a whole corpus: ->>> corpus_tfidf = tfidf[corpus] ->>> for doc in corpus_tfidf: -... print(doc) -[(0, 0.57735026918962573), (1, 0.57735026918962573), (2, 0.57735026918962573)] -[(0, 0.44424552527467476), (3, 0.44424552527467476), (4, 0.44424552527467476), (5, 0.32448702061385548), (6, 0.44424552527467476), (7, 0.32448702061385548)] -[(2, 0.5710059809418182), (5, 0.41707573620227772), (7, 0.41707573620227772), (8, 0.5710059809418182)] -[(1, 0.49182558987264147), (5, 0.71848116070837686), (8, 0.49182558987264147)] -[(3, 0.62825804686700459), (6, 0.62825804686700459), (7, 0.45889394536615247)] -[(9, 1.0)] -[(9, 0.70710678118654746), (10, 0.70710678118654746)] -[(9, 0.50804290089167492), (10, 0.50804290089167492), (11, 0.69554641952003704)] -[(4, 0.62825804686700459), (10, 0.45889394536615247), (11, 0.62825804686700459)] +.. sourcecode:: pycon + + >>> corpus_tfidf = tfidf[corpus] + >>> for doc in corpus_tfidf: + ... print(doc) + [(0, 0.57735026918962573), (1, 0.57735026918962573), (2, 0.57735026918962573)] + [(0, 0.44424552527467476), (3, 0.44424552527467476), (4, 0.44424552527467476), (5, 0.32448702061385548), (6, 0.44424552527467476), (7, 0.32448702061385548)] + [(2, 0.5710059809418182), (5, 0.41707573620227772), (7, 0.41707573620227772), (8, 0.5710059809418182)] + [(1, 0.49182558987264147), (5, 0.71848116070837686), (8, 0.49182558987264147)] + [(3, 0.62825804686700459), (6, 0.62825804686700459), (7, 0.45889394536615247)] + [(9, 1.0)] + [(9, 0.70710678118654746), (10, 0.70710678118654746)] + [(9, 0.50804290089167492), (10, 0.50804290089167492), (11, 0.69554641952003704)] + [(4, 0.62825804686700459), (10, 0.45889394536615247), (11, 0.62825804686700459)] In this particular case, we are transforming the same corpus that we used for training, but this is only incidental. Once the transformation model has been initialized, @@ -105,13 +116,17 @@ folding-in for LSA, by topic inference for LDA etc. Transformations can also be serialized, one on top of another, in a sort of chain: ->>> lsi = models.LsiModel(corpus_tfidf, id2word=dictionary, num_topics=2) # initialize an LSI transformation ->>> corpus_lsi = lsi[corpus_tfidf] # create a double wrapper over the original corpus: bow->tfidf->fold-in-lsi +.. sourcecode:: pycon + + >>> lsi = models.LsiModel(corpus_tfidf, id2word=dictionary, num_topics=2) # initialize an LSI transformation + >>> corpus_lsi = lsi[corpus_tfidf] # create a double wrapper over the original corpus: bow->tfidf->fold-in-lsi Here we transformed our Tf-Idf corpus via `Latent Semantic Indexing `_ into a latent 2-D space (2-D because we set ``num_topics=2``). Now you're probably wondering: what do these two latent dimensions stand for? Let's inspect with :func:`models.LsiModel.print_topics`: +.. sourcecode:: pycon + >>> lsi.print_topics(2) topic #0(1.594): -0.703*"trees" + -0.538*"graph" + -0.402*"minors" + -0.187*"survey" + -0.061*"system" + -0.060*"response" + -0.060*"time" + -0.058*"user" + -0.049*"computer" + -0.035*"interface" topic #1(1.476): -0.460*"system" + -0.373*"user" + -0.332*"eps" + -0.328*"interface" + -0.320*"response" + -0.320*"time" + -0.293*"computer" + -0.280*"human" + -0.171*"survey" + 0.161*"trees" @@ -125,23 +140,27 @@ second topic practically concerns itself with all the other words. As expected, the first five documents are more strongly related to the second topic while the remaining four documents to the first topic: ->>> for doc in corpus_lsi: # both bow->tfidf and tfidf->lsi transformations are actually executed here, on the fly -... print(doc) -[(0, -0.066), (1, 0.520)] # "Human machine interface for lab abc computer applications" -[(0, -0.197), (1, 0.761)] # "A survey of user opinion of computer system response time" -[(0, -0.090), (1, 0.724)] # "The EPS user interface management system" -[(0, -0.076), (1, 0.632)] # "System and human system engineering testing of EPS" -[(0, -0.102), (1, 0.574)] # "Relation of user perceived response time to error measurement" -[(0, -0.703), (1, -0.161)] # "The generation of random binary unordered trees" -[(0, -0.877), (1, -0.168)] # "The intersection graph of paths in trees" -[(0, -0.910), (1, -0.141)] # "Graph minors IV Widths of trees and well quasi ordering" -[(0, -0.617), (1, 0.054)] # "Graph minors A survey" +.. sourcecode:: pycon + + >>> for doc in corpus_lsi: # both bow->tfidf and tfidf->lsi transformations are actually executed here, on the fly + ... print(doc) + [(0, -0.066), (1, 0.520)] # "Human machine interface for lab abc computer applications" + [(0, -0.197), (1, 0.761)] # "A survey of user opinion of computer system response time" + [(0, -0.090), (1, 0.724)] # "The EPS user interface management system" + [(0, -0.076), (1, 0.632)] # "System and human system engineering testing of EPS" + [(0, -0.102), (1, 0.574)] # "Relation of user perceived response time to error measurement" + [(0, -0.703), (1, -0.161)] # "The generation of random binary unordered trees" + [(0, -0.877), (1, -0.168)] # "The intersection graph of paths in trees" + [(0, -0.910), (1, -0.141)] # "Graph minors IV Widths of trees and well quasi ordering" + [(0, -0.617), (1, 0.054)] # "Graph minors A survey" Model persistency is achieved with the :func:`save` and :func:`load` functions: ->>> lsi.save('/tmp/model.lsi') # same for tfidf, lda, ... ->>> lsi = models.LsiModel.load('/tmp/model.lsi') +.. sourcecode:: pycon + + >>> lsi.save('/tmp/model.lsi') # same for tfidf, lda, ... + >>> lsi = models.LsiModel.load('/tmp/model.lsi') The next question might be: just how exactly similar are those documents to each other? @@ -165,7 +184,9 @@ Gensim implements several popular Vector Space Model algorithms: the number of dimensions intact. It can also optionally normalize the resulting vectors to (Euclidean) unit length. - >>> model = models.TfidfModel(corpus, normalize=True) + .. sourcecode:: pycon + + >>> model = models.TfidfModel(corpus, normalize=True) * `Latent Semantic Indexing, LSI (or sometimes LSA) `_ transforms documents from either bag-of-words or (preferrably) TfIdf-weighted space into @@ -173,7 +194,9 @@ Gensim implements several popular Vector Space Model algorithms: 2 latent dimensions, but on real corpora, target dimensionality of 200--500 is recommended as a "golden standard" [1]_. - >>> model = models.LsiModel(tfidf_corpus, id2word=dictionary, num_topics=300) + .. sourcecode:: pycon + + >>> model = models.LsiModel(tfidf_corpus, id2word=dictionary, num_topics=300) LSI training is unique in that we can continue "training" at any point, simply by providing more training documents. This is done by incremental updates to @@ -181,12 +204,13 @@ Gensim implements several popular Vector Space Model algorithms: input document stream may even be infinite -- just keep feeding LSI new documents as they arrive, while using the computed transformation model as read-only in the meanwhile! - >>> model.add_documents(another_tfidf_corpus) # now LSI has been trained on tfidf_corpus + another_tfidf_corpus - >>> lsi_vec = model[tfidf_vec] # convert some new document into the LSI space, without affecting the model - >>> ... - >>> model.add_documents(more_documents) # tfidf_corpus + another_tfidf_corpus + more_documents - >>> lsi_vec = model[tfidf_vec] - >>> ... + .. sourcecode:: pycon + + >>> model.add_documents(another_tfidf_corpus) # now LSI has been trained on tfidf_corpus + another_tfidf_corpus + >>> lsi_vec = model[tfidf_vec] # convert some new document into the LSI space, without affecting the model + >>> + >>> model.add_documents(more_documents) # tfidf_corpus + another_tfidf_corpus + more_documents + >>> lsi_vec = model[tfidf_vec] See the :mod:`gensim.models.lsimodel` documentation for details on how to make LSI gradually "forget" old observations in infinite streams. If you want to get dirty, @@ -205,7 +229,9 @@ Gensim implements several popular Vector Space Model algorithms: CPU-friendly) approach to approximating TfIdf distances between documents, by throwing in a little randomness. Recommended target dimensionality is again in the hundreds/thousands, depending on your dataset. - >>> model = models.RpModel(tfidf_corpus, num_topics=500) + .. sourcecode:: pycon + + >>> model = models.RpModel(tfidf_corpus, num_topics=500) * `Latent Dirichlet Allocation, LDA `_ is yet another transformation from bag-of-words counts into a topic space of lower @@ -214,7 +240,9 @@ Gensim implements several popular Vector Space Model algorithms: just like with LSA, inferred automatically from a training corpus. Documents are in turn interpreted as a (soft) mixture of these topics (again, just like with LSA). - >>> model = models.LdaModel(corpus, id2word=dictionary, num_topics=100) + .. sourcecode:: pycon + + >>> model = models.LdaModel(corpus, id2word=dictionary, num_topics=100) `gensim` uses a fast implementation of online LDA parameter estimation based on [2]_, modified to run in :doc:`distributed mode ` on a cluster of computers. @@ -222,7 +250,9 @@ Gensim implements several popular Vector Space Model algorithms: * `Hierarchical Dirichlet Process, HDP `_ is a non-parametric bayesian method (note the missing number of requested topics): - >>> model = models.HdpModel(corpus, id2word=dictionary) + .. sourcecode:: pycon + + >>> model = models.HdpModel(corpus, id2word=dictionary) `gensim` uses a fast, online implementation based on [3]_. The HDP model is a new addition to `gensim`, and still rough around its academic edges -- use with care. diff --git a/docs/src/tut3.rst b/docs/src/tut3.rst index e2cf10a7b5..d9b28220cc 100644 --- a/docs/src/tut3.rst +++ b/docs/src/tut3.rst @@ -6,8 +6,10 @@ Similarity Queries Don't forget to set ->>> import logging ->>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) +.. sourcecode:: pycon + + >>> import logging + >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) if you want to see logging events. @@ -25,16 +27,21 @@ previous examples (which really originally comes from Deerwester et al.'s `"Indexing by Latent Semantic Analysis" `_ seminal 1990 article): ->>> from gensim import corpora, models, similarities ->>> dictionary = corpora.Dictionary.load('/tmp/deerwester.dict') ->>> corpus = corpora.MmCorpus('/tmp/deerwester.mm') # comes from the first tutorial, "From strings to vectors" ->>> print(corpus) -MmCorpus(9 documents, 12 features, 28 non-zero entries) +.. sourcecode:: pycon + + >>> from gensim import corpora + >>> dictionary = corpora.Dictionary.load('/tmp/deerwester.dict') + >>> corpus = corpora.MmCorpus('/tmp/deerwester.mm') # comes from the first tutorial, "From strings to vectors" + >>> print(corpus) + MmCorpus(9 documents, 12 features, 28 non-zero entries) To follow Deerwester's example, we first use this tiny corpus to define a 2-dimensional LSI space: ->>> lsi = models.LsiModel(corpus, id2word=dictionary, num_topics=2) +.. sourcecode:: pycon + + >>> from gensim import models + >>> lsi = models.LsiModel(corpus, id2word=dictionary, num_topics=2) Now suppose a user typed in the query `"Human computer interaction"`. We would like to sort our nine corpus documents in decreasing order of relevance to this query. @@ -42,11 +49,13 @@ Unlike modern search engines, here we only concentrate on a single aspect of pos similarities---on apparent semantic relatedness of their texts (words). No hyperlinks, no random-walk static ranks, just a semantic extension over the boolean keyword match: ->>> doc = "Human computer interaction" ->>> vec_bow = dictionary.doc2bow(doc.lower().split()) ->>> vec_lsi = lsi[vec_bow] # convert the query to LSI space ->>> print(vec_lsi) -[(0, -0.461821), (1, 0.070028)] +.. sourcecode:: pycon + + >>> doc = "Human computer interaction" + >>> vec_bow = dictionary.doc2bow(doc.lower().split()) + >>> vec_lsi = lsi[vec_bow] # convert the query to LSI space + >>> print(vec_lsi) + [(0, -0.461821), (1, 0.070028)] In addition, we will be considering `cosine similarity `_ to determine the similarity of two vectors. Cosine similarity is a standard measure @@ -62,7 +71,9 @@ to compare against subsequent queries. In our case, they are the same nine docum used for training LSI, converted to 2-D LSA space. But that's only incidental, we might also be indexing a different corpus altogether. ->>> index = similarities.MatrixSimilarity(lsi[corpus]) # transform corpus to LSI space and index it +.. sourcecode:: pycon + + >>> index = similarities.MatrixSimilarity(lsi[corpus]) # transform corpus to LSI space and index it .. warning:: The class :class:`similarities.MatrixSimilarity` is only appropriate when the whole @@ -76,8 +87,10 @@ might also be indexing a different corpus altogether. Index persistency is handled via the standard :func:`save` and :func:`load` functions: ->>> index.save('/tmp/deerwester.index') ->>> index = similarities.MatrixSimilarity.load('/tmp/deerwester.index') +.. sourcecode:: pycon + + >>> index.save('/tmp/deerwester.index') + >>> index = similarities.MatrixSimilarity.load('/tmp/deerwester.index') This is true for all similarity indexing classes (:class:`similarities.Similarity`, :class:`similarities.MatrixSimilarity` and :class:`similarities.SparseMatrixSimilarity`). @@ -90,10 +103,12 @@ Performing queries To obtain similarities of our query document against the nine indexed documents: ->>> sims = index[vec_lsi] # perform a similarity query against the corpus ->>> print(list(enumerate(sims))) # print (document_number, document_similarity) 2-tuples -[(0, 0.99809301), (1, 0.93748635), (2, 0.99844527), (3, 0.9865886), (4, 0.90755945), -(5, -0.12416792), (6, -0.1063926), (7, -0.098794639), (8, 0.05004178)] +.. sourcecode:: pycon + + >>> sims = index[vec_lsi] # perform a similarity query against the corpus + >>> print(list(enumerate(sims))) # print (document_number, document_similarity) 2-tuples + [(0, 0.99809301), (1, 0.93748635), (2, 0.99844527), (3, 0.9865886), (4, 0.90755945), + (5, -0.12416792), (6, -0.1063926), (7, -0.098794639), (8, 0.05004178)] Cosine measure returns similarities in the range `<-1, 1>` (the greater, the more similar), so that the first document has a score of 0.99809301 etc. @@ -101,17 +116,19 @@ so that the first document has a score of 0.99809301 etc. With some standard Python magic we sort these similarities into descending order, and obtain the final answer to the query `"Human computer interaction"`: ->>> sims = sorted(enumerate(sims), key=lambda item: -item[1]) ->>> print(sims) # print sorted (document number, similarity score) 2-tuples -[(2, 0.99844527), # The EPS user interface management system -(0, 0.99809301), # Human machine interface for lab abc computer applications -(3, 0.9865886), # System and human system engineering testing of EPS -(1, 0.93748635), # A survey of user opinion of computer system response time -(4, 0.90755945), # Relation of user perceived response time to error measurement -(8, 0.050041795), # Graph minors A survey -(7, -0.098794639), # Graph minors IV Widths of trees and well quasi ordering -(6, -0.1063926), # The intersection graph of paths in trees -(5, -0.12416792)] # The generation of random binary unordered trees +.. sourcecode:: pycon + + >>> sims = sorted(enumerate(sims), key=lambda item: -item[1]) + >>> print(sims) # print sorted (document number, similarity score) 2-tuples + [(2, 0.99844527), # The EPS user interface management system + (0, 0.99809301), # Human machine interface for lab abc computer applications + (3, 0.9865886), # System and human system engineering testing of EPS + (1, 0.93748635), # A survey of user opinion of computer system response time + (4, 0.90755945), # Relation of user perceived response time to error measurement + (8, 0.050041795), # Graph minors A survey + (7, -0.098794639), # Graph minors IV Widths of trees and well quasi ordering + (6, -0.1063926), # The intersection graph of paths in trees + (5, -0.12416792)] # The generation of random binary unordered trees (I added the original documents in their "string form" to the output comments, to improve clarity.) diff --git a/docs/src/tutorial.rst b/docs/src/tutorial.rst index e8ec9b8912..3ec9631153 100644 --- a/docs/src/tutorial.rst +++ b/docs/src/tutorial.rst @@ -38,17 +38,17 @@ Quick Example First, let's import gensim and create a small corpus of nine documents and twelve features [1]_: ->>> from gensim import corpora, models, similarities ->>> ->>> corpus = [[(0, 1.0), (1, 1.0), (2, 1.0)], ->>> [(2, 1.0), (3, 1.0), (4, 1.0), (5, 1.0), (6, 1.0), (8, 1.0)], ->>> [(1, 1.0), (3, 1.0), (4, 1.0), (7, 1.0)], ->>> [(0, 1.0), (4, 2.0), (7, 1.0)], ->>> [(3, 1.0), (5, 1.0), (6, 1.0)], ->>> [(9, 1.0)], ->>> [(9, 1.0), (10, 1.0)], ->>> [(9, 1.0), (10, 1.0), (11, 1.0)], ->>> [(8, 1.0), (10, 1.0), (11, 1.0)]] +.. sourcecode:: pycon + + >>> corpus = [[(0, 1.0), (1, 1.0), (2, 1.0)], + >>> [(2, 1.0), (3, 1.0), (4, 1.0), (5, 1.0), (6, 1.0), (8, 1.0)], + >>> [(1, 1.0), (3, 1.0), (4, 1.0), (7, 1.0)], + >>> [(0, 1.0), (4, 2.0), (7, 1.0)], + >>> [(3, 1.0), (5, 1.0), (6, 1.0)], + >>> [(9, 1.0)], + >>> [(9, 1.0), (10, 1.0)], + >>> [(9, 1.0), (10, 1.0), (11, 1.0)], + >>> [(8, 1.0), (10, 1.0), (11, 1.0)]] In `gensim` a :dfn:`corpus` is simply an object which, when iterated over, returns its documents represented as sparse vectors. In this case we're using a list of list of tuples. If you're not familiar with the `vector space model `_, we'll bridge the gap between **raw strings**, **corpora** and **sparse vectors** in the next tutorial on :doc:`tut1`. @@ -67,13 +67,19 @@ has major impact on the quality of any subsequent applications. Next, let's initialize a :dfn:`transformation`: ->>> tfidf = models.TfidfModel(corpus) +.. sourcecode:: pycon + + >>> from gensim import models + >>> + >>> tfidf = models.TfidfModel(corpus) A transformation is used to convert documents from one vector representation into another: ->>> vec = [(0, 1), (4, 1)] ->>> print(tfidf[vec]) -[(0, 0.8075244), (4, 0.5898342)] +.. sourcecode:: pycon + + >>> vec = [(0, 1), (4, 1)] + >>> print(tfidf[vec]) + [(0, 0.8075244), (4, 0.5898342)] Here, we used `Tf-Idf `_, a simple transformation which takes documents represented as bag-of-words counts and applies @@ -84,13 +90,19 @@ Transformations are covered in detail in the tutorial on :doc:`tut2`. To transform the whole corpus via TfIdf and index it, in preparation for similarity queries: ->>> index = similarities.SparseMatrixSimilarity(tfidf[corpus], num_features=12) +.. sourcecode:: pycon + + >>> from gensim import similarities + >>> + >>> index = similarities.SparseMatrixSimilarity(tfidf[corpus], num_features=12) and to query the similarity of our query vector ``vec`` against every document in the corpus: ->>> sims = index[tfidf[vec]] ->>> print(list(enumerate(sims))) -[(0, 0.4662244), (1, 0.19139354), (2, 0.24600551), (3, 0.82094586), (4, 0.0), (5, 0.0), (6, 0.0), (7, 0.0), (8, 0.0)] +.. sourcecode:: pycon + + >>> sims = index[tfidf[vec]] + >>> print(list(enumerate(sims))) + [(0, 0.4662244), (1, 0.19139354), (2, 0.24600551), (3, 0.82094586), (4, 0.0), (5, 0.0), (6, 0.0), (7, 0.0), (8, 0.0)] How to read this output? Document number zero (the first document) has a similarity score of 0.466=46.6\%, the second document has a similarity score of 19.1\% etc. diff --git a/docs/src/wiki.rst b/docs/src/wiki.rst index 2992cf8401..bc148729d4 100644 --- a/docs/src/wiki.rst +++ b/docs/src/wiki.rst @@ -36,17 +36,21 @@ Preparing the corpus Latent Semantic Analysis -------------------------- -First let's load the corpus iterator and dictionary, created in the second step above:: +First let's load the corpus iterator and dictionary, created in the second step above - >>> import logging, gensim - >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) +.. sourcecode:: pycon + >>> import logging + >>> import gensim + >>> + >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) + >>> >>> # load id->word mapping (the dictionary), one of the results of step 2 above >>> id2word = gensim.corpora.Dictionary.load_from_text('wiki_en_wordids.txt') >>> # load corpus iterator >>> mm = gensim.corpora.MmCorpus('wiki_en_tfidf.mm') >>> # mm = gensim.corpora.MmCorpus('wiki_en_tfidf.mm.bz2') # use this if you compressed the TFIDF output (recommended) - + >>> >>> print(mm) MmCorpus(3931787 documents, 100000 features, 756379027 non-zero entries) @@ -54,11 +58,13 @@ We see that our corpus contains 3.9M documents, 100K features (distinct tokens) and 0.76G non-zero entries in the sparse TF-IDF matrix. The Wikipedia corpus contains about 2.24 billion tokens in total. -Now we're ready to compute LSA of the English Wikipedia:: +Now we're ready to compute LSA of the English Wikipedia: + +.. sourcecode:: pycon >>> # extract 400 LSI topics; use the default one-pass algorithm >>> lsi = gensim.models.lsimodel.LsiModel(corpus=mm, id2word=id2word, num_topics=400) - + >>> >>> # print the most contributing words (both positively and negatively) for each of the first ten topics >>> lsi.print_topics(10) topic #0(332.762): 0.425*"utc" + 0.299*"talk" + 0.293*"page" + 0.226*"article" + 0.224*"delete" + 0.216*"discussion" + 0.205*"deletion" + 0.198*"should" + 0.146*"debate" + 0.132*"be" @@ -91,17 +97,21 @@ or where the cost of storing/iterating over the corpus multiple times is too hig Latent Dirichlet Allocation ---------------------------- -As with Latent Semantic Analysis above, first load the corpus iterator and dictionary:: +As with Latent Semantic Analysis above, first load the corpus iterator and dictionary - >>> import logging, gensim - >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) +.. sourcecode:: pycon + >>> import logging + >>> import gensim + >>> + >>> logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) + >>> >>> # load id->word mapping (the dictionary), one of the results of step 2 above >>> id2word = gensim.corpora.Dictionary.load_from_text('wiki_en_wordids.txt') >>> # load corpus iterator >>> mm = gensim.corpora.MmCorpus('wiki_en_tfidf.mm') >>> # mm = gensim.corpora.MmCorpus('wiki_en_tfidf.mm.bz2') # use this if you compressed the TFIDF output - + >>> >>> print(mm) MmCorpus(3931787 documents, 100000 features, 756379027 non-zero entries) @@ -114,15 +124,19 @@ over the smaller chunks (subcorpora) are pretty good in themselves, so that the model estimation converges faster. As a result, we will perhaps only need a single full pass over the corpus: if the corpus has 3 million articles, and we update once after every 10,000 articles, this means we will have done 300 updates in one pass, quite likely -enough to have a very accurate topics estimate:: +enough to have a very accurate topics estimate + +.. sourcecode:: pycon >>> # extract 100 LDA topics, using 1 pass and updating once every 1 chunk (10,000 documents) - >>> lda = gensim.models.ldamodel.LdaModel(corpus=mm, id2word=id2word, num_topics=100, update_every=1, chunksize=10000, passes=1) + >>> lda = gensim.models.ldamodel.LdaModel(corpus=mm, id2word=id2word, num_topics=100, update_every=1, passes=1) using serial LDA version on this node running online LDA training, 100 topics, 1 passes over the supplied corpus of 3931787 documents, updating model once every 10000 documents ... -Unlike LSA, the topics coming from LDA are easier to interpret:: +Unlike LSA, the topics coming from LDA are easier to interpret + +.. sourcecode:: pycon >>> # print the most contributing words for 20 randomly selected topics >>> lda.print_topics(20) @@ -164,7 +178,9 @@ In short, be careful if using LDA to incrementally add new documents to the mode over time. **Batch usage of LDA**, where the entire training corpus is either known beforehand or does not exhibit topic drift, **is ok and not affected**. -To run batch LDA (not online), train `LdaModel` with:: +To run batch LDA (not online), train `LdaModel` with: + +.. sourcecode:: pycon >>> # extract 100 LDA topics, using 20 full passes, no online updates >>> lda = gensim.models.ldamodel.LdaModel(corpus=mm, id2word=id2word, num_topics=100, update_every=0, passes=20) @@ -172,6 +188,8 @@ To run batch LDA (not online), train `LdaModel` with:: As usual, a trained model can used be to transform new, unseen documents (plain bag-of-words count vectors) into LDA topic distributions: +.. sourcecode:: pycon + >>> doc_lda = lda[doc_bow] -------------------- diff --git a/gensim/corpora/dictionary.py b/gensim/corpora/dictionary.py index 1e13692a2d..590fe02453 100644 --- a/gensim/corpora/dictionary.py +++ b/gensim/corpora/dictionary.py @@ -62,13 +62,15 @@ def __init__(self, documents=None, prune_at=2000000): Examples -------- - >>> from gensim.corpora import Dictionary - >>> - >>> texts = [['human', 'interface', 'computer']] - >>> dct = Dictionary(texts) # initialize a Dictionary - >>> dct.add_documents([["cat", "say", "meow"], ["dog"]]) # add more document (extend the vocabulary) - >>> dct.doc2bow(["dog", "computer", "non_existent_word"]) - [(0, 1), (6, 1)] + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> + >>> texts = [['human', 'interface', 'computer']] + >>> dct = Dictionary(texts) # initialize a Dictionary + >>> dct.add_documents([["cat", "say", "meow"], ["dog"]]) # add more document (extend the vocabulary) + >>> dct.doc2bow(["dog", "computer", "non_existent_word"]) + [(0, 1), (6, 1)] """ self.token2id = {} @@ -180,15 +182,17 @@ def add_documents(self, documents, prune_at=2000000): Examples -------- - >>> from gensim.corpora import Dictionary - >>> - >>> corpus = ["máma mele maso".split(), "ema má máma".split()] - >>> dct = Dictionary(corpus) - >>> len(dct) - 5 - >>> dct.add_documents([["this", "is", "sparta"], ["just", "joking"]]) - >>> len(dct) - 10 + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> + >>> corpus = ["máma mele maso".split(), "ema má máma".split()] + >>> dct = Dictionary(corpus) + >>> len(dct) + 5 + >>> dct.add_documents([["this", "is", "sparta"], ["just", "joking"]]) + >>> len(dct) + 10 """ for docno, document in enumerate(documents): @@ -228,12 +232,15 @@ def doc2bow(self, document, allow_update=False, return_missing=False): Examples -------- - >>> from gensim.corpora import Dictionary - >>> dct = Dictionary(["máma mele maso".split(), "ema má máma".split()]) - >>> dct.doc2bow(["this", "is", "máma"]) - [(2, 1)] - >>> dct.doc2bow(["this", "is", "máma"], return_missing=True) - ([(2, 1)], {u'this': 1, u'is': 1}) + + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> dct = Dictionary(["máma mele maso".split(), "ema má máma".split()]) + >>> dct.doc2bow(["this", "is", "máma"]) + [(2, 1)] + >>> dct.doc2bow(["this", "is", "máma"], return_missing=True) + ([(2, 1)], {u'this': 1, u'is': 1}) """ if isinstance(document, string_types): @@ -288,12 +295,14 @@ def doc2idx(self, document, unknown_word_index=-1): Examples -------- - >>> from gensim.corpora import Dictionary - >>> - >>> corpus = [["a", "a", "b"], ["a", "c"]] - >>> dct = Dictionary(corpus) - >>> dct.doc2idx(["a", "a", "c", "not_in_dictionary", "c"]) - [0, 0, 2, -1, 2] + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> + >>> corpus = [["a", "a", "b"], ["a", "c"]] + >>> dct = Dictionary(corpus) + >>> dct.doc2idx(["a", "a", "c", "not_in_dictionary", "c"]) + [0, 0, 2, -1, 2] """ if isinstance(document, string_types): @@ -331,15 +340,18 @@ def filter_extremes(self, no_below=5, no_above=0.5, keep_n=100000, keep_tokens=N Examples -------- - >>> from gensim.corpora import Dictionary - >>> - >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] - >>> dct = Dictionary(corpus) - >>> len(dct) - 5 - >>> dct.filter_extremes(no_below=1, no_above=0.5, keep_n=1) - >>> len(dct) - 1 + + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> + >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] + >>> dct = Dictionary(corpus) + >>> len(dct) + 5 + >>> dct.filter_extremes(no_below=1, no_above=0.5, keep_n=1) + >>> len(dct) + 1 """ no_above_abs = int(no_above * self.num_docs) # convert fractional threshold to absolute threshold @@ -380,15 +392,18 @@ def filter_n_most_frequent(self, remove_n): Examples -------- - >>> from gensim.corpora import Dictionary - >>> - >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] - >>> dct = Dictionary(corpus) - >>> len(dct) - 5 - >>> dct.filter_n_most_frequent(2) - >>> len(dct) - 3 + + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> + >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] + >>> dct = Dictionary(corpus) + >>> len(dct) + 5 + >>> dct.filter_n_most_frequent(2) + >>> len(dct) + 3 """ # determine which tokens to keep @@ -416,20 +431,23 @@ def filter_tokens(self, bad_ids=None, good_ids=None): Examples -------- - >>> from gensim.corpora import Dictionary - >>> - >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] - >>> dct = Dictionary(corpus) - >>> 'ema' in dct.token2id - True - >>> dct.filter_tokens(bad_ids=[dct.token2id['ema']]) - >>> 'ema' in dct.token2id - False - >>> len(dct) - 4 - >>> dct.filter_tokens(good_ids=[dct.token2id['maso']]) - >>> len(dct) - 1 + + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> + >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] + >>> dct = Dictionary(corpus) + >>> 'ema' in dct.token2id + True + >>> dct.filter_tokens(bad_ids=[dct.token2id['ema']]) + >>> 'ema' in dct.token2id + False + >>> len(dct) + 4 + >>> dct.filter_tokens(good_ids=[dct.token2id['maso']]) + >>> len(dct) + 1 """ if bad_ids is not None: @@ -486,17 +504,19 @@ def save_as_text(self, fname, sort_by_word=True): Examples -------- - >>> from gensim.corpora import Dictionary - >>> from gensim.test.utils import get_tmpfile - >>> - >>> tmp_fname = get_tmpfile("dictionary") - >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] - >>> - >>> dct = Dictionary(corpus) - >>> dct.save_as_text(tmp_fname) - >>> - >>> loaded_dct = Dictionary.load_from_text(tmp_fname) - >>> assert dct.token2id == loaded_dct.token2id + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> from gensim.test.utils import get_tmpfile + >>> + >>> tmp_fname = get_tmpfile("dictionary") + >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] + >>> + >>> dct = Dictionary(corpus) + >>> dct.save_as_text(tmp_fname) + >>> + >>> loaded_dct = Dictionary.load_from_text(tmp_fname) + >>> assert dct.token2id == loaded_dct.token2id """ logger.info("saving dictionary mapping to %s", fname) @@ -536,15 +556,18 @@ def merge_with(self, other): Examples -------- - >>> from gensim.corpora import Dictionary - >>> - >>> corpus_1, corpus_2 = [["a", "b", "c"]], [["a", "f", "f"]] - >>> dct_1, dct_2 = Dictionary(corpus_1), Dictionary(corpus_2) - >>> dct_1.doc2bow(corpus_2[0]) - [(0, 1)] - >>> transformer = dct_1.merge_with(dct_2) - >>> dct_1.doc2bow(corpus_2[0]) - [(0, 1), (3, 2)] + + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> + >>> corpus_1, corpus_2 = [["a", "b", "c"]], [["a", "f", "f"]] + >>> dct_1, dct_2 = Dictionary(corpus_1), Dictionary(corpus_2) + >>> dct_1.doc2bow(corpus_2[0]) + [(0, 1)] + >>> transformer = dct_1.merge_with(dct_2) + >>> dct_1.doc2bow(corpus_2[0]) + [(0, 1), (3, 2)] """ old2new = {} @@ -589,17 +612,20 @@ def load_from_text(fname): Examples -------- - >>> from gensim.corpora import Dictionary - >>> from gensim.test.utils import get_tmpfile - >>> - >>> tmp_fname = get_tmpfile("dictionary") - >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] - >>> - >>> dct = Dictionary(corpus) - >>> dct.save_as_text(tmp_fname) - >>> - >>> loaded_dct = Dictionary.load_from_text(tmp_fname) - >>> assert dct.token2id == loaded_dct.token2id + + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> from gensim.test.utils import get_tmpfile + >>> + >>> tmp_fname = get_tmpfile("dictionary") + >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] + >>> + >>> dct = Dictionary(corpus) + >>> dct.save_as_text(tmp_fname) + >>> + >>> loaded_dct = Dictionary.load_from_text(tmp_fname) + >>> assert dct.token2id == loaded_dct.token2id """ result = Dictionary() @@ -651,12 +677,15 @@ def from_corpus(corpus, id2word=None): Examples -------- - >>> from gensim.corpora import Dictionary - >>> - >>> corpus = [[(1, 1.0)], [], [(0, 5.0), (2, 1.0)], []] - >>> dct = Dictionary.from_corpus(corpus) - >>> len(dct) - 3 + + .. sourcecode:: pycon + + >>> from gensim.corpora import Dictionary + >>> + >>> corpus = [[(1, 1.0)], [], [(0, 5.0), (2, 1.0)], []] + >>> dct = Dictionary.from_corpus(corpus) + >>> len(dct) + 3 """ result = Dictionary() diff --git a/gensim/corpora/hashdictionary.py b/gensim/corpora/hashdictionary.py index 85922d16c7..141f384271 100644 --- a/gensim/corpora/hashdictionary.py +++ b/gensim/corpora/hashdictionary.py @@ -50,13 +50,15 @@ class HashDictionary(utils.SaveLoad, dict): Examples -------- - >>> from gensim.corpora import HashDictionary - >>> - >>> dct = HashDictionary(debug=False) # needs no training corpus! - >>> - >>> texts = [['human', 'interface', 'computer']] - >>> dct.doc2bow(texts[0]) - [(10608, 1), (12466, 1), (31002, 1)] + .. sourcecode:: pycon + + >>> from gensim.corpora import HashDictionary + >>> + >>> dct = HashDictionary(debug=False) # needs no training corpus! + >>> + >>> texts = [['human', 'interface', 'computer']] + >>> dct.doc2bow(texts[0]) + [(10608, 1), (12466, 1), (31002, 1)] """ def __init__(self, documents=None, id_range=32000, myhash=zlib.adler32, debug=True): @@ -172,16 +174,18 @@ def add_documents(self, documents): Examples -------- - >>> from gensim.corpora import HashDictionary - >>> - >>> dct = HashDictionary(debug=True) # needs no training corpus! - >>> - >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] - >>> "sparta" in dct.token2id - False - >>> dct.add_documents([["this", "is", "sparta"], ["just", "joking"]]) - >>> "sparta" in dct.token2id - True + .. sourcecode:: pycon + + >>> from gensim.corpora import HashDictionary + >>> + >>> dct = HashDictionary(debug=True) # needs no training corpus! + >>> + >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] + >>> "sparta" in dct.token2id + False + >>> dct.add_documents([["this", "is", "sparta"], ["just", "joking"]]) + >>> "sparta" in dct.token2id + True """ for docno, document in enumerate(documents): @@ -222,11 +226,13 @@ def doc2bow(self, document, allow_update=False, return_missing=False): Examples -------- - >>> from gensim.corpora import HashDictionary - >>> - >>> dct = HashDictionary() - >>> dct.doc2bow(["this", "is", "máma"]) - [(1721, 1), (5280, 1), (22493, 1)] + .. sourcecode:: pycon + + >>> from gensim.corpora import HashDictionary + >>> + >>> dct = HashDictionary() + >>> dct.doc2bow(["this", "is", "máma"]) + [(1721, 1), (5280, 1), (22493, 1)] """ result = {} @@ -325,12 +331,15 @@ def save_as_text(self, fname): Examples -------- - >>> from gensim.corpora import HashDictionary - >>> from gensim.test.utils import get_tmpfile - >>> - >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] - >>> data = HashDictionary(corpus) - >>> data.save_as_text(get_tmpfile("dictionary_in_text_format")) + + .. sourcecode:: pycon + + >>> from gensim.corpora import HashDictionary + >>> from gensim.test.utils import get_tmpfile + >>> + >>> corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]] + >>> data = HashDictionary(corpus) + >>> data.save_as_text(get_tmpfile("dictionary_in_text_format")) """ logger.info("saving %s mapping to %s" % (self, fname)) diff --git a/gensim/corpora/indexedcorpus.py b/gensim/corpora/indexedcorpus.py index c4e58cb95a..5b6f8a42f2 100644 --- a/gensim/corpora/indexedcorpus.py +++ b/gensim/corpora/indexedcorpus.py @@ -23,18 +23,22 @@ class IndexedCorpus(interfaces.CorpusABC): While the standard corpus interface in gensim allows iterating over corpus, we'll show it with :class:`~gensim.corpora.mmcorpus.MmCorpus`. - >>> from gensim.corpora import MmCorpus - >>> from gensim.test.utils import datapath - >>> - >>> corpus = MmCorpus(datapath('testcorpus.mm')) - >>> for doc in corpus: - ... pass + .. sourcecode:: pycon + + >>> from gensim.corpora import MmCorpus + >>> from gensim.test.utils import datapath + >>> + >>> corpus = MmCorpus(datapath('testcorpus.mm')) + >>> for doc in corpus: + ... pass :class:`~gensim.corpora.indexedcorpus.IndexedCorpus` allows accessing the documents with index in :math:`{O}(1)` look-up time. - >>> document_index = 3 - >>> doc = corpus[document_index] + .. sourcecode:: pycon + + >>> document_index = 3 + >>> doc = corpus[document_index] Notes ----- @@ -89,16 +93,19 @@ def serialize(serializer, fname, corpus, id2word=None, index_fname=None, Examples -------- - >>> from gensim.corpora import MmCorpus - >>> from gensim.test.utils import get_tmpfile - >>> - >>> corpus = [[(1, 0.3), (2, 0.1)], [(1, 0.1)], [(2, 0.3)]] - >>> output_fname = get_tmpfile("test.mm") - >>> - >>> MmCorpus.serialize(output_fname, corpus) - >>> mm = MmCorpus(output_fname) # `mm` document stream now has random access - >>> print(mm[1]) # retrieve document no. 42, etc. - [(1, 0.1)] + + .. sourcecode:: pycon + + >>> from gensim.corpora import MmCorpus + >>> from gensim.test.utils import get_tmpfile + >>> + >>> corpus = [[(1, 0.3), (2, 0.1)], [(1, 0.1)], [(2, 0.3)]] + >>> output_fname = get_tmpfile("test.mm") + >>> + >>> MmCorpus.serialize(output_fname, corpus) + >>> mm = MmCorpus(output_fname) # `mm` document stream now has random access + >>> print(mm[1]) # retrieve document no. 42, etc. + [(1, 0.1)] """ if getattr(corpus, 'fname', None) == fname: diff --git a/gensim/corpora/lowcorpus.py b/gensim/corpora/lowcorpus.py index 277df249e5..9986c780f3 100644 --- a/gensim/corpora/lowcorpus.py +++ b/gensim/corpora/lowcorpus.py @@ -60,20 +60,22 @@ class LowCorpus(IndexedCorpus): Examples -------- - >>> from gensim.test.utils import datapath, get_tmpfile, common_texts - >>> from gensim.corpora import LowCorpus - >>> from gensim.corpora import Dictionary - >>> - >>> # Prepare needed data - >>> dictionary = Dictionary(common_texts) - >>> corpus = [dictionary.doc2bow(doc) for doc in common_texts] - >>> - >>> # Write corpus in GibbsLda++ format to disk - >>> output_fname = get_tmpfile("corpus.low") - >>> LowCorpus.serialize(output_fname, corpus, dictionary) - >>> - >>> # Read corpus - >>> loaded_corpus = LowCorpus(output_fname) + .. sourcecode:: pycon + + >>> from gensim.test.utils import get_tmpfile, common_texts + >>> from gensim.corpora import LowCorpus + >>> from gensim.corpora import Dictionary + >>> + >>> # Prepare needed data + >>> dictionary = Dictionary(common_texts) + >>> corpus = [dictionary.doc2bow(doc) for doc in common_texts] + >>> + >>> # Write corpus in GibbsLda++ format to disk + >>> output_fname = get_tmpfile("corpus.low") + >>> LowCorpus.serialize(output_fname, corpus, dictionary) + >>> + >>> # Read corpus + >>> loaded_corpus = LowCorpus(output_fname) """ def __init__(self, fname, id2word=None, line2words=split_on_space): @@ -263,14 +265,17 @@ def docbyoffset(self, offset): Examples -------- - >>> from gensim.test.utils import datapath - >>> from gensim.corpora import LowCorpus - >>> - >>> data = LowCorpus(datapath("testcorpus.low")) - >>> data.docbyoffset(1) # end of first line - [] - >>> data.docbyoffset(2) # start of second line - [(0, 1), (3, 1), (4, 1)] + + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.corpora import LowCorpus + >>> + >>> data = LowCorpus(datapath("testcorpus.low")) + >>> data.docbyoffset(1) # end of first line + [] + >>> data.docbyoffset(2) # start of second line + [(0, 1), (3, 1), (4, 1)] """ with utils.smart_open(self.fname) as f: diff --git a/gensim/corpora/malletcorpus.py b/gensim/corpora/malletcorpus.py index 37d7fc0d9d..db28b9e632 100644 --- a/gensim/corpora/malletcorpus.py +++ b/gensim/corpora/malletcorpus.py @@ -36,20 +36,22 @@ class MalletCorpus(LowCorpus): Examples -------- - >>> from gensim.test.utils import datapath, get_tmpfile, common_texts - >>> from gensim.corpora import MalletCorpus - >>> from gensim.corpora import Dictionary - >>> - >>> # Prepare needed data - >>> dictionary = Dictionary(common_texts) - >>> corpus = [dictionary.doc2bow(doc) for doc in common_texts] - >>> - >>> # Write corpus in Mallet format to disk - >>> output_fname = get_tmpfile("corpus.mallet") - >>> MalletCorpus.serialize(output_fname, corpus, dictionary) - >>> - >>> # Read corpus - >>> loaded_corpus = MalletCorpus(output_fname) + .. sourcecode:: pycon + + >>> from gensim.test.utils import get_tmpfile, common_texts + >>> from gensim.corpora import MalletCorpus + >>> from gensim.corpora import Dictionary + >>> + >>> # Prepare needed data + >>> dictionary = Dictionary(common_texts) + >>> corpus = [dictionary.doc2bow(doc) for doc in common_texts] + >>> + >>> # Write corpus in Mallet format to disk + >>> output_fname = get_tmpfile("corpus.mallet") + >>> MalletCorpus.serialize(output_fname, corpus, dictionary) + >>> + >>> # Read corpus + >>> loaded_corpus = MalletCorpus(output_fname) """ def __init__(self, fname, id2word=None, metadata=False): @@ -113,12 +115,15 @@ def line2doc(self, line): Examples -------- - >>> from gensim.test.utils import datapath - >>> from gensim.corpora import MalletCorpus - >>> - >>> corpus = MalletCorpus(datapath("testcorpus.mallet")) - >>> corpus.line2doc("en computer human interface") - [(3, 1), (4, 1)] + + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.corpora import MalletCorpus + >>> + >>> corpus = MalletCorpus(datapath("testcorpus.mallet")) + >>> corpus.line2doc("en computer human interface") + [(3, 1), (4, 1)] """ splited_line = [word for word in utils.to_unicode(line).strip().split(' ') if word] @@ -214,14 +219,16 @@ def docbyoffset(self, offset): Examples -------- - >>> from gensim.test.utils import datapath - >>> from gensim.corpora import MalletCorpus - >>> - >>> data = MalletCorpus(datapath("testcorpus.mallet")) - >>> data.docbyoffset(1) # end of first line - [(3, 1), (4, 1)] - >>> data.docbyoffset(4) # start of second line - [(4, 1)] + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.corpora import MalletCorpus + >>> + >>> data = MalletCorpus(datapath("testcorpus.mallet")) + >>> data.docbyoffset(1) # end of first line + [(3, 1), (4, 1)] + >>> data.docbyoffset(4) # start of second line + [(4, 1)] """ with utils.smart_open(self.fname) as f: diff --git a/gensim/corpora/mmcorpus.py b/gensim/corpora/mmcorpus.py index 92048bb67d..3650f75e11 100644 --- a/gensim/corpora/mmcorpus.py +++ b/gensim/corpora/mmcorpus.py @@ -41,13 +41,14 @@ class MmCorpus(matutils.MmReader, IndexedCorpus): Example -------- - >>> from gensim.corpora.mmcorpus import MmCorpus - >>> from gensim.test.utils import datapath - >>> import gensim.downloader as api - >>> - >>> corpus = MmCorpus(datapath('test_mmcorpus_with_index.mm')) - >>> for document in corpus: - ... pass + .. sourcecode:: pycon + + >>> from gensim.corpora.mmcorpus import MmCorpus + >>> from gensim.test.utils import datapath + >>> + >>> corpus = MmCorpus(datapath('test_mmcorpus_with_index.mm')) + >>> for document in corpus: + ... pass """ def __init__(self, fname): @@ -107,14 +108,15 @@ def save_corpus(fname, corpus, id2word=None, progress_cnt=1000, metadata=False): Example ------- - >>> from gensim.corpora.mmcorpus import MmCorpus - >>> from gensim.test.utils import datapath - >>> import gensim.downloader as api - >>> - >>> corpus = MmCorpus(datapath('test_mmcorpus_with_index.mm')) - >>> - >>> MmCorpus.save_corpus("random", corpus) # Do not do it, use `serialize` instead. - [97, 121, 169, 201, 225, 249, 258, 276, 303] + .. sourcecode:: pycon + + >>> from gensim.corpora.mmcorpus import MmCorpus + >>> from gensim.test.utils import datapath + >>> + >>> corpus = MmCorpus(datapath('test_mmcorpus_with_index.mm')) + >>> + >>> MmCorpus.save_corpus("random", corpus) # Do not do it, use `serialize` instead. + [97, 121, 169, 201, 225, 249, 258, 276, 303] """ logger.info("storing corpus in Matrix Market format to %s", fname) diff --git a/gensim/corpora/sharded_corpus.py b/gensim/corpora/sharded_corpus.py index 049e22f226..9be2d02f8f 100644 --- a/gensim/corpora/sharded_corpus.py +++ b/gensim/corpora/sharded_corpus.py @@ -67,9 +67,11 @@ class ShardedCorpus(IndexedCorpus): supply the dimension of your data to the corpus. (The dimension of word frequency vectors will typically be the size of the vocabulary, etc.) - >>> corpus = gensim.utils.mock_data() - >>> output_prefix = 'mydata.shdat' - >>> ShardedCorpus.serialize(output_prefix, corpus, dim=1000) + .. sourcecode:: pycon + + >>> corpus = gensim.utils.mock_data() + >>> output_prefix = 'mydata.shdat' + >>> ShardedCorpus.serialize(output_prefix, corpus, dim=1000) The `output_prefix` tells the ShardedCorpus where to put the data. Shards are saved as `output_prefix.0`, `output_prefix.1`, etc. @@ -88,15 +90,19 @@ class ShardedCorpus(IndexedCorpus): To retrieve data, you can load the corpus and use it like a list: - >>> sh_corpus = ShardedCorpus.load(output_prefix) - >>> batch = sh_corpus[100:150] + .. sourcecode:: pycon + + >>> sh_corpus = ShardedCorpus.load(output_prefix) + >>> batch = sh_corpus[100:150] This will retrieve a numpy 2-dimensional array of 50 rows and 1000 columns (1000 was the dimension of the data we supplied to the corpus). To retrieve gensim-style sparse vectors, set the `gensim` property: - >>> sh_corpus.gensim = True - >>> batch = sh_corpus[100:150] + .. sourcecode:: pycon + + >>> sh_corpus.gensim = True + >>> batch = sh_corpus[100:150] The batch now will be a generator of gensim vectors. @@ -105,8 +111,10 @@ class ShardedCorpus(IndexedCorpus): `ShardedCorpus.serialize()`, you can just initialize and use the corpus right away: - >>> corpus = ShardedCorpus(output_prefix, corpus, dim=1000) - >>> batch = corpus[100:150] + .. sourcecode:: pycon + + >>> corpus = ShardedCorpus(output_prefix, corpus, dim=1000) + >>> batch = corpus[100:150] ShardedCorpus also supports working with scipy sparse matrices, both during retrieval and during serialization. If you want to serialize your @@ -117,15 +125,17 @@ class ShardedCorpus(IndexedCorpus): will retrieve numpy ndarrays even if it was serialized into sparse matrices. - >>> sparse_prefix = 'mydata.sparse.shdat' - >>> ShardedCorpus.serialize(sparse_prefix, corpus, dim=1000, sparse_serialization=True) - >>> sparse_corpus = ShardedCorpus.load(sparse_prefix) - >>> batch = sparse_corpus[100:150] - >>> type(batch) - - >>> sparse_corpus.sparse_retrieval = True - >>> batch = sparse_corpus[100:150] - + .. sourcecode:: pycon + + >>> sparse_prefix = 'mydata.sparse.shdat' + >>> ShardedCorpus.serialize(sparse_prefix, corpus, dim=1000, sparse_serialization=True) + >>> sparse_corpus = ShardedCorpus.load(sparse_prefix) + >>> batch = sparse_corpus[100:150] + >>> type(batch) + + >>> sparse_corpus.sparse_retrieval = True + >>> batch = sparse_corpus[100:150] + While you *can* touch the `sparse_retrieval` attribute during the life of a ShardedCorpus object, you should definitely not touch ` diff --git a/gensim/corpora/textcorpus.py b/gensim/corpora/textcorpus.py index cd3d0d26e4..e5616fe9d7 100644 --- a/gensim/corpora/textcorpus.py +++ b/gensim/corpora/textcorpus.py @@ -216,26 +216,29 @@ def __init__(self, input=None, dictionary=None, metadata=False, character_filter Examples -------- - >>> from gensim.corpora.textcorpus import TextCorpus - >>> from gensim.test.utils import datapath - >>> from gensim import utils - >>> - >>> - >>> class CorpusMiislita(TextCorpus): - ... stopwords = set('for a of the and to in on'.split()) - ... - ... def get_texts(self): - ... for doc in self.getstream(): - ... yield [word for word in utils.to_unicode(doc).lower().split() if word not in self.stopwords] - ... - ... def __len__(self): - ... self.length = sum(1 for _ in self.get_texts()) - ... return self.length - >>> - >>> corpus = CorpusMiislita(datapath('head500.noblanks.cor.bz2')) - >>> len(corpus) - 250 - >>> document = next(iter(corpus.get_texts())) + .. sourcecode:: pycon + + >>> from gensim.corpora.textcorpus import TextCorpus + >>> from gensim.test.utils import datapath + >>> from gensim import utils + >>> + >>> + >>> class CorpusMiislita(TextCorpus): + ... stopwords = set('for a of the and to in on'.split()) + ... + ... def get_texts(self): + ... for doc in self.getstream(): + ... yield [word for word in utils.to_unicode(doc).lower().split() if word not in self.stopwords] + ... + ... def __len__(self): + ... self.length = sum(1 for _ in self.get_texts()) + ... return self.length + >>> + >>> + >>> corpus = CorpusMiislita(datapath('head500.noblanks.cor.bz2')) + >>> len(corpus) + 250 + >>> document = next(iter(corpus.get_texts())) """ self.input = input diff --git a/gensim/corpora/ucicorpus.py b/gensim/corpora/ucicorpus.py index 5aa1c456ba..09451581e7 100644 --- a/gensim/corpora/ucicorpus.py +++ b/gensim/corpora/ucicorpus.py @@ -171,12 +171,14 @@ def __init__(self, fname, fname_vocab=None): Examples -------- - >>> from gensim.corpora import UciCorpus - >>> from gensim.test.utils import datapath - >>> - >>> corpus = UciCorpus(datapath('testcorpus.uci')) - >>> for document in corpus: - ... pass + .. sourcecode:: pycon + + >>> from gensim.corpora import UciCorpus + >>> from gensim.test.utils import datapath + >>> + >>> corpus = UciCorpus(datapath('testcorpus.uci')) + >>> for document in corpus: + ... pass """ IndexedCorpus.__init__(self, fname) @@ -214,10 +216,13 @@ def create_dictionary(self): Examples -------- - >>> from gensim.corpora.ucicorpus import UciCorpus - >>> from gensim.test.utils import datapath - >>> ucc = UciCorpus(datapath('testcorpus.uci')) - >>> dictionary = ucc.create_dictionary() + + .. sourcecode:: pycon + + >>> from gensim.corpora.ucicorpus import UciCorpus + >>> from gensim.test.utils import datapath + >>> ucc = UciCorpus(datapath('testcorpus.uci')) + >>> dictionary = ucc.create_dictionary() """ dictionary = Dictionary() diff --git a/gensim/corpora/wikicorpus.py b/gensim/corpora/wikicorpus.py index 86545a11ab..8cc5ea58a7 100644 --- a/gensim/corpora/wikicorpus.py +++ b/gensim/corpora/wikicorpus.py @@ -126,12 +126,16 @@ def filter_example(elem, text, *args, **kwargs): pageid_path : str XPath expression for page id. - Example: - ------ - >>> import gensim.corpora - >>> filter_func = gensim.corpora.wikicorpus.filter_example - >>> dewiki = gensim.corpora.WikiCorpus('./dewiki-20180520-pages-articles-multistream.xml.bz2', - filter_articles=filter_func) + Example + ------- + .. sourcecode:: pycon + + >>> import gensim.corpora + >>> filter_func = gensim.corpora.wikicorpus.filter_example + >>> dewiki = gensim.corpora.WikiCorpus( + ... './dewiki-20180520-pages-articles-multistream.xml.bz2', + ... filter_articles=filter_func) + """ # Filter German wikipedia dump for articles that are marked either as # Lesenswert (featured) or Exzellent (excellent) by wikipedia editors. @@ -557,14 +561,16 @@ class WikiCorpus(TextCorpus): Examples -------- - >>> from gensim.test.utils import datapath, get_tmpfile - >>> from gensim.corpora import WikiCorpus, MmCorpus - >>> - >>> path_to_wiki_dump = datapath("enwiki-latest-pages-articles1.xml-p000000010p000030302-shortened.bz2") - >>> corpus_path = get_tmpfile("wiki-corpus.mm") - >>> - >>> wiki = WikiCorpus(path_to_wiki_dump) # create word->word_id mapping, ~8h on full wiki - >>> MmCorpus.serialize(corpus_path, wiki) # another 8h, creates a file in MatrixMarket format and mapping + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath, get_tmpfile + >>> from gensim.corpora import WikiCorpus, MmCorpus + >>> + >>> path_to_wiki_dump = datapath("enwiki-latest-pages-articles1.xml-p000000010p000030302-shortened.bz2") + >>> corpus_path = get_tmpfile("wiki-corpus.mm") + >>> + >>> wiki = WikiCorpus(path_to_wiki_dump) # create word->word_id mapping, ~8h on full wiki + >>> MmCorpus.serialize(corpus_path, wiki) # another 8h, creates a file in MatrixMarket format and mapping """ def __init__(self, fname, processes=None, lemmatize=utils.has_pattern(), dictionary=None, @@ -643,13 +649,15 @@ def get_texts(self): Examples -------- - >>> from gensim.test.utils import datapath - >>> from gensim.corpora import WikiCorpus - >>> - >>> path_to_wiki_dump = datapath("enwiki-latest-pages-articles1.xml-p000000010p000030302-shortened.bz2") - >>> - >>> for vec in WikiCorpus(path_to_wiki_dump): - ... pass + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.corpora import WikiCorpus + >>> + >>> path_to_wiki_dump = datapath("enwiki-latest-pages-articles1.xml-p000000010p000030302-shortened.bz2") + >>> + >>> for vec in WikiCorpus(path_to_wiki_dump): + ... pass Yields ------ diff --git a/gensim/downloader.py b/gensim/downloader.py index 3b2cf34ffa..d8ec1ac80e 100644 --- a/gensim/downloader.py +++ b/gensim/downloader.py @@ -3,27 +3,31 @@ Give information about available models/datasets: ->>> import gensim.downloader as api ->>> ->>> api.info() # return dict with info about available models/datasets ->>> api.info("text8") # return dict with info about "text8" dataset +.. sourcecode:: pycon + + >>> import gensim.downloader as api + >>> + >>> api.info() # return dict with info about available models/datasets + >>> api.info("text8") # return dict with info about "text8" dataset Model example: +.. sourcecode:: pycon ->>> import gensim.downloader as api ->>> ->>> model = api.load("glove-twitter-25") # load glove vectors ->>> model.most_similar("cat") # show words that similar to word 'cat' + >>> import gensim.downloader as api + >>> + >>> model = api.load("glove-twitter-25") # load glove vectors + >>> model.most_similar("cat") # show words that similar to word 'cat' Dataset example: +.. sourcecode:: pycon ->>> import gensim.downloader as api ->>> from gensim.models import Word2Vec ->>> ->>> dataset = api.load("text8") # load dataset as iterable ->>> model = Word2Vec(dataset) # train w2v model + >>> import gensim.downloader as api + >>> from gensim.models import Word2Vec + >>> + >>> dataset = api.load("text8") # load dataset as iterable + >>> model = Word2Vec(dataset) # train w2v model Also, this API available via CLI:: @@ -182,15 +186,17 @@ def info(name=None, show_only_latest=True, name_only=False): Examples -------- - >>> import gensim.downloader as api - >>> api.info("text8") # retrieve information about text8 dataset - {u'checksum': u'68799af40b6bda07dfa47a32612e5364', - u'description': u'Cleaned small sample from wikipedia', - u'file_name': u'text8.gz', - u'parts': 1, - u'source': u'http://mattmahoney.net/dc/text8.zip'} - >>> - >>> api.info() # retrieve information about all available datasets and models + .. sourcecode:: pycon + + >>> import gensim.downloader as api + >>> api.info("text8") # retrieve information about text8 dataset + {u'checksum': u'68799af40b6bda07dfa47a32612e5364', + u'description': u'Cleaned small sample from wikipedia', + u'file_name': u'text8.gz', + u'parts': 1, + u'source': u'http://mattmahoney.net/dc/text8.zip'} + >>> + >>> api.info() # retrieve information about all available datasets and models """ information = json.loads(urlopen(DATA_LIST_URL).read().decode("utf-8")) @@ -388,23 +394,30 @@ def load(name, return_path=False): -------- Model example: - >>> import gensim.downloader as api - >>> - >>> model = api.load("glove-twitter-25") # load glove vectors - >>> model.most_similar("cat") # show words that similar to word 'cat' + .. sourcecode:: pycon + + >>> import gensim.downloader as api + >>> + >>> model = api.load("glove-twitter-25") # load glove vectors + >>> model.most_similar("cat") # show words that similar to word 'cat' Dataset example: - >>> import gensim.downloader as api - >>> - >>> wiki = api.load("wiki-en") # load extracted Wikipedia dump, around 6 Gb - >>> for article in wiki: # iterate over all wiki script - >>> ... + .. sourcecode:: pycon - Download only example - >>> import gensim.downloader as api - >>> - >>> print(api.load("wiki-en", return_path=True)) # output: /home/user/gensim-data/wiki-en/wiki-en.gz + >>> import gensim.downloader as api + >>> + >>> wiki = api.load("wiki-en") # load extracted Wikipedia dump, around 6 Gb + >>> for article in wiki: # iterate over all wiki script + >>> pass + + Download only example: + + .. sourcecode:: pycon + + >>> import gensim.downloader as api + >>> + >>> print(api.load("wiki-en", return_path=True)) # output: /home/user/gensim-data/wiki-en/wiki-en.gz """ _create_base_dir() diff --git a/gensim/interfaces.py b/gensim/interfaces.py index 327dc9c960..56c71da747 100644 --- a/gensim/interfaces.py +++ b/gensim/interfaces.py @@ -30,38 +30,44 @@ class CorpusABC(utils.SaveLoad): Corpus is simply an iterable object, where each iteration step yields one document: - >>> from gensim.corpora import MmCorpus # this is inheritor of CorpusABC class - >>> from gensim.test.utils import datapath - >>> - >>> corpus = MmCorpus(datapath("testcorpus.mm")) - >>> for doc in corpus: - ... pass # do something with the doc... + .. sourcecode:: pycon + + >>> from gensim.corpora import MmCorpus # this is inheritor of CorpusABC class + >>> from gensim.test.utils import datapath + >>> + >>> corpus = MmCorpus(datapath("testcorpus.mm")) + >>> for doc in corpus: + ... pass # do something with the doc... A document represented in bag-of-word (BoW) format, i.e. list of (attr_id, attr_value), like ``[(1, 0.2), (4, 0.6), ...]``. - >>> from gensim.corpora import MmCorpus # this is inheritor of CorpusABC class - >>> from gensim.test.utils import datapath - >>> - >>> corpus = MmCorpus(datapath("testcorpus.mm")) - >>> doc = next(iter(corpus)) - >>> print(doc) - [(0, 1.0), (1, 1.0), (2, 1.0)] + .. sourcecode:: pycon + + >>> from gensim.corpora import MmCorpus # this is inheritor of CorpusABC class + >>> from gensim.test.utils import datapath + >>> + >>> corpus = MmCorpus(datapath("testcorpus.mm")) + >>> doc = next(iter(corpus)) + >>> print(doc) + [(0, 1.0), (1, 1.0), (2, 1.0)] Remember, that save/load methods save only corpus class (not corpus as data itself), for save/load functionality, please use this pattern : - >>> from gensim.corpora import MmCorpus # this is inheritor of CorpusABC class - >>> from gensim.test.utils import datapath, get_tmpfile - >>> - >>> corpus = MmCorpus(datapath("testcorpus.mm")) - >>> tmp_path = get_tmpfile("temp_corpus.mm") - >>> - >>> MmCorpus.serialize(tmp_path, corpus) # serialize corpus to disk in MmCorpus format - >>> # MmCorpus.save_corpus(tmp_path, corpus) # this variant also possible, but if serialize availbe - call it. - >>> loaded_corpus = MmCorpus(tmp_path) # load corpus through constructor - >>> for (doc_1, doc_2) in zip(corpus, loaded_corpus): - ... assert doc_1 == doc_2 # check that corpuses exactly same + .. sourcecode:: pycon + + >>> from gensim.corpora import MmCorpus # this is inheritor of CorpusABC class + >>> from gensim.test.utils import datapath, get_tmpfile + >>> + >>> corpus = MmCorpus(datapath("testcorpus.mm")) + >>> tmp_path = get_tmpfile("temp_corpus.mm") + >>> + >>> MmCorpus.serialize(tmp_path, corpus) # serialize corpus to disk in MmCorpus format + >>> # MmCorpus.save_corpus(tmp_path, corpus) # this variant also possible, but if serialize availbe - call it. + >>> loaded_corpus = MmCorpus(tmp_path) # load corpus through constructor + >>> for (doc_1, doc_2) in zip(corpus, loaded_corpus): + ... assert doc_1 == doc_2 # check that corpuses exactly same See Also @@ -209,12 +215,14 @@ class TransformationABC(utils.SaveLoad): A 'transformation' is any object which accepts document in BoW format via the `__getitem__` (notation `[]`) and returns another sparse document in its stead: - >>> from gensim.models import LsiModel - >>> from gensim.test.utils import common_dictionary, common_corpus - >>> - >>> model = LsiModel(common_corpus, id2word=common_dictionary) - >>> bow_vector = model[common_corpus[0]] # model applied through __getitem__ on one document from corpus. - >>> bow_corpus = model[common_corpus] # also, we can apply model on the full corpus + .. sourcecode:: pycon + + >>> from gensim.models import LsiModel + >>> from gensim.test.utils import common_dictionary, common_corpus + >>> + >>> model = LsiModel(common_corpus, id2word=common_dictionary) + >>> bow_vector = model[common_corpus[0]] # model applied through __getitem__ on one document from corpus. + >>> bow_corpus = model[common_corpus] # also, we can apply model on the full corpus """ def __getitem__(self, vec): @@ -256,11 +264,13 @@ class SimilarityABC(utils.SaveLoad): Examples -------- - >>> from gensim.similarities import MatrixSimilarity - >>> from gensim.test.utils import common_dictionary, common_corpus - >>> - >>> index = MatrixSimilarity(common_corpus) - >>> similarities = index.get_similarities(common_corpus[1]) # get similarities between query and corpus + .. sourcecode:: pycon + + >>> from gensim.similarities import MatrixSimilarity + >>> from gensim.test.utils import common_corpus + >>> + >>> index = MatrixSimilarity(common_corpus) + >>> similarities = index.get_similarities(common_corpus[1]) # get similarities between query and corpus Notes ----- diff --git a/gensim/models/__init__.py b/gensim/models/__init__.py index 4114724027..96ca698b27 100644 --- a/gensim/models/__init__.py +++ b/gensim/models/__init__.py @@ -37,12 +37,14 @@ class VocabTransform(interfaces.TransformationABC): `VocabTransform[corpus]` returns the same vectors but with the new ids. Old features that have no counterpart in the new ids are discarded. This - can be used to filter vocabulary of a corpus "online":: + can be used to filter vocabulary of a corpus "online": - >>> old2new = {oldid: newid for newid, oldid in enumerate(ids_you_want_to_keep)} - >>> vt = VocabTransform(old2new) - >>> for vec_with_new_ids in vt[corpus_with_old_ids]: - >>> ... + .. sourcecode:: pycon + + >>> old2new = {oldid: newid for newid, oldid in enumerate(ids_you_want_to_keep)} + >>> vt = VocabTransform(old2new) + >>> for vec_with_new_ids in vt[corpus_with_old_ids]: + >>> pass """ diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index 412f630099..a60e657788 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -25,28 +25,31 @@ Example ------- ->>> from gensim.models import AuthorTopicModel ->>> from gensim.corpora import mmcorpus ->>> from gensim.test.utils import common_dictionary, datapath, temporary_file - ->>> author2doc = { -... 'john': [0, 1, 2, 3, 4, 5, 6], -... 'jane': [2, 3, 4, 5, 6, 7, 8], -... 'jack': [0, 2, 4, 6, 8] -... } ->>> ->>> corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm')) ->>> ->>> with temporary_file("serialized") as s_path: -... model = AuthorTopicModel( -... corpus, author2doc=author2doc, id2word=common_dictionary, num_topics=4, -... serialized=True, serialization_path=s_path -... ) -... -... model.update(corpus, author2doc) # update the author-topic model with additional documents ->>> ->>> # construct vectors for authors ->>> author_vecs = [model.get_author_topics(author) for author in model.id2author.values()] + +.. sourcecode:: pycon + + >>> from gensim.models import AuthorTopicModel + >>> from gensim.corpora import mmcorpus + >>> from gensim.test.utils import common_dictionary, datapath, temporary_file + + >>> author2doc = { + ... 'john': [0, 1, 2, 3, 4, 5, 6], + ... 'jane': [2, 3, 4, 5, 6, 7, 8], + ... 'jack': [0, 2, 4, 6, 8] + ... } + >>> + >>> corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm')) + >>> + >>> with temporary_file("serialized") as s_path: + ... model = AuthorTopicModel( + ... corpus, author2doc=author2doc, id2word=common_dictionary, num_topics=4, + ... serialized=True, serialization_path=s_path + ... ) + ... + ... model.update(corpus, author2doc) # update the author-topic model with additional documents + >>> + >>> # construct vectors for authors + >>> author_vecs = [model.get_author_topics(author) for author in model.id2author.values()] """ # TODO: this class inherits LdaModel and overwrites some methods. There is some code @@ -1120,28 +1123,30 @@ def get_author_topics(self, author_name, minimum_probability=None): Example ------- - >>> from gensim.models import AuthorTopicModel - >>> from gensim.corpora import mmcorpus - >>> from gensim.test.utils import common_dictionary, datapath, temporary_file - - >>> author2doc = { - ... 'john': [0, 1, 2, 3, 4, 5, 6], - ... 'jane': [2, 3, 4, 5, 6, 7, 8], - ... 'jack': [0, 2, 4, 6, 8] - ... } - >>> - >>> corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm')) - >>> - >>> with temporary_file("serialized") as s_path: - ... model = AuthorTopicModel( - ... corpus, author2doc=author2doc, id2word=common_dictionary, num_topics=4, - ... serialized=True, serialization_path=s_path - ... ) - ... - ... model.update(corpus, author2doc) # update the author-topic model with additional documents - >>> - >>> # construct vectors for authors - >>> author_vecs = [model.get_author_topics(author) for author in model.id2author.values()] + .. sourcecode:: pycon + + >>> from gensim.models import AuthorTopicModel + >>> from gensim.corpora import mmcorpus + >>> from gensim.test.utils import common_dictionary, datapath, temporary_file + + >>> author2doc = { + ... 'john': [0, 1, 2, 3, 4, 5, 6], + ... 'jane': [2, 3, 4, 5, 6, 7, 8], + ... 'jack': [0, 2, 4, 6, 8] + ... } + >>> + >>> corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm')) + >>> + >>> with temporary_file("serialized") as s_path: + ... model = AuthorTopicModel( + ... corpus, author2doc=author2doc, id2word=common_dictionary, num_topics=4, + ... serialized=True, serialization_path=s_path + ... ) + ... + ... model.update(corpus, author2doc) # update the author-topic model with additional documents + >>> + >>> # construct vectors for authors + >>> author_vecs = [model.get_author_topics(author) for author in model.id2author.values()] """ author_id = self.author2id[author_name] diff --git a/gensim/models/callbacks.py b/gensim/models/callbacks.py index 9935fdb3b4..c54efd88d8 100644 --- a/gensim/models/callbacks.py +++ b/gensim/models/callbacks.py @@ -22,63 +22,67 @@ To implement a Callback, inherit from this base class and override one or more of its methods. Create a callback to save the training model after each epoch - ->>> from gensim.test.utils import common_corpus, common_texts, get_tmpfile ->>> from gensim.models.callbacks import CallbackAny2Vec ->>> from gensim.models import Word2Vec ->>> ->>> class EpochSaver(CallbackAny2Vec): -... '''Callback to save model after each epoch.''' -... -... def __init__(self, path_prefix): -... self.path_prefix = path_prefix -... self.epoch = 0 -... -... def on_epoch_end(self, model): -... output_path = get_tmpfile('{}_epoch{}.model'.format(self.path_prefix, self.epoch)) -... model.save(output_path) -... self.epoch += 1 -... - -Create a callback to print progress information to the console - ->>> class EpochLogger(CallbackAny2Vec): -... '''Callback to log information about training''' -... -... def __init__(self): -... self.epoch = 0 -... -... def on_epoch_begin(self, model): -... print("Epoch #{} start".format(self.epoch)) -... -... def on_epoch_end(self, model): -... print("Epoch #{} end".format(self.epoch)) -... self.epoch += 1 -... ->>> ->>> epoch_logger = EpochLogger() ->>> ->>> w2v_model = Word2Vec(common_texts, iter=5, size=10, min_count=0, seed=42, callbacks=[epoch_logger]) -Epoch #0 start -Epoch #0 end -Epoch #1 start -Epoch #1 end -Epoch #2 start -Epoch #2 end -Epoch #3 start -Epoch #3 end -Epoch #4 start -Epoch #4 end - -Create and bind a callback to a topic model. This callback will log the perplexity metric in real time - ->>> from gensim.models.callbacks import PerplexityMetric ->>> from gensim.models.ldamodel import LdaModel ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> ->>> # Log the perplexity score at the end of each epoch. ->>> perplexity_logger = PerplexityMetric(corpus=common_corpus, logger='shell') ->>> lda = LdaModel(common_corpus, id2word=common_dictionary, num_topics=5, callbacks=[perplexity_logger]) +.. sourcecode:: pycon + + >>> from gensim.test.utils import get_tmpfile + >>> from gensim.models.callbacks import CallbackAny2Vec + >>> + >>> + >>> class EpochSaver(CallbackAny2Vec): + ... '''Callback to save model after each epoch.''' + ... + ... def __init__(self, path_prefix): + ... self.path_prefix = path_prefix + ... self.epoch = 0 + ... + ... def on_epoch_end(self, model): + ... output_path = get_tmpfile('{}_epoch{}.model'.format(self.path_prefix, self.epoch)) + ... model.save(output_path) + ... self.epoch += 1 + ... + +Create a callback to print progress information to the console: + +.. sourcecode:: pycon + + >>> class EpochLogger(CallbackAny2Vec): + ... '''Callback to log information about training''' + ... + ... def __init__(self): + ... self.epoch = 0 + ... + ... def on_epoch_begin(self, model): + ... print("Epoch #{} start".format(self.epoch)) + ... + ... def on_epoch_end(self, model): + ... print("Epoch #{} end".format(self.epoch)) + ... self.epoch += 1 + ... + >>> + >>> epoch_logger = EpochLogger() + >>> w2v_model = Word2Vec(common_texts, iter=5, size=10, min_count=0, seed=42, callbacks=[epoch_logger]) + Epoch #0 start + Epoch #0 end + Epoch #1 start + Epoch #1 end + Epoch #2 start + Epoch #2 end + Epoch #3 start + Epoch #3 end + Epoch #4 start + Epoch #4 end + +Create and bind a callback to a topic model. This callback will log the perplexity metric in real time: + +.. sourcecode:: pycon + + >>> from gensim.models.callbacks import PerplexityMetric + >>> from gensim.models.ldamodel import LdaModel + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> + >>> # Log the perplexity score at the end of each epoch. + >>> perplexity_logger = PerplexityMetric(corpus=common_corpus, logger='shell') + >>> lda = LdaModel(common_corpus, id2word=common_dictionary, num_topics=5, callbacks=[perplexity_logger]) """ diff --git a/gensim/models/coherencemodel.py b/gensim/models/coherencemodel.py index b7e27fc474..fd42f53359 100644 --- a/gensim/models/coherencemodel.py +++ b/gensim/models/coherencemodel.py @@ -94,26 +94,30 @@ class CoherenceModel(interfaces.TransformationABC): One way of using this feature is through providing a trained topic model. A dictionary has to be explicitly provided if the model does not contain a dictionary already - >>> from gensim.test.utils import common_corpus, common_dictionary - >>> from gensim.models.ldamodel import LdaModel - >>> from gensim.models.coherencemodel import CoherenceModel - >>> - >>> model = LdaModel(common_corpus, 5, common_dictionary) - >>> - >>> cm = CoherenceModel(model=model, corpus=common_corpus, coherence='u_mass') - >>> coherence = cm.get_coherence() # get coherence value - - Another way of using this feature is through providing tokenized topics such as - - >>> from gensim.test.utils import common_corpus, common_dictionary - >>> from gensim.models.coherencemodel import CoherenceModel - >>> topics = [ - ... ['human', 'computer', 'system', 'interface'], - ... ['graph', 'minors', 'trees', 'eps'] - ... ] - >>> - >>> cm = CoherenceModel(topics=topics, corpus=common_corpus, dictionary=common_dictionary, coherence='u_mass') - >>> coherence = cm.get_coherence() # get coherence value + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.models.ldamodel import LdaModel + >>> from gensim.models.coherencemodel import CoherenceModel + >>> + >>> model = LdaModel(common_corpus, 5, common_dictionary) + >>> + >>> cm = CoherenceModel(model=model, corpus=common_corpus, coherence='u_mass') + >>> coherence = cm.get_coherence() # get coherence value + + Another way of using this feature is through providing tokenized topics such as: + + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.models.coherencemodel import CoherenceModel + >>> topics = [ + ... ['human', 'computer', 'system', 'interface'], + ... ['graph', 'minors', 'trees', 'eps'] + ... ] + >>> + >>> cm = CoherenceModel(topics=topics, corpus=common_corpus, dictionary=common_dictionary, coherence='u_mass') + >>> coherence = cm.get_coherence() # get coherence value """ def __init__(self, model=None, topics=None, texts=None, corpus=None, dictionary=None, @@ -233,14 +237,16 @@ def for_models(cls, models, dictionary, topn=20, **kwargs): Example ------- - >>> from gensim.test.utils import common_corpus, common_dictionary - >>> from gensim.models.ldamodel import LdaModel - >>> from gensim.models.coherencemodel import CoherenceModel - >>> - >>> m1 = LdaModel(common_corpus, 3, common_dictionary) - >>> m2 = LdaModel(common_corpus, 5, common_dictionary) - >>> - >>> cm = CoherenceModel.for_models([m1, m2], common_dictionary, corpus=common_corpus, coherence='u_mass') + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.models.ldamodel import LdaModel + >>> from gensim.models.coherencemodel import CoherenceModel + >>> + >>> m1 = LdaModel(common_corpus, 3, common_dictionary) + >>> m2 = LdaModel(common_corpus, 5, common_dictionary) + >>> + >>> cm = CoherenceModel.for_models([m1, m2], common_dictionary, corpus=common_corpus, coherence='u_mass') """ topics = [cls.top_topics_as_word_lists(model, dictionary, topn) for model in models] kwargs['dictionary'] = dictionary diff --git a/gensim/models/deprecated/doc2vec.py b/gensim/models/deprecated/doc2vec.py index 4b10224a87..8d8875affe 100644 --- a/gensim/models/deprecated/doc2vec.py +++ b/gensim/models/deprecated/doc2vec.py @@ -21,16 +21,22 @@ Initialize a model with e.g.:: ->>> model = Doc2Vec(documents, size=100, window=8, min_count=5, workers=4) +.. sourcecode:: pycon + + >>> model = Doc2Vec(documents, size=100, window=8, min_count=5, workers=4) Persist a model to disk with:: ->>> model.save(fname) ->>> model = Doc2Vec.load(fname) # you can continue training with the loaded model! +.. sourcecode:: pycon + + >>> model.save(fname) + >>> model = Doc2Vec.load(fname) # you can continue training with the loaded model! If you're finished training a model (=no more updates, only querying), you can do - >>> model.delete_temporary_training_data(keep_doctags_vectors=True, keep_inference=True): +.. sourcecode:: pycon + + >>> model.delete_temporary_training_data(keep_doctags_vectors=True, keep_inference=True): to trim unneeded model memory = use (much) less RAM. @@ -359,11 +365,13 @@ class DocvecsArray(SaveLoad): As the 'docvecs' property of a Doc2Vec model, allows access and comparison of document vectors. - >>> docvec = d2v_model.docvecs[99] - >>> docvec = d2v_model.docvecs['SENT_99'] # if string tag used in training - >>> sims = d2v_model.docvecs.most_similar(99) - >>> sims = d2v_model.docvecs.most_similar('SENT_99') - >>> sims = d2v_model.docvecs.most_similar(docvec) + .. sourcecode:: pycon + + >>> docvec = d2v_model.docvecs[99] + >>> docvec = d2v_model.docvecs['SENT_99'] # if string tag used in training + >>> sims = d2v_model.docvecs.most_similar(99) + >>> sims = d2v_model.docvecs.most_similar('SENT_99') + >>> sims = d2v_model.docvecs.most_similar(docvec) If only plain int tags are presented during training, the dict (of string tag -> index) and list (of index -> string tag) stay empty, diff --git a/gensim/models/deprecated/fasttext.py b/gensim/models/deprecated/fasttext.py index 1ba0d9b155..47e7f1a6a8 100644 --- a/gensim/models/deprecated/fasttext.py +++ b/gensim/models/deprecated/fasttext.py @@ -328,13 +328,14 @@ def __init__( -------- Initialize and train a `FastText` model - >>> from gensim.models import FastText - >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] - >>> - >>> model = FastText(sentences, min_count=1) - >>> say_vector = model['say'] # get vector for word - >>> of_vector = model['of'] # get vector for out-of-vocab word + .. sourcecode:: pycon + >>> from gensim.models import FastText + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> + >>> model = FastText(sentences, min_count=1) + >>> say_vector = model['say'] # get vector for word + >>> of_vector = model['of'] # get vector for out-of-vocab word """ # fastText specific params @@ -387,15 +388,17 @@ def build_vocab(self, sentences, keep_raw_vocab=False, trim_rule=None, progress_ ------- Train a model and update vocab for online training - >>> from gensim.models import FastText - >>> sentences_1 = [["cat", "say", "meow"], ["dog", "say", "woof"]] - >>> sentences_2 = [["dude", "say", "wazzup!"]] - >>> - >>> model = FastText(min_count=1) - >>> model.build_vocab(sentences_1) - >>> model.train(sentences_1, total_examples=model.corpus_count, epochs=model.iter) - >>> model.build_vocab(sentences_2, update=True) - >>> model.train(sentences_2, total_examples=model.corpus_count, epochs=model.iter) + .. sourcecode:: pycon + + >>> from gensim.models import FastText + >>> sentences_1 = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> sentences_2 = [["dude", "say", "wazzup!"]] + >>> + >>> model = FastText(min_count=1) + >>> model.build_vocab(sentences_1) + >>> model.train(sentences_1, total_examples=model.corpus_count, epochs=model.iter) + >>> model.build_vocab(sentences_2, update=True) + >>> model.train(sentences_2, total_examples=model.corpus_count, epochs=model.iter) """ if update: @@ -585,12 +588,15 @@ def train(self, sentences, total_examples=None, total_words=None, Examples -------- - >>> from gensim.models import FastText - >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] - >>> - >>> model = FastText(min_count=1) - >>> model.build_vocab(sentences) - >>> model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) + + .. sourcecode:: pycon + + >>> from gensim.models import FastText + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> + >>> model = FastText(min_count=1) + >>> model.build_vocab(sentences) + >>> model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) """ self.neg_labels = [] @@ -624,11 +630,13 @@ def __getitem__(self, word): Example ------- - >>> from gensim.models import FastText - >>> from gensim.test.utils import datapath - >>> - >>> trained_model = FastText.load_fasttext_format(datapath('lee_fasttext')) - >>> meow_vector = trained_model['hello'] # get vector for word + .. sourcecode:: pycon + + >>> from gensim.models import FastText + >>> from gensim.test.utils import datapath + >>> + >>> trained_model = FastText.load_fasttext_format(datapath('lee_fasttext')) + >>> meow_vector = trained_model['hello'] # get vector for word """ return self.word_vec(word) @@ -666,11 +674,13 @@ def word_vec(self, word, use_norm=False): Example ------- - >>> from gensim.models import FastText - >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] - >>> - >>> model = FastText(sentences, min_count=1) - >>> meow_vector = model.word_vec('meow') # get vector for word + .. sourcecode:: pycon + + >>> from gensim.models import FastText + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> + >>> model = FastText(sentences, min_count=1) + >>> meow_vector = model.word_vec('meow') # get vector for word """ return FastTextKeyedVectors.word_vec(self.wv, word, use_norm=use_norm) diff --git a/gensim/models/deprecated/fasttext_wrapper.py b/gensim/models/deprecated/fasttext_wrapper.py index 930f2c1308..727db0e1e0 100644 --- a/gensim/models/deprecated/fasttext_wrapper.py +++ b/gensim/models/deprecated/fasttext_wrapper.py @@ -23,10 +23,11 @@ `Word2Vec` for that. Example: +.. sourcecode:: pycon ->>> from gensim.models.wrappers import FastText ->>> model = FastText.train('/Users/kofola/fastText/fasttext', corpus_file='text8') ->>> print model['forests'] # prints vector for given out-of-vocabulary word + >>> from gensim.models.wrappers import FastText + >>> model = FastText.train('/Users/kofola/fastText/fasttext', corpus_file='text8') + >>> print model['forests'] # prints vector for given out-of-vocabulary word .. [1] https://github.com/facebookresearch/fastText#enriching-word-vectors-with-subword-information @@ -89,10 +90,12 @@ def word_vec(self, word, use_norm=False): The word can be out-of-vocabulary as long as ngrams for the word are present. For words with all ngrams absent, a KeyError is raised. - Example:: + Example: - >>> trained_model['office'] - array([ -1.40128313e-02, ...]) + .. sourcecode:: pycon + + >>> trained_model['office'] + array([ -1.40128313e-02, ...]) """ if word in self.vocab: diff --git a/gensim/models/deprecated/keyedvectors.py b/gensim/models/deprecated/keyedvectors.py index d86b0f3837..4dbb3cd70c 100644 --- a/gensim/models/deprecated/keyedvectors.py +++ b/gensim/models/deprecated/keyedvectors.py @@ -16,46 +16,58 @@ The word vectors are considered read-only in this class. -Initialize the vectors by training e.g. Word2Vec:: +Initialize the vectors by training e.g. Word2Vec: ->>> model = Word2Vec(sentences, size=100, window=5, min_count=5, workers=4) ->>> word_vectors = model.wv +.. sourcecode:: pycon -Persist the word vectors to disk with:: + >>> model = Word2Vec(sentences, size=100, window=5, min_count=5, workers=4) + >>> word_vectors = model.wv ->>> word_vectors.save(fname) ->>> word_vectors = KeyedVectors.load(fname) +Persist the word vectors to disk with: + +.. sourcecode:: pycon + + >>> word_vectors.save(fname) + >>> word_vectors = KeyedVectors.load(fname) The vectors can also be instantiated from an existing file on disk -in the original Google's word2vec C format as a KeyedVectors instance:: +in the original Google's word2vec C format as a KeyedVectors instance: + +.. sourcecode:: pycon - >>> from gensim.models.keyedvectors import KeyedVectors - >>> word_vectors = KeyedVectors.load_word2vec_format('/tmp/vectors.txt', binary=False) # C text format - >>> word_vectors = KeyedVectors.load_word2vec_format('/tmp/vectors.bin', binary=True) # C binary format + >>> from gensim.models.keyedvectors import KeyedVectors + >>> word_vectors = KeyedVectors.load_word2vec_format('/tmp/vectors.txt', binary=False) # C text format + >>> word_vectors = KeyedVectors.load_word2vec_format('/tmp/vectors.bin', binary=True) # C binary format You can perform various syntactic/semantic NLP word tasks with the vectors. Some of them -are already built-in:: +are already built-in: + +.. sourcecode:: pycon + + >>> word_vectors.most_similar(positive=['woman', 'king'], negative=['man']) + [('queen', 0.50882536), ...] - >>> word_vectors.most_similar(positive=['woman', 'king'], negative=['man']) - [('queen', 0.50882536), ...] + >>> word_vectors.most_similar_cosmul(positive=['woman', 'king'], negative=['man']) + [('queen', 0.71382287), ...] - >>> word_vectors.most_similar_cosmul(positive=['woman', 'king'], negative=['man']) - [('queen', 0.71382287), ...] + >>> word_vectors.doesnt_match("breakfast cereal dinner lunch".split()) + 'cereal' - >>> word_vectors.doesnt_match("breakfast cereal dinner lunch".split()) - 'cereal' + >>> word_vectors.similarity('woman', 'man') + 0.73723527 - >>> word_vectors.similarity('woman', 'man') - 0.73723527 +Correlation with human opinion on word similarity: -Correlation with human opinion on word similarity:: +.. sourcecode:: pycon - >>> word_vectors.evaluate_word_pairs(os.path.join(module_path, 'test_data','wordsim353.tsv')) - 0.51, 0.62, 0.13 + >>> word_vectors.evaluate_word_pairs(os.path.join(module_path, 'test_data','wordsim353.tsv')) + 0.51, 0.62, 0.13 -And on analogies:: +And on analogies: - >>> word_vectors.accuracy(os.path.join(module_path, 'test_data', 'questions-words.txt')) +.. sourcecode:: pycon + + >>> word_vectors.accuracy(os.path.join(module_path, 'test_data', 'questions-words.txt')) and so on. @@ -292,10 +304,12 @@ def word_vec(self, word): Accept a single word as input. Returns the word's representations in vector space, as a 1D numpy array. - Example:: + Example: - >>> trained_model.word_vec('office') - array([ -1.40128313e-02, ...]) + .. sourcecode:: pycon + + >>> trained_model.word_vec('office') + array([ -1.40128313e-02, ...]) """ if word in self.vocab: @@ -316,15 +330,17 @@ def __getitem__(self, words): 2d numpy array: #words x #vector_size. Matrix rows are in the same order as in input. - Example:: + Example: + + .. sourcecode:: pycon - >>> trained_model['office'] - array([ -1.40128313e-02, ...]) + >>> trained_model['office'] + array([ -1.40128313e-02, ...]) - >>> trained_model[['office', 'products']] - array([ -1.40128313e-02, ...] - [ -1.70425311e-03, ...] - ...) + >>> trained_model[['office', 'products']] + array([ -1.40128313e-02, ...] + [ -1.70425311e-03, ...] + ...) """ if isinstance(words, string_types): @@ -349,13 +365,15 @@ def most_similar_to_given(self, w1, word_list): Raises: KeyError: If w1 or any word in word_list is not in the vocabulary - Example:: + Example: + + .. sourcecode:: pycon - >>> trained_model.most_similar_to_given('music', ['water', 'sound', 'backpack', 'mouse']) - 'sound' + >>> trained_model.most_similar_to_given('music', ['water', 'sound', 'backpack', 'mouse']) + 'sound' - >>> trained_model.most_similar_to_given('snake', ['food', 'pencil', 'animal', 'phone']) - 'animal' + >>> trained_model.most_similar_to_given('snake', ['food', 'pencil', 'animal', 'phone']) + 'animal' """ return word_list[argmax([self.similarity(w1, word) for word in word_list])] @@ -379,8 +397,10 @@ def words_closer_than(self, w1, w2): Examples -------- - >>> model.words_closer_than('carnivore.n.01', 'mammal.n.01') - ['dog.n.01', 'canine.n.02'] + .. sourcecode:: pycon + + >>> model.words_closer_than('carnivore.n.01', 'mammal.n.01') + ['dog.n.01', 'canine.n.02'] """ all_distances = self.distances(w1) @@ -408,8 +428,10 @@ def rank(self, w1, w2): Examples -------- - >>> model.rank('mammal.n.01', 'carnivore.n.01') - 3 + .. sourcecode:: pycon + + >>> model.rank('mammal.n.01', 'carnivore.n.01') + 3 """ return len(self.words_closer_than(w1, w2)) + 1 @@ -441,10 +463,12 @@ def word_vec(self, word, use_norm=False): If `use_norm` is True, returns the normalized word vector. - Example:: + Example: + + .. sourcecode:: pycon - >>> trained_model['office'] - array([ -1.40128313e-02, ...]) + >>> trained_model['office'] + array([ -1.40128313e-02, ...]) """ if word in self.vocab: @@ -475,10 +499,12 @@ def most_similar(self, positive=None, negative=None, topn=10, restrict_vocab=Non only check the first 10000 word vectors in the vocabulary order. (This may be meaningful if you've sorted the vocabulary by descending frequency.) - Example:: + Example: + + .. sourcecode:: pycon - >>> trained_model.most_similar(positive=['woman', 'king'], negative=['man']) - [('queen', 0.50882536), ...] + >>> trained_model.most_similar(positive=['woman', 'king'], negative=['man']) + [('queen', 0.50882536), ...] """ if positive is None: @@ -538,10 +564,12 @@ def similar_by_word(self, word, topn=10, restrict_vocab=None): only check the first 10000 word vectors in the vocabulary order. (This may be meaningful if you've sorted the vocabulary by descending frequency.) - Example:: + Example: - >>> trained_model.similar_by_word('graph') - [('user', 0.9999163150787354), ...] + .. sourcecode:: pycon + + >>> trained_model.similar_by_word('graph') + [('user', 0.9999163150787354), ...] """ return self.most_similar(positive=[word], topn=topn, restrict_vocab=restrict_vocab) @@ -580,6 +608,9 @@ def wmdistance(self, document1, document2): This method only works if `pyemd` is installed (can be installed via pip, but requires a C compiler). Example: + + .. sourcecode:: pycon + >>> # Train word2vec model. >>> model = Word2Vec(sentences) @@ -671,10 +702,12 @@ def most_similar_cosmul(self, positive=None, negative=None, topn=10): respectively – a potentially sensible but untested extension of the method. (With a single positive example, rankings will be the same as in the default most_similar.) - Example:: + Example: - >>> trained_model.most_similar_cosmul(positive=['baghdad', 'england'], negative=['london']) - [(u'iraq', 0.8488819003105164), ...] + .. sourcecode:: pycon + + >>> trained_model.most_similar_cosmul(positive=['baghdad', 'england'], negative=['london']) + [(u'iraq', 0.8488819003105164), ...] .. [4] Omer Levy and Yoav Goldberg. Linguistic Regularities in Sparse and Explicit Word Representations, 2014. @@ -810,13 +843,15 @@ def distance(self, w1, w2): """ Compute cosine distance between two words. - Example:: + Example: - >>> trained_model.distance('woman', 'man') - 0.34 + .. sourcecode:: pycon - >>> trained_model.distance('woman', 'woman') - 0.0 + >>> trained_model.distance('woman', 'man') + 0.34 + + >>> trained_model.distance('woman', 'woman') + 0.0 """ return 1 - self.similarity(w1, w2) @@ -825,13 +860,15 @@ def similarity(self, w1, w2): """ Compute cosine similarity between two words. - Example:: + Example: + + .. sourcecode:: pycon - >>> trained_model.similarity('woman', 'man') - 0.73723527 + >>> trained_model.similarity('woman', 'man') + 0.73723527 - >>> trained_model.similarity('woman', 'woman') - 1.0 + >>> trained_model.similarity('woman', 'woman') + 1.0 """ return dot(matutils.unitvec(self[w1]), matutils.unitvec(self[w2])) @@ -840,16 +877,18 @@ def n_similarity(self, ws1, ws2): """ Compute cosine similarity between two sets of words. - Example:: + Example: + + .. sourcecode:: pycon - >>> trained_model.n_similarity(['sushi', 'shop'], ['japanese', 'restaurant']) - 0.61540466561049689 + >>> trained_model.n_similarity(['sushi', 'shop'], ['japanese', 'restaurant']) + 0.61540466561049689 - >>> trained_model.n_similarity(['restaurant', 'japanese'], ['japanese', 'restaurant']) - 1.0000000000000004 + >>> trained_model.n_similarity(['restaurant', 'japanese'], ['japanese', 'restaurant']) + 1.0000000000000004 - >>> trained_model.n_similarity(['sushi'], ['restaurant']) == trained_model.similarity('sushi', 'restaurant') - True + >>> trained_model.n_similarity(['sushi'], ['restaurant']) == trained_model.similarity('sushi', 'restaurant') + True """ if not(len(ws1) and len(ws2)): diff --git a/gensim/models/deprecated/word2vec.py b/gensim/models/deprecated/word2vec.py index 8a9dcd960c..885d77ba66 100644 --- a/gensim/models/deprecated/word2vec.py +++ b/gensim/models/deprecated/word2vec.py @@ -27,26 +27,34 @@ **Make sure you have a C compiler before installing gensim, to use optimized (compiled) word2vec training** (70x speedup compared to plain NumPy implementation [3]_). -Initialize a model with e.g.:: +Initialize a model with e.g.: + +.. sourcecode:: pycon >>> model = Word2Vec(sentences, size=100, window=5, min_count=5, workers=4) -Persist a model to disk with:: +Persist a model to disk with: + +.. sourcecode:: pycon >>> model.save(fname) >>> model = Word2Vec.load(fname) # you can continue training with the loaded model! The word vectors are stored in a KeyedVectors instance in model.wv. -This separates the read-only word vector lookup operations in KeyedVectors from the training code in Word2Vec:: +This separates the read-only word vector lookup operations in KeyedVectors from the training code in Word2Vec: + +.. sourcecode:: pycon - >>> model.wv['computer'] # numpy vector of a word - array([-0.00449447, -0.00310097, 0.02421786, ...], dtype=float32) + >>> model.wv['computer'] # numpy vector of a word + array([-0.00449447, -0.00310097, 0.02421786, ...], dtype=float32) The word vectors can also be instantiated from an existing file on disk in the word2vec C format as a KeyedVectors instance:: NOTE: It is impossible to continue training the vectors loaded from the C format because hidden weights, - vocabulary frequency and the binary tree is missing:: + vocabulary frequency and the binary tree is missing: + + .. sourcecode:: pycon >>> from gensim.models.keyedvectors import KeyedVectors >>> word_vectors = KeyedVectors.load_word2vec_format('/tmp/vectors.txt', binary=False) # C text format @@ -54,42 +62,51 @@ You can perform various NLP word tasks with the model. Some of them -are already built-in:: +are already built-in: + +.. sourcecode:: pycon + + >>> model.wv.most_similar(positive=['woman', 'king'], negative=['man']) + [('queen', 0.50882536), ...] - >>> model.wv.most_similar(positive=['woman', 'king'], negative=['man']) - [('queen', 0.50882536), ...] + >>> model.wv.most_similar_cosmul(positive=['woman', 'king'], negative=['man']) + [('queen', 0.71382287), ...] - >>> model.wv.most_similar_cosmul(positive=['woman', 'king'], negative=['man']) - [('queen', 0.71382287), ...] + >>> model.wv.doesnt_match("breakfast cereal dinner lunch".split()) + 'cereal' + >>> model.wv.similarity('woman', 'man') + 0.73723527 - >>> model.wv.doesnt_match("breakfast cereal dinner lunch".split()) - 'cereal' +Probability of a text under the model: - >>> model.wv.similarity('woman', 'man') - 0.73723527 +.. sourcecode:: pycon -Probability of a text under the model:: + >>> model.score(["The fox jumped over a lazy dog".split()]) + 0.2158356 - >>> model.score(["The fox jumped over a lazy dog".split()]) - 0.2158356 +Correlation with human opinion on word similarity: -Correlation with human opinion on word similarity:: +.. sourcecode:: pycon - >>> model.wv.evaluate_word_pairs(os.path.join(module_path, 'test_data','wordsim353.tsv')) - 0.51, 0.62, 0.13 + >>> model.wv.evaluate_word_pairs(os.path.join(module_path, 'test_data','wordsim353.tsv')) + 0.51, 0.62, 0.13 -And on analogies:: +And on analogies: - >>> model.wv.accuracy(os.path.join(module_path, 'test_data', 'questions-words.txt')) +.. sourcecode:: pycon + + >>> model.wv.accuracy(os.path.join(module_path, 'test_data', 'questions-words.txt')) and so on. If you're finished training a model (i.e. no more updates, only querying), then switch to the :mod:`gensim.models.KeyedVectors` instance in wv - >>> word_vectors = model.wv - >>> del model +.. sourcecode:: pycon + + >>> word_vectors = model.wv + >>> del model to trim unneeded model memory = use much less RAM. @@ -97,6 +114,8 @@ detect phrases longer than one word. Using phrases, you can learn a word2vec model where "words" are actually multiword expressions, such as `new_york_times` or `financial_crisis`: +.. sourcecode:: pycon + >>> bigram_transformer = gensim.models.Phrases(sentences) >>> model = Word2Vec(bigram_transformer[sentences], size=100, ...) @@ -719,9 +738,13 @@ def build_vocab_from_freq(self, word_freq, keep_raw_vocab=False, corpus_count=No Examples -------- - >>> from gensim.models.word2vec import Word2Vec - >>> model= Word2Vec() - >>> model.build_vocab_from_freq({"Word1": 15, "Word2": 20}) + + .. sourcecode:: pycon + + >>> from gensim.models.word2vec import Word2Vec + >>> model = Word2Vec() + >>> model.build_vocab_from_freq({"Word1": 15, "Word2": 20}) + """ logger.info("Processing provided word frequencies") # Instead of scanning text, this will assign provided word frequencies dictionary(word_freq) diff --git a/gensim/models/doc2vec.py b/gensim/models/doc2vec.py index 135aa4ac5b..6a6b3d3ae9 100644 --- a/gensim/models/doc2vec.py +++ b/gensim/models/doc2vec.py @@ -28,28 +28,36 @@ Initialize & train a model: ->>> from gensim.test.utils import common_texts ->>> from gensim.models.doc2vec import Doc2Vec, TaggedDocument ->>> ->>> documents = [TaggedDocument(doc, [i]) for i, doc in enumerate(common_texts)] ->>> model = Doc2Vec(documents, vector_size=5, window=2, min_count=1, workers=4) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts + >>> from gensim.models.doc2vec import Doc2Vec, TaggedDocument + >>> + >>> documents = [TaggedDocument(doc, [i]) for i, doc in enumerate(common_texts)] + >>> model = Doc2Vec(documents, vector_size=5, window=2, min_count=1, workers=4) Persist a model to disk: ->>> from gensim.test.utils import get_tmpfile ->>> ->>> fname = get_tmpfile("my_doc2vec_model") ->>> ->>> model.save(fname) ->>> model = Doc2Vec.load(fname) # you can continue training with the loaded model! +.. sourcecode:: pycon + + >>> from gensim.test.utils import get_tmpfile + >>> + >>> fname = get_tmpfile("my_doc2vec_model") + >>> + >>> model.save(fname) + >>> model = Doc2Vec.load(fname) # you can continue training with the loaded model! If you're finished training a model (=no more updates, only querying, reduce memory usage), you can do: ->>> model.delete_temporary_training_data(keep_doctags_vectors=True, keep_inference=True) +.. sourcecode:: pycon + + >>> model.delete_temporary_training_data(keep_doctags_vectors=True, keep_inference=True) Infer vector for a new document: ->>> vector = model.infer_vector(["system", "response"]) +.. sourcecode:: pycon + + >>> vector = model.infer_vector(["system", "response"]) """ @@ -1511,11 +1519,13 @@ def __init__(self, source): Examples -------- - >>> from gensim.test.utils import datapath - >>> from gensim.models.doc2vec import TaggedLineDocument - >>> - >>> for document in TaggedLineDocument(datapath("head500.noblanks.cor")): - ... pass + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.models.doc2vec import TaggedLineDocument + >>> + >>> for document in TaggedLineDocument(datapath("head500.noblanks.cor")): + ... pass """ self.source = source diff --git a/gensim/models/fasttext.py b/gensim/models/fasttext.py index cbdcba3d7a..f7e9d65556 100644 --- a/gensim/models/fasttext.py +++ b/gensim/models/fasttext.py @@ -24,54 +24,65 @@ -------------- Initialize and train a model: +.. sourcecode:: pycon ->>> from gensim.test.utils import common_texts ->>> from gensim.models import FastText ->>> ->>> model = FastText(common_texts, size=4, window=3, min_count=1, iter=10) + >>> from gensim.test.utils import common_texts + >>> from gensim.models import FastText + >>> + >>> model = FastText(common_texts, size=4, window=3, min_count=1, iter=10) Persist a model to disk with: ->>> from gensim.test.utils import get_tmpfile ->>> ->>> fname = get_tmpfile("fasttext.model") ->>> ->>> model.save(fname) ->>> model = FastText.load(fname) # you can continue training with the loaded model! +.. sourcecode:: pycon + + >>> from gensim.test.utils import get_tmpfile + >>> + >>> fname = get_tmpfile("fasttext.model") + >>> + >>> model.save(fname) + >>> model = FastText.load(fname) # you can continue training with the loaded model! Retrieve word-vector for vocab and out-of-vocab word: ->>> existent_word = "computer" ->>> existent_word in model.wv.vocab -True ->>> computer_vec = model.wv[existent_word] # numpy vector of a word ->>> ->>> oov_word = "graph-out-of-vocab" ->>> oov_word in model.wv.vocab -False ->>> oov_vec = model.wv[oov_word] # numpy vector for OOV word +.. sourcecode:: pycon + + >>> existent_word = "computer" + >>> existent_word in model.wv.vocab + True + >>> computer_vec = model.wv[existent_word] # numpy vector of a word + >>> + >>> oov_word = "graph-out-of-vocab" + >>> oov_word in model.wv.vocab + False + >>> oov_vec = model.wv[oov_word] # numpy vector for OOV word You can perform various NLP word tasks with the model, some of them are already built-in: ->>> similarities = model.wv.most_similar(positive=['computer', 'human'], negative=['interface']) ->>> most_similar = similarities[0] ->>> ->>> similarities = model.wv.most_similar_cosmul(positive=['computer', 'human'], negative=['interface']) ->>> most_similar = similarities[0] ->>> ->>> not_matching = model.wv.doesnt_match("human computer interface tree".split()) ->>> ->>> sim_score = model.wv.similarity('computer', 'human') +.. sourcecode:: pycon + + >>> similarities = model.wv.most_similar(positive=['computer', 'human'], negative=['interface']) + >>> most_similar = similarities[0] + >>> + >>> similarities = model.wv.most_similar_cosmul(positive=['computer', 'human'], negative=['interface']) + >>> most_similar = similarities[0] + >>> + >>> not_matching = model.wv.doesnt_match("human computer interface tree".split()) + >>> + >>> sim_score = model.wv.similarity('computer', 'human') Correlation with human opinion on word similarity: ->>> from gensim.test.utils import datapath ->>> ->>> similarities = model.wv.evaluate_word_pairs(datapath('wordsim353.tsv')) +.. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> similarities = model.wv.evaluate_word_pairs(datapath('wordsim353.tsv')) And on word analogies: ->>> analogies_result = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) +.. sourcecode:: pycon + + >>> analogies_result = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) """ @@ -357,14 +368,16 @@ def __init__(self, sentences=None, corpus_file=None, sg=0, hs=0, size=100, alpha Examples -------- - Initialize and train a `FastText` model:: + Initialize and train a `FastText` model: + + .. sourcecode:: pycon - >>> from gensim.models import FastText - >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] - >>> - >>> model = FastText(sentences, min_count=1) - >>> say_vector = model.wv['say'] # get vector for word - >>> of_vector = model.wv['of'] # get vector for out-of-vocab word + >>> from gensim.models import FastText + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> + >>> model = FastText(sentences, min_count=1) + >>> say_vector = model.wv['say'] # get vector for word + >>> of_vector = model.wv['of'] # get vector for out-of-vocab word """ self.load = call_on_class_only @@ -479,18 +492,20 @@ def build_vocab(self, sentences=None, corpus_file=None, update=False, progress_p Examples -------- - Train a model and update vocab for online training - - >>> from gensim.models import FastText - >>> sentences_1 = [["cat", "say", "meow"], ["dog", "say", "woof"]] - >>> sentences_2 = [["dude", "say", "wazzup!"]] - >>> - >>> model = FastText(min_count=1) - >>> model.build_vocab(sentences_1) - >>> model.train(sentences_1, total_examples=model.corpus_count, epochs=model.epochs) - >>> - >>> model.build_vocab(sentences_2, update=True) - >>> model.train(sentences_2, total_examples=model.corpus_count, epochs=model.epochs) + Train a model and update vocab for online training: + + .. sourcecode:: pycon + + >>> from gensim.models import FastText + >>> sentences_1 = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> sentences_2 = [["dude", "say", "wazzup!"]] + >>> + >>> model = FastText(min_count=1) + >>> model.build_vocab(sentences_1) + >>> model.train(sentences_1, total_examples=model.corpus_count, epochs=model.epochs) + >>> + >>> model.build_vocab(sentences_2, update=True) + >>> model.train(sentences_2, total_examples=model.corpus_count, epochs=model.epochs) """ if update: @@ -652,12 +667,14 @@ def train(self, sentences=None, corpus_file=None, total_examples=None, total_wor Examples -------- - >>> from gensim.models import FastText - >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] - >>> - >>> model = FastText(min_count=1) - >>> model.build_vocab(sentences) - >>> model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) + .. sourcecode:: pycon + + >>> from gensim.models import FastText + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> + >>> model = FastText(min_count=1) + >>> model.build_vocab(sentences) + >>> model.train(sentences, total_examples=model.corpus_count, epochs=model.epochs) """ super(FastText, self).train( diff --git a/gensim/models/hdpmodel.py b/gensim/models/hdpmodel.py index 6d0bfbce56..3ff2a508d2 100755 --- a/gensim/models/hdpmodel.py +++ b/gensim/models/hdpmodel.py @@ -22,23 +22,31 @@ Train :class:`~gensim.models.hdpmodel.HdpModel` ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> from gensim.models import HdpModel ->>> ->>> hdp = HdpModel(common_corpus, common_dictionary) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.models import HdpModel + >>> + >>> hdp = HdpModel(common_corpus, common_dictionary) You can then infer topic distributions on new, unseen documents, with ->>> unseen_document = [(1, 3.), (2, 4)] ->>> doc_hdp = hdp[unseen_document] +.. sourcecode:: pycon + + >>> unseen_document = [(1, 3.), (2, 4)] + >>> doc_hdp = hdp[unseen_document] To print 20 topics with top 10 most probable words. ->>> topic_info = hdp.print_topics(num_topics=20, num_words=10) +.. sourcecode:: pycon + + >>> topic_info = hdp.print_topics(num_topics=20, num_words=10) The model can be updated (trained) with new documents via ->>> hdp.update([[(1, 2)], [(1, 1), (4, 5)]]) +.. sourcecode:: pycon + + >>> hdp.update([[(1, 2)], [(1, 1), (4, 5)]]) """ from __future__ import with_statement diff --git a/gensim/models/keyedvectors.py b/gensim/models/keyedvectors.py index 72a33c5bba..442e9ca07a 100644 --- a/gensim/models/keyedvectors.py +++ b/gensim/models/keyedvectors.py @@ -57,90 +57,100 @@ Train a full model, then access its `model.wv` property, which holds the standalone keyed vectors. For example, using the Word2Vec algorithm to train the vectors ->>> from gensim.test.utils import common_texts ->>> from gensim.models import Word2Vec ->>> ->>> model = Word2Vec(common_texts, size=100, window=5, min_count=1, workers=4) ->>> word_vectors = model.wv +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts + >>> from gensim.models import Word2Vec + >>> + >>> model = Word2Vec(common_texts, size=100, window=5, min_count=1, workers=4) + >>> word_vectors = model.wv Persist the word vectors to disk with +.. sourcecode:: pycon ->>> from gensim.test.utils import get_tmpfile ->>> from gensim.models import KeyedVectors ->>> ->>> fname = get_tmpfile("vectors.kv") ->>> word_vectors.save(fname) ->>> word_vectors = KeyedVectors.load(fname, mmap='r') + >>> from gensim.test.utils import get_tmpfile + >>> from gensim.models import KeyedVectors + >>> + >>> fname = get_tmpfile("vectors.kv") + >>> word_vectors.save(fname) + >>> word_vectors = KeyedVectors.load(fname, mmap='r') The vectors can also be instantiated from an existing file on disk in the original Google's word2vec C format as a KeyedVectors instance ->>> from gensim.test.utils import datapath ->>> ->>> wv_from_text = KeyedVectors.load_word2vec_format(datapath('word2vec_pre_kv_c'), binary=False) # C text format ->>> wv_from_bin = KeyedVectors.load_word2vec_format(datapath("euclidean_vectors.bin"), binary=True) # C binary format +.. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> wv_from_text = KeyedVectors.load_word2vec_format(datapath('word2vec_pre_kv_c'), binary=False) # C text format + >>> wv_from_bin = KeyedVectors.load_word2vec_format(datapath("euclidean_vectors.bin"), binary=True) # C bin format What can I do with word vectors? ================================ You can perform various syntactic/semantic NLP word tasks with the trained vectors. Some of them are already built-in - ->>> import gensim.downloader as api ->>> ->>> word_vectors = api.load("glove-wiki-gigaword-100") # load pre-trained word-vectors from gensim-data ->>> ->>> result = word_vectors.most_similar(positive=['woman', 'king'], negative=['man']) ->>> print("{}: {:.4f}".format(*result[0])) -queen: 0.7699 ->>> ->>> result = word_vectors.most_similar_cosmul(positive=['woman', 'king'], negative=['man']) ->>> print("{}: {:.4f}".format(*result[0])) -queen: 0.8965 ->>> ->>> print(word_vectors.doesnt_match("breakfast cereal dinner lunch".split())) -cereal ->>> ->>> similarity = word_vectors.similarity('woman', 'man') ->>> similarity > 0.8 -True ->>> ->>> result = word_vectors.similar_by_word("cat") ->>> print("{}: {:.4f}".format(*result[0])) -dog: 0.8798 ->>> ->>> sentence_obama = 'Obama speaks to the media in Illinois'.lower().split() ->>> sentence_president = 'The president greets the press in Chicago'.lower().split() ->>> ->>> similarity = word_vectors.wmdistance(sentence_obama, sentence_president) ->>> print("{:.4f}".format(similarity)) -3.4893 ->>> ->>> distance = word_vectors.distance("media", "media") ->>> print("{:.1f}".format(distance)) -0.0 ->>> ->>> sim = word_vectors.n_similarity(['sushi', 'shop'], ['japanese', 'restaurant']) ->>> print("{:.4f}".format(sim)) -0.7067 ->>> ->>> vector = word_vectors['computer'] # numpy vector of a word ->>> vector.shape -(100,) ->>> ->>> vector = word_vectors.wv.word_vec('office', use_norm=True) ->>> vector.shape -(100,) +.. sourcecode:: pycon + + >>> import gensim.downloader as api + >>> + >>> word_vectors = api.load("glove-wiki-gigaword-100") # load pre-trained word-vectors from gensim-data + >>> + >>> result = word_vectors.most_similar(positive=['woman', 'king'], negative=['man']) + >>> print("{}: {:.4f}".format(*result[0])) + queen: 0.7699 + >>> + >>> result = word_vectors.most_similar_cosmul(positive=['woman', 'king'], negative=['man']) + >>> print("{}: {:.4f}".format(*result[0])) + queen: 0.8965 + >>> + >>> print(word_vectors.doesnt_match("breakfast cereal dinner lunch".split())) + cereal + >>> + >>> similarity = word_vectors.similarity('woman', 'man') + >>> similarity > 0.8 + True + >>> + >>> result = word_vectors.similar_by_word("cat") + >>> print("{}: {:.4f}".format(*result[0])) + dog: 0.8798 + >>> + >>> sentence_obama = 'Obama speaks to the media in Illinois'.lower().split() + >>> sentence_president = 'The president greets the press in Chicago'.lower().split() + >>> + >>> similarity = word_vectors.wmdistance(sentence_obama, sentence_president) + >>> print("{:.4f}".format(similarity)) + 3.4893 + >>> + >>> distance = word_vectors.distance("media", "media") + >>> print("{:.1f}".format(distance)) + 0.0 + >>> + >>> sim = word_vectors.n_similarity(['sushi', 'shop'], ['japanese', 'restaurant']) + >>> print("{:.4f}".format(sim)) + 0.7067 + >>> + >>> vector = word_vectors['computer'] # numpy vector of a word + >>> vector.shape + (100,) + >>> + >>> vector = word_vectors.wv.word_vec('office', use_norm=True) + >>> vector.shape + (100,) Correlation with human opinion on word similarity ->>> from gensim.test.utils import datapath ->>> ->>> similarities = model.wv.evaluate_word_pairs(datapath('wordsim353.tsv')) +.. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> similarities = model.wv.evaluate_word_pairs(datapath('wordsim353.tsv')) And on word analogies ->>> analogy_scores = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) +.. sourcecode:: pycon + + >>> analogy_scores = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) and so on. diff --git a/gensim/models/lda_dispatcher.py b/gensim/models/lda_dispatcher.py index 3e83bf7b3f..8ca05ba36d 100755 --- a/gensim/models/lda_dispatcher.py +++ b/gensim/models/lda_dispatcher.py @@ -38,9 +38,11 @@ python -m gensim.models.lda_dispatcher & -#. Run :class:`~gensim.models.ldamodel.LdaModel` in distributed mode :: +#. Run :class:`~gensim.models.ldamodel.LdaModel` in distributed mode : - >>> from gensim.test.utils import common_corpus,common_dictionary +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary >>> from gensim.models import LdaModel >>> >>> model = LdaModel(common_corpus, id2word=common_dictionary, distributed=True) diff --git a/gensim/models/lda_worker.py b/gensim/models/lda_worker.py index 83aa7ec462..cac24c2698 100755 --- a/gensim/models/lda_worker.py +++ b/gensim/models/lda_worker.py @@ -35,9 +35,11 @@ python -m gensim.models.lda_dispatcher & -#. Run :class:`~gensim.models.ldamodel.LdaModel` in distributed mode :: +#. Run :class:`~gensim.models.ldamodel.LdaModel` in distributed mode : - >>> from gensim.test.utils import common_corpus,common_dictionary +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary >>> from gensim.models import LdaModel >>> >>> model = LdaModel(common_corpus, id2word=common_dictionary, distributed=True) diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index 2f66f30c52..c62a5aa9d1 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -29,48 +29,58 @@ Train an LDA model using a Gensim corpus ->>> from gensim.test.utils import common_texts ->>> from gensim.corpora.dictionary import Dictionary ->>> ->>> # Create a corpus from a list of texts ->>> common_dictionary = Dictionary(common_texts) ->>> common_corpus = [common_dictionary.doc2bow(text) for text in common_texts] ->>> ->>> # Train the model on the corpus. ->>> lda = LdaModel(common_corpus, num_topics=10) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts + >>> from gensim.corpora.dictionary import Dictionary + >>> + >>> # Create a corpus from a list of texts + >>> common_dictionary = Dictionary(common_texts) + >>> common_corpus = [common_dictionary.doc2bow(text) for text in common_texts] + >>> + >>> # Train the model on the corpus. + >>> lda = LdaModel(common_corpus, num_topics=10) Save a model to disk, or reload a pre-trained model ->>> from gensim.test.utils import datapath ->>> ->>> # Save model to disk. ->>> temp_file = datapath("model") ->>> lda.save(temp_file) ->>> ->>> # Load a potentially pretrained model from disk. ->>> lda = LdaModel.load(temp_file) +.. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Save model to disk. + >>> temp_file = datapath("model") + >>> lda.save(temp_file) + >>> + >>> # Load a potentially pretrained model from disk. + >>> lda = LdaModel.load(temp_file) Query, the model using new, unseen documents ->>> # Create a new corpus, made of previously unseen documents. ->>> other_texts = [ -... ['computer', 'time', 'graph'], -... ['survey', 'response', 'eps'], -... ['human', 'system', 'computer'] -... ] ->>> other_corpus = [common_dictionary.doc2bow(text) for text in other_texts] ->>> ->>> unseen_doc = other_corpus[0] ->>> vector = lda[unseen_doc] # get topic probability distribution for a document +.. sourcecode:: pycon + + >>> # Create a new corpus, made of previously unseen documents. + >>> other_texts = [ + ... ['computer', 'time', 'graph'], + ... ['survey', 'response', 'eps'], + ... ['human', 'system', 'computer'] + ... ] + >>> other_corpus = [common_dictionary.doc2bow(text) for text in other_texts] + >>> + >>> unseen_doc = other_corpus[0] + >>> vector = lda[unseen_doc] # get topic probability distribution for a document Update the model by incrementally training on the new corpus ->>> lda.update(other_corpus) ->>> vector = lda[unseen_doc] +.. sourcecode:: pycon + + >>> lda.update(other_corpus) + >>> vector = lda[unseen_doc] A lot of parameters can be tuned to optimize training for your specific case ->>> lda = LdaModel(common_corpus, num_topics=50, alpha='auto', eval_every=5) # learn asymmetric alpha from data +.. sourcecode:: pycon + + >>> lda = LdaModel(common_corpus, num_topics=50, alpha='auto', eval_every=5) # learn asymmetric alpha from data """ @@ -315,21 +325,27 @@ class LdaModel(interfaces.TransformationABC, basemodel.BaseTopicModel): ------- Initialize a model using a Gensim corpus - >>> from gensim.test.utils import common_corpus - >>> - >>> lda = LdaModel(common_corpus, num_topics=10) + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus + >>> + >>> lda = LdaModel(common_corpus, num_topics=10) You can then infer topic distributions on new, unseen documents. - >>> doc_bow = [(1, 0.3), (2, 0.1), (0, 0.09)] - >>> doc_lda = lda[doc_bow] + .. sourcecode:: pycon + + >>> doc_bow = [(1, 0.3), (2, 0.1), (0, 0.09)] + >>> doc_lda = lda[doc_bow] The model can be updated (trained) with new documents. - >>> # In practice (corpus =/= initial training corpus), but we use the same here for simplicity. - >>> other_corpus = common_corpus - >>> - >>> lda.update(other_corpus) + .. sourcecode:: pycon + + >>> # In practice (corpus =/= initial training corpus), but we use the same here for simplicity. + >>> other_corpus = common_corpus + >>> + >>> lda.update(other_corpus) Model persistency is achieved through :meth:`~gensim.models.ldamodel.LdaModel.load` and :meth:`~gensim.models.ldamodel.LdaModel.save` methods. @@ -1407,12 +1423,15 @@ def diff(self, other, distance="kullback_leibler", num_words=100, -------- Get the differences between each pair of topics inferred by two models - >>> from gensim.models.ldamulticore import LdaMulticore - >>> from gensim.test.utils import datapath - >>> - >>> m1, m2 = LdaMulticore.load(datapath("lda_3_0_1_model")), LdaMulticore.load(datapath("ldamodel_python_3_5")) - >>> mdiff, annotation = m1.diff(m2) - >>> topic_diff = mdiff # get matrix with difference for each topic pair from `m1` and `m2` + .. sourcecode:: pycon + + >>> from gensim.models.ldamulticore import LdaMulticore + >>> from gensim.test.utils import datapath + >>> + >>> m1 = LdaMulticore.load(datapath("lda_3_0_1_model")) + >>> m2 = LdaMulticore.load(datapath("ldamodel_python_3_5")) + >>> mdiff, annotation = m1.diff(m2) + >>> topic_diff = mdiff # get matrix with difference for each topic pair from `m1` and `m2` """ distances = { @@ -1604,10 +1623,12 @@ def load(cls, fname, *args, **kwargs): -------- Large arrays can be memmap'ed back as read-only (shared memory) by setting `mmap='r'`: - >>> from gensim.test.utils import datapath - >>> - >>> fname = datapath("lda_3_0_1_model") - >>> lda = LdaModel.load(fname, mmap='r') + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> fname = datapath("lda_3_0_1_model") + >>> lda = LdaModel.load(fname, mmap='r') """ kwargs['mmap'] = kwargs.get('mmap', None) diff --git a/gensim/models/ldamulticore.py b/gensim/models/ldamulticore.py index 168a2752c0..ecf043ea29 100644 --- a/gensim/models/ldamulticore.py +++ b/gensim/models/ldamulticore.py @@ -44,36 +44,41 @@ Usage examples -------------- The constructor estimates Latent Dirichlet Allocation model parameters based on a training corpus +.. sourcecode:: pycon ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> ->>> lda = LdaMulticore(common_corpus, id2word=common_dictionary, num_topics=10) + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> + >>> lda = LdaMulticore(common_corpus, id2word=common_dictionary, num_topics=10) Save a model to disk, or reload a pre-trained model ->>> from gensim.test.utils import datapath ->>> ->>> # Save model to disk. ->>> temp_file = datapath("model") ->>> lda.save(temp_file) ->>> ->>> # Load a potentially pretrained model from disk. ->>> lda = LdaModel.load(temp_file) +.. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Save model to disk. + >>> temp_file = datapath("model") + >>> lda.save(temp_file) + >>> + >>> # Load a potentially pretrained model from disk. + >>> lda = LdaModel.load(temp_file) Query, or update the model using new, unseen documents ->>> other_texts = [ -... ['computer', 'time', 'graph'], -... ['survey', 'response', 'eps'], -... ['human', 'system', 'computer'] -... ] ->>> other_corpus = [common_dictionary.doc2bow(text) for text in other_texts] ->>> ->>> unseen_doc = other_corpus[0] ->>> vector = lda[unseen_doc] # get topic probability distribution for a document ->>> ->>> # Update the model by incrementally training on the new corpus. ->>> lda.update(other_corpus) # update the LDA model with additional documents +.. sourcecode:: pycon + + >>> other_texts = [ + ... ['computer', 'time', 'graph'], + ... ['survey', 'response', 'eps'], + ... ['human', 'system', 'computer'] + ... ] + >>> other_corpus = [common_dictionary.doc2bow(text) for text in other_texts] + >>> + >>> unseen_doc = other_corpus[0] + >>> vector = lda[unseen_doc] # get topic probability distribution for a document + >>> + >>> # Update the model by incrementally training on the new corpus. + >>> lda.update(other_corpus) # update the LDA model with additional documents """ diff --git a/gensim/models/ldaseqmodel.py b/gensim/models/ldaseqmodel.py index 3baae7aeca..35c1b64a15 100644 --- a/gensim/models/ldaseqmodel.py +++ b/gensim/models/ldaseqmodel.py @@ -23,26 +23,32 @@ Set up a model using have 30 documents, with 5 in the first time-slice, 10 in the second, and 15 in the third ->>> from gensim.test.utils import common_corpus ->>> from gensim.models import LdaSeqModel ->>> ->>> ldaseq = LdaSeqModel(corpus=common_corpus, time_slice=[2, 4, 3], num_topics=2, chunksize=1) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus + >>> from gensim.models import LdaSeqModel + >>> + >>> ldaseq = LdaSeqModel(corpus=common_corpus, time_slice=[2, 4, 3], num_topics=2, chunksize=1) Persist a model to disk and reload it later ->>> from gensim.test.utils import datapath ->>> ->>> temp_file = datapath("model") ->>> ldaseq.save(temp_file) ->>> ->>> # Load a potentially pre-trained model from disk. ->>> ldaseq = LdaSeqModel.load(temp_file) +.. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> temp_file = datapath("model") + >>> ldaseq.save(temp_file) + >>> + >>> # Load a potentially pre-trained model from disk. + >>> ldaseq = LdaSeqModel.load(temp_file) Access the document embeddings generated from the DTM ->>> doc = common_corpus[1] ->>> ->>> embedding = ldaseq[doc] +.. sourcecode:: pycon + + >>> doc = common_corpus[1] + >>> + >>> embedding = ldaseq[doc] """ diff --git a/gensim/models/logentropy_model.py b/gensim/models/logentropy_model.py index 9421c57546..6429cbf9e0 100644 --- a/gensim/models/logentropy_model.py +++ b/gensim/models/logentropy_model.py @@ -43,14 +43,16 @@ class LogEntropyModel(interfaces.TransformationABC): Examples -------- - >>> from gensim.models import LogEntropyModel - >>> from gensim.test.utils import common_texts - >>> from gensim.corpora import Dictionary - >>> - >>> dct = Dictionary(common_texts) # fit dictionary - >>> corpus = [dct.doc2bow(row) for row in common_texts] # convert to BoW format - >>> model = LogEntropyModel(corpus) # fit model - >>> vector = model[corpus[1]] # apply model to document + .. sourcecode:: pycon + + >>> from gensim.models import LogEntropyModel + >>> from gensim.test.utils import common_texts + >>> from gensim.corpora import Dictionary + >>> + >>> dct = Dictionary(common_texts) # fit dictionary + >>> corpus = [dct.doc2bow(row) for row in common_texts] # convert to BoW format + >>> model = LogEntropyModel(corpus) # fit model + >>> vector = model[corpus[1]] # apply model to document """ diff --git a/gensim/models/lsi_dispatcher.py b/gensim/models/lsi_dispatcher.py index 0d033d580c..cb7fd4c053 100755 --- a/gensim/models/lsi_dispatcher.py +++ b/gensim/models/lsi_dispatcher.py @@ -37,12 +37,14 @@ python -m gensim.models.lsi_dispatcher & -#. Run :class:`~gensim.models.lsimodel.LsiModel` in distributed mode :: +#. Run :class:`~gensim.models.lsimodel.LsiModel` in distributed mode: - >>> from gensim.test.utils import common_corpus, common_dictionary - >>> from gensim.models import LsiModel - >>> - >>> model = LsiModel(common_corpus, id2word=common_dictionary, distributed=True) + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.models import LsiModel + >>> + >>> model = LsiModel(common_corpus, id2word=common_dictionary, distributed=True) Command line arguments ---------------------- diff --git a/gensim/models/lsi_worker.py b/gensim/models/lsi_worker.py index fddcdf6bcb..4a38ba8e2d 100755 --- a/gensim/models/lsi_worker.py +++ b/gensim/models/lsi_worker.py @@ -35,12 +35,14 @@ python -m gensim.models.lsi_dispatcher & -#. Run :class:`~gensim.models.lsimodel.LsiModel` in distributed mode :: +#. Run :class:`~gensim.models.lsimodel.LsiModel` in distributed mode: - >>> from gensim.test.utils import common_corpus, common_dictionary - >>> from gensim.models import LsiModel - >>> - >>> model = LsiModel(common_corpus, id2word=common_dictionary, distributed=True) + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.models import LsiModel + >>> + >>> model = LsiModel(common_corpus, id2word=common_dictionary, distributed=True) Command line arguments diff --git a/gensim/models/lsimodel.py b/gensim/models/lsimodel.py index 054cb3aa5b..8547f0f153 100644 --- a/gensim/models/lsimodel.py +++ b/gensim/models/lsimodel.py @@ -43,11 +43,13 @@ Examples -------- ->>> from gensim.test.utils import common_dictionary, common_corpus ->>> from gensim.models import LsiModel ->>> ->>> model = LsiModel(common_corpus, id2word=common_dictionary) ->>> vectorized_corpus = model[common_corpus] # vectorize input copus in BoW format +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_dictionary, common_corpus + >>> from gensim.models import LsiModel + >>> + >>> model = LsiModel(common_corpus, id2word=common_dictionary) + >>> vectorized_corpus = model[common_corpus] # vectorize input copus in BoW format .. [1] The stochastic algo could be distributed too, but most time is already spent @@ -337,15 +339,17 @@ class LsiModel(interfaces.TransformationABC, basemodel.BaseTopicModel): Examples -------- - >>> from gensim.test.utils import common_corpus, common_dictionary, get_tmpfile - >>> from gensim.models import LsiModel - >>> - >>> model = LsiModel(common_corpus[:3], id2word=common_dictionary) # train model - >>> vector = model[common_corpus[4]] # apply model to BoW document - >>> model.add_documents(common_corpus[4:]) # update model with new documents - >>> tmp_fname = get_tmpfile("lsi.model") - >>> model.save(tmp_fname) # save model - >>> loaded_model = LsiModel.load(tmp_fname) # load model + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary, get_tmpfile + >>> from gensim.models import LsiModel + >>> + >>> model = LsiModel(common_corpus[:3], id2word=common_dictionary) # train model + >>> vector = model[common_corpus[4]] # apply model to BoW document + >>> model.add_documents(common_corpus[4:]) # update model with new documents + >>> tmp_fname = get_tmpfile("lsi.model") + >>> model.save(tmp_fname) # save model + >>> loaded_model = LsiModel.load(tmp_fname) # load model """ diff --git a/gensim/models/phrases.py b/gensim/models/phrases.py index 1c0db3abe8..9d8a5f5da6 100644 --- a/gensim/models/phrases.py +++ b/gensim/models/phrases.py @@ -14,23 +14,26 @@ Examples -------- ->>> from gensim.test.utils import datapath ->>> from gensim.models.word2vec import Text8Corpus ->>> from gensim.models.phrases import Phrases, Phraser ->>> ->>> sentences = Text8Corpus(datapath('testcorpus.txt')) ->>> phrases = Phrases(sentences, min_count=1, threshold=1) # train model ->>> phrases[[u'trees', u'graph', u'minors']] # apply model to sentence -[u'trees_graph', u'minors'] ->>> ->>> phrases.add_vocab([["hello", "world"], ["meow"]]) # update model with new sentences ->>> ->>> bigram = Phraser(phrases) # construct faster model (this is only an wrapper) ->>> bigram[[u'trees', u'graph', u'minors']] # apply model to sentence -[u'trees_graph', u'minors'] ->>> ->>> for sent in bigram[sentences]: # apply model to text corpus -... pass + +.. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.models.word2vec import Text8Corpus + >>> from gensim.models.phrases import Phrases, Phraser + >>> + >>> sentences = Text8Corpus(datapath('testcorpus.txt')) + >>> phrases = Phrases(sentences, min_count=1, threshold=1) # train model + >>> phrases[[u'trees', u'graph', u'minors']] # apply model to sentence + [u'trees_graph', u'minors'] + >>> + >>> phrases.add_vocab([["hello", "world"], ["meow"]]) # update model with new sentences + >>> + >>> bigram = Phraser(phrases) # construct faster model (this is only an wrapper) + >>> bigram[[u'trees', u'graph', u'minors']] # apply model to sentence + [u'trees_graph', u'minors'] + >>> + >>> for sent in bigram[sentences]: # apply model to text corpus + ... pass """ @@ -446,18 +449,20 @@ def learn_vocab(sentences, max_vocab_size, delimiter=b'_', progress_per=10000, Example ---------- - >>> from gensim.test.utils import datapath - >>> from gensim.models.word2vec import Text8Corpus - >>> from gensim.models.phrases import Phrases - >>> - >>> sentences = Text8Corpus(datapath('testcorpus.txt')) - >>> pruned_words, counters, total_words = Phrases.learn_vocab(sentences, 100) - >>> (pruned_words, total_words) - (1, 29) - >>> counters['computer'] - 2 - >>> counters['response_time'] - 1 + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.models.word2vec import Text8Corpus + >>> from gensim.models.phrases import Phrases + >>> + >>> sentences = Text8Corpus(datapath('testcorpus.txt')) + >>> pruned_words, counters, total_words = Phrases.learn_vocab(sentences, 100) + >>> (pruned_words, total_words) + (1, 29) + >>> counters['computer'] + 2 + >>> counters['response_time'] + 1 """ sentence_no = -1 @@ -506,21 +511,23 @@ def add_vocab(self, sentences): Example ------- - >>> from gensim.test.utils import datapath - >>> from gensim.models.word2vec import Text8Corpus - >>> from gensim.models.phrases import Phrases - >>> #Create corpus and use it for phrase detector - >>> sentences = Text8Corpus(datapath('testcorpus.txt')) - >>> phrases = Phrases(sentences) # train model - >>> assert len(phrases.vocab) == 37 - >>> - >>> more_sentences = [ - ... [u'the', u'mayor', u'of', u'new', u'york', u'was', u'there'], - ... [u'machine', u'learning', u'can', u'be', u'new', u'york' , u'sometimes'] - ... ] - >>> - >>> phrases.add_vocab(more_sentences) # add new sentences to model - >>> assert len(phrases.vocab) == 60 + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.models.word2vec import Text8Corpus + >>> from gensim.models.phrases import Phrases + >>> # Create corpus and use it for phrase detector + >>> sentences = Text8Corpus(datapath('testcorpus.txt')) + >>> phrases = Phrases(sentences) # train model + >>> assert len(phrases.vocab) == 37 + >>> + >>> more_sentences = [ + ... [u'the', u'mayor', u'of', u'new', u'york', u'was', u'there'], + ... [u'machine', u'learning', u'can', u'be', u'new', u'york', u'sometimes'] + ... ] + >>> + >>> phrases.add_vocab(more_sentences) # add new sentences to model + >>> assert len(phrases.vocab) == 60 """ # uses a separate vocab to collect the token counts from `sentences`. @@ -565,15 +572,17 @@ def export_phrases(self, sentences, out_delimiter=b' ', as_tuples=False): Example ------- - >>> from gensim.test.utils import datapath - >>> from gensim.models.word2vec import Text8Corpus - >>> from gensim.models.phrases import Phrases - >>> - >>> sentences = Text8Corpus(datapath('testcorpus.txt')) - >>> phrases = Phrases(sentences, min_count=1, threshold=0.1) - >>> - >>> for phrase, score in phrases.export_phrases(sentences): - ... pass + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.models.word2vec import Text8Corpus + >>> from gensim.models.phrases import Phrases + >>> + >>> sentences = Text8Corpus(datapath('testcorpus.txt')) + >>> phrases = Phrases(sentences, min_count=1, threshold=0.1) + >>> + >>> for phrase, score in phrases.export_phrases(sentences): + ... pass """ analyze_sentence = ft.partial( @@ -617,28 +626,30 @@ def __getitem__(self, sentence): Examples ---------- - >>> from gensim.test.utils import datapath - >>> from gensim.models.word2vec import Text8Corpus - >>> from gensim.models.phrases import Phrases, Phraser - >>> - >>> #Create corpus - >>> sentences = Text8Corpus(datapath('testcorpus.txt')) - >>> - >>> #Train the detector with: - >>> phrases = Phrases(sentences, min_count=1, threshold=1) - >>> #Input is a list of unicode strings: - >>> sent = [u'trees', u'graph', u'minors'] - >>> #Both of these tokens appear in corpus at least twice, and phrase score is higher, than treshold = 1: - >>> print(phrases[sent]) - [u'trees_graph', u'minors'] - >>> - >>> sentences = Text8Corpus(datapath('testcorpus.txt')) - >>> phrases = Phrases(sentences, min_count=1, threshold=1) - >>> phraser = Phraser(phrases) # for speedup - >>> - >>> sent = [[u'trees', u'graph', u'minors'],[u'graph', u'minors']] - >>> for phrase in phraser[sent]: - ... pass + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.models.word2vec import Text8Corpus + >>> from gensim.models.phrases import Phrases, Phraser + >>> + >>> # Create corpus + >>> sentences = Text8Corpus(datapath('testcorpus.txt')) + >>> + >>> # Train the detector with: + >>> phrases = Phrases(sentences, min_count=1, threshold=1) + >>> # Input is a list of unicode strings: + >>> sent = [u'trees', u'graph', u'minors'] + >>> # Both of these tokens appear in corpus at least twice, and phrase score is higher, than treshold = 1: + >>> print(phrases[sent]) + [u'trees_graph', u'minors'] + >>> + >>> sentences = Text8Corpus(datapath('testcorpus.txt')) + >>> phrases = Phrases(sentences, min_count=1, threshold=1) + >>> phraser = Phraser(phrases) # for speedup + >>> + >>> sent = [[u'trees', u'graph', u'minors'], [u'graph', u'minors']] + >>> for phrase in phraser[sent]: + ... pass """ warnings.warn("For a faster implementation, use the gensim.models.phrases.Phraser class") @@ -767,17 +778,19 @@ def __init__(self, phrases_model): Examples -------- - >>> from gensim.test.utils import datapath - >>> from gensim.models.word2vec import Text8Corpus - >>> from gensim.models.phrases import Phrases, Phraser - >>> - >>> sentences = Text8Corpus(datapath('testcorpus.txt')) - >>> phrases = Phrases(sentences, min_count=1, threshold=1) - >>> - >>> bigram = Phraser(phrases) - >>> sent = [u'trees', u'graph', u'minors'] - >>> print(bigram[sent]) - [u'trees_graph', u'minors'] + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.models.word2vec import Text8Corpus + >>> from gensim.models.phrases import Phrases, Phraser + >>> + >>> sentences = Text8Corpus(datapath('testcorpus.txt')) + >>> phrases = Phrases(sentences, min_count=1, threshold=1) + >>> + >>> bigram = Phraser(phrases) + >>> sent = [u'trees', u'graph', u'minors'] + >>> print(bigram[sent]) + [u'trees_graph', u'minors'] """ self.threshold = phrases_model.threshold @@ -855,24 +868,26 @@ def __getitem__(self, sentence): Examples ---------- - >>> from gensim.test.utils import datapath - >>> from gensim.models.word2vec import Text8Corpus - >>> from gensim.models.phrases import Phrases, Phraser - >>> - >>> sentences = Text8Corpus(datapath('testcorpus.txt')) # Read corpus - >>> - >>> phrases = Phrases(sentences, min_count=1, threshold=1) # Train model - >>> # Create a Phraser object to transform any sentence and turn 2 suitable tokens into 1 phrase - >>> phraser_model = Phraser(phrases) - >>> - >>> sent = [u'trees', u'graph', u'minors'] - >>> print(phraser_model[sent]) - [u'trees_graph', u'minors'] - >>> sent = [[u'trees', u'graph', u'minors'],[u'graph', u'minors']] - >>> for phrase in phraser_model[sent]: - ... print(phrase) - [u'trees_graph', u'minors'] - [u'graph_minors'] + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.models.word2vec import Text8Corpus + >>> from gensim.models.phrases import Phrases, Phraser + >>> + >>> sentences = Text8Corpus(datapath('testcorpus.txt')) # Read corpus + >>> + >>> phrases = Phrases(sentences, min_count=1, threshold=1) # Train model + >>> # Create a Phraser object to transform any sentence and turn 2 suitable tokens into 1 phrase + >>> phraser_model = Phraser(phrases) + >>> + >>> sent = [u'trees', u'graph', u'minors'] + >>> print(phraser_model[sent]) + [u'trees_graph', u'minors'] + >>> sent = [[u'trees', u'graph', u'minors'], [u'graph', u'minors']] + >>> for phrase in phraser_model[sent]: + ... print(phrase) + [u'trees_graph', u'minors'] + [u'graph_minors'] """ return _sentence2token(self, sentence) diff --git a/gensim/models/poincare.py b/gensim/models/poincare.py index b753ce944a..7a4a33d561 100644 --- a/gensim/models/poincare.py +++ b/gensim/models/poincare.py @@ -24,18 +24,21 @@ Initialize and train a model from a list ->>> from gensim.models.poincare import PoincareModel ->>> relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal'), ('gib', 'cat')] ->>> model = PoincareModel(relations, negative=2) ->>> model.train(epochs=50) +.. sourcecode:: pycon + + >>> from gensim.models.poincare import PoincareModel + >>> relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal'), ('gib', 'cat')] + >>> model = PoincareModel(relations, negative=2) + >>> model.train(epochs=50) Initialize and train a model from a file containing one relation per line +.. sourcecode:: pycon ->>> from gensim.models.poincare import PoincareModel, PoincareRelations ->>> from gensim.test.utils import datapath ->>> file_path = datapath('poincare_hypernyms.tsv') ->>> model = PoincareModel(PoincareRelations(file_path), negative=2) ->>> model.train(epochs=50) + >>> from gensim.models.poincare import PoincareModel, PoincareRelations + >>> from gensim.test.utils import datapath + >>> file_path = datapath('poincare_hypernyms.tsv') + >>> model = PoincareModel(PoincareRelations(file_path), negative=2) + >>> model.train(epochs=50) """ @@ -128,16 +131,20 @@ def __init__(self, train_data, size=50, alpha=0.1, negative=10, workers=1, epsil -------- Initialize a model from a list: - >>> from gensim.models.poincare import PoincareModel - >>> relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal'), ('gib', 'cat')] - >>> model = PoincareModel(relations, negative=2) + .. sourcecode:: pycon + + >>> from gensim.models.poincare import PoincareModel + >>> relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal'), ('gib', 'cat')] + >>> model = PoincareModel(relations, negative=2) Initialize a model from a file containing one relation per line: - >>> from gensim.models.poincare import PoincareModel, PoincareRelations - >>> from gensim.test.utils import datapath - >>> file_path = datapath('poincare_hypernyms.tsv') - >>> model = PoincareModel(PoincareRelations(file_path), negative=2) + .. sourcecode:: pycon + + >>> from gensim.models.poincare import PoincareModel, PoincareRelations + >>> from gensim.test.utils import datapath + >>> file_path = datapath('poincare_hypernyms.tsv') + >>> model = PoincareModel(PoincareRelations(file_path), negative=2) See :class:`~gensim.models.poincare.PoincareRelations` for more options. @@ -574,10 +581,12 @@ def train(self, epochs, batch_size=10, print_every=1000, check_gradients_every=N Examples -------- - >>> from gensim.models.poincare import PoincareModel - >>> relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal'), ('gib', 'cat')] - >>> model = PoincareModel(relations, negative=2) - >>> model.train(epochs=50) + .. sourcecode:: pycon + + >>> from gensim.models.poincare import PoincareModel + >>> relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal'), ('gib', 'cat')] + >>> model = PoincareModel(relations, negative=2) + >>> model.train(epochs=50) """ if self.workers > 1: @@ -842,15 +851,17 @@ def word_vec(self, word): Examples -------- - >>> from gensim.test.utils import datapath - >>> - >>> # Read the sample relations file and train the model - >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) - >>> model = PoincareModel(train_data=relations) - >>> model.train(epochs=50) - >>> - >>> # Query the trained model. - >>> wv = model.kv.word_vec('kangaroo.n.01') + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Read the sample relations file and train the model + >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) + >>> model = PoincareModel(train_data=relations) + >>> model.train(epochs=50) + >>> + >>> # Query the trained model. + >>> wv = model.kv.word_vec('kangaroo.n.01') """ return super(PoincareKeyedVectors, self).get_vector(word) @@ -872,16 +883,18 @@ def words_closer_than(self, w1, w2): Examples -------- - >>> from gensim.test.utils import datapath - >>> - >>> # Read the sample relations file and train the model - >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) - >>> model = PoincareModel(train_data=relations) - >>> model.train(epochs=50) - >>> - >>> # Which term is closer to 'kangaroo' than 'metatherian' is to 'kangaroo'? - >>> model.kv.words_closer_than('kangaroo.n.01', 'metatherian.n.01') - [u'marsupial.n.01', u'phalanger.n.01'] + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Read the sample relations file and train the model + >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) + >>> model = PoincareModel(train_data=relations) + >>> model.train(epochs=50) + >>> + >>> # Which term is closer to 'kangaroo' than 'metatherian' is to 'kangaroo'? + >>> model.kv.words_closer_than('kangaroo.n.01', 'metatherian.n.01') + [u'marsupial.n.01', u'phalanger.n.01'] """ return super(PoincareKeyedVectors, self).closer_than(w1, w2) @@ -1107,16 +1120,18 @@ def distance(self, w1, w2): Examples -------- - >>> from gensim.test.utils import datapath - >>> - >>> # Read the sample relations file and train the model - >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) - >>> model = PoincareModel(train_data=relations) - >>> model.train(epochs=50) - >>> - >>> # What is the distance between the words 'mammal' and 'carnivore'? - >>> model.kv.distance('mammal.n.01', 'carnivore.n.01') - 2.9742298803339304 + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Read the sample relations file and train the model + >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) + >>> model = PoincareModel(train_data=relations) + >>> model.train(epochs=50) + >>> + >>> # What is the distance between the words 'mammal' and 'carnivore'? + >>> model.kv.distance('mammal.n.01', 'carnivore.n.01') + 2.9742298803339304 Raises ------ @@ -1145,16 +1160,18 @@ def similarity(self, w1, w2): Examples -------- - >>> from gensim.test.utils import datapath - >>> - >>> # Read the sample relations file and train the model - >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) - >>> model = PoincareModel(train_data=relations) - >>> model.train(epochs=50) - >>> - >>> # What is the similarity between the words 'mammal' and 'carnivore'? - >>> model.kv.similarity('mammal.n.01', 'carnivore.n.01') - 0.25162107631176484 + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Read the sample relations file and train the model + >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) + >>> model = PoincareModel(train_data=relations) + >>> model.train(epochs=50) + >>> + >>> # What is the similarity between the words 'mammal' and 'carnivore'? + >>> model.kv.similarity('mammal.n.01', 'carnivore.n.01') + 0.25162107631176484 Raises ------ @@ -1185,16 +1202,18 @@ def most_similar(self, node_or_vector, topn=10, restrict_vocab=None): Examples -------- - >>> from gensim.test.utils import datapath - >>> - >>> # Read the sample relations file and train the model - >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) - >>> model = PoincareModel(train_data=relations) - >>> model.train(epochs=50) - >>> - >>> # Which words are most similar to 'kangaroo'? - >>> model.kv.most_similar('kangaroo.n.01', topn=2) - [(u'kangaroo.n.01', 0.0), (u'marsupial.n.01', 0.26524229460827725)] + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Read the sample relations file and train the model + >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) + >>> model = PoincareModel(train_data=relations) + >>> model.train(epochs=50) + >>> + >>> # Which words are most similar to 'kangaroo'? + >>> model.kv.most_similar('kangaroo.n.01', topn=2) + [(u'kangaroo.n.01', 0.0), (u'marsupial.n.01', 0.26524229460827725)] """ if not restrict_vocab: @@ -1239,19 +1258,21 @@ def distances(self, node_or_vector, other_nodes=()): Examples -------- - >>> from gensim.test.utils import datapath - >>> - >>> # Read the sample relations file and train the model - >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) - >>> model = PoincareModel(train_data=relations) - >>> model.train(epochs=50) - >>> - >>> # Check the distances between a word and a list of other words. - >>> model.kv.distances('mammal.n.01', ['carnivore.n.01', 'dog.n.01']) - array([2.97422988, 2.83007402]) - - >>> # Check the distances between a word and every other word in the vocab. - >>> all_distances = model.kv.distances('mammal.n.01') + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Read the sample relations file and train the model + >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) + >>> model = PoincareModel(train_data=relations) + >>> model.train(epochs=50) + >>> + >>> # Check the distances between a word and a list of other words. + >>> model.kv.distances('mammal.n.01', ['carnivore.n.01', 'dog.n.01']) + array([2.97422988, 2.83007402]) + + >>> # Check the distances between a word and every other word in the vocab. + >>> all_distances = model.kv.distances('mammal.n.01') Raises ------ @@ -1286,16 +1307,19 @@ def norm(self, node_or_vector): Examples -------- - >>> from gensim.test.utils import datapath - >>> - >>> # Read the sample relations file and train the model - >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) - >>> model = PoincareModel(train_data=relations) - >>> model.train(epochs=50) - >>> - >>> # Get the norm of the embedding of the word `mammal`. - >>> model.kv.norm('mammal.n.01') - 0.6423008703542398 + + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Read the sample relations file and train the model + >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) + >>> model = PoincareModel(train_data=relations) + >>> model.train(epochs=50) + >>> + >>> # Get the norm of the embedding of the word `mammal`. + >>> model.kv.norm('mammal.n.01') + 0.6423008703542398 Notes ----- @@ -1326,18 +1350,21 @@ def difference_in_hierarchy(self, node_or_vector_1, node_or_vector_2): Examples -------- - >>> from gensim.test.utils import datapath - >>> - >>> # Read the sample relations file and train the model - >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) - >>> model = PoincareModel(train_data=relations) - >>> model.train(epochs=50) - >>> - >>> model.kv.difference_in_hierarchy('mammal.n.01', 'dog.n.01') - 0.05382517902410999 - - >>> model.kv.difference_in_hierarchy('dog.n.01', 'mammal.n.01') - -0.05382517902410999 + + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> # Read the sample relations file and train the model + >>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv')) + >>> model = PoincareModel(train_data=relations) + >>> model.train(epochs=50) + >>> + >>> model.kv.difference_in_hierarchy('mammal.n.01', 'dog.n.01') + 0.05382517902410999 + + >>> model.kv.difference_in_hierarchy('dog.n.01', 'mammal.n.01') + -0.05382517902410999 Notes ----- diff --git a/gensim/models/rpmodel.py b/gensim/models/rpmodel.py index 0826a7c359..e946c4acb2 100644 --- a/gensim/models/rpmodel.py +++ b/gensim/models/rpmodel.py @@ -12,19 +12,22 @@ Examples -------- ->>> from gensim.models import RpModel ->>> from gensim.corpora import Dictionary ->>> from gensim.test.utils import common_texts, temporary_file ->>> ->>> dictionary = Dictionary(common_texts) # fit dictionary ->>> corpus = [dictionary.doc2bow(text) for text in common_texts] # convert texts to BoW format ->>> ->>> model = RpModel(corpus, id2word=dictionary) # fit model ->>> result = model[corpus[3]] # apply model to document, result is vector in BoW format ->>> ->>> with temporary_file("model_file") as fname: -... model.save(fname) # save model to file -... loaded_model = RpModel.load(fname) # load model + +.. sourcecode:: pycon + + >>> from gensim.models import RpModel + >>> from gensim.corpora import Dictionary + >>> from gensim.test.utils import common_texts, temporary_file + >>> + >>> dictionary = Dictionary(common_texts) # fit dictionary + >>> corpus = [dictionary.doc2bow(text) for text in common_texts] # convert texts to BoW format + >>> + >>> model = RpModel(corpus, id2word=dictionary) # fit model + >>> result = model[corpus[3]] # apply model to document, result is vector in BoW format + >>> + >>> with temporary_file("model_file") as fname: + ... model.save(fname) # save model to file + ... loaded_model = RpModel.load(fname) # load model References @@ -114,15 +117,20 @@ def __getitem__(self, bow): Examples ---------- - >>> from gensim.models import RpModel - >>> from gensim.corpora import Dictionary - >>> from gensim.test.utils import common_texts - >>> - >>> dictionary = Dictionary(common_texts) # fit dictionary - >>> corpus = [dictionary.doc2bow(text) for text in common_texts] # convert texts to BoW format - >>> - >>> model = RpModel(corpus, id2word=dictionary) # fit model - >>> result = model[corpus[0]] # apply model to document, result is vector in BoW format, i.e. [(1, 0.3), ... ] + + .. sourcecode:: pycon + + >>> from gensim.models import RpModel + >>> from gensim.corpora import Dictionary + >>> from gensim.test.utils import common_texts + >>> + >>> dictionary = Dictionary(common_texts) # fit dictionary + >>> corpus = [dictionary.doc2bow(text) for text in common_texts] # convert texts to BoW format + >>> + >>> model = RpModel(corpus, id2word=dictionary) # fit model + >>> + >>> # apply model to document, result is vector in BoW format, i.e. [(1, 0.3), ... ] + >>> result = model[corpus[0]] """ # if the input vector is in fact a corpus, return a transformed corpus as result diff --git a/gensim/models/tfidfmodel.py b/gensim/models/tfidfmodel.py index ae24556835..8f163b66c4 100644 --- a/gensim/models/tfidfmodel.py +++ b/gensim/models/tfidfmodel.py @@ -223,16 +223,18 @@ class TfidfModel(interfaces.TransformationABC): Examples -------- - >>> import gensim.downloader as api - >>> from gensim.models import TfidfModel - >>> from gensim.corpora import Dictionary - >>> - >>> dataset = api.load("text8") - >>> dct = Dictionary(dataset) # fit dictionary - >>> corpus = [dct.doc2bow(line) for line in dataset] # convert corpus to BoW format - >>> - >>> model = TfidfModel(corpus) # fit model - >>> vector = model[corpus[0]] # apply model to the first corpus document + .. sourcecode:: pycon + + >>> import gensim.downloader as api + >>> from gensim.models import TfidfModel + >>> from gensim.corpora import Dictionary + >>> + >>> dataset = api.load("text8") + >>> dct = Dictionary(dataset) # fit dictionary + >>> corpus = [dct.doc2bow(line) for line in dataset] # convert corpus to BoW format + >>> + >>> model = TfidfModel(corpus) # fit model + >>> vector = model[corpus[0]] # apply model to the first corpus document """ def __init__(self, corpus=None, id2word=None, dictionary=None, wlocal=utils.identity, diff --git a/gensim/models/translation_matrix.py b/gensim/models/translation_matrix.py index 7ee724c4e3..8969d02bc2 100644 --- a/gensim/models/translation_matrix.py +++ b/gensim/models/translation_matrix.py @@ -15,61 +15,71 @@ Initialize a word-vector models ->>> from gensim.models import KeyedVectors ->>> from gensim.test.utils import datapath, temporary_file ->>> from gensim.models import TranslationMatrix ->>> ->>> model_en = KeyedVectors.load_word2vec_format(datapath("EN.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt")) ->>> model_it = KeyedVectors.load_word2vec_format(datapath("IT.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt")) +.. sourcecode:: pycon + + >>> from gensim.models import KeyedVectors + >>> from gensim.test.utils import datapath + >>> + >>> model_en = KeyedVectors.load_word2vec_format(datapath("EN.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt")) + >>> model_it = KeyedVectors.load_word2vec_format(datapath("IT.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt")) Define word pairs (that will be used for construction of translation matrix +.. sourcecode:: pycon ->>> word_pairs = [ -... ("one", "uno"), ("two", "due"), ("three", "tre"), ("four", "quattro"), ("five", "cinque"), -... ("seven", "sette"), ("eight", "otto"), -... ("dog", "cane"), ("pig", "maiale"), ("fish", "cavallo"), ("birds", "uccelli"), -... ("apple", "mela"), ("orange", "arancione"), ("grape", "acino"), ("banana", "banana") -... ] + >>> word_pairs = [ + ... ("one", "uno"), ("two", "due"), ("three", "tre"), ("four", "quattro"), ("five", "cinque"), + ... ("seven", "sette"), ("eight", "otto"), + ... ("dog", "cane"), ("pig", "maiale"), ("fish", "cavallo"), ("birds", "uccelli"), + ... ("apple", "mela"), ("orange", "arancione"), ("grape", "acino"), ("banana", "banana") + ... ] Fit :class:`~gensim.models.translation_matrix.TranslationMatrix` +.. sourcecode:: pycon ->>> trans_model = TranslationMatrix(model_en, model_it, word_pairs=word_pairs) + >>> trans_model = TranslationMatrix(model_en, model_it, word_pairs=word_pairs) Apply model (translate words "dog" and "one") +.. sourcecode:: pycon ->>> trans_model.translate(["dog", "one"], topn=3) -OrderedDict([('dog', [u'cane', u'gatto', u'cavallo']), ('one', [u'uno', u'due', u'tre'])]) + >>> trans_model.translate(["dog", "one"], topn=3) + OrderedDict([('dog', [u'cane', u'gatto', u'cavallo']), ('one', [u'uno', u'due', u'tre'])]) Save / load model +.. sourcecode:: pycon ->>> with temporary_file("model_file") as fname: -... trans_model.save(fname) # save model to file -... loaded_trans_model = TranslationMatrix.load(fname) # load model + >>> with temporary_file("model_file") as fname: + ... trans_model.save(fname) # save model to file + ... loaded_trans_model = TranslationMatrix.load(fname) # load model How to make translation between two :class:`~gensim.models.doc2vec.Doc2Vec` models ================================================================================== Prepare data and models +.. sourcecode:: pycon ->>> from gensim.test.utils import datapath ->>> from gensim.test.test_translation_matrix import read_sentiment_docs ->>> from gensim.models import Doc2Vec, BackMappingTranslationMatrix ->>> ->>> data = read_sentiment_docs(datapath("alldata-id-10.txt"))[:5] ->>> src_model = Doc2Vec.load(datapath("small_tag_doc_5_iter50")) ->>> dst_model = Doc2Vec.load(datapath("large_tag_doc_10_iter50")) + >>> from gensim.test.utils import datapath + >>> from gensim.test.test_translation_matrix import read_sentiment_docs + >>> from gensim.models import Doc2Vec + >>> + >>> data = read_sentiment_docs(datapath("alldata-id-10.txt"))[:5] + >>> src_model = Doc2Vec.load(datapath("small_tag_doc_5_iter50")) + >>> dst_model = Doc2Vec.load(datapath("large_tag_doc_10_iter50")) Train backward translation ->>> model_trans = BackMappingTranslationMatrix(data, src_model, dst_model) ->>> trans_matrix = model_trans.train(data) +.. sourcecode:: pycon + + >>> model_trans = BackMappingTranslationMatrix(data, src_model, dst_model) + >>> trans_matrix = model_trans.train(data) Apply model ->>> result = model_trans.infer_vector(dst_model.docvecs[data[3].tags]) +.. sourcecode:: pycon + + >>> result = model_trans.infer_vector(dst_model.docvecs[data[3].tags]) References @@ -161,23 +171,27 @@ class TranslationMatrix(utils.SaveLoad): Examples -------- - >>> from gensim.models import KeyedVectors - >>> from gensim.test.utils import datapath, temporary_file - >>> - >>> model_en = KeyedVectors.load_word2vec_format(datapath("EN.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt")) - >>> model_it = KeyedVectors.load_word2vec_format(datapath("IT.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt")) - >>> - >>> word_pairs = [ - ... ("one", "uno"), ("two", "due"), ("three", "tre"), ("four", "quattro"), ("five", "cinque"), - ... ("seven", "sette"), ("eight", "otto"), - ... ("dog", "cane"), ("pig", "maiale"), ("fish", "cavallo"), ("birds", "uccelli"), - ... ("apple", "mela"), ("orange", "arancione"), ("grape", "acino"), ("banana", "banana") - ... ] - >>> - >>> trans_model = TranslationMatrix(model_en, model_it) - >>> trans_model.train(word_pairs) - >>> trans_model.translate(["dog", "one"], topn=3) - OrderedDict([('dog', [u'cane', u'gatto', u'cavallo']), ('one', [u'uno', u'due', u'tre'])]) + + .. sourcecode:: pycon + + >>> from gensim.models import KeyedVectors + >>> from gensim.test.utils import datapath + >>> en = datapath("EN.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt") + >>> it = datapath("IT.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt") + >>> model_en = KeyedVectors.load_word2vec_format(en) + >>> model_it = KeyedVectors.load_word2vec_format(it) + >>> + >>> word_pairs = [ + ... ("one", "uno"), ("two", "due"), ("three", "tre"), ("four", "quattro"), ("five", "cinque"), + ... ("seven", "sette"), ("eight", "otto"), + ... ("dog", "cane"), ("pig", "maiale"), ("fish", "cavallo"), ("birds", "uccelli"), + ... ("apple", "mela"), ("orange", "arancione"), ("grape", "acino"), ("banana", "banana") + ... ] + >>> + >>> trans_model = TranslationMatrix(model_en, model_it) + >>> trans_model.train(word_pairs) + >>> trans_model.translate(["dog", "one"], topn=3) + OrderedDict([('dog', [u'cane', u'gatto', u'cavallo']), ('one', [u'uno', u'due', u'tre'])]) References @@ -361,18 +375,20 @@ class BackMappingTranslationMatrix(utils.SaveLoad): Examples -------- - >>> from gensim.test.utils import datapath - >>> from gensim.test.test_translation_matrix import read_sentiment_docs - >>> from gensim.models import Doc2Vec, BackMappingTranslationMatrix - >>> - >>> data = read_sentiment_docs(datapath("alldata-id-10.txt"))[:5] - >>> src_model = Doc2Vec.load(datapath("small_tag_doc_5_iter50")) - >>> dst_model = Doc2Vec.load(datapath("large_tag_doc_10_iter50")) - >>> - >>> model_trans = BackMappingTranslationMatrix(src_model, dst_model) - >>> trans_matrix = model_trans.train(data) - >>> - >>> result = model_trans.infer_vector(dst_model.docvecs[data[3].tags]) + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> from gensim.test.test_translation_matrix import read_sentiment_docs + >>> from gensim.models import Doc2Vec, BackMappingTranslationMatrix + >>> + >>> data = read_sentiment_docs(datapath("alldata-id-10.txt"))[:5] + >>> src_model = Doc2Vec.load(datapath("small_tag_doc_5_iter50")) + >>> dst_model = Doc2Vec.load(datapath("large_tag_doc_10_iter50")) + >>> + >>> model_trans = BackMappingTranslationMatrix(src_model, dst_model) + >>> trans_matrix = model_trans.train(data) + >>> + >>> result = model_trans.infer_vector(dst_model.docvecs[data[3].tags]) """ def __init__(self, source_lang_vec, target_lang_vec, tagged_docs=None, random_state=None): diff --git a/gensim/models/word2vec.py b/gensim/models/word2vec.py index 2c736351a7..098905420b 100755 --- a/gensim/models/word2vec.py +++ b/gensim/models/word2vec.py @@ -34,47 +34,55 @@ Initialize a model with e.g.: ->>> from gensim.test.utils import common_texts, get_tmpfile ->>> from gensim.models import Word2Vec ->>> ->>> path = get_tmpfile("word2vec.model") ->>> ->>> model = Word2Vec(common_texts, size=100, window=5, min_count=1, workers=4) ->>> model.save("word2vec.model") +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts, get_tmpfile + >>> from gensim.models import Word2Vec + >>> + >>> path = get_tmpfile("word2vec.model") + >>> + >>> model = Word2Vec(common_texts, size=100, window=5, min_count=1, workers=4) + >>> model.save("word2vec.model") The training is streamed, meaning `sentences` can be a generator, reading input data from disk on-the-fly, without loading the entire corpus into RAM. It also means you can continue training the model later: +.. sourcecode:: pycon ->>> model = Word2Vec.load("word2vec.model") ->>> model.train([["hello", "world"]], total_examples=1, epochs=1) -(0, 2) + >>> model = Word2Vec.load("word2vec.model") + >>> model.train([["hello", "world"]], total_examples=1, epochs=1) + (0, 2) The trained word vectors are stored in a :class:`~gensim.models.keyedvectors.KeyedVectors` instance in `model.wv`: +.. sourcecode:: pycon ->>> vector = model.wv['computer'] # numpy vector of a word + >>> vector = model.wv['computer'] # numpy vector of a word The reason for separating the trained vectors into `KeyedVectors` is that if you don't need the full model state any more (don't need to continue training), the state can discarded, resulting in a much smaller and faster object that can be mmapped for lightning -fast loading and sharing the vectors in RAM between processes:: +fast loading and sharing the vectors in RAM between processes: + +.. sourcecode:: pycon ->>> from gensim.models import KeyedVectors ->>> ->>> path = get_tmpfile("wordvectors.kv") ->>> ->>> model.wv.save(path) ->>> wv = KeyedVectors.load("model.wv", mmap='r') ->>> vector = wv['computer'] # numpy vector of a word + >>> from gensim.models import KeyedVectors + >>> + >>> path = get_tmpfile("wordvectors.kv") + >>> + >>> model.wv.save(path) + >>> wv = KeyedVectors.load("model.wv", mmap='r') + >>> vector = wv['computer'] # numpy vector of a word Gensim can also load word vectors in the "word2vec C format", as a -:class:`~gensim.models.keyedvectors.KeyedVectors` instance:: +:class:`~gensim.models.keyedvectors.KeyedVectors` instance: + +.. sourcecode:: pycon ->>> from gensim.test.utils import datapath ->>> ->>> wv_from_text = KeyedVectors.load_word2vec_format(datapath('word2vec_pre_kv_c'), binary=False) # C text format ->>> wv_from_bin = KeyedVectors.load_word2vec_format(datapath("euclidean_vectors.bin"), binary=True) # C binary format + >>> from gensim.test.utils import datapath + >>> + >>> wv_from_text = KeyedVectors.load_word2vec_format(datapath('word2vec_pre_kv_c'), binary=False) # C text format + >>> wv_from_bin = KeyedVectors.load_word2vec_format(datapath("euclidean_vectors.bin"), binary=True) # C bin format It is impossible to continue training the vectors loaded from the C format because the hidden weights, vocabulary frequencies and the binary tree are missing. To continue training, you'll need the @@ -87,8 +95,10 @@ If you're finished training a model (i.e. no more updates, only querying), you can switch to the :class:`~gensim.models.keyedvectors.KeyedVectors` instance: ->>> word_vectors = model.wv ->>> del model +.. sourcecode:: pycon + + >>> word_vectors = model.wv + >>> del model to trim unneeded model state = use much less RAM and allow fast loading and memory sharing (mmap). @@ -96,11 +106,13 @@ detect phrases longer than one word. Using phrases, you can learn a word2vec model where "words" are actually multiword expressions, such as `new_york_times` or `financial_crisis`: ->>> from gensim.test.utils import common_texts ->>> from gensim.models import Phrases ->>> ->>> bigram_transformer = Phrases(common_texts) ->>> model = Word2Vec(bigram_transformer[common_texts], min_count=1) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts + >>> from gensim.models import Phrases + >>> + >>> bigram_transformer = Phrases(common_texts) + >>> model = Word2Vec(bigram_transformer[common_texts], min_count=1) """ @@ -744,9 +756,11 @@ def __init__(self, sentences=None, corpus_file=None, size=100, alpha=0.025, wind -------- Initialize and train a :class:`~gensim.models.word2vec.Word2Vec` model - >>> from gensim.models import Word2Vec - >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] - >>> model = Word2Vec(sentences, min_count=1) + .. sourcecode:: pycon + + >>> from gensim.models import Word2Vec + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> model = Word2Vec(sentences, min_count=1) """ self.max_final_vocab = max_final_vocab @@ -877,13 +891,15 @@ def train(self, sentences=None, corpus_file=None, total_examples=None, total_wor Examples -------- - >>> from gensim.models import Word2Vec - >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] - >>> - >>> model = Word2Vec(min_count=1) - >>> model.build_vocab(sentences) # prepare the model vocabulary - >>> model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) # train word vectors - (1, 30) + .. sourcecode:: pycon + + >>> from gensim.models import Word2Vec + >>> sentences = [["cat", "say", "meow"], ["dog", "say", "woof"]] + >>> + >>> model = Word2Vec(min_count=1) + >>> model.build_vocab(sentences) # prepare the model vocabulary + >>> model.train(sentences, total_examples=model.corpus_count, epochs=model.iter) # train word vectors + (1, 30) """ return super(Word2Vec, self).train( @@ -1393,10 +1409,13 @@ def __init__(self, source, max_sentence_length=MAX_WORDS_IN_BATCH, limit=None): Examples -------- - >>> from gensim.test.utils import datapath - >>> sentences = LineSentence(datapath('lee_background.cor')) - >>> for sentence in sentences: - ... pass + + .. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> sentences = LineSentence(datapath('lee_background.cor')) + >>> for sentence in sentences: + ... pass """ self.source = source diff --git a/gensim/models/wrappers/dtmmodel.py b/gensim/models/wrappers/dtmmodel.py index 0df73e6be8..a88d7eb23e 100644 --- a/gensim/models/wrappers/dtmmodel.py +++ b/gensim/models/wrappers/dtmmodel.py @@ -25,14 +25,17 @@ Examples -------- ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> from gensim.models.wrappers import DtmModel ->>> ->>> path_to_dtm_binary = "/path/to/dtm/binary" ->>> model = DtmModel( -... path_to_dtm_binary, corpus=common_corpus, id2word=common_dictionary, -... time_slices=[1] * len(common_corpus) -... ) + +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.models.wrappers import DtmModel + >>> + >>> path_to_dtm_binary = "/path/to/dtm/binary" + >>> model = DtmModel( + ... path_to_dtm_binary, corpus=common_corpus, id2word=common_dictionary, + ... time_slices=[1] * len(common_corpus) + ... ) """ diff --git a/gensim/models/wrappers/fasttext.py b/gensim/models/wrappers/fasttext.py index 3f7b65c21b..bca36c7cb9 100644 --- a/gensim/models/wrappers/fasttext.py +++ b/gensim/models/wrappers/fasttext.py @@ -25,9 +25,11 @@ Example: ->>> from gensim.models.wrappers import FastText ->>> model = FastText.train('/Users/kofola/fastText/fasttext', corpus_file='text8') ->>> print model['forests'] # prints vector for given out-of-vocabulary word +.. sourcecode:: pycon + + >>> from gensim.models.wrappers import FastText + >>> model = FastText.train('/Users/kofola/fastText/fasttext', corpus_file='text8') + >>> print(model['forests']) # prints vector for given out-of-vocabulary word .. [1] https://github.com/facebookresearch/fastText#enriching-word-vectors-with-subword-information diff --git a/gensim/models/wrappers/ldamallet.py b/gensim/models/wrappers/ldamallet.py index 56509d3e4c..6c9487eb37 100644 --- a/gensim/models/wrappers/ldamallet.py +++ b/gensim/models/wrappers/ldamallet.py @@ -32,12 +32,15 @@ Examples -------- ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> from gensim.models.wrappers import LdaMallet ->>> ->>> path_to_mallet_binary = "/path/to/mallet/binary" ->>> model = LdaMallet(path_to_mallet_binary, corpus=common_corpus, num_topics=20, id2word=common_dictionary) ->>> vector = model[common_corpus[0]] # LDA topics of a documents + +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.models.wrappers import LdaMallet + >>> + >>> path_to_mallet_binary = "/path/to/mallet/binary" + >>> model = LdaMallet(path_to_mallet_binary, corpus=common_corpus, num_topics=20, id2word=common_dictionary) + >>> vector = model[common_corpus[0]] # LDA topics of a documents """ diff --git a/gensim/models/wrappers/ldavowpalwabbit.py b/gensim/models/wrappers/ldavowpalwabbit.py index c8f65ceb92..d62a914d53 100644 --- a/gensim/models/wrappers/ldavowpalwabbit.py +++ b/gensim/models/wrappers/ldavowpalwabbit.py @@ -31,40 +31,51 @@ -------- Train model +.. sourcecode:: pycon ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> from gensim.models.wrappers import LdaVowpalWabbit ->>> ->>> path_to_wv_binary = "/path/to/vw/binary" ->>> model = LdaVowpalWabbit(path_to_wv_binary, corpus=common_corpus, num_topics=20, id2word=common_dictionary) + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.models.wrappers import LdaVowpalWabbit + >>> + >>> path_to_wv_binary = "/path/to/vw/binary" + >>> model = LdaVowpalWabbit(path_to_wv_binary, corpus=common_corpus, num_topics=20, id2word=common_dictionary) Update existing model ->>> another_corpus = [[(1, 1), (2, 1)], [(3, 5)]] ->>> model.update(another_corpus) +.. sourcecode:: pycon + + >>> another_corpus = [[(1, 1), (2, 1)], [(3, 5)]] + >>> model.update(another_corpus) Get topic probability distributions for a document ->>> document_bow = [(1, 1)] ->>> print(model[document_bow]) +.. sourcecode:: pycon + + >>> document_bow = [(1, 1)] + >>> print(model[document_bow]) Print topics ->>> print(model.print_topics()) +.. sourcecode:: pycon + + >>> print(model.print_topics()) Save/load the trained model ->>> from gensim.test.utils import get_tmpfile ->>> ->>> temp_path = get_tmpfile("vw_lda.model") ->>> model.save(temp_path) ->>> ->>> loaded_lda = LdaVowpalWabbit.load(temp_path) +.. sourcecode:: pycon + + >>> from gensim.test.utils import get_tmpfile + >>> + >>> temp_path = get_tmpfile("vw_lda.model") + >>> model.save(temp_path) + >>> + >>> loaded_lda = LdaVowpalWabbit.load(temp_path) Calculate log-perplexoty on given corpus ->>> another_corpus = [[(1, 1), (2, 1)], [(3, 5)]] ->>> print(model.log_perpexity(another_corpus)) +.. sourcecode:: pycon + + >>> another_corpus = [[(1, 1), (2, 1)], [(3, 5)]] + >>> print(model.log_perpexity(another_corpus)) Vowpal Wabbit works on files, so this wrapper maintains a temporary directory while it's around, reading/writing there as necessary. diff --git a/gensim/models/wrappers/wordrank.py b/gensim/models/wrappers/wordrank.py index f6dcc617ed..018fe1f9d6 100644 --- a/gensim/models/wrappers/wordrank.py +++ b/gensim/models/wrappers/wordrank.py @@ -33,13 +33,14 @@ Examples -------- - ->>> from gensim.models.wrappers import Wordrank ->>> ->>> path_to_wordrank_binary = '/path/to/wordrank/binary' ->>> model = Wordrank.train(path_to_wordrank_binary, corpus_file='text8', out_name='wr_model') ->>> ->>> print model["hello"] # prints vector for given words +.. sourcecode:: pycon + + >>> from gensim.models.wrappers import Wordrank + >>> + >>> path_to_wordrank_binary = '/path/to/wordrank/binary' + >>> model = Wordrank.train(path_to_wordrank_binary, corpus_file='text8', out_name='wr_model') + >>> + >>> print(model["hello"]) # prints vector for given words Warnings -------- diff --git a/gensim/parsing/porter.py b/gensim/parsing/porter.py index 92c52e0c6d..7d298d7486 100644 --- a/gensim/parsing/porter.py +++ b/gensim/parsing/porter.py @@ -9,17 +9,19 @@ Examples: --------- ->>> from gensim.parsing.porter import PorterStemmer ->>> ->>> p = PorterStemmer() ->>> p.stem("apple") -'appl' ->>> ->>> p.stem_sentence("Cats and ponies have meeting") -'cat and poni have meet' ->>> ->>> p.stem_documents(["Cats and ponies", "have meeting"]) -['cat and poni', 'have meet'] +.. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> + >>> p = PorterStemmer() + >>> p.stem("apple") + 'appl' + >>> + >>> p.stem_sentence("Cats and ponies have meeting") + 'cat and poni have meet' + >>> + >>> p.stem_documents(["Cats and ponies", "have meeting"]) + ['cat and poni', 'have meet'] .. [1] Porter, 1980, An algorithm for suffix stripping, http://www.cs.odu.edu/~jbollen/IR04/readings/readings5.pdf .. [2] http://www.tartarus.org/~martin/PorterStemmer @@ -62,14 +64,16 @@ def _cons(self, i): Examples -------- - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "hi" - >>> p._cons(1) - False - >>> p.b = "meow" - >>> p._cons(3) - True + .. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> p = PorterStemmer() + >>> p.b = "hi" + >>> p._cons(1) + False + >>> p.b = "meow" + >>> p._cons(3) + True """ ch = self.b[i] @@ -97,12 +101,14 @@ def _m(self): Examples -------- - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "aobm" - >>> p.j = 11 - >>> p._m() - 2 + .. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> p = PorterStemmer() + >>> p.b = "aobm" + >>> p.j = 11 + >>> p._m() + 2 """ i = 0 @@ -140,19 +146,18 @@ def _vowelinstem(self): Examples -------- - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "gnsm" - >>> p.j = 3 - >>> p._vowelinstem() - False - - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "gensim" - >>> p.j = 5 - >>> p._vowelinstem() - True + .. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> p = PorterStemmer() + >>> p.b = "gnsm" + >>> p.j = 3 + >>> p._vowelinstem() + False + >>> p.b = "gensim" + >>> p.j = 5 + >>> p._vowelinstem() + True """ return not all(self._cons(i) for i in xrange(self.j + 1)) @@ -171,19 +176,18 @@ def _doublec(self, j): Examples -------- - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "real" - >>> p.j = 3 - >>> p._doublec(3) - False - - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "really" - >>> p.j = 5 - >>> p._doublec(4) - True + .. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> p = PorterStemmer() + >>> p.b = "real" + >>> p.j = 3 + >>> p._doublec(3) + False + >>> p.b = "really" + >>> p.j = 5 + >>> p._doublec(4) + True """ return j > 0 and self.b[j] == self.b[j - 1] and self._cons(j) @@ -204,26 +208,22 @@ def _cvc(self, i): Examples -------- - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "lib" - >>> p.j = 2 - >>> p._cvc(2) - True - - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "dll" - >>> p.j = 2 - >>> p._cvc(2) - False - - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "wow" - >>> p.j = 2 - >>> p._cvc(2) - False + .. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> p = PorterStemmer() + >>> p.b = "lib" + >>> p.j = 2 + >>> p._cvc(2) + True + >>> p.b = "dll" + >>> p.j = 2 + >>> p._cvc(2) + False + >>> p.b = "wow" + >>> p.j = 2 + >>> p._cvc(2) + False """ if i < 2 or not self._cons(i) or self._cons(i - 1) or not self._cons(i - 2): @@ -243,13 +243,15 @@ def _ends(self, s): Examples -------- - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.b = "cowboy" - >>> p.j = 5 - >>> p.k = 2 - >>> p._ends("cow") - True + .. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> p = PorterStemmer() + >>> p.b = "cowboy" + >>> p.j = 5 + >>> p.k = 2 + >>> p._ends("cow") + True """ if s[-1] != self.b[self.k]: # tiny speed-up @@ -493,10 +495,13 @@ def stem(self, w): Examples -------- - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.stem("ponies") - 'poni' + + .. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> p = PorterStemmer() + >>> p.stem("ponies") + 'poni' """ w = w.lower() @@ -535,10 +540,12 @@ def stem_sentence(self, txt): Examples -------- - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.stem_sentence("Wow very nice woman with apple") - 'wow veri nice woman with appl' + .. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> p = PorterStemmer() + >>> p.stem_sentence("Wow very nice woman with apple") + 'wow veri nice woman with appl' """ return " ".join(self.stem(x) for x in txt.split()) @@ -558,10 +565,12 @@ def stem_documents(self, docs): Examples -------- - >>> from gensim.parsing.porter import PorterStemmer - >>> p = PorterStemmer() - >>> p.stem_documents(["Have a very nice weekend", "Have a very nice weekend"]) - ['have a veri nice weekend', 'have a veri nice weekend'] + .. sourcecode:: pycon + + >>> from gensim.parsing.porter import PorterStemmer + >>> p = PorterStemmer() + >>> p.stem_documents(["Have a very nice weekend", "Have a very nice weekend"]) + ['have a veri nice weekend', 'have a veri nice weekend'] """ return [self.stem_sentence(x) for x in docs] diff --git a/gensim/parsing/preprocessing.py b/gensim/parsing/preprocessing.py index cc15b4665b..98133b3848 100644 --- a/gensim/parsing/preprocessing.py +++ b/gensim/parsing/preprocessing.py @@ -10,12 +10,14 @@ Examples: --------- ->>> from gensim.parsing.preprocessing import remove_stopwords ->>> remove_stopwords("Better late than never, but better never late.") -u'Better late never, better late.' ->>> ->>> preprocess_string("Hel 9lo Wo9 rld! Th3 weather_is really g00d today, isn't it?") -[u'hel', u'rld', u'weather', u'todai', u'isn'] +.. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import remove_stopwords + >>> remove_stopwords("Better late than never, but better never late.") + u'Better late never, better late.' + >>> + >>> preprocess_string("Hel 9lo Wo9 rld! Th3 weather_is really g00d today, isn't it?") + [u'hel', u'rld', u'weather', u'todai', u'isn'] Data: @@ -96,9 +98,11 @@ def remove_stopwords(s): Examples -------- - >>> from gensim.parsing.preprocessing import remove_stopwords - >>> remove_stopwords("Better late than never, but better never late.") - u'Better late never, better late.' + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import remove_stopwords + >>> remove_stopwords("Better late than never, but better never late.") + u'Better late never, better late.' """ s = utils.to_unicode(s) @@ -119,9 +123,11 @@ def strip_punctuation(s): Examples -------- - >>> from gensim.parsing.preprocessing import strip_punctuation - >>> strip_punctuation("A semicolon is a stronger break than a comma, but not as much as a full stop!") - u'A semicolon is a stronger break than a comma but not as much as a full stop ' + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import strip_punctuation + >>> strip_punctuation("A semicolon is a stronger break than a comma, but not as much as a full stop!") + u'A semicolon is a stronger break than a comma but not as much as a full stop ' """ s = utils.to_unicode(s) @@ -145,9 +151,11 @@ def strip_tags(s): Examples -------- - >>> from gensim.parsing.preprocessing import strip_tags - >>> strip_tags("Hello World!") - u'Hello World!' + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import strip_tags + >>> strip_tags("Hello World!") + u'Hello World!' """ s = utils.to_unicode(s) @@ -169,12 +177,14 @@ def strip_short(s, minsize=3): Examples -------- - >>> from gensim.parsing.preprocessing import strip_short - >>> strip_short("salut les amis du 59") - u'salut les amis' - >>> - >>> strip_short("one two three four five six seven eight nine ten", minsize=5) - u'three seven eight' + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import strip_short + >>> strip_short("salut les amis du 59") + u'salut les amis' + >>> + >>> strip_short("one two three four five six seven eight nine ten", minsize=5) + u'three seven eight' """ s = utils.to_unicode(s) @@ -195,9 +205,11 @@ def strip_numeric(s): Examples -------- - >>> from gensim.parsing.preprocessing import strip_numeric - >>> strip_numeric("0text24gensim365test") - u'textgensimtest' + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import strip_numeric + >>> strip_numeric("0text24gensim365test") + u'textgensimtest' """ s = utils.to_unicode(s) @@ -222,9 +234,11 @@ def strip_non_alphanum(s): Examples -------- - >>> from gensim.parsing.preprocessing import strip_non_alphanum - >>> strip_non_alphanum("if-you#can%read$this&then@this#method^works") - u'if you can read this then this method works' + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import strip_non_alphanum + >>> strip_non_alphanum("if-you#can%read$this&then@this#method^works") + u'if you can read this then this method works' """ s = utils.to_unicode(s) @@ -246,9 +260,11 @@ def strip_multiple_whitespaces(s): Examples -------- - >>> from gensim.parsing.preprocessing import strip_multiple_whitespaces - >>> strip_multiple_whitespaces("salut" + '\r' + " les" + '\n' + " loulous!") - u'salut les loulous!' + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import strip_multiple_whitespaces + >>> strip_multiple_whitespaces("salut" + '\r' + " les" + '\n' + " loulous!") + u'salut les loulous!' """ s = utils.to_unicode(s) @@ -269,9 +285,11 @@ def split_alphanum(s): Examples -------- - >>> from gensim.parsing.preprocessing import split_alphanum - >>> split_alphanum("24.0hours7 days365 a1b2c3") - u'24.0 hours 7 days 365 a 1 b 2 c 3' + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import split_alphanum + >>> split_alphanum("24.0hours7 days365 a1b2c3") + u'24.0 hours 7 days 365 a 1 b 2 c 3' """ s = utils.to_unicode(s) @@ -293,9 +311,11 @@ def stem_text(text): Examples -------- - >>> from gensim.parsing.preprocessing import stem_text - >>> stem_text("While it is quite useful to be able to search a large collection of documents almost instantly.") - u'while it is quit us to be abl to search a larg collect of document almost instantly.' + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import stem_text + >>> stem_text("While it is quite useful to be able to search a large collection of documents almost instantly.") + u'while it is quit us to be abl to search a larg collect of document almost instantly.' """ text = utils.to_unicode(text) @@ -338,14 +358,16 @@ def preprocess_string(s, filters=DEFAULT_FILTERS): Examples -------- - >>> from gensim.parsing.preprocessing import preprocess_string - >>> preprocess_string("Hel 9lo Wo9 rld! Th3 weather_is really g00d today, isn't it?") - [u'hel', u'rld', u'weather', u'todai', u'isn'] - >>> - >>> s = "Hel 9lo Wo9 rld! Th3 weather_is really g00d today, isn't it?" - >>> CUSTOM_FILTERS = [lambda x: x.lower(), strip_tags, strip_punctuation] - >>> preprocess_string(s, CUSTOM_FILTERS) - [u'hel', u'9lo', u'wo9', u'rld', u'th3', u'weather', u'is', u'really', u'g00d', u'today', u'isn', u't', u'it'] + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import preprocess_string + >>> preprocess_string("Hel 9lo Wo9 rld! Th3 weather_is really g00d today, isn't it?") + [u'hel', u'rld', u'weather', u'todai', u'isn'] + >>> + >>> s = "Hel 9lo Wo9 rld! Th3 weather_is really g00d today, isn't it?" + >>> CUSTOM_FILTERS = [lambda x: x.lower(), strip_tags, strip_punctuation] + >>> preprocess_string(s, CUSTOM_FILTERS) + [u'hel', u'9lo', u'wo9', u'rld', u'th3', u'weather', u'is', u'really', u'g00d', u'today', u'isn', u't', u'it'] """ s = utils.to_unicode(s) @@ -368,9 +390,12 @@ def preprocess_documents(docs): Examples -------- - >>> from gensim.parsing.preprocessing import preprocess_documents - >>> preprocess_documents(["Hel 9lo Wo9 rld!", "Th3 weather_is really g00d today, isn't it?"]) - [[u'hel', u'rld'], [u'weather', u'todai', u'isn']] + + .. sourcecode:: pycon + + >>> from gensim.parsing.preprocessing import preprocess_documents + >>> preprocess_documents(["Hel 9lo Wo9 rld!", "Th3 weather_is really g00d today, isn't it?"]) + [[u'hel', u'rld'], [u'weather', u'todai', u'isn']] """ return [preprocess_string(d) for d in docs] diff --git a/gensim/scripts/glove2word2vec.py b/gensim/scripts/glove2word2vec.py index 8574d6ff77..39fb683f58 100644 --- a/gensim/scripts/glove2word2vec.py +++ b/gensim/scripts/glove2word2vec.py @@ -34,19 +34,19 @@ How to use ---------- ->>> from gensim.test.utils import datapath, get_tmpfile ->>> from gensim.models import KeyedVectors ->>> ->>> glove_file = datapath('test_glove.txt') ->>> tmp_file = get_tmpfile("test_word2vec.txt") ->>> ->>> # call glove2word2vec script ->>> # default way (through CLI): python -m gensim.scripts.glove2word2vec --input --output ->>> from gensim.scripts.glove2word2vec import glove2word2vec ->>> glove2word2vec(glove_file, tmp_file) ->>> ->>> model = KeyedVectors.load_word2vec_format(tmp_file) +.. sourcecode:: pycon + + >>> from gensim.test.utils import datapath, get_tmpfile + >>> from gensim.models import KeyedVectors + >>> from gensim.scripts.glove2word2vec import glove2word2vec + >>> + >>> glove_file = datapath('test_glove.txt') + >>> tmp_file = get_tmpfile("test_word2vec.txt") + >>> + >>> _ = glove2word2vec(glove_file, tmp_file) + >>> + >>> model = KeyedVectors.load_word2vec_format(tmp_file) Command line arguments ---------------------- diff --git a/gensim/scripts/package_info.py b/gensim/scripts/package_info.py index da607b78e9..eee2d082de 100644 --- a/gensim/scripts/package_info.py +++ b/gensim/scripts/package_info.py @@ -4,9 +4,11 @@ -------- You can use it through python ->>> from gensim.scripts.package_info import package_info ->>> ->>> info = package_info() +.. sourcecode:: pycon + + >>> from gensim.scripts.package_info import package_info + >>> + >>> info = package_info() or using CLI interface diff --git a/gensim/scripts/segment_wiki.py b/gensim/scripts/segment_wiki.py index 9a9c19b43f..e4b6bd9f8d 100644 --- a/gensim/scripts/segment_wiki.py +++ b/gensim/scripts/segment_wiki.py @@ -19,22 +19,24 @@ python -m gensim.scripts.segment_wiki -i -f enwiki-latest-pages-articles.xml.bz2 -o enwiki-latest.json.gz -#. Read output in simple way +#. Read output in simple way: + +.. sourcecode:: pycon >>> from smart_open import smart_open >>> import json >>> >>> # iterate over the plain text data we just created >>> for line in smart_open('enwiki-latest.json.gz'): - >>> # decode each JSON line into a Python dictionary object - >>> article = json.loads(line) + >>> # decode each JSON line into a Python dictionary object + >>> article = json.loads(line) >>> - >>> # each article has a "title", a mapping of interlinks and a list of "section_titles" and "section_texts". - >>> print("Article title: %s" % article['title']) - >>> print("Interlinks: %s" + article['interlinks']) - >>> for section_title, section_text in zip(article['section_titles'], article['section_texts']): - >>> print("Section title: %s" % section_title) - >>> print("Section text: %s" % section_text) + >>> # each article has a "title", a mapping of interlinks and a list of "section_titles" and "section_texts". + >>> print("Article title: %s" % article['title']) + >>> print("Interlinks: %s" + article['interlinks']) + >>> for section_title, section_text in zip(article['section_titles'], article['section_texts']): + >>> print("Section title: %s" % section_title) + >>> print("Section text: %s" % section_text) Notes @@ -300,10 +302,12 @@ def get_texts_with_sections(self): etc are ignored). Note that this iterates over the **texts**; if you want vectors, just use - the standard corpus interface instead of this function:: + the standard corpus interface instead of this function: + + .. sourcecode:: pycon - >>> for vec in wiki_corpus: - >>> print(vec) + >>> for vec in wiki_corpus: + >>> print(vec) Yields ------ diff --git a/gensim/similarities/docsim.py b/gensim/similarities/docsim.py index 8b59df04e3..d033eb4fc7 100755 --- a/gensim/similarities/docsim.py +++ b/gensim/similarities/docsim.py @@ -25,24 +25,28 @@ Once the index has been initialized, you can query for document similarity simply by ->>> from gensim.test.utils import common_corpus, common_dictionary, get_tmpfile ->>> ->>> index_tmpfile = get_tmpfile("index") ->>> query = [(1, 2), (6, 1), (7, 2)] ->>> ->>> index = Similarity(index_tmpfile, common_corpus, num_features=len(common_dictionary)) # build the index ->>> similarities = index[query] # get similarities between the query and all index documents +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary, get_tmpfile + >>> + >>> index_tmpfile = get_tmpfile("index") + >>> query = [(1, 2), (6, 1), (7, 2)] + >>> + >>> index = Similarity(index_tmpfile, common_corpus, num_features=len(common_dictionary)) # build the index + >>> similarities = index[query] # get similarities between the query and all index documents If you have more query documents, you can submit them all at once, in a batch +.. sourcecode:: pycon ->>> from gensim.test.utils import common_corpus, common_dictionary, get_tmpfile ->>> ->>> index_tmpfile = get_tmpfile("index") ->>> batch_of_documents = common_corpus[:] # only as example ->>> index = Similarity(index_tmpfile, common_corpus, num_features=len(common_dictionary)) # build the index ->>> ->>> for similarities in index[batch_of_documents]: # the batch is simply an iterable of documents, aka gensim corpus. -... pass + >>> from gensim.test.utils import common_corpus, common_dictionary, get_tmpfile + >>> + >>> index_tmpfile = get_tmpfile("index") + >>> batch_of_documents = common_corpus[:] # only as example + >>> index = Similarity(index_tmpfile, common_corpus, num_features=len(common_dictionary)) # build the index + >>> + >>> # the batch is simply an iterable of documents, aka gensim corpus: + >>> for similarities in index[batch_of_documents]: + ... pass The benefit of this batch (aka "chunked") querying is a much better performance. To see the speed-up on your machine, run ``python -m gensim.test.simspeed`` @@ -51,14 +55,15 @@ There is also a special syntax for when you need similarity of documents in the index to the index itself (i.e. queries = the indexed documents themselves). This special syntax uses the faster, batch queries internally and **is ideal for all-vs-all pairwise similarities**: +.. sourcecode:: pycon ->>> from gensim.test.utils import common_corpus, common_dictionary, get_tmpfile ->>> ->>> index_tmpfile = get_tmpfile("index") ->>> index = Similarity(index_tmpfile, common_corpus, num_features=len(common_dictionary)) # build the index ->>> ->>> for similarities in index: # yield similarities of the 1st indexed document, then 2nd... -... pass + >>> from gensim.test.utils import common_corpus, common_dictionary, get_tmpfile + >>> + >>> index_tmpfile = get_tmpfile("index") + >>> index = Similarity(index_tmpfile, common_corpus, num_features=len(common_dictionary)) # build the index + >>> + >>> for similarities in index: # yield similarities of the 1st indexed document, then 2nd... + ... pass """ @@ -238,25 +243,27 @@ class Similarity(interfaces.SimilarityABC): Examples -------- - >>> from gensim.corpora.textcorpus import TextCorpus - >>> from gensim.test.utils import datapath, get_tmpfile - >>> from gensim.similarities import Similarity - >>> - >>> corpus = TextCorpus(datapath('testcorpus.mm')) - >>> index_temp = get_tmpfile("index") - >>> index = Similarity(index_temp, corpus, num_features=400) # create index - >>> - >>> query = next(iter(corpus)) - >>> result = index[query] # search similar to `query` in index - >>> - >>> for sims in index[corpus]: # if you have more query documents, you can submit them all at once, in a batch - ... pass - >>> - >>> # There is also a special syntax for when you need similarity of documents in the index - >>> # to the index itself (i.e. queries=indexed documents themselves). This special syntax - >>> # uses the faster, batch queries internally and **is ideal for all-vs-all pairwise similarities**: - >>> for similarities in index: # yield similarities of the 1st indexed document, then 2nd... - ... pass + .. sourcecode:: pycon + + >>> from gensim.corpora.textcorpus import TextCorpus + >>> from gensim.test.utils import datapath, get_tmpfile + >>> from gensim.similarities import Similarity + >>> + >>> corpus = TextCorpus(datapath('testcorpus.mm')) + >>> index_temp = get_tmpfile("index") + >>> index = Similarity(index_temp, corpus, num_features=400) # create index + >>> + >>> query = next(iter(corpus)) + >>> result = index[query] # search similar to `query` in index + >>> + >>> for sims in index[corpus]: # if you have more query documents, you can submit them all at once, in a batch + ... pass + >>> + >>> # There is also a special syntax for when you need similarity of documents in the index + >>> # to the index itself (i.e. queries=indexed documents themselves). This special syntax + >>> # uses the faster, batch queries internally and **is ideal for all-vs-all pairwise similarities**: + >>> for similarities in index: # yield similarities of the 1st indexed document, then 2nd... + ... pass See Also -------- @@ -348,16 +355,18 @@ def add_documents(self, corpus): Examples -------- - >>> from gensim.corpora.textcorpus import TextCorpus - >>> from gensim.test.utils import datapath, get_tmpfile - >>> from gensim.similarities import Similarity - >>> - >>> corpus = TextCorpus(datapath('testcorpus.mm')) - >>> index_temp = get_tmpfile("index") - >>> index = Similarity(index_temp, corpus, num_features=400) # create index - >>> - >>> one_more_corpus = TextCorpus(datapath('testcorpus.txt')) - >>> index.add_documents(one_more_corpus) # add more documents in corpus + .. sourcecode:: pycon + + >>> from gensim.corpora.textcorpus import TextCorpus + >>> from gensim.test.utils import datapath, get_tmpfile + >>> from gensim.similarities import Similarity + >>> + >>> corpus = TextCorpus(datapath('testcorpus.mm')) + >>> index_temp = get_tmpfile("index") + >>> index = Similarity(index_temp, corpus, num_features=400) # create index + >>> + >>> one_more_corpus = TextCorpus(datapath('testcorpus.txt')) + >>> index.add_documents(one_more_corpus) # add more documents in corpus """ min_ratio = 1.0 # 0.5 to only reopen shards that are <50% complete @@ -490,14 +499,15 @@ def __getitem__(self, query): Examples -------- - >>> from gensim.corpora.textcorpus import TextCorpus - >>> from gensim.test.utils import datapath - >>> from gensim.similarities import Similarity - >>> import gensim.downloader as api - >>> - >>> corpus = TextCorpus(datapath('testcorpus.txt')) - >>> index = Similarity('temp', corpus, num_features=400) - >>> result = index[corpus] # pairwise similarities of each document against each document + .. sourcecode:: pycon + + >>> from gensim.corpora.textcorpus import TextCorpus + >>> from gensim.test.utils import datapath + >>> from gensim.similarities import Similarity + >>> + >>> corpus = TextCorpus(datapath('testcorpus.txt')) + >>> index = Similarity('temp', corpus, num_features=400) + >>> result = index[corpus] # pairwise similarities of each document against each document """ self.close_shard() # no-op if no documents added to index since last query @@ -562,15 +572,17 @@ def vector_by_id(self, docpos): Examples -------- - >>> from gensim.corpora.textcorpus import TextCorpus - >>> from gensim.test.utils import datapath - >>> from gensim.similarities import Similarity - >>> import gensim.downloader as api - >>> - >>> # Create index: - >>> corpus = TextCorpus(datapath('testcorpus.txt')) - >>> index = Similarity('temp', corpus, num_features=400) - >>> vector = index.vector_by_id(1) + + .. sourcecode:: pycon + + >>> from gensim.corpora.textcorpus import TextCorpus + >>> from gensim.test.utils import datapath + >>> from gensim.similarities import Similarity + >>> + >>> # Create index: + >>> corpus = TextCorpus(datapath('testcorpus.txt')) + >>> index = Similarity('temp', corpus, num_features=400) + >>> vector = index.vector_by_id(1) """ self.close_shard() # no-op if no documents added to index since last query @@ -599,13 +611,16 @@ def similarity_by_id(self, docpos): Examples -------- - >>> from gensim.corpora.textcorpus import TextCorpus - >>> from gensim.test.utils import datapath - >>> from gensim.similarities import Similarity - >>> - >>> corpus = TextCorpus(datapath('testcorpus.txt')) - >>> index = Similarity('temp', corpus, num_features=400) - >>> similarities = index.similarity_by_id(1) + + .. sourcecode:: pycon + + >>> from gensim.corpora.textcorpus import TextCorpus + >>> from gensim.test.utils import datapath + >>> from gensim.similarities import Similarity + >>> + >>> corpus = TextCorpus(datapath('testcorpus.txt')) + >>> index = Similarity('temp', corpus, num_features=400) + >>> similarities = index.similarity_by_id(1) """ query = self.vector_by_id(docpos) @@ -692,18 +707,21 @@ def save(self, fname=None, *args, **kwargs): Examples -------- - >>> from gensim.corpora.textcorpus import TextCorpus - >>> from gensim.test.utils import datapath, get_tmpfile - >>> from gensim.similarities import Similarity - >>> - >>> temp_fname = get_tmpfile("index") - >>> output_fname = get_tmpfile("saved_index") - >>> - >>> corpus = TextCorpus(datapath('testcorpus.txt')) - >>> index = Similarity(output_fname, corpus, num_features=400) - >>> - >>> index.save(output_fname) - >>> loaded_index = index.load(output_fname) + + .. sourcecode:: pycon + + >>> from gensim.corpora.textcorpus import TextCorpus + >>> from gensim.test.utils import datapath, get_tmpfile + >>> from gensim.similarities import Similarity + >>> + >>> temp_fname = get_tmpfile("index") + >>> output_fname = get_tmpfile("saved_index") + >>> + >>> corpus = TextCorpus(datapath('testcorpus.txt')) + >>> index = Similarity(output_fname, corpus, num_features=400) + >>> + >>> index.save(output_fname) + >>> loaded_index = index.load(output_fname) """ self.close_shard() @@ -726,12 +744,15 @@ class MatrixSimilarity(interfaces.SimilarityABC): Examples -------- - >>> from gensim.test.utils import common_corpus, common_dictionary - >>> from gensim.similarities import MatrixSimilarity - >>> - >>> query = [(1, 2), (5, 4)] - >>> index = MatrixSimilarity(common_corpus, num_features=len(common_dictionary)) - >>> sims = index[query] + + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.similarities import MatrixSimilarity + >>> + >>> query = [(1, 2), (5, 4)] + >>> index = MatrixSimilarity(common_corpus, num_features=len(common_dictionary)) + >>> sims = index[query] """ def __init__(self, corpus, num_best=None, dtype=numpy.float32, num_features=None, chunksize=256, corpus_len=None): @@ -844,22 +865,25 @@ class SoftCosineSimilarity(interfaces.SimilarityABC): Examples -------- - >>> from gensim.test.utils import common_texts - >>> from gensim.corpora import Dictionary - >>> from gensim.models import Word2Vec - >>> from gensim.similarities import SoftCosineSimilarity - >>> - >>> model = Word2Vec(common_texts, size=20, min_count=1) # train word-vectors - >>> dictionary = Dictionary(common_texts) - >>> bow_corpus = [dictionary.doc2bow(document) for document in common_texts] - >>> - >>> similarity_matrix = model.wv.similarity_matrix(dictionary) # construct similarity matrix - >>> index = SoftCosineSimilarity(bow_corpus, similarity_matrix, num_best=10) - >>> - >>> # Make a query. - >>> query = 'graph trees computer'.split() - >>> # calculate similarity between query and each doc from bow_corpus - >>> sims = index[dictionary.doc2bow(query)] + + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts + >>> from gensim.corpora import Dictionary + >>> from gensim.models import Word2Vec + >>> from gensim.similarities import SoftCosineSimilarity + >>> + >>> model = Word2Vec(common_texts, size=20, min_count=1) # train word-vectors + >>> dictionary = Dictionary(common_texts) + >>> bow_corpus = [dictionary.doc2bow(document) for document in common_texts] + >>> + >>> similarity_matrix = model.wv.similarity_matrix(dictionary) # construct similarity matrix + >>> index = SoftCosineSimilarity(bow_corpus, similarity_matrix, num_best=10) + >>> + >>> # Make a query. + >>> query = 'graph trees computer'.split() + >>> # calculate similarity between query and each doc from bow_corpus + >>> sims = index[dictionary.doc2bow(query)] Check out `Tutorial Notebook `_ @@ -971,19 +995,22 @@ class WmdSimilarity(interfaces.SimilarityABC): Example ------- - >>> from gensim.test.utils import common_texts - >>> from gensim.corpora import Dictionary - >>> from gensim.models import Word2Vec - >>> from gensim.similarities import WmdSimilarity - >>> - >>> model = Word2Vec(common_texts, size=20, min_count=1) # train word-vectors - >>> dictionary = Dictionary(common_texts) - >>> bow_corpus = [dictionary.doc2bow(document) for document in common_texts] - >>> - >>> index = WmdSimilarity(bow_corpus, model) - >>> # Make query. - >>> query = 'trees' - >>> sims = index[query] + + .. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts + >>> from gensim.corpora import Dictionary + >>> from gensim.models import Word2Vec + >>> from gensim.similarities import WmdSimilarity + >>> + >>> model = Word2Vec(common_texts, size=20, min_count=1) # train word-vectors + >>> dictionary = Dictionary(common_texts) + >>> bow_corpus = [dictionary.doc2bow(document) for document in common_texts] + >>> + >>> index = WmdSimilarity(bow_corpus, model) + >>> # Make query. + >>> query = 'trees' + >>> sims = index[query] """ def __init__(self, corpus, w2v_model, num_best=None, normalize_w2v_and_replace=True, chunksize=256): diff --git a/gensim/similarities/index.py b/gensim/similarities/index.py index 2c1a5e66fa..681fe58ef4 100644 --- a/gensim/similarities/index.py +++ b/gensim/similarities/index.py @@ -70,15 +70,17 @@ def __init__(self, model=None, num_trees=None): Examples -------- - >>> from gensim.similarities.index import AnnoyIndexer - >>> from gensim.models import Word2Vec - >>> - >>> sentences = [['cute', 'cat', 'say', 'meow'], ['cute', 'dog', 'say', 'woof']] - >>> model = Word2Vec(sentences, min_count=1, seed=1) - >>> - >>> indexer = AnnoyIndexer(model, 2) - >>> model.most_similar("cat", topn=2, indexer=indexer) - [('cat', 1.0), ('dog', 0.32011348009109497)] + .. sourcecode:: pycon + + >>> from gensim.similarities.index import AnnoyIndexer + >>> from gensim.models import Word2Vec + >>> + >>> sentences = [['cute', 'cat', 'say', 'meow'], ['cute', 'dog', 'say', 'woof']] + >>> model = Word2Vec(sentences, min_count=1, seed=1) + >>> + >>> indexer = AnnoyIndexer(model, 2) + >>> model.most_similar("cat", topn=2, indexer=indexer) + [('cat', 1.0), ('dog', 0.32011348009109497)] """ self.index = None @@ -127,20 +129,22 @@ def load(self, fname): Examples -------- - >>> from gensim.similarities.index import AnnoyIndexer - >>> from gensim.models import Word2Vec - >>> from tempfile import mkstemp - >>> - >>> sentences = [['cute', 'cat', 'say', 'meow'], ['cute', 'dog', 'say', 'woof']] - >>> model = Word2Vec(sentences, min_count=1, seed=1, iter=10) - >>> - >>> indexer = AnnoyIndexer(model, 2) - >>> _, temp_fn = mkstemp() - >>> indexer.save(temp_fn) - >>> - >>> new_indexer = AnnoyIndexer() - >>> new_indexer.load(temp_fn) - >>> new_indexer.model = model + .. sourcecode:: pycon + + >>> from gensim.similarities.index import AnnoyIndexer + >>> from gensim.models import Word2Vec + >>> from tempfile import mkstemp + >>> + >>> sentences = [['cute', 'cat', 'say', 'meow'], ['cute', 'dog', 'say', 'woof']] + >>> model = Word2Vec(sentences, min_count=1, seed=1, iter=10) + >>> + >>> indexer = AnnoyIndexer(model, 2) + >>> _, temp_fn = mkstemp() + >>> indexer.save(temp_fn) + >>> + >>> new_indexer = AnnoyIndexer() + >>> new_indexer.load(temp_fn) + >>> new_indexer.model = model """ fname_dict = fname + '.d' diff --git a/gensim/sklearn_api/atmodel.py b/gensim/sklearn_api/atmodel.py index 085ed9a745..1b07537c27 100644 --- a/gensim/sklearn_api/atmodel.py +++ b/gensim/sklearn_api/atmodel.py @@ -11,21 +11,23 @@ Examples -------- ->>> from gensim.test.utils import common_texts, common_dictionary, common_corpus ->>> from gensim.sklearn_api.atmodel import AuthorTopicTransformer ->>> ->>> # Pass a mapping from authors to the documents they contributed to. ->>> author2doc = { -... 'john': [0, 1, 2, 3, 4, 5, 6], -... 'jane': [2, 3, 4, 5, 6, 7, 8], -... 'jack': [0, 2, 4, 6, 8] -... } ->>> ->>> # Lets use the model to discover 2 different topics. ->>> model = AuthorTopicTransformer(id2word=common_dictionary, author2doc=author2doc, num_topics=2, passes=100) ->>> ->>> # In which of those 2 topics does jack mostly contribute to? ->>> topic_dist = model.fit(common_corpus).transform('jack') +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_dictionary, common_corpus + >>> from gensim.sklearn_api.atmodel import AuthorTopicTransformer + >>> + >>> # Pass a mapping from authors to the documents they contributed to. + >>> author2doc = { + ... 'john': [0, 1, 2, 3, 4, 5, 6], + ... 'jane': [2, 3, 4, 5, 6, 7, 8], + ... 'jack': [0, 2, 4, 6, 8] + ... } + >>> + >>> # Lets use the model to discover 2 different topics. + >>> model = AuthorTopicTransformer(id2word=common_dictionary, author2doc=author2doc, num_topics=2, passes=100) + >>> + >>> # In which of those 2 topics does jack mostly contribute to? + >>> topic_dist = model.fit(common_corpus).transform('jack') """ import numpy as np diff --git a/gensim/sklearn_api/d2vmodel.py b/gensim/sklearn_api/d2vmodel.py index fbc8375e5a..e967c84eb1 100644 --- a/gensim/sklearn_api/d2vmodel.py +++ b/gensim/sklearn_api/d2vmodel.py @@ -10,11 +10,13 @@ Examples -------- ->>> from gensim.test.utils import common_texts ->>> from gensim.sklearn_api import D2VTransformer ->>> ->>> model = D2VTransformer(min_count=1, size=5) ->>> docvecs = model.fit_transform(common_texts) # represent `common_texts` as vectors +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts + >>> from gensim.sklearn_api import D2VTransformer + >>> + >>> model = D2VTransformer(min_count=1, size=5) + >>> docvecs = model.fit_transform(common_texts) # represent `common_texts` as vectors """ import numpy as np diff --git a/gensim/sklearn_api/ftmodel.py b/gensim/sklearn_api/ftmodel.py index 606a5f3d21..a1edd6c338 100644 --- a/gensim/sklearn_api/ftmodel.py +++ b/gensim/sklearn_api/ftmodel.py @@ -12,29 +12,33 @@ Examples -------- ->>> from gensim.test.utils import common_texts ->>> from gensim.sklearn_api import FTTransformer ->>> ->>> # Create a model to represent each word by a 10 dimensional vector. ->>> model = FTTransformer(size=10, min_count=1, seed=1) ->>> ->>> # What is the vector representations of the word 'graph' and 'system'? ->>> wordvecs = model.fit(common_texts).transform(['graph', 'system']) ->>> assert wordvecs.shape == (2, 10) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts + >>> from gensim.sklearn_api import FTTransformer + >>> + >>> # Create a model to represent each word by a 10 dimensional vector. + >>> model = FTTransformer(size=10, min_count=1, seed=1) + >>> + >>> # What is the vector representations of the word 'graph' and 'system'? + >>> wordvecs = model.fit(common_texts).transform(['graph', 'system']) + >>> assert wordvecs.shape == (2, 10) Retrieve word-vector for vocab and out-of-vocab word: ->>> existent_word = "system" ->>> existent_word in model.gensim_model.wv.vocab -True ->>> existent_word_vec = model.transform(existent_word) # numpy vector of a word ->>> assert existent_word_vec.shape == (1, 10) ->>> ->>> oov_word = "sys" ->>> oov_word in model.gensim_model.wv.vocab -False ->>> oov_word_vec = model.transform(oov_word) # numpy vector of a word ->>> assert oov_word_vec.shape == (1, 10) +.. sourcecode:: pycon + + >>> existent_word = "system" + >>> existent_word in model.gensim_model.wv.vocab + True + >>> existent_word_vec = model.transform(existent_word) # numpy vector of a word + >>> assert existent_word_vec.shape == (1, 10) + >>> + >>> oov_word = "sys" + >>> oov_word in model.gensim_model.wv.vocab + False + >>> oov_word_vec = model.transform(oov_word) # numpy vector of a word + >>> assert oov_word_vec.shape == (1, 10) """ import numpy as np diff --git a/gensim/sklearn_api/hdp.py b/gensim/sklearn_api/hdp.py index f62b46d9c5..e98c1916c8 100644 --- a/gensim/sklearn_api/hdp.py +++ b/gensim/sklearn_api/hdp.py @@ -11,12 +11,14 @@ Examples -------- ->>> from gensim.test.utils import common_dictionary, common_corpus ->>> from gensim.sklearn_api import HdpTransformer ->>> ->>> # Lets extract the distribution of each document in topics ->>> model = HdpTransformer(id2word=common_dictionary) ->>> distr = model.fit_transform(common_corpus) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_dictionary, common_corpus + >>> from gensim.sklearn_api import HdpTransformer + >>> + >>> # Lets extract the distribution of each document in topics + >>> model = HdpTransformer(id2word=common_dictionary) + >>> distr = model.fit_transform(common_corpus) """ import numpy as np diff --git a/gensim/sklearn_api/ldamodel.py b/gensim/sklearn_api/ldamodel.py index 33f2575acc..ed12b95369 100644 --- a/gensim/sklearn_api/ldamodel.py +++ b/gensim/sklearn_api/ldamodel.py @@ -11,12 +11,14 @@ Examples -------- ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> from gensim.sklearn_api import LdaTransformer ->>> ->>> # Reduce each document to 2 dimensions (topics) using the sklearn interface. ->>> model = LdaTransformer(num_topics=2, id2word=common_dictionary, iterations=20, random_state=1) ->>> docvecs = model.fit_transform(common_corpus) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.sklearn_api import LdaTransformer + >>> + >>> # Reduce each document to 2 dimensions (topics) using the sklearn interface. + >>> model = LdaTransformer(num_topics=2, id2word=common_dictionary, iterations=20, random_state=1) + >>> docvecs = model.fit_transform(common_corpus) """ import numpy as np diff --git a/gensim/sklearn_api/ldaseqmodel.py b/gensim/sklearn_api/ldaseqmodel.py index 277578e55a..cb70242129 100644 --- a/gensim/sklearn_api/ldaseqmodel.py +++ b/gensim/sklearn_api/ldaseqmodel.py @@ -11,15 +11,17 @@ Examples -------- ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> from gensim.sklearn_api.ldaseqmodel import LdaSeqTransformer ->>> ->>> # Create a sequential LDA transformer to extract 2 topics from the common corpus. ->>> # Divide the work into 3 unequal time slices. ->>> model = LdaSeqTransformer(id2word=common_dictionary, num_topics=2, time_slice=[3, 4, 2], initialize='gensim') ->>> ->>> # Each document almost entirely belongs to one of the two topics. ->>> transformed_corpus = model.fit_transform(common_corpus) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.sklearn_api.ldaseqmodel import LdaSeqTransformer + >>> + >>> # Create a sequential LDA transformer to extract 2 topics from the common corpus. + >>> # Divide the work into 3 unequal time slices. + >>> model = LdaSeqTransformer(id2word=common_dictionary, num_topics=2, time_slice=[3, 4, 2], initialize='gensim') + >>> + >>> # Each document almost entirely belongs to one of the two topics. + >>> transformed_corpus = model.fit_transform(common_corpus) """ import numpy as np diff --git a/gensim/sklearn_api/lsimodel.py b/gensim/sklearn_api/lsimodel.py index 709c911fa4..e078dac3d3 100644 --- a/gensim/sklearn_api/lsimodel.py +++ b/gensim/sklearn_api/lsimodel.py @@ -13,21 +13,23 @@ -------- Integrate with sklearn Pipelines: ->>> from sklearn.pipeline import Pipeline ->>> from sklearn import linear_model ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> from gensim.sklearn_api import LsiTransformer ->>> ->>> # Create stages for our pipeline (including gensim and sklearn models alike). ->>> model = LsiTransformer(num_topics=15, id2word=common_dictionary) ->>> clf = linear_model.LogisticRegression(penalty='l2', C=0.1) ->>> pipe = Pipeline([('features', model,), ('classifier', clf)]) ->>> ->>> # Create some random binary labels for our documents. ->>> labels = np.random.choice([0, 1], len(common_corpus)) ->>> ->>> # How well does our pipeline perform on the training set? ->>> score = pipe.fit(common_corpus, labels).score(common_corpus, labels) +.. sourcecode:: pycon + + >>> from sklearn.pipeline import Pipeline + >>> from sklearn import linear_model + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.sklearn_api import LsiTransformer + >>> + >>> # Create stages for our pipeline (including gensim and sklearn models alike). + >>> model = LsiTransformer(num_topics=15, id2word=common_dictionary) + >>> clf = linear_model.LogisticRegression(penalty='l2', C=0.1) + >>> pipe = Pipeline([('features', model,), ('classifier', clf)]) + >>> + >>> # Create some random binary labels for our documents. + >>> labels = np.random.choice([0, 1], len(common_corpus)) + >>> + >>> # How well does our pipeline perform on the training set? + >>> score = pipe.fit(common_corpus, labels).score(common_corpus, labels) """ import numpy as np diff --git a/gensim/sklearn_api/phrases.py b/gensim/sklearn_api/phrases.py index 8c32ab4dd0..e4b2c6c642 100644 --- a/gensim/sklearn_api/phrases.py +++ b/gensim/sklearn_api/phrases.py @@ -10,19 +10,21 @@ Examples -------- ->>> from gensim.sklearn_api.phrases import PhrasesTransformer ->>> ->>> # Create the model. Make sure no term is ignored and combinations seen 3+ times are captured. ->>> m = PhrasesTransformer(min_count=1, threshold=3) ->>> texts = [ -... ['I', 'love', 'computer', 'science'], -... ['computer', 'science', 'is', 'my', 'passion'], -... ['I', 'studied', 'computer', 'science'] -... ] ->>> ->>> # Use sklearn fit_transform to see the transformation. ->>> # Since computer and science were seen together 3+ times they are considered a phrase. ->>> assert ['I', 'love', 'computer_science'] == m.fit_transform(texts)[0] +.. sourcecode:: pycon + + >>> from gensim.sklearn_api.phrases import PhrasesTransformer + >>> + >>> # Create the model. Make sure no term is ignored and combinations seen 3+ times are captured. + >>> m = PhrasesTransformer(min_count=1, threshold=3) + >>> texts = [ + ... ['I', 'love', 'computer', 'science'], + ... ['computer', 'science', 'is', 'my', 'passion'], + ... ['I', 'studied', 'computer', 'science'] + ... ] + >>> + >>> # Use sklearn fit_transform to see the transformation. + >>> # Since computer and science were seen together 3+ times they are considered a phrase. + >>> assert ['I', 'love', 'computer_science'] == m.fit_transform(texts)[0] """ from six import string_types diff --git a/gensim/sklearn_api/rpmodel.py b/gensim/sklearn_api/rpmodel.py index 323aee6a34..e676c1b3fa 100644 --- a/gensim/sklearn_api/rpmodel.py +++ b/gensim/sklearn_api/rpmodel.py @@ -11,14 +11,16 @@ Examples -------- ->>> from gensim.sklearn_api.rpmodel import RpTransformer ->>> from gensim.test.utils import common_dictionary, common_corpus ->>> ->>> # Initialize and fit the model. ->>> model = RpTransformer(id2word=common_dictionary).fit(common_corpus) ->>> ->>> # Use the trained model to transform a document. ->>> result = model.transform(common_corpus[3]) +.. sourcecode:: pycon + + >>> from gensim.sklearn_api.rpmodel import RpTransformer + >>> from gensim.test.utils import common_dictionary, common_corpus + >>> + >>> # Initialize and fit the model. + >>> model = RpTransformer(id2word=common_dictionary).fit(common_corpus) + >>> + >>> # Use the trained model to transform a document. + >>> result = model.transform(common_corpus[3]) """ diff --git a/gensim/sklearn_api/text2bow.py b/gensim/sklearn_api/text2bow.py index dd6b555c7b..7cae8a609b 100644 --- a/gensim/sklearn_api/text2bow.py +++ b/gensim/sklearn_api/text2bow.py @@ -10,17 +10,19 @@ Examples -------- ->>> from gensim.sklearn_api import Text2BowTransformer ->>> ->>> # Get a corpus as an iterable of unicode strings. ->>> texts = [u'complier system computer', u'loading computer system'] ->>> ->>> # Create a transformer.. ->>> model = Text2BowTransformer() ->>> ->>> # Use sklearn-style `fit_transform` to get the BOW representation of each document. ->>> model.fit_transform(texts) -[[(0, 1), (1, 1), (2, 1)], [(1, 1), (2, 1), (3, 1)]] +.. sourcecode:: pycon + + >>> from gensim.sklearn_api import Text2BowTransformer + >>> + >>> # Get a corpus as an iterable of unicode strings. + >>> texts = [u'complier system computer', u'loading computer system'] + >>> + >>> # Create a transformer.. + >>> model = Text2BowTransformer() + >>> + >>> # Use sklearn-style `fit_transform` to get the BOW representation of each document. + >>> model.fit_transform(texts) + [[(0, 1), (1, 1), (2, 1)], [(1, 1), (2, 1), (3, 1)]] """ from six import string_types diff --git a/gensim/sklearn_api/tfidf.py b/gensim/sklearn_api/tfidf.py index f8d1615e8d..4484037572 100644 --- a/gensim/sklearn_api/tfidf.py +++ b/gensim/sklearn_api/tfidf.py @@ -10,12 +10,14 @@ Examples -------- ->>> from gensim.test.utils import common_corpus, common_dictionary ->>> from gensim.sklearn_api import TfIdfTransformer ->>> ->>> # Transform the word counts inversely to their global frequency using the sklearn interface. ->>> model = TfIdfTransformer(dictionary=common_dictionary) ->>> tfidf_corpus = model.fit_transform(common_corpus) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_corpus, common_dictionary + >>> from gensim.sklearn_api import TfIdfTransformer + >>> + >>> # Transform the word counts inversely to their global frequency using the sklearn interface. + >>> model = TfIdfTransformer(dictionary=common_dictionary) + >>> tfidf_corpus = model.fit_transform(common_corpus) """ from sklearn.base import TransformerMixin, BaseEstimator diff --git a/gensim/sklearn_api/w2vmodel.py b/gensim/sklearn_api/w2vmodel.py index 6c63dc6397..07091c2dde 100644 --- a/gensim/sklearn_api/w2vmodel.py +++ b/gensim/sklearn_api/w2vmodel.py @@ -12,15 +12,17 @@ Examples -------- ->>> from gensim.test.utils import common_texts ->>> from gensim.sklearn_api import W2VTransformer ->>> ->>> # Create a model to represent each word by a 10 dimensional vector. ->>> model = W2VTransformer(size=10, min_count=1, seed=1) ->>> ->>> # What is the vector representation of the word 'graph'? ->>> wordvecs = model.fit(common_texts).transform(['graph', 'system']) ->>> assert wordvecs.shape == (2, 10) +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts + >>> from gensim.sklearn_api import W2VTransformer + >>> + >>> # Create a model to represent each word by a 10 dimensional vector. + >>> model = W2VTransformer(size=10, min_count=1, seed=1) + >>> + >>> # What is the vector representation of the word 'graph'? + >>> wordvecs = model.fit(common_texts).transform(['graph', 'system']) + >>> assert wordvecs.shape == (2, 10) """ import numpy as np diff --git a/gensim/summarization/bm25.py b/gensim/summarization/bm25.py index 50019c32fe..e516d625f6 100644 --- a/gensim/summarization/bm25.py +++ b/gensim/summarization/bm25.py @@ -16,13 +16,16 @@ Examples -------- ->>> from gensim.summarization.bm25 import get_bm25_weights ->>> corpus = [ -... ["black", "cat", "white", "cat"], -... ["cat", "outer", "space"], -... ["wag", "dog"] -... ] ->>> result = get_bm25_weights(corpus, n_jobs=-1) + +.. sourcecode:: pycon + + >>> from gensim.summarization.bm25 import get_bm25_weights + >>> corpus = [ + ... ["black", "cat", "white", "cat"], + ... ["cat", "outer", "space"], + ... ["wag", "dog"] + ... ] + >>> result = get_bm25_weights(corpus, n_jobs=-1) Data: @@ -198,13 +201,15 @@ def get_bm25_weights(corpus, n_jobs=1): Examples -------- - >>> from gensim.summarization.bm25 import get_bm25_weights - >>> corpus = [ - ... ["black", "cat", "white", "cat"], - ... ["cat", "outer", "space"], - ... ["wag", "dog"] - ... ] - >>> result = get_bm25_weights(corpus, n_jobs=-1) + .. sourcecode:: pycon + + >>> from gensim.summarization.bm25 import get_bm25_weights + >>> corpus = [ + ... ["black", "cat", "white", "cat"], + ... ["cat", "outer", "space"], + ... ["wag", "dog"] + ... ] + >>> result = get_bm25_weights(corpus, n_jobs=-1) """ bm25 = BM25(corpus) diff --git a/gensim/summarization/commons.py b/gensim/summarization/commons.py index f1a2264e46..0735a357e7 100644 --- a/gensim/summarization/commons.py +++ b/gensim/summarization/commons.py @@ -10,18 +10,21 @@ -------- Create simple graph and add edges. Let's take a look at nodes. +.. sourcecode:: pycon ->>> gg = build_graph(['Felidae', 'Lion', 'Tiger', 'Wolf']) ->>> gg.add_edge(("Felidae", "Lion")) ->>> gg.add_edge(("Felidae", "Tiger")) ->>> sorted(gg.nodes()) -['Felidae', 'Lion', 'Tiger', 'Wolf'] + >>> gg = build_graph(['Felidae', 'Lion', 'Tiger', 'Wolf']) + >>> gg.add_edge(("Felidae", "Lion")) + >>> gg.add_edge(("Felidae", "Tiger")) + >>> sorted(gg.nodes()) + ['Felidae', 'Lion', 'Tiger', 'Wolf'] Remove nodes with no edges. ->>> remove_unreachable_nodes(gg) ->>> sorted(gg.nodes()) -['Felidae', 'Lion', 'Tiger'] +.. sourcecode:: pycon + + >>> remove_unreachable_nodes(gg) + >>> sorted(gg.nodes()) + ['Felidae', 'Lion', 'Tiger'] """ diff --git a/gensim/summarization/graph.py b/gensim/summarization/graph.py index 79cd1a160f..12a43d06ce 100644 --- a/gensim/summarization/graph.py +++ b/gensim/summarization/graph.py @@ -11,25 +11,31 @@ class Graph (based on IGraph) which implements undirected graph. Create simple graph with 4 nodes. ->>> g = Graph() ->>> g.add_node('Felidae') ->>> g.add_node('Lion') ->>> g.add_node('Tiger') ->>> g.add_node('Wolf') ->>> sorted(g.nodes()) -['Felidae', 'Lion', 'Tiger', 'Wolf'] +.. sourcecode:: pycon + + >>> g = Graph() + >>> g.add_node('Felidae') + >>> g.add_node('Lion') + >>> g.add_node('Tiger') + >>> g.add_node('Wolf') + >>> sorted(g.nodes()) + ['Felidae', 'Lion', 'Tiger', 'Wolf'] Add some edges and check neighbours. ->>> g.add_edge(("Felidae", "Lion")) ->>> g.add_edge(("Felidae", "Tiger")) ->>> g.neighbors("Felidae") -['Lion', 'Tiger'] +.. sourcecode:: pycon + + >>> g.add_edge(("Felidae", "Lion")) + >>> g.add_edge(("Felidae", "Tiger")) + >>> g.neighbors("Felidae") + ['Lion', 'Tiger'] One node has no neighbours. ->>> g.neighbors("Wolf") -[] +.. sourcecode:: pycon + + >>> g.neighbors("Wolf") + [] """ diff --git a/gensim/summarization/keywords.py b/gensim/summarization/keywords.py index 9f43158146..4d0523c9fb 100644 --- a/gensim/summarization/keywords.py +++ b/gensim/summarization/keywords.py @@ -9,14 +9,16 @@ -------- Extract keywords from text ->>> from gensim.summarization import keywords ->>> text='''Challenges in natural language processing frequently involve -... speech recognition, natural language understanding, natural language -... generation (frequently from formal, machine-readable logical forms), -... connecting language and machine perception, dialog systems, or some -... combination thereof.''' ->>> keywords(text).split('\\n') -[u'natural language', u'machine', u'frequently'] +.. sourcecode:: pycon + + >>> from gensim.summarization import keywords + >>> text = '''Challenges in natural language processing frequently involve + ... speech recognition, natural language understanding, natural language + ... generation (frequently from formal, machine-readable logical forms), + ... connecting language and machine perception, dialog systems, or some + ... combination thereof.''' + >>> keywords(text).split('\\n') + [u'natural language', u'machine', u'frequently'] Notes diff --git a/gensim/summarization/pagerank_weighted.py b/gensim/summarization/pagerank_weighted.py index df1352367c..e49d43fa6c 100644 --- a/gensim/summarization/pagerank_weighted.py +++ b/gensim/summarization/pagerank_weighted.py @@ -13,21 +13,25 @@ Calculate Pagerank for words ->>> from gensim.summarization.keywords import get_graph ->>> from gensim.summarization.pagerank_weighted import pagerank_weighted ->>> graph = get_graph("The road to hell is paved with good intentions.") ->>> # result will looks like {'good': 0.70432858653171504, 'hell': 0.051128871128006126, ...} ->>> result = pagerank_weighted(graph) +.. sourcecode:: pycon + + >>> from gensim.summarization.keywords import get_graph + >>> from gensim.summarization.pagerank_weighted import pagerank_weighted + >>> graph = get_graph("The road to hell is paved with good intentions.") + >>> # result will looks like {'good': 0.70432858653171504, 'hell': 0.051128871128006126, ...} + >>> result = pagerank_weighted(graph) Build matrix from graph ->>> from gensim.summarization.pagerank_weighted import build_adjacency_matrix ->>> build_adjacency_matrix(graph).todense() -matrix([[ 0., 0., 0., 0., 0.], - [ 0., 0., 1., 0., 0.], - [ 0., 1., 0., 0., 0.], - [ 0., 0., 0., 0., 0.], - [ 0., 0., 0., 0., 0.]]) +.. sourcecode:: pycon + + >>> from gensim.summarization.pagerank_weighted import build_adjacency_matrix + >>> build_adjacency_matrix(graph).todense() + matrix([[ 0., 0., 0., 0., 0.], + [ 0., 0., 1., 0., 0.], + [ 0., 1., 0., 0., 0.], + [ 0., 0., 0., 0., 0.], + [ 0., 0., 0., 0., 0.]]) """ diff --git a/gensim/summarization/summarizer.py b/gensim/summarization/summarizer.py index afeb359b15..50c77ab6c2 100644 --- a/gensim/summarization/summarizer.py +++ b/gensim/summarization/summarizer.py @@ -20,33 +20,35 @@ Example ------- ->>> from gensim.summarization.summarizer import summarize ->>> text = '''Rice Pudding - Poem by Alan Alexander Milne -... What is the matter with Mary Jane? -... She's crying with all her might and main, -... And she won't eat her dinner - rice pudding again - -... What is the matter with Mary Jane? -... What is the matter with Mary Jane? -... I've promised her dolls and a daisy-chain, -... And a book about animals - all in vain - -... What is the matter with Mary Jane? -... What is the matter with Mary Jane? -... She's perfectly well, and she hasn't a pain; -... But, look at her, now she's beginning again! - -... What is the matter with Mary Jane? -... What is the matter with Mary Jane? -... I've promised her sweets and a ride in the train, -... And I've begged her to stop for a bit and explain - -... What is the matter with Mary Jane? -... What is the matter with Mary Jane? -... She's perfectly well and she hasn't a pain, -... And it's lovely rice pudding for dinner again! -... What is the matter with Mary Jane?''' ->>> print(summarize(text)) -And she won't eat her dinner - rice pudding again - -I've promised her dolls and a daisy-chain, -I've promised her sweets and a ride in the train, -And it's lovely rice pudding for dinner again! +.. sourcecode:: pycon + + >>> from gensim.summarization.summarizer import summarize + >>> text = '''Rice Pudding - Poem by Alan Alexander Milne + ... What is the matter with Mary Jane? + ... She's crying with all her might and main, + ... And she won't eat her dinner - rice pudding again - + ... What is the matter with Mary Jane? + ... What is the matter with Mary Jane? + ... I've promised her dolls and a daisy-chain, + ... And a book about animals - all in vain - + ... What is the matter with Mary Jane? + ... What is the matter with Mary Jane? + ... She's perfectly well, and she hasn't a pain; + ... But, look at her, now she's beginning again! - + ... What is the matter with Mary Jane? + ... What is the matter with Mary Jane? + ... I've promised her sweets and a ride in the train, + ... And I've begged her to stop for a bit and explain - + ... What is the matter with Mary Jane? + ... What is the matter with Mary Jane? + ... She's perfectly well and she hasn't a pain, + ... And it's lovely rice pudding for dinner again! + ... What is the matter with Mary Jane?''' + >>> print(summarize(text)) + And she won't eat her dinner - rice pudding again - + I've promised her dolls and a daisy-chain, + I've promised her sweets and a ride in the train, + And it's lovely rice pudding for dinner again! """ diff --git a/gensim/summarization/textcleaner.py b/gensim/summarization/textcleaner.py index 5af6bef257..8bd0158a54 100644 --- a/gensim/summarization/textcleaner.py +++ b/gensim/summarization/textcleaner.py @@ -63,13 +63,16 @@ def split_sentences(text): Example ------- - >>> from gensim.summarization.textcleaner import split_sentences - >>> text = '''Beautiful is better than ugly. - ... Explicit is better than implicit. Simple is better than complex.''' - >>> split_sentences(text) - ['Beautiful is better than ugly.', - 'Explicit is better than implicit.', - 'Simple is better than complex.'] + + .. sourcecode:: pycon + + >>> from gensim.summarization.textcleaner import split_sentences + >>> text = '''Beautiful is better than ugly. + ... Explicit is better than implicit. Simple is better than complex.''' + >>> split_sentences(text) + ['Beautiful is better than ugly.', + 'Explicit is better than implicit.', + 'Simple is better than complex.'] """ processed = replace_abbreviations(text) @@ -91,8 +94,11 @@ def replace_abbreviations(text): Example ------- - >>> replace_abbreviations("God bless you, please, Mrs. Robinson") - God bless you, please, Mrs.@Robinson + + .. sourcecode:: pycon + + >>> replace_abbreviations("God bless you, please, Mrs. Robinson") + God bless you, please, Mrs.@Robinson """ return replace_with_separator(text, SEPARATOR, [AB_SENIOR, AB_ACRONYM]) @@ -161,11 +167,14 @@ def get_sentences(text): Example ------- - >>> text = "Does this text contains two sentences? Yes, it does." - >>> for sentence in get_sentences(text): - >>> print(sentence) - Does this text contains two sentences? - Yes, it does. + + .. sourcecode:: pycon + + >>> text = "Does this text contains two sentences? Yes, it does." + >>> for sentence in get_sentences(text): + >>> print(sentence) + Does this text contains two sentences? + Yes, it does. """ for match in RE_SENTENCE.finditer(text): @@ -263,11 +272,13 @@ def clean_text_by_word(text, deacc=True): Example ------- - >>> from gensim.summarization.textcleaner import clean_text_by_word - >>> clean_text_by_word("God helps those who help themselves") - {'god': Original unit: 'god' *-*-*-* Processed unit: 'god', - 'help': Original unit: 'help' *-*-*-* Processed unit: 'help', - 'helps': Original unit: 'helps' *-*-*-* Processed unit: 'help'} + .. sourcecode:: pycon + + >>> from gensim.summarization.textcleaner import clean_text_by_word + >>> clean_text_by_word("God helps those who help themselves") + {'god': Original unit: 'god' *-*-*-* Processed unit: 'god', + 'help': Original unit: 'help' *-*-*-* Processed unit: 'help', + 'helps': Original unit: 'helps' *-*-*-* Processed unit: 'help'} """ text_without_acronyms = replace_with_separator(text, "", [AB_ACRONYM_LETTERS]) @@ -297,14 +308,16 @@ def tokenize_by_word(text): Example ------- - >>> from gensim.summarization.textcleaner import tokenize_by_word - >>> g = tokenize_by_word('Veni. Vedi. Vici.') - >>> print(next(g)) - veni - >>> print(next(g)) - vedi - >>> print(next(g)) - vici + .. sourcecode:: pycon + + >>> from gensim.summarization.textcleaner import tokenize_by_word + >>> g = tokenize_by_word('Veni. Vedi. Vici.') + >>> print(next(g)) + veni + >>> print(next(g)) + vedi + >>> print(next(g)) + vici """ text_without_acronyms = replace_with_separator(text, "", [AB_ACRONYM_LETTERS]) diff --git a/gensim/test/utils.py b/gensim/test/utils.py index e33bdd1d9c..1802984e68 100644 --- a/gensim/test/utils.py +++ b/gensim/test/utils.py @@ -22,40 +22,48 @@ --------- It's easy to keep objects in temporary folder and reuse'em if needed: ->>> from gensim.models import word2vec ->>> from gensim.test.utils import get_tmpfile, common_texts ->>> ->>> model = word2vec.Word2Vec(common_texts, min_count=1) ->>> temp_path = get_tmpfile('toy_w2v') ->>> model.save(temp_path) ->>> ->>> new_model = word2vec.Word2Vec.load(temp_path) ->>> result = new_model.wv.most_similar("human", topn=1) +.. sourcecode:: pycon + + >>> from gensim.models import word2vec + >>> from gensim.test.utils import get_tmpfile, common_texts + >>> + >>> model = word2vec.Word2Vec(common_texts, min_count=1) + >>> temp_path = get_tmpfile('toy_w2v') + >>> model.save(temp_path) + >>> + >>> new_model = word2vec.Word2Vec.load(temp_path) + >>> result = new_model.wv.most_similar("human", topn=1) Let's print first document in toy dataset and then recreate it using its corpus and dictionary. ->>> from gensim.test.utils import common_texts, common_dictionary, common_corpus ->>> print(common_texts[0]) -['human', 'interface', 'computer'] ->>> assert common_dictionary.doc2bow(common_texts[0]) == common_corpus[0] +.. sourcecode:: pycon + + >>> from gensim.test.utils import common_texts, common_dictionary, common_corpus + >>> print(common_texts[0]) + ['human', 'interface', 'computer'] + >>> assert common_dictionary.doc2bow(common_texts[0]) == common_corpus[0] We can find our toy set in test data directory. ->>> from gensim.test.utils import datapath ->>> ->>> with open(datapath("testcorpus.txt")) as f: -... texts = [line.strip().split() for line in f] ->>> print(texts[0]) -['computer', 'human', 'interface'] +.. sourcecode:: pycon + + >>> from gensim.test.utils import datapath + >>> + >>> with open(datapath("testcorpus.txt")) as f: + ... texts = [line.strip().split() for line in f] + >>> print(texts[0]) + ['computer', 'human', 'interface'] If you don't need to keep temporary objects on disk use :func:`~gensim.test.utils.temporary_file`: ->>> from gensim.test.utils import temporary_file, common_corpus, common_dictionary ->>> from gensim.models import LdaModel ->>> ->>> with temporary_file("temp.txt") as tf: -... lda = LdaModel(common_corpus, id2word=common_dictionary, num_topics=3) -... lda.save(tf) +.. sourcecode:: pycon + + >>> from gensim.test.utils import temporary_file, common_corpus, common_dictionary + >>> from gensim.models import LdaModel + >>> + >>> with temporary_file("temp.txt") as tf: + ... lda = LdaModel(common_corpus, id2word=common_dictionary, num_topics=3) + ... lda.save(tf) """ @@ -87,12 +95,14 @@ def datapath(fname): ------- Let's get path of test GloVe data file and check if it exits. - >>> from gensim.corpora import MmCorpus - >>> from gensim.test.utils import datapath - >>> - >>> corpus = MmCorpus(datapath("testcorpus.mm")) - >>> for document in corpus: - ... pass + .. sourcecode:: pycon + + >>> from gensim.corpora import MmCorpus + >>> from gensim.test.utils import datapath + >>> + >>> corpus = MmCorpus(datapath("testcorpus.mm")) + >>> for document in corpus: + ... pass """ @@ -118,15 +128,17 @@ def get_tmpfile(suffix): -------- Using this function we may get path to temporary file and use it, for example, to store temporary model. - >>> from gensim.models import LsiModel - >>> from gensim.test.utils import get_tmpfile, common_dictionary, common_corpus - >>> - >>> tmp_f = get_tmpfile("toy_lsi_model") - >>> - >>> model = LsiModel(common_corpus, id2word=common_dictionary) - >>> model.save(tmp_f) - >>> - >>> loaded_model = LsiModel.load(tmp_f) + .. sourcecode:: pycon + + >>> from gensim.models import LsiModel + >>> from gensim.test.utils import get_tmpfile, common_dictionary, common_corpus + >>> + >>> tmp_f = get_tmpfile("toy_lsi_model") + >>> + >>> model = LsiModel(common_corpus, id2word=common_dictionary) + >>> model.save(tmp_f) + >>> + >>> loaded_model = LsiModel.load(tmp_f) """ return os.path.join(tempfile.gettempdir(), suffix) @@ -152,19 +164,21 @@ def temporary_file(name=""): This example demonstrates that created temporary directory (and included files) will deleted at the end of context. - >>> import os - >>> from gensim.test.utils import temporary_file - >>> with temporary_file("temp.txt") as tf, open(tf, 'w') as outfile: - ... outfile.write("my extremely useful information") - ... print("Is this file exists? {}".format(os.path.exists(tf))) - ... print("Is this folder exists? {}".format(os.path.exists(os.path.dirname(tf)))) - Is this file exists? True - Is this folder exists? True - >>> - >>> print("Is this file exists? {}".format(os.path.exists(tf))) - Is this file exists? False - >>> print("Is this folder exists? {}".format(os.path.exists(os.path.dirname(tf)))) - Is this folder exists? False + .. sourcecode:: pycon + + >>> import os + >>> from gensim.test.utils import temporary_file + >>> with temporary_file("temp.txt") as tf, open(tf, 'w') as outfile: + ... outfile.write("my extremely useful information") + ... print("Is this file exists? {}".format(os.path.exists(tf))) + ... print("Is this folder exists? {}".format(os.path.exists(os.path.dirname(tf)))) + Is this file exists? True + Is this folder exists? True + >>> + >>> print("Is this file exists? {}".format(os.path.exists(tf))) + Is this file exists? False + >>> print("Is this folder exists? {}".format(os.path.exists(os.path.dirname(tf)))) + Is this folder exists? False """ diff --git a/gensim/topic_coherence/aggregation.py b/gensim/topic_coherence/aggregation.py index aa27c833f7..79ecee793f 100644 --- a/gensim/topic_coherence/aggregation.py +++ b/gensim/topic_coherence/aggregation.py @@ -29,9 +29,11 @@ def arithmetic_mean(confirmed_measures): Examples -------- - >>> from gensim.topic_coherence.aggregation import arithmetic_mean - >>> arithmetic_mean([1.1, 2.2, 3.3, 4.4]) - 2.75 + .. sourcecode:: pycon + + >>> from gensim.topic_coherence.aggregation import arithmetic_mean + >>> arithmetic_mean([1.1, 2.2, 3.3, 4.4]) + 2.75 """ return np.mean(confirmed_measures) diff --git a/gensim/topic_coherence/direct_confirmation_measure.py b/gensim/topic_coherence/direct_confirmation_measure.py index 6482191d9c..d7a7e5f464 100644 --- a/gensim/topic_coherence/direct_confirmation_measure.py +++ b/gensim/topic_coherence/direct_confirmation_measure.py @@ -41,23 +41,26 @@ def log_conditional_probability(segmented_topics, accumulator, with_std=False, w Examples -------- - >>> from gensim.topic_coherence import direct_confirmation_measure, text_analysis - >>> from collections import namedtuple - >>> - >>> # Create dictionary - >>> id2token = {1: 'test', 2: 'doc'} - >>> token2id = {v: k for k, v in id2token.items()} - >>> dictionary = namedtuple('Dictionary', 'token2id, id2token')(token2id, id2token) - >>> - >>> # Initialize segmented topics and accumulator - >>> segmentation = [[(1, 2)]] - >>> - >>> accumulator = text_analysis.InvertedIndexAccumulator({1, 2}, dictionary) - >>> accumulator._inverted_index = {0: {2, 3, 4}, 1: {3, 5}} - >>> accumulator._num_docs = 5 - >>> - >>> # result should be ~ ln(1 / 2) = -0.693147181 - >>> result = direct_confirmation_measure.log_conditional_probability(segmentation, accumulator)[0] + + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import direct_confirmation_measure, text_analysis + >>> from collections import namedtuple + >>> + >>> # Create dictionary + >>> id2token = {1: 'test', 2: 'doc'} + >>> token2id = {v: k for k, v in id2token.items()} + >>> dictionary = namedtuple('Dictionary', 'token2id, id2token')(token2id, id2token) + >>> + >>> # Initialize segmented topics and accumulator + >>> segmentation = [[(1, 2)]] + >>> + >>> accumulator = text_analysis.InvertedIndexAccumulator({1, 2}, dictionary) + >>> accumulator._inverted_index = {0: {2, 3, 4}, 1: {3, 5}} + >>> accumulator._num_docs = 5 + >>> + >>> # result should be ~ ln(1 / 2) = -0.693147181 + >>> result = direct_confirmation_measure.log_conditional_probability(segmentation, accumulator)[0] """ topic_coherences = [] @@ -99,13 +102,15 @@ def aggregate_segment_sims(segment_sims, with_std, with_support): Examples --------- - >>> from gensim.topic_coherence import direct_confirmation_measure - >>> - >>> segment_sims = [0.2, 0.5, 1., 0.05] - >>> direct_confirmation_measure.aggregate_segment_sims(segment_sims, True, True) - (0.4375, 0.36293077852394939, 4) - >>> direct_confirmation_measure.aggregate_segment_sims(segment_sims, False, False) - 0.4375 + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import direct_confirmation_measure + >>> + >>> segment_sims = [0.2, 0.5, 1., 0.05] + >>> direct_confirmation_measure.aggregate_segment_sims(segment_sims, True, True) + (0.4375, 0.36293077852394939, 4) + >>> direct_confirmation_measure.aggregate_segment_sims(segment_sims, False, False) + 0.4375 """ mean = np.mean(segment_sims) @@ -155,23 +160,26 @@ def log_ratio_measure(segmented_topics, accumulator, normalize=False, with_std=F Examples -------- - >>> from gensim.topic_coherence import direct_confirmation_measure, text_analysis - >>> from collections import namedtuple - >>> - >>> # Create dictionary - >>> id2token = {1: 'test', 2: 'doc'} - >>> token2id = {v: k for k, v in id2token.items()} - >>> dictionary = namedtuple('Dictionary', 'token2id, id2token')(token2id, id2token) - >>> - >>> # Initialize segmented topics and accumulator - >>> segmentation = [[(1, 2)]] - >>> - >>> accumulator = text_analysis.InvertedIndexAccumulator({1, 2}, dictionary) - >>> accumulator._inverted_index = {0: {2, 3, 4}, 1: {3, 5}} - >>> accumulator._num_docs = 5 - >>> - >>> # result should be ~ ln{(1 / 5) / [(3 / 5) * (2 / 5)]} = -0.182321557 - >>> result = direct_confirmation_measure.log_ratio_measure(segmentation, accumulator)[0] + + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import direct_confirmation_measure, text_analysis + >>> from collections import namedtuple + >>> + >>> # Create dictionary + >>> id2token = {1: 'test', 2: 'doc'} + >>> token2id = {v: k for k, v in id2token.items()} + >>> dictionary = namedtuple('Dictionary', 'token2id, id2token')(token2id, id2token) + >>> + >>> # Initialize segmented topics and accumulator + >>> segmentation = [[(1, 2)]] + >>> + >>> accumulator = text_analysis.InvertedIndexAccumulator({1, 2}, dictionary) + >>> accumulator._inverted_index = {0: {2, 3, 4}, 1: {3, 5}} + >>> accumulator._num_docs = 5 + >>> + >>> # result should be ~ ln{(1 / 5) / [(3 / 5) * (2 / 5)]} = -0.182321557 + >>> result = direct_confirmation_measure.log_ratio_measure(segmentation, accumulator)[0] """ topic_coherences = [] diff --git a/gensim/topic_coherence/indirect_confirmation_measure.py b/gensim/topic_coherence/indirect_confirmation_measure.py index fdcbd1565f..76077813d0 100644 --- a/gensim/topic_coherence/indirect_confirmation_measure.py +++ b/gensim/topic_coherence/indirect_confirmation_measure.py @@ -66,22 +66,24 @@ def word2vec_similarity(segmented_topics, accumulator, with_std=False, with_supp Examples -------- - >>> import numpy as np - >>> from gensim.corpora.dictionary import Dictionary - >>> from gensim.topic_coherence import indirect_confirmation_measure - >>> from gensim.topic_coherence import text_analysis - >>> - >>> # create segmentation - >>> segmentation = [[(1, np.array([1, 2])), (2, np.array([1, 2]))]] - >>> - >>> # create accumulator - >>> dictionary = Dictionary() - >>> dictionary.id2token = {1: 'fake', 2: 'tokens'} - >>> accumulator = text_analysis.WordVectorsAccumulator({1, 2}, dictionary) - >>> _ = accumulator.accumulate([['fake', 'tokens'],['tokens', 'fake']], 5) - >>> - >>> # should be (0.726752426218 0.00695475919227) - >>> mean, std = indirect_confirmation_measure.word2vec_similarity(segmentation, accumulator, with_std=True)[0] + .. sourcecode:: pycon + + >>> import numpy as np + >>> from gensim.corpora.dictionary import Dictionary + >>> from gensim.topic_coherence import indirect_confirmation_measure + >>> from gensim.topic_coherence import text_analysis + >>> + >>> # create segmentation + >>> segmentation = [[(1, np.array([1, 2])), (2, np.array([1, 2]))]] + >>> + >>> # create accumulator + >>> dictionary = Dictionary() + >>> dictionary.id2token = {1: 'fake', 2: 'tokens'} + >>> accumulator = text_analysis.WordVectorsAccumulator({1, 2}, dictionary) + >>> _ = accumulator.accumulate([['fake', 'tokens'], ['tokens', 'fake']], 5) + >>> + >>> # should be (0.726752426218 0.00695475919227) + >>> mean, std = indirect_confirmation_measure.word2vec_similarity(segmentation, accumulator, with_std=True)[0] """ topic_coherences = [] @@ -141,25 +143,27 @@ def cosine_similarity(segmented_topics, accumulator, topics, measure='nlr', Examples -------- - >>> from gensim.corpora.dictionary import Dictionary - >>> from gensim.topic_coherence import indirect_confirmation_measure, text_analysis - >>> import numpy as np - >>> - >>> # create accumulator - >>> dictionary = Dictionary() - >>> dictionary.id2token = {1: 'fake', 2: 'tokens'} - >>> accumulator = text_analysis.InvertedIndexAccumulator({1, 2}, dictionary) - >>> accumulator._inverted_index = {0: {2, 3, 4}, 1: {3, 5}} - >>> accumulator._num_docs = 5 - >>> - >>> # create topics - >>> topics = [np.array([1, 2])] - >>> - >>> # create segmentation - >>> segmentation = [[(1, np.array([1, 2])), (2, np.array([1, 2]))]] - >>> obtained = indirect_confirmation_measure.cosine_similarity(segmentation, accumulator, topics, 'nlr', 1) - >>> print obtained[0] - 0.623018926945 + .. sourcecode:: pycon + + >>> from gensim.corpora.dictionary import Dictionary + >>> from gensim.topic_coherence import indirect_confirmation_measure, text_analysis + >>> import numpy as np + >>> + >>> # create accumulator + >>> dictionary = Dictionary() + >>> dictionary.id2token = {1: 'fake', 2: 'tokens'} + >>> accumulator = text_analysis.InvertedIndexAccumulator({1, 2}, dictionary) + >>> accumulator._inverted_index = {0: {2, 3, 4}, 1: {3, 5}} + >>> accumulator._num_docs = 5 + >>> + >>> # create topics + >>> topics = [np.array([1, 2])] + >>> + >>> # create segmentation + >>> segmentation = [[(1, np.array([1, 2])), (2, np.array([1, 2]))]] + >>> obtained = indirect_confirmation_measure.cosine_similarity(segmentation, accumulator, topics, 'nlr', 1) + >>> print(obtained[0]) + 0.623018926945 """ context_vectors = ContextVectorComputer(measure, topics, accumulator, gamma) @@ -202,24 +206,26 @@ class ContextVectorComputer(object): Example ------- - >>> from gensim.corpora.dictionary import Dictionary - >>> from gensim.topic_coherence import indirect_confirmation_measure, text_analysis - >>> import numpy as np - >>> - >>> # create measure, topics - >>> measure = 'nlr' - >>> topics = [np.array([1, 2])] - >>> - >>> # create accumulator - >>> dictionary = Dictionary() - >>> dictionary.id2token = {1: 'fake', 2: 'tokens'} - >>> accumulator = text_analysis.WordVectorsAccumulator({1, 2}, dictionary) - >>> _ = accumulator.accumulate([['fake', 'tokens'],['tokens', 'fake']], 5) - >>> cont_vect_comp = indirect_confirmation_measure.ContextVectorComputer(measure, topics, accumulator, 1) - >>> cont_vect_comp.mapping - {1: 0, 2: 1} - >>> cont_vect_comp.vocab_size - 2 + .. sourcecode:: pycon + + >>> from gensim.corpora.dictionary import Dictionary + >>> from gensim.topic_coherence import indirect_confirmation_measure, text_analysis + >>> import numpy as np + >>> + >>> # create measure, topics + >>> measure = 'nlr' + >>> topics = [np.array([1, 2])] + >>> + >>> # create accumulator + >>> dictionary = Dictionary() + >>> dictionary.id2token = {1: 'fake', 2: 'tokens'} + >>> accumulator = text_analysis.WordVectorsAccumulator({1, 2}, dictionary) + >>> _ = accumulator.accumulate([['fake', 'tokens'], ['tokens', 'fake']], 5) + >>> cont_vect_comp = indirect_confirmation_measure.ContextVectorComputer(measure, topics, accumulator, 1) + >>> cont_vect_comp.mapping + {1: 0, 2: 1} + >>> cont_vect_comp.vocab_size + 2 """ diff --git a/gensim/topic_coherence/probability_estimation.py b/gensim/topic_coherence/probability_estimation.py index 404310a36c..6aea8fb6fa 100644 --- a/gensim/topic_coherence/probability_estimation.py +++ b/gensim/topic_coherence/probability_estimation.py @@ -34,33 +34,41 @@ def p_boolean_document(corpus, segmented_topics): Examples --------- - >>> from gensim.topic_coherence import probability_estimation - >>> from gensim.corpora.hashdictionary import HashDictionary - >>> - >>> - >>> texts = [ - ... ['human', 'interface', 'computer'], - ... ['eps', 'user', 'interface', 'system'], - ... ['system', 'human', 'system', 'eps'], - ... ['user', 'response', 'time'], - ... ['trees'], - ... ['graph', 'trees'] - ... ] - >>> dictionary = HashDictionary(texts) - >>> w2id = dictionary.token2id - >>> - >>> # create segmented_topics - >>> segmented_topics = [ - ... [(w2id['system'], w2id['graph']),(w2id['computer'], w2id['graph']),(w2id['computer'], w2id['system'])], - ... [(w2id['computer'], w2id['graph']),(w2id['user'], w2id['graph']),(w2id['user'], w2id['computer'])] - ... ] - >>> - >>> # create corpus - >>> corpus = [dictionary.doc2bow(text) for text in texts] - >>> - >>> result = probability_estimation.p_boolean_document(corpus, segmented_topics) - >>> result.index_to_dict() - {10608: set([0]), 12736: set([1, 3]), 18451: set([5]), 5798: set([1, 2])} + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import probability_estimation + >>> from gensim.corpora.hashdictionary import HashDictionary + >>> + >>> + >>> texts = [ + ... ['human', 'interface', 'computer'], + ... ['eps', 'user', 'interface', 'system'], + ... ['system', 'human', 'system', 'eps'], + ... ['user', 'response', 'time'], + ... ['trees'], + ... ['graph', 'trees'] + ... ] + >>> dictionary = HashDictionary(texts) + >>> w2id = dictionary.token2id + >>> + >>> # create segmented_topics + >>> segmented_topics = [ + ... [ + ... (w2id['system'], w2id['graph']), + ... (w2id['computer'], w2id['graph']), + ... (w2id['computer'], w2id['system']) + ... ], + ... [ + ... (w2id['computer'], w2id['graph']), + ... (w2id['user'], w2id['graph']), + ... (w2id['user'], w2id['computer'])] + ... ] + >>> # create corpus + >>> corpus = [dictionary.doc2bow(text) for text in texts] + >>> + >>> result = probability_estimation.p_boolean_document(corpus, segmented_topics) + >>> result.index_to_dict() + {10608: set([0]), 12736: set([1, 3]), 18451: set([5]), 5798: set([1, 2])} """ top_ids = unique_ids_from_segments(segmented_topics) @@ -101,34 +109,42 @@ def p_boolean_sliding_window(texts, segmented_topics, dictionary, window_size, p Examples --------- - >>> from gensim.topic_coherence import probability_estimation - >>> from gensim.corpora.hashdictionary import HashDictionary - >>> - >>> - >>> texts = [ - ... ['human', 'interface', 'computer'], - ... ['eps', 'user', 'interface', 'system'], - ... ['system', 'human', 'system', 'eps'], - ... ['user', 'response', 'time'], - ... ['trees'], - ... ['graph', 'trees'] - ... ] - >>> dictionary = HashDictionary(texts) - >>> w2id = dictionary.token2id - - >>> - >>> # create segmented_topics - >>> segmented_topics = [ - ... [(w2id['system'], w2id['graph']),(w2id['computer'], w2id['graph']),(w2id['computer'], w2id['system'])], - ... [(w2id['computer'], w2id['graph']),(w2id['user'], w2id['graph']),(w2id['user'], w2id['computer'])] - ... ] - >>> - >>> # create corpus - >>> corpus = [dictionary.doc2bow(text) for text in texts] - >>> accumulator = probability_estimation.p_boolean_sliding_window(texts, segmented_topics, dictionary, 2) - >>> - >>> (accumulator[w2id['computer']], accumulator[w2id['user']], accumulator[w2id['system']]) - (1, 3, 4) + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import probability_estimation + >>> from gensim.corpora.hashdictionary import HashDictionary + >>> + >>> + >>> texts = [ + ... ['human', 'interface', 'computer'], + ... ['eps', 'user', 'interface', 'system'], + ... ['system', 'human', 'system', 'eps'], + ... ['user', 'response', 'time'], + ... ['trees'], + ... ['graph', 'trees'] + ... ] + >>> dictionary = HashDictionary(texts) + >>> w2id = dictionary.token2id + + >>> + >>> # create segmented_topics + >>> segmented_topics = [ + ... [ + ... (w2id['system'], w2id['graph']), + ... (w2id['computer'], w2id['graph']), + ... (w2id['computer'], w2id['system']) + ... ], + ... [ + ... (w2id['computer'], w2id['graph']), + ... (w2id['user'], w2id['graph']), + ... (w2id['user'], w2id['computer'])] + ... ] + >>> # create corpus + >>> corpus = [dictionary.doc2bow(text) for text in texts] + >>> accumulator = probability_estimation.p_boolean_sliding_window(texts, segmented_topics, dictionary, 2) + >>> + >>> (accumulator[w2id['computer']], accumulator[w2id['user']], accumulator[w2id['system']]) + (1, 3, 4) """ top_ids = unique_ids_from_segments(segmented_topics) @@ -166,33 +182,44 @@ def p_word2vec(texts, segmented_topics, dictionary, window_size=None, processes= Examples -------- - >>> from gensim.topic_coherence import probability_estimation - >>> from gensim.corpora.hashdictionary import HashDictionary - >>> from gensim.models import word2vec - >>> - >>> texts = [ - ... ['human', 'interface', 'computer'], - ... ['eps', 'user', 'interface', 'system'], - ... ['system', 'human', 'system', 'eps'], - ... ['user', 'response', 'time'], - ... ['trees'], - ... ['graph', 'trees'] - ... ] - >>> dictionary = HashDictionary(texts) - >>> w2id = dictionary.token2id - - >>> - >>> # create segmented_topics - >>> segmented_topics = [ - ... [(w2id['system'], w2id['graph']),(w2id['computer'], w2id['graph']),(w2id['computer'], w2id['system'])], - ... [(w2id['computer'], w2id['graph']),(w2id['user'], w2id['graph']),(w2id['user'], w2id['computer'])] - ... ] - >>> - >>> # create corpus - >>> corpus = [dictionary.doc2bow(text) for text in texts] - >>> sentences = [['human', 'interface', 'computer'],['survey', 'user', 'computer', 'system', 'response', 'time']] - >>> model = word2vec.Word2Vec(sentences, size=100,min_count=1) - >>> accumulator = probability_estimation.p_word2vec(texts, segmented_topics, dictionary, 2, 1, model) + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import probability_estimation + >>> from gensim.corpora.hashdictionary import HashDictionary + >>> from gensim.models import word2vec + >>> + >>> texts = [ + ... ['human', 'interface', 'computer'], + ... ['eps', 'user', 'interface', 'system'], + ... ['system', 'human', 'system', 'eps'], + ... ['user', 'response', 'time'], + ... ['trees'], + ... ['graph', 'trees'] + ... ] + >>> dictionary = HashDictionary(texts) + >>> w2id = dictionary.token2id + + >>> + >>> # create segmented_topics + >>> segmented_topics = [ + ... [ + ... (w2id['system'], w2id['graph']), + ... (w2id['computer'], w2id['graph']), + ... (w2id['computer'], w2id['system']) + ... ], + ... [ + ... (w2id['computer'], w2id['graph']), + ... (w2id['user'], w2id['graph']), + ... (w2id['user'], w2id['computer'])] + ... ] + >>> # create corpus + >>> corpus = [dictionary.doc2bow(text) for text in texts] + >>> sentences = [ + ... ['human', 'interface', 'computer'], + ... ['survey', 'user', 'computer', 'system', 'response', 'time'] + ... ] + >>> model = word2vec.Word2Vec(sentences, size=100, min_count=1) + >>> accumulator = probability_estimation.p_word2vec(texts, segmented_topics, dictionary, 2, 1, model) """ top_ids = unique_ids_from_segments(segmented_topics) @@ -216,11 +243,14 @@ def unique_ids_from_segments(segmented_topics): Example ------- - >>> from gensim.topic_coherence import probability_estimation - >>> - >>> segmentation = [[(1, 2)]] - >>> probability_estimation.unique_ids_from_segments(segmentation) - set([1, 2]) + + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import probability_estimation + >>> + >>> segmentation = [[(1, 2)]] + >>> probability_estimation.unique_ids_from_segments(segmentation) + set([1, 2]) """ unique_ids = set() # is a set of all the unique ids contained in topics. diff --git a/gensim/topic_coherence/segmentation.py b/gensim/topic_coherence/segmentation.py index 9629369b42..d02f700547 100644 --- a/gensim/topic_coherence/segmentation.py +++ b/gensim/topic_coherence/segmentation.py @@ -31,12 +31,14 @@ def s_one_pre(topics): Examples -------- - >>> import numpy as np - >>> from gensim.topic_coherence import segmentation - >>> - >>> topics = [np.array([1, 2, 3]), np.array([4, 5, 6])] - >>> segmentation.s_one_pre(topics) - [[(2, 1), (3, 1), (3, 2)], [(5, 4), (6, 4), (6, 5)]] + .. sourcecode:: pycon + + >>> import numpy as np + >>> from gensim.topic_coherence import segmentation + >>> + >>> topics = [np.array([1, 2, 3]), np.array([4, 5, 6])] + >>> segmentation.s_one_pre(topics) + [[(2, 1), (3, 1), (3, 2)], [(5, 4), (6, 4), (6, 5)]] """ s_one_pre_res = [] @@ -68,12 +70,14 @@ def s_one_one(topics): Examples ------- - >>> import numpy as np - >>> from gensim.topic_coherence import segmentation - >>> - >>> topics = [np.array([1, 2, 3]), np.array([4, 5, 6])] - >>> segmentation.s_one_one(topics) - [[(1, 2), (1, 3), (2, 1), (2, 3), (3, 1), (3, 2)], [(4, 5), (4, 6), (5, 4), (5, 6), (6, 4), (6, 5)]] + .. sourcecode:: pycon + + >>> import numpy as np + >>> from gensim.topic_coherence import segmentation + >>> + >>> topics = [np.array([1, 2, 3]), np.array([4, 5, 6])] + >>> segmentation.s_one_one(topics) + [[(1, 2), (1, 3), (2, 1), (2, 3), (3, 1), (3, 2)], [(4, 5), (4, 6), (5, 4), (5, 6), (6, 4), (6, 5)]] """ s_one_one_res = [] @@ -108,12 +112,14 @@ def s_one_set(topics): Examples -------- - >>> import numpy as np - >>> from gensim.topic_coherence import segmentation - >>> - >>> topics = [np.array([9, 10, 7])] - >>> segmentation.s_one_set(topics) - [[(9, array([ 9, 10, 7])), (10, array([ 9, 10, 7])), (7, array([ 9, 10, 7]))]] + .. sourcecode:: pycon + + >>> import numpy as np + >>> from gensim.topic_coherence import segmentation + >>> + >>> topics = [np.array([9, 10, 7])] + >>> segmentation.s_one_set(topics) + [[(9, array([ 9, 10, 7])), (10, array([ 9, 10, 7])), (7, array([ 9, 10, 7]))]] """ s_one_set_res = [] diff --git a/gensim/topic_coherence/text_analysis.py b/gensim/topic_coherence/text_analysis.py index 3ec859b6fd..ddd06bde8a 100644 --- a/gensim/topic_coherence/text_analysis.py +++ b/gensim/topic_coherence/text_analysis.py @@ -41,15 +41,17 @@ def _ids_to_words(ids, dictionary): Examples -------- - >>> from gensim.corpora.dictionary import Dictionary - >>> from gensim.topic_coherence import text_analysis - >>> - >>> dictionary = Dictionary() - >>> ids = {1: 'fake', 4: 'cats'} - >>> dictionary.id2token = {1: 'fake', 2: 'tokens', 3: 'rabbids', 4: 'cats'} - >>> - >>> text_analysis._ids_to_words(ids, dictionary) - set(['cats', 'fake']) + .. sourcecode:: pycon + + >>> from gensim.corpora.dictionary import Dictionary + >>> from gensim.topic_coherence import text_analysis + >>> + >>> dictionary = Dictionary() + >>> ids = {1: 'fake', 4: 'cats'} + >>> dictionary.id2token = {1: 'fake', 2: 'tokens', 3: 'rabbids', 4: 'cats'} + >>> + >>> text_analysis._ids_to_words(ids, dictionary) + set(['cats', 'fake']) """ if not dictionary.id2token: # may not be initialized in the standard gensim.corpora.Dictionary @@ -93,12 +95,14 @@ def __init__(self, relevant_ids): Examples -------- - >>> from gensim.topic_coherence import text_analysis - >>> ids = {1: 'fake', 4: 'cats'} - >>> base = text_analysis.BaseAnalyzer(ids) - >>> # should return {1: 'fake', 4: 'cats'} 2 {1: 0, 4: 1} 1000 0 - >>> print base.relevant_ids, base._vocab_size, base.id2contiguous, base.log_every, base._num_docs - {1: 'fake', 4: 'cats'} 2 {1: 0, 4: 1} 1000 0 + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import text_analysis + >>> ids = {1: 'fake', 4: 'cats'} + >>> base = text_analysis.BaseAnalyzer(ids) + >>> # should return {1: 'fake', 4: 'cats'} 2 {1: 0, 4: 1} 1000 0 + >>> print(base.relevant_ids, base._vocab_size, base.id2contiguous, base.log_every, base._num_docs) + {1: 'fake', 4: 'cats'} 2 {1: 0, 4: 1} 1000 0 """ self.relevant_ids = relevant_ids @@ -170,15 +174,17 @@ def __init__(self, relevant_ids, dictionary): Examples -------- - >>> from gensim.topic_coherence import text_analysis - >>> from gensim.corpora.dictionary import Dictionary - >>> - >>> ids = {1: 'foo', 2: 'bar'} - >>> dictionary = Dictionary([['foo','bar','baz'], ['foo','bar','bar','baz']]) - >>> udict = text_analysis.UsesDictionary(ids, dictionary) - >>> - >>> print udict.relevant_words - set([u'foo', u'baz']) + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import text_analysis + >>> from gensim.corpora.dictionary import Dictionary + >>> + >>> ids = {1: 'foo', 2: 'bar'} + >>> dictionary = Dictionary([['foo', 'bar', 'baz'], ['foo', 'bar', 'bar', 'baz']]) + >>> udict = text_analysis.UsesDictionary(ids, dictionary) + >>> + >>> print(udict.relevant_words) + set([u'foo', u'baz']) """ super(UsesDictionary, self).__init__(relevant_ids) @@ -221,13 +227,15 @@ def __init__(self, *args): Examples -------- - >>> from gensim.topic_coherence import text_analysis - >>> - >>> ids = {1: 'fake', 4: 'cats'} - >>> ininb = text_analysis.InvertedIndexBased(ids) - >>> - >>> print ininb._inverted_index - [set([]) set([])] + .. sourcecode:: pycon + + >>> from gensim.topic_coherence import text_analysis + >>> + >>> ids = {1: 'fake', 4: 'cats'} + >>> ininb = text_analysis.InvertedIndexBased(ids) + >>> + >>> print(ininb._inverted_index) + [set([]) set([])] """ super(InvertedIndexBased, self).__init__(*args) diff --git a/gensim/utils.py b/gensim/utils.py index 1331c17c6d..0359125db5 100644 --- a/gensim/utils.py +++ b/gensim/utils.py @@ -180,9 +180,11 @@ def deaccent(text): Examples -------- - >>> from gensim.utils import deaccent - >>> deaccent("Šéf chomutovských komunistů dostal poštou bílý prášek") - u'Sef chomutovskych komunistu dostal postou bily prasek' + .. sourcecode:: pycon + + >>> from gensim.utils import deaccent + >>> deaccent("Šéf chomutovských komunistů dostal poštou bílý prášek") + u'Sef chomutovskych komunistu dostal postou bily prasek' """ if not isinstance(text, unicode): @@ -243,9 +245,11 @@ def tokenize(text, lowercase=False, deacc=False, encoding='utf8', errors="strict Examples -------- - >>> from gensim.utils import tokenize - >>> list(tokenize('Nic nemůže letět rychlostí vyšší, než 300 tisíc kilometrů za sekundu!', deacc=True)) - [u'Nic', u'nemuze', u'letet', u'rychlosti', u'vyssi', u'nez', u'tisic', u'kilometru', u'za', u'sekundu'] + .. sourcecode:: pycon + + >>> from gensim.utils import tokenize + >>> list(tokenize('Nic nemůže letět rychlostí vyšší, než 300 tisíc kilometrů za sekundu!', deacc=True)) + [u'Nic', u'nemuze', u'letet', u'rychlosti', u'vyssi', u'nez', u'tisic', u'kilometru', u'za', u'sekundu'] """ lowercase = lowercase or to_lower or lower @@ -840,9 +844,11 @@ def is_corpus(obj): Examples -------- - >>> from gensim.utils import is_corpus - >>> corpus = [[(1, 1.0)], [(2, -0.3), (3, 0.12)]] - >>> corpus_or_not, corpus = is_corpus(corpus) + .. sourcecode:: pycon + + >>> from gensim.utils import is_corpus + >>> corpus = [[(1, 1.0)], [(2, -0.3), (3, 0.12)]] + >>> corpus_or_not, corpus = is_corpus(corpus) Warnings -------- @@ -918,11 +924,13 @@ class RepeatCorpus(SaveLoad): Examples -------- - >>> from gensim.utils import RepeatCorpus - >>> - >>> corpus = [[(1, 2)], []] # 2 documents - >>> list(RepeatCorpus(corpus, 5)) # repeat 2.5 times to get 5 documents - [[(1, 2)], [], [(1, 2)], [], [(1, 2)]] + .. sourcecode:: pycon + + >>> from gensim.utils import RepeatCorpus + >>> + >>> corpus = [[(1, 2)], []] # 2 documents + >>> list(RepeatCorpus(corpus, 5)) # repeat 2.5 times to get 5 documents + [[(1, 2)], [], [(1, 2)], [], [(1, 2)]] """ def __init__(self, corpus, reps): @@ -948,11 +956,13 @@ class RepeatCorpusNTimes(SaveLoad): Examples -------- - >>> from gensim.utils import RepeatCorpusNTimes - >>> - >>> corpus = [[(1, 0.5)], []] - >>> list(RepeatCorpusNTimes(corpus, 3)) # repeat 3 times - [[(1, 0.5)], [], [(1, 0.5)], [], [(1, 0.5)], []] + .. sourcecode:: pycon + + >>> from gensim.utils import RepeatCorpusNTimes + >>> + >>> corpus = [[(1, 0.5)], []] + >>> list(RepeatCorpusNTimes(corpus, 3)) # repeat 3 times + [[(1, 0.5)], [], [(1, 0.5)], [], [(1, 0.5)], []] """ def __init__(self, corpus, n): @@ -1084,15 +1094,17 @@ def decode_htmlentities(text): Examples -------- - >>> from gensim.utils import decode_htmlentities - >>> - >>> u = u'E tu vivrai nel terrore - L'aldilà (1981)' - >>> print(decode_htmlentities(u).encode('UTF-8')) - E tu vivrai nel terrore - L'aldilà (1981) - >>> print(decode_htmlentities("l'eau")) - l'eau - >>> print(decode_htmlentities("foo < bar")) - foo < bar + .. sourcecode:: pycon + + >>> from gensim.utils import decode_htmlentities + >>> + >>> u = u'E tu vivrai nel terrore - L'aldilà (1981)' + >>> print(decode_htmlentities(u).encode('UTF-8')) + E tu vivrai nel terrore - L'aldilà (1981) + >>> print(decode_htmlentities("l'eau")) + l'eau + >>> print(decode_htmlentities("foo < bar")) + foo < bar """ def substitute_entity(match): @@ -1141,8 +1153,10 @@ def chunkize_serial(iterable, chunksize, as_numpy=False, dtype=np.float32): Examples -------- - >>> print(list(grouper(range(10), 3))) - [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] + .. sourcecode:: pycon + + >>> print(list(grouper(range(10), 3))) + [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] """ it = iter(iterable) @@ -1314,9 +1328,11 @@ def smart_extension(fname, ext): Examples -------- - >>> from gensim.utils import smart_extension - >>> smart_extension("my_file.pkl.gz", ".vectors") - 'my_file.pkl.vectors.gz' + .. sourcecode:: pycon + + >>> from gensim.utils import smart_extension + >>> smart_extension("my_file.pkl.gz", ".vectors") + 'my_file.pkl.vectors.gz' """ fname, oext = os.path.splitext(fname) @@ -1388,10 +1404,12 @@ def revdict(d): Examples -------- - >>> from gensim.utils import revdict - >>> d = {1: 2, 3: 4} - >>> revdict(d) - {2: 1, 4: 3} + .. sourcecode:: pycon + + >>> from gensim.utils import revdict + >>> d = {1: 2, 3: 4} + >>> revdict(d) + {2: 1, 4: 3} """ return {v: k for (k, v) in iteritems(dict(d))} @@ -1637,17 +1655,21 @@ def lemmatize(content, allowed_tags=re.compile(r'(NN|VB|JJ|RB)'), light=False, Examples -------- - >>> from gensim.utils import lemmatize - >>> lemmatize('Hello World! How is it going?! Nonexistentword, 21') - ['world/NN', 'be/VB', 'go/VB', 'nonexistentword/NN'] + .. sourcecode:: pycon + + >>> from gensim.utils import lemmatize + >>> lemmatize('Hello World! How is it going?! Nonexistentword, 21') + ['world/NN', 'be/VB', 'go/VB', 'nonexistentword/NN'] Note the context-dependent part-of-speech tags between these two examples: - >>> lemmatize('The study ranks high.') - ['study/NN', 'rank/VB', 'high/JJ'] + .. sourcecode:: pycon - >>> lemmatize('The ranks study hard.') - ['rank/NN', 'study/VB', 'hard/RB'] + >>> lemmatize('The study ranks high.') + ['study/NN', 'rank/VB', 'high/JJ'] + + >>> lemmatize('The ranks study hard.') + ['rank/NN', 'study/VB', 'hard/RB'] """ if not has_pattern(): @@ -1869,9 +1891,11 @@ def check_output(stdout=subprocess.PIPE, *popenargs, **kwargs): Examples -------- - >>> from gensim.utils import check_output - >>> check_output(args=['echo', '1']) - '1\n' + .. sourcecode:: pycon + + >>> from gensim.utils import check_output + >>> check_output(args=['echo', '1']) + '1\n' Raises ------ @@ -1938,19 +1962,22 @@ def strided_windows(ndarray, window_size): Examples -------- - >>> from gensim.utils import strided_windows - >>> strided_windows(np.arange(5), 2) - array([[0, 1], - [1, 2], - [2, 3], - [3, 4]]) - >>> strided_windows(np.arange(10), 5) - array([[0, 1, 2, 3, 4], - [1, 2, 3, 4, 5], - [2, 3, 4, 5, 6], - [3, 4, 5, 6, 7], - [4, 5, 6, 7, 8], - [5, 6, 7, 8, 9]]) + + .. sourcecode:: pycon + + >>> from gensim.utils import strided_windows + >>> strided_windows(np.arange(5), 2) + array([[0, 1], + [1, 2], + [2, 3], + [3, 4]]) + >>> strided_windows(np.arange(10), 5) + array([[0, 1, 2, 3, 4], + [1, 2, 3, 4, 5], + [2, 3, 4, 5, 6], + [3, 4, 5, 6, 7], + [4, 5, 6, 7, 8], + [5, 6, 7, 8, 9]]) """ ndarray = np.asarray(ndarray) diff --git a/tox.ini b/tox.ini index ed26490654..2eb26717dd 100644 --- a/tox.ini +++ b/tox.ini @@ -9,8 +9,12 @@ platform = linux: linux ignore = E12, W503 max-line-length = 120 show-source = True -builtins = get_ipython +[flake8-rst] +filename = *.rst *.py +max-line-length = 120 +ignore = F821 ; TODO remove me when all examples in docstrings will be executable +exclude=.venv, .git, .tox, dist, doc, build, gensim/models/deprecated [pytest] addopts = -rfxEXs --durations=20 --showlocals --reruns 3 --reruns-delay 1 @@ -45,7 +49,13 @@ commands = recreate = True deps = flake8 -commands = flake8 gensim/ +commands = flake8 gensim/ {posargs} + +[testenv:flake8-docs] +recreate = True +deps = flake8-rst >= 0.4.1 + +commands = flake8-rst gensim/ docs/ {posargs} [testenv:docs] From 367bdbd5eab6f1ebd5b3588d4b7c0a67df48904e Mon Sep 17 00:00:00 2001 From: Patrick Moelk Date: Thu, 4 Oct 2018 14:17:33 +0200 Subject: [PATCH 33/66] Add `common_terms` parameter to `sklearn_api.PhrasesTransformer` (#2074) * add common_terms parameter This parameter is being propagated to the underlying models.Phrases class. * add tests for new common_terms parameter * utilize models.phrases.Phraser class this avoids the following warning: "UserWarning: For a faster implementation, use the gensim.models.phrases.Phraser class" * add testCompareToOld, add pre-trained Phrases model * use pickle to load old PhrasesTransformer * allow setting Phrases model without setting Phraser model A pre-trained Phrases model (self.gensim_model) may be set to avoid using the fit() method. In transform(), the also necessary Phraser model (self.phraser) will be instantiated if it hasn't been before. * open pickle file * add __setstate__ for backward compatibility * use pickle protocol 2 * test loading new phrases transformer --- gensim/sklearn_api/phrases.py | 25 +++++- .../phrases-transformer-new-v3-5-0.pkl | Bin 0 -> 1896 bytes .../test_data/phrases-transformer-v3-5-0.pkl | Bin 0 -> 1432 bytes gensim/test/test_sklearn_api.py | 83 ++++++++++++++++++ 4 files changed, 104 insertions(+), 4 deletions(-) create mode 100644 gensim/test/test_data/phrases-transformer-new-v3-5-0.pkl create mode 100644 gensim/test/test_data/phrases-transformer-v3-5-0.pkl diff --git a/gensim/sklearn_api/phrases.py b/gensim/sklearn_api/phrases.py index e4b2c6c642..1570acf224 100644 --- a/gensim/sklearn_api/phrases.py +++ b/gensim/sklearn_api/phrases.py @@ -32,6 +32,7 @@ from sklearn.exceptions import NotFittedError from gensim import models +from gensim.models.phrases import Phraser class PhrasesTransformer(TransformerMixin, BaseEstimator): @@ -44,7 +45,7 @@ class PhrasesTransformer(TransformerMixin, BaseEstimator): """ def __init__(self, min_count=5, threshold=10.0, max_vocab_size=40000000, - delimiter=b'_', progress_per=10000, scoring='default'): + delimiter=b'_', progress_per=10000, scoring='default', common_terms=frozenset()): """ Parameters @@ -87,15 +88,25 @@ def __init__(self, min_count=5, threshold=10.0, max_vocab_size=40000000, A scoring function without any of these parameters (even if the parameters are not used) will raise a ValueError on initialization of the Phrases class. The scoring function must be pickleable. + common_terms : set of str, optional + List of "stop words" that won't affect frequency count of expressions containing them. + Allow to detect expressions like "bank_of_america" or "eye_of_the_beholder". """ self.gensim_model = None + self.phraser = None self.min_count = min_count self.threshold = threshold self.max_vocab_size = max_vocab_size self.delimiter = delimiter self.progress_per = progress_per self.scoring = scoring + self.common_terms = common_terms + + def __setstate__(self, state): + self.__dict__ = state + self.common_terms = frozenset() + self.phraser = None def fit(self, X, y=None): """Fit the model according to the given training data. @@ -114,8 +125,9 @@ def fit(self, X, y=None): self.gensim_model = models.Phrases( sentences=X, min_count=self.min_count, threshold=self.threshold, max_vocab_size=self.max_vocab_size, delimiter=self.delimiter, - progress_per=self.progress_per, scoring=self.scoring + progress_per=self.progress_per, scoring=self.scoring, common_terms=self.common_terms ) + self.phraser = Phraser(self.gensim_model) return self def transform(self, docs): @@ -139,10 +151,14 @@ def transform(self, docs): "This model has not been fitted yet. Call 'fit' with appropriate arguments before using this method." ) + if self.phraser is None: + self.phraser = Phraser(self.gensim_model) + # input as python lists if isinstance(docs[0], string_types): docs = [docs] - return [self.gensim_model[doc] for doc in docs] + + return [self.phraser[doc] for doc in docs] def partial_fit(self, X): """Train model over a potentially incomplete set of sentences. @@ -166,8 +182,9 @@ def partial_fit(self, X): self.gensim_model = models.Phrases( sentences=X, min_count=self.min_count, threshold=self.threshold, max_vocab_size=self.max_vocab_size, delimiter=self.delimiter, - progress_per=self.progress_per, scoring=self.scoring + progress_per=self.progress_per, scoring=self.scoring, common_terms=self.common_terms ) self.gensim_model.add_vocab(X) + self.phraser = Phraser(self.gensim_model) return self diff --git a/gensim/test/test_data/phrases-transformer-new-v3-5-0.pkl b/gensim/test/test_data/phrases-transformer-new-v3-5-0.pkl new file mode 100644 index 0000000000000000000000000000000000000000..77994180586f35f8f185943fc7a1c191dbd29c18 GIT binary patch literal 1896 zcmZ`)hjtrP5S3-8t)0XPp_f1?aVP>O0Zcd55D^GN01*g})#^#RWQ*=TtrHtW62PK| z-g_^<#g8!acC|)6WRK2~G@b)Ih21NPTknF zlQhz)gUX)M80(`t)7Vt0RH7hOUXsN_jKBJJ&h{_*+sZ= zfU8Cbt}X~T*RGFiL~S==rf^?GwMWF)60RHI`uh07{EolvGr$d*^}}TkqJVv&#)f0; zS>Z;pxrs1qdu6;N4dM>=o~2iec7oUql_*N{_cf-N#!I3oi4`+*BlderHM1b(8Y|_r z(`1?FN%tJwd=$5=;8wz1)?Cxj+lpBk`oG;q)6#7>3wyZ3I_aiKhZ|{BmlL@2;Pmds z7Iq!EC)8r%xlLt)WsSQIRE{*OYqGuDpS2A6U@JR+xaTY>79;Lpp2zXtKJJ_Gc>w*r z{yu-dKj+s9u90Z&muRHFHWEDGc}W;*uNUypIYkz&z~dS`I8vN2i8~G+TEPP0VQJ>p zyqMjLOR3C~>FEfMNQVv(>Kw&XV>4@HhS74S9~Jx{;W3L#J90yn58LAcA0j+q;cc)`eL_$l=d4y2?j?~Q zBZO8i3FT(JRBMAs(3p@|r(%`MyCMf6E!pzzGa~O1vibyju?4s5Ule>%;Nyf7maaT4 zOCnw-ylg|CW-n#h!%n{1I0G`=BxYqOS`+vy%w?mL0MC;U*tMIL!4 ze-!v9!p{~K{@l9k0DcjTUkSg}tC=samu;-XH?Er*bv#L1#XIfy`YyXH%+3qg%J^je Z!OH)0i2k&Ph!35_I&vTX^7;PN{slS=U%&tW literal 0 HcmV?d00001 diff --git a/gensim/test/test_data/phrases-transformer-v3-5-0.pkl b/gensim/test/test_data/phrases-transformer-v3-5-0.pkl new file mode 100644 index 0000000000000000000000000000000000000000..8ffef6763b539975f1035f50d458da257b6e183c GIT binary patch literal 1432 zcmZ{kYj+ey5QcY?kidjXz#HBHF9g&m;9U_F5M)KML>y6UXJ>nN2Ils5&jb?X9Q**k zo2r`GiE@sgX1AWI>Z-S@`@upyvxQId4Sy%Ikt9)n*D}$Tk z1x;HgTt-r66YLue)sm+L=WmJ{``IGJww;=s&&qIM-njntPxc&OjT=39ABWbHrT=l| zCTD38Wk&iHZJr8u@iAH;o{r-(%WPbyW#NNPgGn0K0iKDC88>Oh?HLnfWiboz>_a?9 zIJ};$gu6KhH&bz(#q$aG{{rE~(E=k$pEP+?;3cuYOn7Cq$o2zsyGaX{b{fSNuZqVJ z!qGl*=AtUWF|of!czv|Qk*gf3c2hfRJ>C%dCgJ#K$W&Bg9ZTmsye{|z;iO_Irww~c z>~9m^(Q;&ZzHys&3mXF7CA_C#NdnT`;*^+A6V7OHhuGW~&O^(qMi%6Kar%Jpp&ll& zjc4;mV*Z%$iKY~_`iM^jeMb0P(@OPz%hxtHt!QTje?d5>2^SgL%HvD1ent3NgVxx( zQ%f6;^Wt)WaIsfK6PgZxBlr^GvSL|ODfC~n_*NXQ5Ju_{?9?Uae<$!N;d_Pq7pP-rxA))QoZ~`;PIOu-^%{ zG^xaDFTKQn2sea3wOU!cIhv~xiN}}__a5C4=0#v49+ogw551bQnx^Y(CO9Fa{eW2} zqqDy)=nf(4wbJVP%(?J_P%4+@ZbfXPE1@8`{@jkX>){30gl05slI3JI(FaYY#f2GX zCk Date: Thu, 4 Oct 2018 23:39:31 +0500 Subject: [PATCH 34/66] Add `python3.7` support (#2211) * init support for py37 * Add CI jobs for 3.7 * workaround for py37 & travis * revert back pyemd (finger cross) * temporary supress deprecation from numpy --- .travis.yml | 8 ++++++++ appveyor.yml | 5 +++++ gensim/models/utils_any2vec.py | 4 +++- setup.py | 13 ++++++++----- tox.ini | 15 +++++++++++++-- 5 files changed, 37 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index c4c8603f43..e8df82ceec 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,5 +27,13 @@ matrix: - python: '3.6' env: TOXENV="py36-linux" + - python: '3.7' + env: + - TOXENV="py37-linux" + - BOTO_CONFIG="/dev/null" + dist: xenial + sudo: true + + install: pip install tox script: tox -vv diff --git a/appveyor.yml b/appveyor.yml index 04da45cd43..c9bbf02931 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -28,6 +28,11 @@ environment: PYTHON_ARCH: "64" TOXENV: "py36-win" + - PYTHON: "C:\\Python37-x64" + PYTHON_VERSION: "3.7.0" + PYTHON_ARCH: "64" + TOXENV: "py37-win" + init: - "ECHO %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%" - "ECHO \"%APPVEYOR_SCHEDULED_BUILD%\"" diff --git a/gensim/models/utils_any2vec.py b/gensim/models/utils_any2vec.py index cc1e057afd..199b8c034f 100644 --- a/gensim/models/utils_any2vec.py +++ b/gensim/models/utils_any2vec.py @@ -209,7 +209,9 @@ def add_word(word, weights): if ch != b'\n': # ignore newlines in front of words (some binary files have) word.append(ch) word = utils.to_unicode(b''.join(word), encoding=encoding, errors=unicode_errors) - weights = fromstring(fin.read(binary_len), dtype=REAL).astype(datatype) + with utils.ignore_deprecation_warning(): + # TODO use frombuffer or something similar + weights = fromstring(fin.read(binary_len), dtype=REAL).astype(datatype) add_word(word, weights) else: for line_no in xrange(vocab_size): diff --git a/setup.py b/setup.py index ff40e5d7dc..da6b56a141 100644 --- a/setup.py +++ b/setup.py @@ -237,11 +237,14 @@ def finalize_options(self): 'Morfessor==2.0.2a4', ] -linux_testenv = win_testenv + [ - 'annoy', - 'tensorflow <= 1.3.0', - 'keras >= 2.0.4, <= 2.1.4', -] +linux_testenv = win_testenv[:] + +if sys.version_info < (3, 7): + linux_testenv.extend([ + 'tensorflow <= 1.3.0', + 'keras >= 2.0.4, <= 2.1.4', + 'annoy', + ]) ext_modules = [ Extension('gensim.models.word2vec_inner', diff --git a/tox.ini b/tox.ini index 2eb26717dd..f380171659 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] minversion = 2.0 -envlist = {py27,py35,py36}-{win,linux}, flake8, docs, docs-upload, download-wheels, upload-wheels, test-pypi +envlist = {py27,py35,py36,py37}-{win,linux}, flake8, docs, docs-upload, download-wheels, upload-wheels, test-pypi skipsdist = True platform = linux: linux win: win64 @@ -23,9 +23,19 @@ addopts = -rfxEXs --durations=20 --showlocals --reruns 3 --reruns-delay 1 recreate = True ; rackcdn host only for windows wheels (numpy, scipy) -install_command = pip install --timeout=60 --trusted-host 28daf2247a33ed269873-7b1aad3fab3cc330e1fd9d109892382a.r6.cf2.rackcdn.com --find-links http://28daf2247a33ed269873-7b1aad3fab3cc330e1fd9d109892382a.r6.cf2.rackcdn.com/ {opts} numpy==1.11.3 scipy==0.18.1 {packages} +install_command = pip install --timeout=60 --trusted-host 28daf2247a33ed269873-7b1aad3fab3cc330e1fd9d109892382a.r6.cf2.rackcdn.com --find-links http://28daf2247a33ed269873-7b1aad3fab3cc330e1fd9d109892382a.r6.cf2.rackcdn.com/ {env:TOX_PIP_OPTS:} {opts} {packages} deps = + py37: numpy==1.14.5 + py37: scipy==1.1.0 + + py27: numpy==1.11.3 + py27: scipy==0.18.1 + py35: numpy==1.11.3 + py35: scipy==0.18.1 + py36: numpy==1.11.3 + py36: scipy==0.18.1 + linux: .[test] win: .[test-win] @@ -36,6 +46,7 @@ setenv = DTM_PATH={env:DTM_PATH:} MALLET_HOME={env:MALLET_HOME:} SKIP_NETWORK_TESTS={env:SKIP_NETWORK_TESTS:} + BOTO_CONFIG={env:BOTO_CONFIG:} PYTHONHASHSEED=1 commands = From 61a42dab691b8817ae484385c38e71d014e4175a Mon Sep 17 00:00:00 2001 From: Mario Ynocente Castro Date: Thu, 4 Oct 2018 21:36:05 -0500 Subject: [PATCH 35/66] Fix `np.issubdtype` warnings (#2210) * modify check of subsinstances of np.int for subinstances of np.signedinteger and np.unsignedinteger * change to np.integer and np.floating --- gensim/matutils.py | 4 ++-- gensim/test/test_ldamodel.py | 16 ++++++++-------- gensim/test/test_matutils.py | 12 ++++++------ 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/gensim/matutils.py b/gensim/matutils.py index 777de46817..d159490a70 100644 --- a/gensim/matutils.py +++ b/gensim/matutils.py @@ -715,7 +715,7 @@ def unitvec(vec, norm='l2', return_norm=False): if norm == 'l2': veclen = np.sqrt(np.sum(vec.data ** 2)) if veclen > 0.0: - if np.issubdtype(vec.dtype, np.int): + if np.issubdtype(vec.dtype, np.integer): vec = vec.astype(np.float) vec /= veclen if return_norm: @@ -734,7 +734,7 @@ def unitvec(vec, norm='l2', return_norm=False): if norm == 'l2': veclen = blas_nrm2(vec) if veclen > 0.0: - if np.issubdtype(vec.dtype, np.int): + if np.issubdtype(vec.dtype, np.integer): vec = vec.astype(np.float) if return_norm: return blas_scal(1.0 / veclen, vec).astype(vec.dtype), veclen diff --git a/gensim/test/test_ldamodel.py b/gensim/test/test_ldamodel.py index c8f1a6e743..11df414903 100644 --- a/gensim/test/test_ldamodel.py +++ b/gensim/test/test_ldamodel.py @@ -201,14 +201,14 @@ def testTopTopics(self): for v, k in topic: self.assertTrue(isinstance(k, six.string_types)) - self.assertTrue(np.issubdtype(v, float)) + self.assertTrue(np.issubdtype(v, np.floating)) def testGetTopicTerms(self): topic_terms = self.model.get_topic_terms(1) for k, v in topic_terms: self.assertTrue(isinstance(k, numbers.Integral)) - self.assertTrue(np.issubdtype(v, float)) + self.assertTrue(np.issubdtype(v, np.floating)) def testGetDocumentTopics(self): @@ -222,7 +222,7 @@ def testGetDocumentTopics(self): self.assertTrue(isinstance(topic, list)) for k, v in topic: self.assertTrue(isinstance(k, numbers.Integral)) - self.assertTrue(np.issubdtype(v, float)) + self.assertTrue(np.issubdtype(v, np.floating)) # Test case to use the get_document_topic function for the corpus all_topics = model.get_document_topics(self.corpus, per_word_topics=True) @@ -233,7 +233,7 @@ def testGetDocumentTopics(self): self.assertTrue(isinstance(topic, tuple)) for k, v in topic[0]: # list of doc_topics self.assertTrue(isinstance(k, numbers.Integral)) - self.assertTrue(np.issubdtype(v, float)) + self.assertTrue(np.issubdtype(v, np.floating)) for w, topic_list in topic[1]: # list of word_topics self.assertTrue(isinstance(w, numbers.Integral)) @@ -257,7 +257,7 @@ def testGetDocumentTopics(self): self.assertTrue(isinstance(topic, tuple)) for k, v in topic[0]: # list of doc_topics self.assertTrue(isinstance(k, numbers.Integral)) - self.assertTrue(np.issubdtype(v, float)) + self.assertTrue(np.issubdtype(v, np.floating)) if len(topic[0]) != 0: doc_topic_count_na += 1 @@ -278,7 +278,7 @@ def testGetDocumentTopics(self): for k, v in doc_topics: self.assertTrue(isinstance(k, numbers.Integral)) - self.assertTrue(np.issubdtype(v, float)) + self.assertTrue(np.issubdtype(v, np.floating)) for w, topic_list in word_topics: self.assertTrue(isinstance(w, numbers.Integral)) @@ -306,7 +306,7 @@ def testTermTopics(self): result = model.get_term_topics(2) for topic_no, probability in result: self.assertTrue(isinstance(topic_no, int)) - self.assertTrue(np.issubdtype(probability, float)) + self.assertTrue(np.issubdtype(probability, np.floating)) # checks if topic '1' is in the result list # FIXME: Fails on osx and win @@ -316,7 +316,7 @@ def testTermTopics(self): result = model.get_term_topics(str(model.id2word[2])) for topic_no, probability in result: self.assertTrue(isinstance(topic_no, int)) - self.assertTrue(np.issubdtype(probability, float)) + self.assertTrue(np.issubdtype(probability, np.floating)) # checks if topic '1' is in the result list # FIXME: Fails on osx and win diff --git a/gensim/test/test_matutils.py b/gensim/test/test_matutils.py index b079df9e43..49988af296 100644 --- a/gensim/test/test_matutils.py +++ b/gensim/test/test_matutils.py @@ -176,14 +176,14 @@ def test_sparse_npint32(self): unit_vector = matutils.unitvec(input_vector) man_unit_vector = manual_unitvec(input_vector) self.assertTrue(np.allclose(unit_vector.data, man_unit_vector.data, atol=1e-3)) - self.assertTrue(np.issubdtype(unit_vector.dtype, float)) + self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating)) def test_sparse_npint64(self): input_vector = sparse.csr_matrix(np.asarray([[1, 0, 0, 0, 3], [0, 0, 4, 3, 0]])).astype(np.int64) unit_vector = matutils.unitvec(input_vector) man_unit_vector = manual_unitvec(input_vector) self.assertTrue(np.allclose(unit_vector.data, man_unit_vector.data, atol=1e-3)) - self.assertTrue(np.issubdtype(unit_vector.dtype, float)) + self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating)) def test_dense_npfloat32(self): input_vector = np.random.uniform(size=(5,)).astype(np.float32) @@ -204,14 +204,14 @@ def test_dense_npint32(self): unit_vector = matutils.unitvec(input_vector) man_unit_vector = manual_unitvec(input_vector) self.assertTrue(np.allclose(unit_vector, man_unit_vector)) - self.assertTrue(np.issubdtype(unit_vector.dtype, float)) + self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating)) def test_dense_npint64(self): input_vector = np.random.randint(10, size=5).astype(np.int32) unit_vector = matutils.unitvec(input_vector) man_unit_vector = manual_unitvec(input_vector) self.assertTrue(np.allclose(unit_vector, man_unit_vector)) - self.assertTrue(np.issubdtype(unit_vector.dtype, float)) + self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating)) def test_sparse_python_float(self): input_vector = sparse.csr_matrix(np.asarray([[1, 0, 0, 0, 3], [0, 0, 4, 3, 0]])).astype(float) @@ -225,7 +225,7 @@ def test_sparse_python_int(self): unit_vector = matutils.unitvec(input_vector) man_unit_vector = manual_unitvec(input_vector) self.assertTrue(np.allclose(unit_vector.data, man_unit_vector.data, atol=1e-3)) - self.assertTrue(np.issubdtype(unit_vector.dtype, float)) + self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating)) def test_dense_python_float(self): input_vector = np.random.uniform(size=(5,)).astype(float) @@ -239,7 +239,7 @@ def test_dense_python_int(self): unit_vector = matutils.unitvec(input_vector) man_unit_vector = manual_unitvec(input_vector) self.assertTrue(np.allclose(unit_vector, man_unit_vector)) - self.assertTrue(np.issubdtype(unit_vector.dtype, float)) + self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating)) if __name__ == '__main__': From 2891861d77f9eff2dc703214c099240ef227b7da Mon Sep 17 00:00:00 2001 From: Ivan Menshikh Date: Fri, 5 Oct 2018 14:03:23 +0500 Subject: [PATCH 36/66] Cleanup (get rid py26 stuff) (#2214) * add 3.7 to classifier * drop ez install (outdated & useless) * replace custom _bit_length with naitive bit_length * get rid ez_setup * logging cleanup * revert __name__ for scripts --- ez_setup.py | 405 ---------------------- gensim/__init__.py | 9 +- gensim/downloader.py | 2 +- gensim/models/rpmodel.py | 2 +- gensim/models/wrappers/ldavowpalwabbit.py | 20 +- gensim/summarization/textcleaner.py | 2 +- gensim/test/test_miislita.py | 2 +- setup.py | 4 +- 8 files changed, 7 insertions(+), 439 deletions(-) delete mode 100644 ez_setup.py diff --git a/ez_setup.py b/ez_setup.py deleted file mode 100644 index 4251063fc0..0000000000 --- a/ez_setup.py +++ /dev/null @@ -1,405 +0,0 @@ -#!python -"""Bootstrap setuptools installation - -If you want to use setuptools in your package's setup.py, just include this -file in the same directory with it, and add this to the top of your setup.py:: - - from ez_setup import use_setuptools - use_setuptools() - -If you want to require a specific version of setuptools, set a download -mirror, or use an alternate download directory, you can do so by supplying -the appropriate options to ``use_setuptools()``. - -This file can also be run as a script to install or upgrade setuptools. -""" -import os -import shutil -import sys -import tempfile -import tarfile -import optparse -import subprocess -import platform - -from distutils import log - -try: - from site import USER_SITE -except ImportError: - USER_SITE = None - -DEFAULT_VERSION = "1.3.2" -DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" - - -def _python_cmd(*args): - args = (sys.executable,) + args - return subprocess.call(args) == 0 - - -def _check_call_py24(cmd, *args, **kwargs): - res = subprocess.call(cmd, *args, **kwargs) - - class CalledProcessError(Exception): - pass - if not res == 0: - msg = "Command '%s' return non-zero exit status %d" % (cmd, res) - raise CalledProcessError(msg) - - -vars(subprocess).setdefault('check_call', _check_call_py24) - - -def _install(tarball, install_args=()): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # installing - log.warn('Installing Setuptools') - if not _python_cmd('setup.py', 'install', *install_args): - log.warn('Something went wrong during the installation.') - log.warn('See the error message above.') - # exitcode will be 2 - return 2 - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - - -def _build_egg(egg, tarball, to_dir): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # building an egg - log.warn('Building a Setuptools egg in %s', to_dir) - _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) - - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - # returning the result - log.warn(egg) - if not os.path.exists(egg): - raise IOError('Could not build the egg.') - - -def _do_download(version, download_base, to_dir, download_delay): - egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' - % (version, sys.version_info[0], sys.version_info[1])) - if not os.path.exists(egg): - tarball = download_setuptools(version, download_base, - to_dir, download_delay) - _build_egg(egg, tarball, to_dir) - sys.path.insert(0, egg) - - # Remove previously-imported pkg_resources if present (see - # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). - if 'pkg_resources' in sys.modules: - del sys.modules['pkg_resources'] - - import setuptools - setuptools.bootstrap_install_from = egg - - -def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, download_delay=15): - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - was_imported = 'pkg_resources' in sys.modules or \ - 'setuptools' in sys.modules - try: - import pkg_resources - except ImportError: - return _do_download(version, download_base, to_dir, download_delay) - try: - pkg_resources.require("setuptools>=" + version) - return - except pkg_resources.VersionConflict: - e = sys.exc_info()[1] - if was_imported: - sys.stderr.write( - "The required version of setuptools (>=%s) is not available,\n" - "and can't be installed while this script is running. Please\n" - "install a more recent version first, using\n" - "'easy_install -U setuptools'." - "\n\n(Currently using %r)\n" % (version, e.args[0])) - sys.exit(2) - else: - del pkg_resources, sys.modules['pkg_resources'] # reload ok - return _do_download(version, download_base, to_dir, - download_delay) - except pkg_resources.DistributionNotFound: - return _do_download(version, download_base, to_dir, - download_delay) - - -def _clean_check(cmd, target): - """ - Run the command to download target. If the command fails, clean up before - re-raising the error. - """ - try: - subprocess.check_call(cmd) - except subprocess.CalledProcessError: - if os.access(target, os.F_OK): - os.unlink(target) - raise - - -def download_file_powershell(url, target): - """ - Download the file at url to target using Powershell (which will validate - trust). Raise an exception if the command cannot complete. - """ - target = os.path.abspath(target) - cmd = [ - 'powershell', - '-Command', - "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), - ] - _clean_check(cmd, target) - - -def has_powershell(): - if platform.system() != 'Windows': - return False - cmd = ['powershell', '-Command', 'echo test'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except Exception: - return False - finally: - devnull.close() - return True - - -download_file_powershell.viable = has_powershell - - -def download_file_curl(url, target): - cmd = ['curl', url, '--silent', '--output', target] - _clean_check(cmd, target) - - -def has_curl(): - cmd = ['curl', '--version'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except Exception: - return False - finally: - devnull.close() - return True - - -download_file_curl.viable = has_curl - - -def download_file_wget(url, target): - cmd = ['wget', url, '--quiet', '--output-document', target] - _clean_check(cmd, target) - - -def has_wget(): - cmd = ['wget', '--version'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except Exception: - return False - finally: - devnull.close() - return True - - -download_file_wget.viable = has_wget - - -def download_file_insecure(url, target): - """ - Use Python to download the file, even though it cannot authenticate the - connection. - """ - try: - from urllib.request import urlopen - except ImportError: - from urllib2 import urlopen - src = dst = None - try: - src = urlopen(url) - # Read/write all in one block, so we don't create a corrupt file - # if the download is interrupted. - data = src.read() - dst = open(target, "wb") - dst.write(data) - finally: - if src: - src.close() - if dst: - dst.close() - - -download_file_insecure.viable = lambda: True - - -def get_best_downloader(): - downloaders = [ - download_file_powershell, - download_file_curl, - download_file_wget, - download_file_insecure, - ] - - for dl in downloaders: - if dl.viable(): - return dl - - -def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, delay=15, - downloader_factory=get_best_downloader): - """Download setuptools from a specified location and return its filename - - `version` should be a valid setuptools version number that is available - as an egg for download under the `download_base` URL (which should end - with a '/'). `to_dir` is the directory where the egg will be downloaded. - `delay` is the number of seconds to pause before an actual download - attempt. - - ``downloader_factory`` should be a function taking no arguments and - returning a function for downloading a URL to a target. - """ - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - tgz_name = "setuptools-%s.tar.gz" % version - url = download_base + tgz_name - saveto = os.path.join(to_dir, tgz_name) - if not os.path.exists(saveto): # Avoid repeated downloads - log.warn("Downloading %s", url) - downloader = downloader_factory() - downloader(url, saveto) - return os.path.realpath(saveto) - - -def _extractall(self, path=".", members=None): - """Extract all members from the archive to the current working - directory and set owner, modification time and permissions on - directories afterwards. `path' specifies a different directory - to extract to. `members' is optional and must be a subset of the - list returned by getmembers(). - """ - import copy - import operator - from tarfile import ExtractError - directories = [] - - if members is None: - members = self - - for tarinfo in members: - if tarinfo.isdir(): - # Extract directories with a safe mode. - directories.append(tarinfo) - tarinfo = copy.copy(tarinfo) - tarinfo.mode = 448 # decimal for oct 0700 - self.extract(tarinfo, path) - - # Reverse sort directories. - if sys.version_info < (2, 4): - def sorter(dir1, dir2): - return cmp(dir1.name, dir2.name) # noqa:F821 - directories.sort(sorter) - directories.reverse() - else: - directories.sort(key=operator.attrgetter('name'), reverse=True) - - # Set correct owner, mtime and filemode on directories. - for tarinfo in directories: - dirpath = os.path.join(path, tarinfo.name) - try: - self.chown(tarinfo, dirpath) - self.utime(tarinfo, dirpath) - self.chmod(tarinfo, dirpath) - except ExtractError: - e = sys.exc_info()[1] - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - -def _build_install_args(options): - """ - Build the arguments to 'python setup.py install' on the setuptools package - """ - install_args = [] - if options.user_install: - if sys.version_info < (2, 6): - log.warn("--user requires Python 2.6 or later") - raise SystemExit(1) - install_args.append('--user') - return install_args - - -def _parse_args(): - """ - Parse the command line for options - """ - parser = optparse.OptionParser() - parser.add_option( - '--user', dest='user_install', action='store_true', default=False, - help='install in user site package (requires Python 2.6 or later)') - parser.add_option( - '--download-base', dest='download_base', metavar="URL", - default=DEFAULT_URL, - help='alternative URL from where to download the setuptools package') - parser.add_option( - '--insecure', dest='downloader_factory', action='store_const', - const=lambda: download_file_insecure, default=get_best_downloader, - help='Use internal, non-validating downloader' - ) - options, args = parser.parse_args() - # positional arguments are ignored - return options - - -def main(version=DEFAULT_VERSION): - """Install or upgrade setuptools and EasyInstall""" - options = _parse_args() - tarball = download_setuptools(download_base=options.download_base, - downloader_factory=options.downloader_factory) - return _install(tarball, _build_install_args(options)) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/gensim/__init__.py b/gensim/__init__.py index 4c55eada1e..280af83834 100644 --- a/gensim/__init__.py +++ b/gensim/__init__.py @@ -8,13 +8,6 @@ __version__ = '3.6.0' -class NullHandler(logging.Handler): - """For python versions <= 2.6; same as `logging.NullHandler` in 2.7.""" - - def emit(self, record): - pass - - logger = logging.getLogger('gensim') if len(logger.handlers) == 0: # To ensure reload() doesn't add another one - logger.addHandler(NullHandler()) + logger.addHandler(logging.NullHandler()) diff --git a/gensim/downloader.py b/gensim/downloader.py index d8ec1ac80e..a75a772503 100644 --- a/gensim/downloader.py +++ b/gensim/downloader.py @@ -59,7 +59,7 @@ user_dir = os.path.expanduser('~') base_dir = os.path.join(user_dir, 'gensim-data') -logger = logging.getLogger('gensim.api') +logger = logging.getLogger(__name__) DATA_LIST_URL = "https://raw.githubusercontent.com/RaRe-Technologies/gensim-data/master/list.json" DOWNLOAD_BASE_URL = "https://github.com/RaRe-Technologies/gensim-data/releases/download" diff --git a/gensim/models/rpmodel.py b/gensim/models/rpmodel.py index e946c4acb2..a348d4040b 100644 --- a/gensim/models/rpmodel.py +++ b/gensim/models/rpmodel.py @@ -44,7 +44,7 @@ from gensim import interfaces, matutils, utils -logger = logging.getLogger('gensim.models.rpmodel') +logger = logging.getLogger(__name__) class RpModel(interfaces.TransformationABC): diff --git a/gensim/models/wrappers/ldavowpalwabbit.py b/gensim/models/wrappers/ldavowpalwabbit.py index d62a914d53..6582fe96d6 100644 --- a/gensim/models/wrappers/ldavowpalwabbit.py +++ b/gensim/models/wrappers/ldavowpalwabbit.py @@ -534,7 +534,7 @@ def _get_vw_train_command(self, corpus_size, update=False): # these params are read from model file if updating cmd.extend([ '--lda', str(self.num_topics), - '-b', str(_bit_length(self.num_terms)), + '-b', str(self.num_terms.bit_length()), '--lda_alpha', str(self.alpha), '--lda_rho', str(self.eta) ]) @@ -857,24 +857,6 @@ def _run_vw_command(cmd): return output -# if python2.6 support is ever dropped, can change to using int.bit_length() -def _bit_length(num): - """Get number of bits needed to encode given number. - - Parameters - ---------- - num : int - Number to encode. - - Returns - ------- - int - Number of bits needed to encode given number. - - """ - return len(bin(num).lstrip('-0b')) - - def vwmodel2ldamodel(vw_model, iterations=50): """Convert :class:`~gensim.models.wrappers.ldavowpalwabbit.LdaVowpalWabbit` to :class:`~gensim.models.ldamodel.LdaModel`. diff --git a/gensim/summarization/textcleaner.py b/gensim/summarization/textcleaner.py index 8bd0158a54..9e0dfdd971 100644 --- a/gensim/summarization/textcleaner.py +++ b/gensim/summarization/textcleaner.py @@ -27,7 +27,7 @@ import re import logging -logger = logging.getLogger('summarizer.preprocessing.cleaner') +logger = logging.getLogger(__name__) try: from pattern.en import tag diff --git a/gensim/test/test_miislita.py b/gensim/test/test_miislita.py index 344da1adb3..d3c4384f95 100644 --- a/gensim/test/test_miislita.py +++ b/gensim/test/test_miislita.py @@ -22,7 +22,7 @@ from gensim import utils, corpora, models, similarities from gensim.test.utils import datapath, get_tmpfile -logger = logging.getLogger('test_miislita') +logger = logging.getLogger(__name__) class CorpusMiislita(corpora.TextCorpus): diff --git a/setup.py b/setup.py index da6b56a141..9982cfaeea 100644 --- a/setup.py +++ b/setup.py @@ -14,15 +14,12 @@ import platform import sys import warnings -import ez_setup from setuptools import setup, find_packages, Extension from setuptools.command.build_ext import build_ext if sys.version_info[:2] < (2, 7) or (sys.version_info[:1] == 3 and sys.version_info[:2] < (3, 5)): raise Exception('This version of gensim needs Python 2.7, 3.5 or later.') -ez_setup.use_setuptools() - # the following code is adapted from tornado's setup.py: # https://github.com/tornadoweb/tornado/blob/master/setup.py # to support installing without the extension on platforms where @@ -334,6 +331,7 @@ def finalize_options(self): 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Topic :: Scientific/Engineering :: Artificial Intelligence', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: Text Processing :: Linguistic', From 5934b1301a959b780d18b9cc8975f8b352cd8d4f Mon Sep 17 00:00:00 2001 From: Stig Johan Berggren Date: Fri, 5 Oct 2018 13:25:27 +0200 Subject: [PATCH 37/66] Use `itertools.chain` instead of `sum` to concatenate lists (#2212) * use itertools.chain to concatenate lists * concatenate lists with chain instead of sum --- gensim/models/keyedvectors.py | 9 +++++---- gensim/models/wrappers/ldamallet.py | 5 +++-- gensim/test/test_corpora_dictionary.py | 3 ++- gensim/test/test_word2vec.py | 16 +++++++++++----- 4 files changed, 21 insertions(+), 12 deletions(-) diff --git a/gensim/models/keyedvectors.py b/gensim/models/keyedvectors.py index 442e9ca07a..9954abe75f 100644 --- a/gensim/models/keyedvectors.py +++ b/gensim/models/keyedvectors.py @@ -159,6 +159,7 @@ from __future__ import division # py3 "true division" from collections import deque +from itertools import chain import logging try: @@ -1144,8 +1145,8 @@ def evaluate_word_analogies(self, analogies, restrict_vocab=300000, case_insensi total = { 'section': 'Total accuracy', - 'correct': sum((s['correct'] for s in sections), []), - 'incorrect': sum((s['incorrect'] for s in sections), []), + 'correct': list(chain.from_iterable(s['correct'] for s in sections)), + 'incorrect': list(chain.from_iterable(s['incorrect'] for s in sections)), } oov_ratio = float(oov) / quadruplets_no * 100 @@ -1250,8 +1251,8 @@ def accuracy(self, questions, restrict_vocab=30000, most_similar=most_similar, c total = { 'section': 'total', - 'correct': sum((s['correct'] for s in sections), []), - 'incorrect': sum((s['incorrect'] for s in sections), []), + 'correct': list(chain.from_iterable(s['correct'] for s in sections)), + 'incorrect': list(chain.from_iterable(s['incorrect'] for s in sections)), } self.log_accuracy(total) sections.append(total) diff --git a/gensim/models/wrappers/ldamallet.py b/gensim/models/wrappers/ldamallet.py index 6c9487eb37..6639be5d8e 100644 --- a/gensim/models/wrappers/ldamallet.py +++ b/gensim/models/wrappers/ldamallet.py @@ -52,6 +52,7 @@ import tempfile import xml.etree.ElementTree as et import zipfile +from itertools import chain import numpy from smart_open import smart_open @@ -222,9 +223,9 @@ def corpus2mallet(self, corpus, file_like): """ for docno, doc in enumerate(corpus): if self.id2word: - tokens = sum(([self.id2word[tokenid]] * int(cnt) for tokenid, cnt in doc), []) + tokens = chain.from_iterable([self.id2word[tokenid]] * int(cnt) for tokenid, cnt in doc) else: - tokens = sum(([str(tokenid)] * int(cnt) for tokenid, cnt in doc), []) + tokens = chain.from_iterable([str(tokenid)] * int(cnt) for tokenid, cnt in doc) file_like.write(utils.to_utf8("%s 0 %s\n" % (docno, ' '.join(tokens)))) def convert_input(self, corpus, infer=False, serialize_corpus=True): diff --git a/gensim/test/test_corpora_dictionary.py b/gensim/test/test_corpora_dictionary.py index e0b8d1e426..13a16a3cd1 100644 --- a/gensim/test/test_corpora_dictionary.py +++ b/gensim/test/test_corpora_dictionary.py @@ -9,6 +9,7 @@ from collections import Mapping +from itertools import chain import logging import unittest import codecs @@ -258,7 +259,7 @@ def test_from_corpus(self): for document in documents] # remove words that appear only once - all_tokens = sum(texts, []) + all_tokens = list(chain.from_iterable(texts)) tokens_once = set(word for word in set(all_tokens) if all_tokens.count(word) == 1) texts = [[word for word in text if word not in tokens_once] for text in texts] diff --git a/gensim/test/test_word2vec.py b/gensim/test/test_word2vec.py index 9cce7e6fa9..7a7ef31262 100644 --- a/gensim/test/test_word2vec.py +++ b/gensim/test/test_word2vec.py @@ -593,12 +593,18 @@ def testLocking(self): self.assertFalse((unlocked1 == model.wv.vectors[1]).all()) # unlocked vector should vary self.assertTrue((locked0 == model.wv.vectors[0]).all()) # locked vector should not vary - def testAccuracy(self): - """Test Word2Vec accuracy and KeyedVectors accuracy give the same result""" + def testEvaluateWordAnalogies(self): + """Test that evaluating analogies on KeyedVectors give sane results""" model = word2vec.Word2Vec(LeeCorpus()) - w2v_accuracy = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) - kv_accuracy = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) - self.assertEqual(w2v_accuracy, kv_accuracy) + score, sections = model.wv.evaluate_word_analogies(datapath('questions-words.txt')) + self.assertGreaterEqual(score, 0.0) + self.assertLessEqual(score, 1.0) + self.assertGreater(len(sections), 0) + # Check that dict contains the right keys + first_section = sections[0] + self.assertIn('section', first_section) + self.assertIn('correct', first_section) + self.assertIn('incorrect', first_section) def testEvaluateWordPairs(self): """Test Spearman and Pearson correlation coefficients give sane results on similarity datasets""" From 21ef524cc0f2703e9a055110061fd95da9a1f18b Mon Sep 17 00:00:00 2001 From: Joao Moreira <13685125+jagmoreira@users.noreply.github.com> Date: Sun, 7 Oct 2018 22:12:18 -0500 Subject: [PATCH 38/66] Fix `WmdSimilarity` documentation (#2217) * Update documentation for WmdSimilarity. * Fix curly braces issue. * Fix WmdSimilarity docstring example. --- gensim/similarities/docsim.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/gensim/similarities/docsim.py b/gensim/similarities/docsim.py index d033eb4fc7..e301486eb3 100755 --- a/gensim/similarities/docsim.py +++ b/gensim/similarities/docsim.py @@ -826,7 +826,7 @@ def get_similarities(self, query): Parameters ---------- - query : {list of (int, number), iterable of list of (int, number), :class:`scipy.sparse.csr_matrix` + query : {list of (int, number), iterable of list of (int, number), :class:`scipy.sparse.csr_matrix`} Document or collection of documents. Return @@ -938,7 +938,7 @@ def get_similarities(self, query): Parameters ---------- - query : {list of (int, number), iterable of list of (int, number) + query : {list of (int, number), iterable of list of (int, number)} Document or collection of documents. Return @@ -978,7 +978,7 @@ def __str__(self): class WmdSimilarity(interfaces.SimilarityABC): - """Compute negative WMD similarity against a corpus of documents by storing the index matrix in memory. + """Compute negative WMD similarity against a corpus of documents. See :class:`~gensim.models.keyedvectors.WordEmbeddingsKeyedVectors` for more information. Also, tutorial `notebook @@ -999,17 +999,14 @@ class WmdSimilarity(interfaces.SimilarityABC): .. sourcecode:: pycon >>> from gensim.test.utils import common_texts - >>> from gensim.corpora import Dictionary >>> from gensim.models import Word2Vec >>> from gensim.similarities import WmdSimilarity >>> >>> model = Word2Vec(common_texts, size=20, min_count=1) # train word-vectors - >>> dictionary = Dictionary(common_texts) - >>> bow_corpus = [dictionary.doc2bow(document) for document in common_texts] >>> - >>> index = WmdSimilarity(bow_corpus, model) + >>> index = WmdSimilarity(common_texts, model) >>> # Make query. - >>> query = 'trees' + >>> query = ['trees'] >>> sims = index[query] """ @@ -1018,8 +1015,8 @@ def __init__(self, corpus, w2v_model, num_best=None, normalize_w2v_and_replace=T Parameters ---------- - corpus: iterable of list of (int, float) - A list of documents in the BoW format. + corpus: iterable of list of str + A list of documents, each of which is a list of tokens. w2v_model: :class:`~gensim.models.word2vec.Word2VecTrainables` A trained word2vec model. num_best: int, optional @@ -1058,7 +1055,7 @@ def get_similarities(self, query): Parameters ---------- - query : {list of (int, number), iterable of list of (int, number) + query : {list of str, iterable of list of str} Document or collection of documents. Return @@ -1194,7 +1191,7 @@ def get_similarities(self, query): Parameters ---------- - query : {list of (int, number), iterable of list of (int, number), :class:`scipy.sparse.csr_matrix` + query : {list of (int, number), iterable of list of (int, number), :class:`scipy.sparse.csr_matrix`} Document or collection of documents. Return From 79b4a448174ab94c3dfa73f2cc4f725043220fd0 Mon Sep 17 00:00:00 2001 From: Anmol Wassan Date: Wed, 10 Oct 2018 01:47:00 +0530 Subject: [PATCH 39/66] Replace `fify -> fifty` in `gensim.parsing.preprocessing.STOPWORDS` (#2220) --- gensim/parsing/preprocessing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gensim/parsing/preprocessing.py b/gensim/parsing/preprocessing.py index 98133b3848..ed77c8e6b6 100644 --- a/gensim/parsing/preprocessing.py +++ b/gensim/parsing/preprocessing.py @@ -45,7 +45,7 @@ STOPWORDS = frozenset([ 'all', 'six', 'just', 'less', 'being', 'indeed', 'over', 'move', 'anyway', 'four', 'not', 'own', 'through', - 'using', 'fify', 'where', 'mill', 'only', 'find', 'before', 'one', 'whose', 'system', 'how', 'somewhere', + 'using', 'fifty', 'where', 'mill', 'only', 'find', 'before', 'one', 'whose', 'system', 'how', 'somewhere', 'much', 'thick', 'show', 'had', 'enough', 'should', 'to', 'must', 'whom', 'seeming', 'yourselves', 'under', 'ours', 'two', 'has', 'might', 'thereafter', 'latterly', 'do', 'them', 'his', 'around', 'than', 'get', 'very', 'de', 'none', 'cannot', 'every', 'un', 'they', 'front', 'during', 'thus', 'now', 'him', 'nor', 'name', 'regarding', From de8657e9b8d5192750296b6765175c31c8bb3298 Mon Sep 17 00:00:00 2001 From: Johann Petrak Date: Mon, 15 Oct 2018 07:30:05 +0100 Subject: [PATCH 40/66] Remove `alpha="auto"` from `LdaMulticore` (not supported yet) (#2225) * Remove the 'auto' parameter value. The 'auto' parameter value for parameter alpha is not supported in LdaMulticore, so remove it from the documentation. * Note that alpha='auto' not available in distributed mode. Fixes #2223 * remove trailing whitespace --- gensim/models/ldamodel.py | 2 +- gensim/models/ldamulticore.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index c62a5aa9d1..c6547de50a 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -385,7 +385,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, Alternatively default prior selecting strategies can be employed by supplying a string: * 'asymmetric': Uses a fixed normalized asymmetric prior of `1.0 / topicno`. - * 'auto': Learns an asymmetric prior from the corpus. + * 'auto': Learns an asymmetric prior from the corpus (not available if `distributed==True`). eta : {float, np.array, str}, optional A-priori belief on word probability, this can be: diff --git a/gensim/models/ldamulticore.py b/gensim/models/ldamulticore.py index ecf043ea29..248cc83abc 100644 --- a/gensim/models/ldamulticore.py +++ b/gensim/models/ldamulticore.py @@ -134,7 +134,6 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, workers=None, Alternatively default prior selecting strategies can be employed by supplying a string: * 'asymmetric': Uses a fixed normalized asymmetric prior of `1.0 / topicno`. - * 'auto': Learns an asymmetric prior from the corpus. eta : {float, np.array, str}, optional A-priori belief on word probability, this can be: From 7e4965ee6c9d4e200dae6fb089b46c2ebc27e159 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Radim=20=C5=98eh=C5=AF=C5=99ek?= Date: Tue, 16 Oct 2018 12:26:09 +0200 Subject: [PATCH 41/66] Update Adopters in README (#2234) --- README.md | 40 +++++++++++++++++----------------------- 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/README.md b/README.md index d2b9e865f5..78c2209f42 100644 --- a/README.md +++ b/README.md @@ -119,29 +119,23 @@ Documentation Adopters -------- - - -| Name | Logo | URL | Description | -|----------------------------------------|--------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| RaRe Technologies | ![rare](docs/src/readme_images/rare.png) | [rare-technologies.com](http://rare-technologies.com) | Machine learning & NLP consulting and training. Creators and maintainers of Gensim. | -| Mindseye | ![mindseye](docs/src/readme_images/mindseye.png) | [mindseye.com](http://www.mindseyesolutions.com/) | Similarities in legal documents | -| Talentpair | ![talent-pair](docs/src/readme_images/talent-pair.png) | [talentpair.com](http://talentpair.com) | Data science driving high-touch recruiting | -| Tailwind | ![tailwind](docs/src/readme_images/tailwind.png)| [Tailwindapp.com](https://www.tailwindapp.com/)| Post interesting and relevant content to Pinterest | -| Issuu | ![issuu](docs/src/readme_images/issuu.png) | [Issuu.com](https://issuu.com/)| Gensim’s LDA module lies at the very core of the analysis we perform on each uploaded publication to figure out what it’s all about. -| Sports Authority | ![sports-authority](docs/src/readme_images/sports-authority.png) | [sportsauthority.com](https://en.wikipedia.org/wiki/Sports_Authority)| Text mining of customer surveys and social media sources | -| Search Metrics | ![search-metrics](docs/src/readme_images/search-metrics.png) | [searchmetrics.com](http://www.searchmetrics.com/)| Gensim word2vec used for entity disambiguation in Search Engine Optimisation -| Cisco Security | ![cisco](docs/src/readme_images/cisco.png) | [cisco.com](http://www.cisco.com/c/en/us/products/security/index.html)| Large-scale fraud detection -| 12K Research | ![12k](docs/src/readme_images/12k.png)| [12k.co](https://12k.co/)| Document similarity analysis on media articles -| National Institutes of Health | ![nih](docs/src/readme_images/nih.png) | [github/NIHOPA](https://github.com/NIHOPA/pipeline_word2vec)| Processing grants and publications with word2vec -| Codeq LLC | ![codeq](docs/src/readme_images/codeq.png) | [codeq.com](https://codeq.com)| Document classification with word2vec -| Mass Cognition | ![mass-cognition](docs/src/readme_images/mass-cognition.png) | [masscognition.com](http://www.masscognition.com/) | Topic analysis service for consumer text data and general text data | -| Stillwater Supercomputing | ![stillwater](docs/src/readme_images/stillwater.png) | [stillwater-sc.com](http://www.stillwater-sc.com/) | Document comprehension and association with word2vec | -| Channel 4 | ![channel4](docs/src/readme_images/channel4.png) | [channel4.com](http://www.channel4.com/) | Recommendation engine | -| Amazon | ![amazon](docs/src/readme_images/amazon.png) | [amazon.com](http://www.amazon.com/) | Document similarity| -| SiteGround Hosting | ![siteground](docs/src/readme_images/siteground.png) | [siteground.com](https://www.siteground.com/) | An ensemble search engine which uses different embeddings models and similarities, including word2vec, WMD, and LDA. | -| Juju | ![juju](docs/src/readme_images/juju.png) | [www.juju.com](http://www.juju.com/) | Provide non-obvious related job suggestions. | -| NLPub | ![nlpub](docs/src/readme_images/nlpub.png) | [nlpub.org](https://nlpub.org/) | Distributional semantic models including word2vec. | -|Capital One | ![capitalone](docs/src/readme_images/capitalone.png) | [www.capitalone.com](https://www.capitalone.com/) | Topic modeling for customer complaints exploration. | +| Company | Logo | Industry | Use of Gensim | +|---------|------|----------|---------------| +| [RARE Technologies](http://rare-technologies.com) | ![rare](docs/src/readme_images/rare.png) | ML & NLP consulting | Creators of Gensim – this is us! | +| [Amazon](http://www.amazon.com/) | ![amazon](docs/src/readme_images/amazon.png) | Retail | Document similarity. | +| [National Institutes of Health](https://github.com/NIHOPA/pipeline_word2vec) | ![nih](docs/src/readme_images/nih.png) | Health | Processing grants and publications with word2vec. | +| [Cisco Security](http://www.cisco.com/c/en/us/products/security/index.html) | ![cisco](docs/src/readme_images/cisco.png) | Security | Large-scale fraud detection. | +| [Mindseye](http://www.mindseyesolutions.com/) | ![mindseye](docs/src/readme_images/mindseye.png) | Legal | Similarities in legal documents. | +| [Channel 4](http://www.channel4.com/) | ![channel4](docs/src/readme_images/channel4.png) | Media | Recommendation engine. | +| [Talentpair](http://talentpair.com) | ![talent-pair](docs/src/readme_images/talent-pair.png) | HR | Candidate matching in high-touch recruiting. | +| [Juju](http://www.juju.com/) | ![juju](docs/src/readme_images/juju.png) | HR | Provide non-obvious related job suggestions. | +| [Tailwind](https://www.tailwindapp.com/) | ![tailwind](docs/src/readme_images/tailwind.png) | Media | Post interesting and relevant content to Pinterest. | +| [Issuu](https://issuu.com/) | ![issuu](docs/src/readme_images/issuu.png) | Media | Gensim's LDA module lies at the very core of the analysis we perform on each uploaded publication to figure out what it's all about. | +| [Search Metrics](http://www.searchmetrics.com/) | ![search-metrics](docs/src/readme_images/search-metrics.png) | Content Marketing | Gensim word2vec used for entity disambiguation in Search Engine Optimisation. | +| [12K Research](https://12k.co/) | ![12k](docs/src/readme_images/12k.png)| Media | Document similarity analysis on media articles. | +| [Stillwater Supercomputing](http://www.stillwater-sc.com/) | ![stillwater](docs/src/readme_images/stillwater.png) | Hardware | Document comprehension and association with word2vec. | +| [SiteGround](https://www.siteground.com/) | ![siteground](docs/src/readme_images/siteground.png) | Web hosting | An ensemble search engine which uses different embeddings models and similarities, including word2vec, WMD, and LDA. | +| [Capital One](https://www.capitalone.com/) | ![capitalone](docs/src/readme_images/capitalone.png) | Finance | Topic modeling for customer complaints exploration. | ------- From 2ccc82bf50bcfbee44932c160db076a873cf893e Mon Sep 17 00:00:00 2001 From: Gordon Mohr Date: Fri, 30 Nov 2018 10:55:49 -0800 Subject: [PATCH 42/66] correct data of 3.3.0 release per https://github.com/RaRe-Technologies/gensim/releases/tag/3.3.0 --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59ebd74101..3e51b0f8dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -304,7 +304,7 @@ Apart from the **massive overhaul of all Gensim documentation** (including docst - `gensim.parsing.*` ➡ `gensim.utils.text_utils` -## 3.3.0, 2018-01-02 +## 3.3.0, 2018-02-02 :star2: New features: * Re-designed all "*2vec" implementations (__[@manneshiva](https://github.com/manneshiva)__, [#1777](https://github.com/RaRe-Technologies/gensim/pull/1777)) From 30528a5c01de9c2eb0b2f4d5cf7351adace369d1 Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Tue, 11 Dec 2018 21:25:54 +0500 Subject: [PATCH 43/66] Fix flake8 warnings W605, W504 (#2256) * Fix flake8 warnings W605 * fix W504 * pin flake8-rst (avoid issue from 0.5.0) --- gensim/corpora/wikicorpus.py | 6 +-- gensim/models/atmodel.py | 6 +-- gensim/models/base_any2vec.py | 4 +- gensim/models/coherencemodel.py | 6 +-- gensim/models/deprecated/doc2vec.py | 8 ++-- gensim/models/deprecated/fasttext.py | 8 ++-- gensim/models/deprecated/old_saveload.py | 4 +- gensim/models/deprecated/word2vec.py | 8 ++-- gensim/models/doc2vec.py | 8 ++-- gensim/models/fasttext.py | 8 ++-- gensim/models/hdpmodel.py | 40 +++++++++---------- gensim/models/ldamulticore.py | 6 +-- gensim/models/ldaseqmodel.py | 2 +- gensim/models/logentropy_model.py | 6 +-- gensim/models/normmodel.py | 6 +-- gensim/models/phrases.py | 10 ++--- gensim/models/tfidfmodel.py | 8 ++-- gensim/models/word2vec.py | 8 ++-- gensim/models/wrappers/ldamallet.py | 4 +- gensim/summarization/mz_entropy.py | 6 +-- gensim/test/test_corpora.py | 2 +- gensim/test/test_doc2vec.py | 4 +- gensim/test/test_keyedvectors.py | 3 +- gensim/test/test_matutils.py | 4 +- .../direct_confirmation_measure.py | 10 ++--- gensim/topic_coherence/segmentation.py | 8 ++-- gensim/utils.py | 4 +- tox.ini | 2 +- 28 files changed, 99 insertions(+), 100 deletions(-) diff --git a/gensim/corpora/wikicorpus.py b/gensim/corpora/wikicorpus.py index 8cc5ea58a7..a7302fe5c0 100644 --- a/gensim/corpora/wikicorpus.py +++ b/gensim/corpora/wikicorpus.py @@ -81,7 +81,7 @@ """Capture interlinks text and article linked""" RE_P17 = re.compile( r'(\n.{0,4}((bgcolor)|(\d{0,1}[ ]?colspan)|(rowspan)|(style=)|(class=)|(align=)|(scope=))(.*))|' - '(^.{0,2}((bgcolor)|(\d{0,1}[ ]?colspan)|(rowspan)|(style=)|(class=)|(align=))(.*))', + r'(^.{0,2}((bgcolor)|(\d{0,1}[ ]?colspan)|(rowspan)|(style=)|(class=)|(align=))(.*))', re.UNICODE ) """Table markup""" @@ -143,8 +143,8 @@ def filter_example(elem, text, *args, **kwargs): # regex is in the function call so that we do not pollute the wikicorpus # namespace do not do this in production as this function is called for # every element in the wiki dump - _regex_de_excellent = re.compile('.*\{\{(Exzellent.*?)\}\}[\s]*', flags=re.DOTALL) - _regex_de_featured = re.compile('.*\{\{(Lesenswert.*?)\}\}[\s]*', flags=re.DOTALL) + _regex_de_excellent = re.compile(r'.*\{\{(Exzellent.*?)\}\}[\s]*', flags=re.DOTALL) + _regex_de_featured = re.compile(r'.*\{\{(Lesenswert.*?)\}\}[\s]*', flags=re.DOTALL) if text is None: return False diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index a60e657788..e3ca00f7d7 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -376,14 +376,14 @@ def extend_corpus(self, corpus): self.corpus.extend(corpus) def compute_phinorm(self, expElogthetad, expElogbetad): - """Efficiently computes the normalizing factor in phi. + r"""Efficiently computes the normalizing factor in phi. Parameters ---------- expElogthetad: numpy.ndarray Value of variational distribution :math:`q(\theta|\gamma)`. expElogbetad: numpy.ndarray - Value of variational distribution :math:`q(\\beta|\lambda)`. + Value of variational distribution :math:`q(\beta|\lambda)`. Returns ------- @@ -888,7 +888,7 @@ def rho(): del other def bound(self, chunk, chunk_doc_idx=None, subsample_ratio=1.0, author2doc=None, doc2author=None): - """Estimate the variational bound of documents from `corpus`. + r"""Estimate the variational bound of documents from `corpus`. :math:`\mathbb{E_{q}}[\log p(corpus)] - \mathbb{E_{q}}[\log q(corpus)]` diff --git a/gensim/models/base_any2vec.py b/gensim/models/base_any2vec.py index bd74a5a6bd..d72301dccd 100644 --- a/gensim/models/base_any2vec.py +++ b/gensim/models/base_any2vec.py @@ -5,7 +5,7 @@ # Copyright (C) 2018 RaRe Technologies s.r.o. # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html -"""This module contains base classes required for implementing \*2vec algorithms. +r"""This module contains base classes required for implementing \*2vec algorithms. The class hierarchy is designed to facilitate adding more concrete implementations for creating embeddings. In the most general case, the purpose of this class is to transform an arbitrary representation to a numerical vector @@ -56,7 +56,7 @@ class BaseAny2VecModel(utils.SaveLoad): - """Base class for training, using and evaluating \*2vec model. + r"""Base class for training, using and evaluating \*2vec model. Contains implementation for multi-threaded training. The purpose of this class is to provide a reference interface for concrete embedding implementations, whether the input space is a corpus diff --git a/gensim/models/coherencemodel.py b/gensim/models/coherencemodel.py index fd42f53359..a46414a1a5 100644 --- a/gensim/models/coherencemodel.py +++ b/gensim/models/coherencemodel.py @@ -460,9 +460,9 @@ def _relevant_ids_will_differ(self, new_topics): return not self._accumulator.relevant_ids.issuperset(new_set) def _topics_differ(self, new_topics): - return (new_topics is not None and - self._topics is not None and - not np.array_equal(new_topics, self._topics)) + return (new_topics is not None + and self._topics is not None + and not np.array_equal(new_topics, self._topics)) def _get_topics(self): """Internal helper function to return topics from a trained topic model.""" diff --git a/gensim/models/deprecated/doc2vec.py b/gensim/models/deprecated/doc2vec.py index 8d8875affe..b841866b93 100644 --- a/gensim/models/deprecated/doc2vec.py +++ b/gensim/models/deprecated/doc2vec.py @@ -242,8 +242,8 @@ def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=N if doctag_locks is None: doctag_locks = model.docvecs.doctag_syn0_lockf - word_vocabs = [model.wv.vocab[w] for w in doc_words if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2**32] + word_vocabs = [model.wv.vocab[w] for w in doc_words if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2**32] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) # `b` in the original doc2vec code @@ -298,8 +298,8 @@ def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, if doctag_locks is None: doctag_locks = model.docvecs.doctag_syn0_lockf - word_vocabs = [model.wv.vocab[w] for w in doc_words if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2**32] + word_vocabs = [model.wv.vocab[w] for w in doc_words if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2**32] doctag_len = len(doctag_indexes) if doctag_len != model.dm_tag_count: return 0 # skip doc without expected number of doctag(s) (TODO: warn/pad?) diff --git a/gensim/models/deprecated/fasttext.py b/gensim/models/deprecated/fasttext.py index 47e7f1a6a8..836c66d4ca 100644 --- a/gensim/models/deprecated/fasttext.py +++ b/gensim/models/deprecated/fasttext.py @@ -148,8 +148,8 @@ def train_batch_cbow(model, sentences, alpha, work=None, neu1=None): """ result = 0 for sentence in sentences: - word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2**32] + word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2**32] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) start = max(0, pos - model.window + reduced_window) @@ -211,8 +211,8 @@ def train_batch_sg(model, sentences, alpha, work=None, neu1=None): """ result = 0 for sentence in sentences: - word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2**32] + word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2**32] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) # `b` in the original word2vec code # now go over all words from the (reduced) window, predicting each one in turn diff --git a/gensim/models/deprecated/old_saveload.py b/gensim/models/deprecated/old_saveload.py index 44f4a5619d..c609dd5532 100644 --- a/gensim/models/deprecated/old_saveload.py +++ b/gensim/models/deprecated/old_saveload.py @@ -108,8 +108,8 @@ def _load_specials(self, fname, mmap, compress, subname): """ def mmap_error(obj, filename): return IOError( - 'Cannot mmap compressed object %s in file %s. ' % (obj, filename) + - 'Use `load(fname, mmap=None)` or uncompress files manually.' + 'Cannot mmap compressed object %s in file %s. ' % (obj, filename) + + 'Use `load(fname, mmap=None)` or uncompress files manually.' ) for attrib in getattr(self, '__recursive_saveloads', []): diff --git a/gensim/models/deprecated/word2vec.py b/gensim/models/deprecated/word2vec.py index 885d77ba66..d647bfb8f1 100644 --- a/gensim/models/deprecated/word2vec.py +++ b/gensim/models/deprecated/word2vec.py @@ -232,8 +232,8 @@ def train_batch_sg(model, sentences, alpha, work=None, compute_loss=False): """ result = 0 for sentence in sentences: - word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2**32] + word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2**32] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) # `b` in the original word2vec code @@ -263,8 +263,8 @@ def train_batch_cbow(model, sentences, alpha, work=None, neu1=None, compute_loss """ result = 0 for sentence in sentences: - word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2**32] + word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2**32] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) # `b` in the original word2vec code start = max(0, pos - model.window + reduced_window) diff --git a/gensim/models/doc2vec.py b/gensim/models/doc2vec.py index 6a6b3d3ae9..d9b905cb3b 100644 --- a/gensim/models/doc2vec.py +++ b/gensim/models/doc2vec.py @@ -227,8 +227,8 @@ def train_document_dm(model, doc_words, doctag_indexes, alpha, work=None, neu1=N if doctag_locks is None: doctag_locks = model.docvecs.doctag_syn0_lockf - word_vocabs = [model.wv.vocab[w] for w in doc_words if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] + word_vocabs = [model.wv.vocab[w] for w in doc_words if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) # `b` in the original doc2vec code @@ -314,8 +314,8 @@ def train_document_dm_concat(model, doc_words, doctag_indexes, alpha, work=None, if doctag_locks is None: doctag_locks = model.docvecs.doctag_syn0_lockf - word_vocabs = [model.wv.vocab[w] for w in doc_words if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] + word_vocabs = [model.wv.vocab[w] for w in doc_words if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] doctag_len = len(doctag_indexes) if doctag_len != model.dm_tag_count: return 0 # skip doc without expected number of doctag(s) (TODO: warn/pad?) diff --git a/gensim/models/fasttext.py b/gensim/models/fasttext.py index f7e9d65556..17b314fec9 100644 --- a/gensim/models/fasttext.py +++ b/gensim/models/fasttext.py @@ -140,8 +140,8 @@ def train_batch_cbow(model, sentences, alpha, work=None, neu1=None): """ result = 0 for sentence in sentences: - word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] + word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) start = max(0, pos - model.window + reduced_window) @@ -199,8 +199,8 @@ def train_batch_sg(model, sentences, alpha, work=None, neu1=None): """ result = 0 for sentence in sentences: - word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] + word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) # `b` in the original word2vec code # now go over all words from the (reduced) window, predicting each one in turn diff --git a/gensim/models/hdpmodel.py b/gensim/models/hdpmodel.py index 3ff2a508d2..3c24fb4561 100755 --- a/gensim/models/hdpmodel.py +++ b/gensim/models/hdpmodel.py @@ -72,7 +72,7 @@ def expect_log_sticks(sticks): - """For stick-breaking hdp, get the :math:`\mathbb{E}[log(sticks)]`. + r"""For stick-breaking hdp, get the :math:`\mathbb{E}[log(sticks)]`. Parameters ---------- @@ -97,7 +97,7 @@ def expect_log_sticks(sticks): def lda_e_step(doc_word_ids, doc_word_counts, alpha, beta, max_iter=100): - """Performs EM-iteration on a single document for calculation of likelihood for a maximum iteration of `max_iter`. + r"""Performs EM-iteration on a single document for calculation of likelihood for a maximum iteration of `max_iter`. Parameters ---------- @@ -115,7 +115,7 @@ def lda_e_step(doc_word_ids, doc_word_counts, alpha, beta, max_iter=100): Returns ------- (numpy.ndarray, numpy.ndarray) - Computed (:math:`likelihood`, :math:`\\gamma`). + Computed (:math:`likelihood`, :math:`\gamma`). """ gamma = np.ones(len(alpha)) @@ -172,7 +172,7 @@ def set_zero(self): class HdpModel(interfaces.TransformationABC, basemodel.BaseTopicModel): - """`Hierarchical Dirichlet Process model `_ + r"""`Hierarchical Dirichlet Process model `_ Topic models promise to help summarize and organize large archives of texts that cannot be easily analyzed by hand. Hierarchical Dirichlet process (HDP) is a powerful mixed-membership model for the unsupervised analysis of grouped @@ -194,7 +194,7 @@ class HdpModel(interfaces.TransformationABC, basemodel.BaseTopicModel): For this assume that there is a restaurant franchise (`corpus`) which has a large number of restaurants (`documents`, `j`) under it. They have a global menu of dishes (`topics`, :math:`\Phi_{k}`) which they serve. Also, a single dish (`topic`, :math:`\Phi_{k}`) is only served at a single table `t` for all the customers - (`words`, :math:`\\theta_{j,i}`) who sit at that table. + (`words`, :math:`\theta_{j,i}`) who sit at that table. So, when a customer enters the restaurant he/she has the choice to make where he/she wants to sit. He/she can choose to sit at a table where some customers are already sitting , or he/she can choose to sit at a new table. Here the probability of choosing each option is not same. @@ -213,14 +213,14 @@ class HdpModel(interfaces.TransformationABC, basemodel.BaseTopicModel): share the same set of atoms, :math:`\Phi_{k}`, and only the atom weights :math:`\pi _{jt}` differs. There will be multiple document-level atoms :math:`\psi_{jt}` which map to the same corpus-level atom - :math:`\Phi_{k}`. Here, the :math:`\\beta` signify the weights given to each of the topics globally. Also, each - factor :math:`\\theta_{j,i}` is distributed according to :math:`G_{j}`, i.e., it takes on the value of + :math:`\Phi_{k}`. Here, the :math:`\beta` signify the weights given to each of the topics globally. Also, each + factor :math:`\theta_{j,i}` is distributed according to :math:`G_{j}`, i.e., it takes on the value of :math:`\Phi_{k}` with probability :math:`\pi _{jt}`. :math:`C_{j,t}` is an indicator variable whose value `k` signifies the index of :math:`\Phi`. This helps to map :math:`\psi_{jt}` to :math:`\Phi_{k}`. - The top level (`corpus` level) stick proportions correspond the values of :math:`\\beta`, + The top level (`corpus` level) stick proportions correspond the values of :math:`\beta`, bottom level (`document` level) stick proportions correspond to the values of :math:`\pi`. - The truncation level for the corpus (`K`) and document (`T`) corresponds to the number of :math:`\\beta` + The truncation level for the corpus (`K`) and document (`T`) corresponds to the number of :math:`\beta` and :math:`\pi` which are in existence. Now, whenever coordinate ascent updates are to be performed, they happen at two level. The document level as well @@ -228,7 +228,7 @@ class HdpModel(interfaces.TransformationABC, basemodel.BaseTopicModel): At document level, we update the following: - #. The parameters to the document level sticks, i.e, a and b parameters of :math:`\\beta` distribution of the + #. The parameters to the document level sticks, i.e, a and b parameters of :math:`\beta` distribution of the variable :math:`\pi _{jt}`. #. The parameters to per word topic indicators, :math:`Z_{j,n}`. Here :math:`Z_{j,n}` selects topic parameter :math:`\psi_{jt}`. @@ -236,8 +236,8 @@ class HdpModel(interfaces.TransformationABC, basemodel.BaseTopicModel): At corpus level, we update the following: - #. The parameters to the top level sticks, i.e., the parameters of the :math:`\\beta` distribution for the - corpus level :math:`\\beta`, which signify the topic distribution at corpus level. + #. The parameters to the top level sticks, i.e., the parameters of the :math:`\beta` distribution for the + corpus level :math:`\beta`, which signify the topic distribution at corpus level. #. The parameters to the topics :math:`\Phi_{k}`. Now coming on to the steps involved, procedure for online variational inference for the Hdp model is as follows: @@ -261,14 +261,14 @@ class HdpModel(interfaces.TransformationABC, basemodel.BaseTopicModel): Attributes ---------- lda_alpha : numpy.ndarray - Same as :math:`\\alpha` from :class:`gensim.models.ldamodel.LdaModel`. + Same as :math:`\alpha` from :class:`gensim.models.ldamodel.LdaModel`. lda_beta : numpy.ndarray - Same as :math:`\\beta` from from :class:`gensim.models.ldamodel.LdaModel`. + Same as :math:`\beta` from from :class:`gensim.models.ldamodel.LdaModel`. m_D : int Number of documents in the corpus. m_Elogbeta : numpy.ndarray: - Stores value of dirichlet expectation, i.e., compute :math:`E[log \\theta]` for a vector - :math:`\\theta \sim Dir(\\alpha)`. + Stores value of dirichlet expectation, i.e., compute :math:`E[log \theta]` for a vector + :math:`\theta \sim Dir(\alpha)`. m_lambda : {numpy.ndarray, float} Drawn samples from the parameterized gamma distribution. m_lambda_sum : {numpy.ndarray, float} @@ -280,7 +280,7 @@ class HdpModel(interfaces.TransformationABC, basemodel.BaseTopicModel): m_rhot : float Assigns weight to the information obtained from the mini-chunk and its value it between 0 and 1. m_status_up_to_date : bool - Flag to indicate whether `lambda `and :math:`E[log \\theta]` have been updated if True, otherwise - not. + Flag to indicate whether `lambda `and :math:`E[log \theta]` have been updated if True, otherwise - not. m_timestamp : numpy.ndarray Helps to keep track and perform lazy updates on lambda. m_updatect : int @@ -510,13 +510,13 @@ def update_finished(self, start_time, chunks_processed, docs_processed): """ return ( # chunk limit reached - (self.max_chunks and chunks_processed == self.max_chunks) or + (self.max_chunks and chunks_processed == self.max_chunks) # time limit reached - (self.max_time and time.clock() - start_time > self.max_time) or + or (self.max_time and time.clock() - start_time > self.max_time) # no limits and whole corpus has been processed once - (not self.max_chunks and not self.max_time and docs_processed >= self.m_D)) + or (not self.max_chunks and not self.max_time and docs_processed >= self.m_D)) def update_chunk(self, chunk, update=True, opt_o=True): """Performs lazy update on necessary columns of lambda and variational inference for documents in the chunk. diff --git a/gensim/models/ldamulticore.py b/gensim/models/ldamulticore.py index 248cc83abc..d154e367df 100644 --- a/gensim/models/ldamulticore.py +++ b/gensim/models/ldamulticore.py @@ -276,9 +276,9 @@ def process_result_queue(force=False): if (force and merged_new and queue_size[0] == 0) or (not self.batch and (other.numdocs >= updateafter)): self.do_mstep(rho(), other, pass_ > 0) other.reset() - if self.eval_every is not None and \ - ((force and queue_size[0] == 0) or - (self.eval_every != 0 and (self.num_updates / updateafter) % self.eval_every == 0)): + if self.eval_every is not None \ + and ((force and queue_size[0] == 0) + or (self.eval_every != 0 and (self.num_updates / updateafter) % self.eval_every == 0)): self.log_perplexity(chunk, total_docs=lencorpus) chunk_stream = utils.grouper(corpus, self.chunksize, as_numpy=chunks_as_numpy) diff --git a/gensim/models/ldaseqmodel.py b/gensim/models/ldaseqmodel.py index 35c1b64a15..1c9e8a55d9 100644 --- a/gensim/models/ldaseqmodel.py +++ b/gensim/models/ldaseqmodel.py @@ -741,7 +741,7 @@ def update_zeta(self): return self.zeta def compute_post_variance(self, word, chain_variance): - """Get the variance, based on the `Variational Kalman Filtering approach for Approximate Inference (section 3.1) + r"""Get the variance, based on the `Variational Kalman Filtering approach for Approximate Inference (section 3.1) `_. This function accepts the word to compute variance for, along with the associated sslm class object, diff --git a/gensim/models/logentropy_model.py b/gensim/models/logentropy_model.py index 6429cbf9e0..bdc726d5fd 100644 --- a/gensim/models/logentropy_model.py +++ b/gensim/models/logentropy_model.py @@ -25,7 +25,7 @@ class LogEntropyModel(interfaces.TransformationABC): - """Objects of this class realize the transformation between word-document co-occurrence matrix (int) + r"""Objects of this class realize the transformation between word-document co-occurrence matrix (int) into a locally/globally weighted matrix (positive floats). This is done by a log entropy normalization, optionally normalizing the resulting documents to unit length. @@ -35,9 +35,9 @@ class LogEntropyModel(interfaces.TransformationABC): local\_weight_{i,j} = log(frequency_{i,j} + 1) - P_{i,j} = \\frac{frequency_{i,j}}{\sum_j frequency_{i,j}} + P_{i,j} = \frac{frequency_{i,j}}{\sum_j frequency_{i,j}} - global\_weight_i = 1 + \\frac{\sum_j P_{i,j} * log(P_{i,j})}{log(number\_of\_documents + 1)} + global\_weight_i = 1 + \frac{\sum_j P_{i,j} * log(P_{i,j})}{log(number\_of\_documents + 1)} final\_weight_{i,j} = local\_weight_{i,j} * global\_weight_i diff --git a/gensim/models/normmodel.py b/gensim/models/normmodel.py index 23853cdafd..3292f6514e 100644 --- a/gensim/models/normmodel.py +++ b/gensim/models/normmodel.py @@ -15,15 +15,15 @@ class NormModel(interfaces.TransformationABC): """Objects of this class realize the explicit normalization of vectors (l1 and l2).""" def __init__(self, corpus=None, norm='l2'): - """Compute the l1 or l2 normalization by normalizing separately for each document in a corpus. + r"""Compute the l1 or l2 normalization by normalizing separately for each document in a corpus. If :math:`v_{i,j}` is the 'i'th component of the vector representing document 'j', the l1 normalization is - .. math:: l1_{i, j} = \\frac{v_{i,j}}{\sum_k |v_{k,j}|} + .. math:: l1_{i, j} = \frac{v_{i,j}}{\sum_k |v_{k,j}|} the l2 normalization is - .. math:: l2_{i, j} = \\frac{v_{i,j}}{\sqrt{\sum_k v_{k,j}^2}} + .. math:: l2_{i, j} = \frac{v_{i,j}}{\sqrt{\sum_k v_{k,j}^2}} Parameters diff --git a/gensim/models/phrases.py b/gensim/models/phrases.py index 9d8a5f5da6..ca8c1cfa3b 100644 --- a/gensim/models/phrases.py +++ b/gensim/models/phrases.py @@ -658,7 +658,7 @@ def __getitem__(self, sentence): def original_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count, corpus_word_count): - """Bigram scoring function, based on the original `Mikolov, et. al: "Distributed Representations + r"""Bigram scoring function, based on the original `Mikolov, et. al: "Distributed Representations of Words and Phrases and their Compositionality" `_. Parameters @@ -678,14 +678,14 @@ def original_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count Notes ----- - Formula: :math:`\\frac{(bigram\_count - min\_count) * len\_vocab }{ (worda\_count * wordb\_count)}`. + Formula: :math:`\frac{(bigram\_count - min\_count) * len\_vocab }{ (worda\_count * wordb\_count)}`. """ return (bigram_count - min_count) / worda_count / wordb_count * len_vocab def npmi_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count, corpus_word_count): - """Calculation NPMI score based on `"Normalized (Pointwise) Mutual Information in Colocation Extraction" + r"""Calculation NPMI score based on `"Normalized (Pointwise) Mutual Information in Colocation Extraction" by Gerlof Bouma `_. Parameters @@ -705,8 +705,8 @@ def npmi_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count, co Notes ----- - Formula: :math:`\\frac{ln(prop(word_a, word_b) / (prop(word_a)*prop(word_b)))}{ -ln(prop(word_a, word_b)}`, - where :math:`prob(word) = \\frac{word\_count}{corpus\_word\_count}` + Formula: :math:`\frac{ln(prop(word_a, word_b) / (prop(word_a)*prop(word_b)))}{ -ln(prop(word_a, word_b)}`, + where :math:`prob(word) = \frac{word\_count}{corpus\_word\_count}` """ if bigram_count >= min_count: diff --git a/gensim/models/tfidfmodel.py b/gensim/models/tfidfmodel.py index 8f163b66c4..a4cbedcd22 100644 --- a/gensim/models/tfidfmodel.py +++ b/gensim/models/tfidfmodel.py @@ -83,8 +83,8 @@ def resolve_weights(smartirs): def df2idf(docfreq, totaldocs, log_base=2.0, add=0.0): - """Compute inverse-document-frequency for a term with the given document frequency `docfreq`: - :math:`idf = add + log_{log\_base} \\frac{totaldocs}{docfreq}` + r"""Compute inverse-document-frequency for a term with the given document frequency `docfreq`: + :math:`idf = add + log_{log\_base} \frac{totaldocs}{docfreq}` Parameters ---------- @@ -239,11 +239,11 @@ class TfidfModel(interfaces.TransformationABC): """ def __init__(self, corpus=None, id2word=None, dictionary=None, wlocal=utils.identity, wglobal=df2idf, normalize=True, smartirs=None, pivot=None, slope=0.65): - """Compute TF-IDF by multiplying a local component (term frequency) with a global component + r"""Compute TF-IDF by multiplying a local component (term frequency) with a global component (inverse document frequency), and normalizing the resulting documents to unit length. Formula for non-normalized weight of term :math:`i` in document :math:`j` in a corpus of :math:`D` documents - .. math:: weight_{i,j} = frequency_{i,j} * log_2 \\frac{D}{document\_freq_{i}} + .. math:: weight_{i,j} = frequency_{i,j} * log_2 \frac{D}{document\_freq_{i}} or, more generally diff --git a/gensim/models/word2vec.py b/gensim/models/word2vec.py index 098905420b..a961d6f004 100755 --- a/gensim/models/word2vec.py +++ b/gensim/models/word2vec.py @@ -193,8 +193,8 @@ def train_batch_sg(model, sentences, alpha, work=None, compute_loss=False): """ result = 0 for sentence in sentences: - word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] + word_vocabs = [model.wv.vocab[w] for w in sentence if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) # `b` in the original word2vec code @@ -245,8 +245,8 @@ def train_batch_cbow(model, sentences, alpha, work=None, neu1=None, compute_loss result = 0 for sentence in sentences: word_vocabs = [ - model.wv.vocab[w] for w in sentence if w in model.wv.vocab and - model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32 + model.wv.vocab[w] for w in sentence if w in model.wv.vocab + and model.wv.vocab[w].sample_int > model.random.rand() * 2 ** 32 ] for pos, word in enumerate(word_vocabs): reduced_window = model.random.randint(model.window) # `b` in the original word2vec code diff --git a/gensim/models/wrappers/ldamallet.py b/gensim/models/wrappers/ldamallet.py index 6639be5d8e..1c321d1269 100644 --- a/gensim/models/wrappers/ldamallet.py +++ b/gensim/models/wrappers/ldamallet.py @@ -5,7 +5,7 @@ # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html -"""Python wrapper for `Latent Dirichlet Allocation (LDA) `_ +r"""Python wrapper for `Latent Dirichlet Allocation (LDA) `_ from `MALLET, the Java topic modelling toolkit `_ This module allows both LDA model estimation from a training corpus and inference of topic distribution on new, @@ -250,7 +250,7 @@ def convert_input(self, corpus, infer=False, serialize_corpus=True): cmd = \ self.mallet_path + \ " import-file --preserve-case --keep-sequence " \ - "--remove-stopwords --token-regex \"\S+\" --input %s --output %s" + "--remove-stopwords --token-regex \"\\S+\" --input %s --output %s" if infer: cmd += ' --use-pipe-from ' + self.fcorpusmallet() cmd = cmd % (self.fcorpustxt(), self.fcorpusmallet() + '.infer') diff --git a/gensim/summarization/mz_entropy.py b/gensim/summarization/mz_entropy.py index 11437f5c86..492ae7e79a 100644 --- a/gensim/summarization/mz_entropy.py +++ b/gensim/summarization/mz_entropy.py @@ -98,9 +98,9 @@ def marginal_prob(n, m): occurring m times in a given block""" return numpy.exp( - __log_combinations(n, m) + - __log_combinations(n_words - n, blocksize - m) - - __log_combinations(n_words, blocksize) + __log_combinations(n, m) + + __log_combinations(n_words - n, blocksize - m) + - __log_combinations(n_words, blocksize) ) return numpy.frompyfunc(marginal_prob, 2, 1) diff --git a/gensim/test/test_corpora.py b/gensim/test/test_corpora.py index 8eb10faa0e..a9568e4508 100644 --- a/gensim/test/test_corpora.py +++ b/gensim/test/test_corpora.py @@ -786,7 +786,7 @@ def test_two_level_directory(self): def test_filename_filtering(self): dirpath = self.write_one_level('test1.log', 'test1.txt', 'test2.log', 'other1.log') - corpus = textcorpus.TextDirectoryCorpus(dirpath, pattern="test.*\.log") + corpus = textcorpus.TextDirectoryCorpus(dirpath, pattern=r"test.*\.log") filenames = list(corpus.iter_filepaths()) expected = [os.path.join(dirpath, name) for name in ('test1.log', 'test2.log')] self.assertEqual(sorted(expected), sorted(filenames)) diff --git a/gensim/test/test_doc2vec.py b/gensim/test/test_doc2vec.py index d35b907800..d61adef085 100644 --- a/gensim/test/test_doc2vec.py +++ b/gensim/test/test_doc2vec.py @@ -340,8 +340,8 @@ def test_similarity_unseen_docs(self): model = doc2vec.Doc2Vec(min_count=1) model.build_vocab(corpus) self.assertTrue( - model.docvecs.similarity_unseen_docs(model, rome_str, rome_str) > - model.docvecs.similarity_unseen_docs(model, rome_str, car_str) + model.docvecs.similarity_unseen_docs(model, rome_str, rome_str) + > model.docvecs.similarity_unseen_docs(model, rome_str, car_str) ) def model_sanity(self, model, keep_training=True): diff --git a/gensim/test/test_keyedvectors.py b/gensim/test/test_keyedvectors.py index 927b896c64..0259fea7af 100644 --- a/gensim/test/test_keyedvectors.py +++ b/gensim/test/test_keyedvectors.py @@ -38,8 +38,7 @@ def test_similarity_matrix(self): similarity_matrix = self.vectors.similarity_matrix(dictionary).todense() self.assertTrue((similarity_matrix.T == similarity_matrix).all()) self.assertTrue( - (np.diag(similarity_matrix) == - np.ones(similarity_matrix.shape[0])).all()) + (np.diag(similarity_matrix) == np.ones(similarity_matrix.shape[0])).all()) # checking that thresholding works as expected similarity_matrix = self.vectors.similarity_matrix(dictionary, threshold=0.45).todense() diff --git a/gensim/test/test_matutils.py b/gensim/test/test_matutils.py index 49988af296..cde90b50d9 100644 --- a/gensim/test/test_matutils.py +++ b/gensim/test/test_matutils.py @@ -60,7 +60,7 @@ def mean_absolute_difference(a, b): def dirichlet_expectation(alpha): - """For a vector :math:`\\theta \sim Dir(\\alpha)`, compute :math:`E[log \\theta]`. + r"""For a vector :math:`\theta \sim Dir(\alpha)`, compute :math:`E[log \theta]`. Parameters ---------- @@ -70,7 +70,7 @@ def dirichlet_expectation(alpha): Returns ------- numpy.ndarray: - :math:`E[log \\theta]` + :math:`E[log \theta]` """ if len(alpha.shape) == 1: diff --git a/gensim/topic_coherence/direct_confirmation_measure.py b/gensim/topic_coherence/direct_confirmation_measure.py index d7a7e5f464..710718f3af 100644 --- a/gensim/topic_coherence/direct_confirmation_measure.py +++ b/gensim/topic_coherence/direct_confirmation_measure.py @@ -17,8 +17,8 @@ def log_conditional_probability(segmented_topics, accumulator, with_std=False, with_support=False): - """Calculate the log-conditional-probability measure which is used by coherence measures such as `U_mass`. - This is defined as :math:`m_{lc}(S_i) = log \\frac{P(W', W^{*}) + \epsilon}{P(W^{*})}`. + r"""Calculate the log-conditional-probability measure which is used by coherence measures such as `U_mass`. + This is defined as :math:`m_{lc}(S_i) = log \frac{P(W', W^{*}) + \epsilon}{P(W^{*})}`. Parameters ---------- @@ -124,7 +124,7 @@ def aggregate_segment_sims(segment_sims, with_std, with_support): def log_ratio_measure(segmented_topics, accumulator, normalize=False, with_std=False, with_support=False): - """Compute log ratio measure for `segment_topics`. + r"""Compute log ratio measure for `segment_topics`. Parameters ---------- @@ -146,12 +146,12 @@ def log_ratio_measure(segmented_topics, accumulator, normalize=False, with_std=F ----- If `normalize=False`: Calculate the log-ratio-measure, popularly known as **PMI** which is used by coherence measures such as `c_v`. - This is defined as :math:`m_{lr}(S_i) = log \\frac{P(W', W^{*}) + \epsilon}{P(W') * P(W^{*})}` + This is defined as :math:`m_{lr}(S_i) = log \frac{P(W', W^{*}) + \epsilon}{P(W') * P(W^{*})}` If `normalize=True`: Calculate the normalized-log-ratio-measure, popularly knowns as **NPMI** which is used by coherence measures such as `c_v`. - This is defined as :math:`m_{nlr}(S_i) = \\frac{m_{lr}(S_i)}{-log(P(W', W^{*}) + \epsilon)}` + This is defined as :math:`m_{nlr}(S_i) = \frac{m_{lr}(S_i)}{-log(P(W', W^{*}) + \epsilon)}` Returns ------- diff --git a/gensim/topic_coherence/segmentation.py b/gensim/topic_coherence/segmentation.py index d02f700547..94924c8a60 100644 --- a/gensim/topic_coherence/segmentation.py +++ b/gensim/topic_coherence/segmentation.py @@ -12,7 +12,7 @@ def s_one_pre(topics): - """Performs segmentation on a list of topics. + r"""Performs segmentation on a list of topics. Notes ----- @@ -54,9 +54,9 @@ def s_one_pre(topics): def s_one_one(topics): - """Perform segmentation on a list of topics. + r"""Perform segmentation on a list of topics. Segmentation is defined as - :math:`s_{one} = {(W', W^{*}) | W' = {w_i}; W^{*} = {w_j}; w_{i}, w_{j} \in W; i \\neq j}`. + :math:`s_{one} = {(W', W^{*}) | W' = {w_i}; W^{*} = {w_j}; w_{i}, w_{j} \in W; i \neq j}`. Parameters ---------- @@ -96,7 +96,7 @@ def s_one_one(topics): def s_one_set(topics): - """Perform s_one_set segmentation on a list of topics. + r"""Perform s_one_set segmentation on a list of topics. Segmentation is defined as :math:`s_{set} = {(W', W^{*}) | W' = {w_i}; w_{i} \in W; W^{*} = W}` diff --git a/gensim/utils.py b/gensim/utils.py index 0359125db5..a5c7c94ab7 100644 --- a/gensim/utils.py +++ b/gensim/utils.py @@ -447,8 +447,8 @@ def _load_specials(self, fname, mmap, compress, subname): """ def mmap_error(obj, filename): return IOError( - 'Cannot mmap compressed object %s in file %s. ' % (obj, filename) + - 'Use `load(fname, mmap=None)` or uncompress files manually.' + 'Cannot mmap compressed object %s in file %s. ' % (obj, filename) + + 'Use `load(fname, mmap=None)` or uncompress files manually.' ) for attrib in getattr(self, '__recursive_saveloads', []): diff --git a/tox.ini b/tox.ini index f380171659..c5446a8097 100644 --- a/tox.ini +++ b/tox.ini @@ -64,7 +64,7 @@ commands = flake8 gensim/ {posargs} [testenv:flake8-docs] recreate = True -deps = flake8-rst >= 0.4.1 +deps = flake8-rst == 0.4.3 commands = flake8-rst gensim/ docs/ {posargs} From 54e21646737c20451c1461b1dee042936bb4dd06 Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Wed, 12 Dec 2018 00:27:47 +0500 Subject: [PATCH 44/66] Fix extra list creation in `utils.get_max_id` (#2254) --- gensim/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/gensim/utils.py b/gensim/utils.py index a5c7c94ab7..3204664476 100644 --- a/gensim/utils.py +++ b/gensim/utils.py @@ -732,7 +732,8 @@ def get_max_id(corpus): """ maxid = -1 for document in corpus: - maxid = max(maxid, max([-1] + [fieldid for fieldid, _ in document])) # [-1] to avoid exceptions from max(empty) + if document: + maxid = max(maxid, max(fieldid for fieldid, _ in document)) return maxid From d89e767c0973677a829b7a9e20e46fc7d0c6e3c9 Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Wed, 12 Dec 2018 10:53:38 +0500 Subject: [PATCH 45/66] Drop wrong key `-c` from `gensim.downloader` description (#2262) and add Windows specific python extensions to gitignore --- .gitignore | 2 ++ gensim/downloader.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index aef8db9736..c3ba120f37 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,8 @@ *.o *.so *.pyc +*.pyo +*.pyd # Packages # ############ diff --git a/gensim/downloader.py b/gensim/downloader.py index a75a772503..300cbc2f61 100644 --- a/gensim/downloader.py +++ b/gensim/downloader.py @@ -443,7 +443,7 @@ def load(name, return_path=False): ) parser = argparse.ArgumentParser( description="Gensim console API", - usage="python -m gensim.api.downloader [-h] [-d data_name | -i data_name | -c]" + usage="python -m gensim.api.downloader [-h] [-d data_name | -i data_name]" ) group = parser.add_mutually_exclusive_group() From 5fbea23b6c764501c25074112eec993a5dfd7d74 Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Wed, 12 Dec 2018 14:44:06 +0500 Subject: [PATCH 46/66] Replace inline slow equivalent of mean_absolute_difference with fast (#2284) --- gensim/models/atmodel.py | 4 ++-- gensim/models/hdpmodel.py | 4 ++-- gensim/test/simspeed.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index e3ca00f7d7..f51e7848f0 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -67,7 +67,7 @@ from gensim import utils from gensim.models import LdaModel from gensim.models.ldamodel import LdaState -from gensim.matutils import dirichlet_expectation +from gensim.matutils import dirichlet_expectation, mean_absolute_difference from gensim.corpora import MmCorpus from itertools import chain from scipy.special import gammaln # gamma function utils @@ -505,7 +505,7 @@ def inference(self, chunk, author2doc, doc2author, rhot, collect_sstats=False, c # Check for convergence. # Criterion is mean change in "local" gamma. - meanchange_gamma = np.mean(abs(tilde_gamma - lastgamma)) + meanchange_gamma = mean_absolute_difference(tilde_gamma.ravel(), lastgamma.ravel()) gamma_condition = meanchange_gamma < self.gamma_threshold if gamma_condition: converged += 1 diff --git a/gensim/models/hdpmodel.py b/gensim/models/hdpmodel.py index 3c24fb4561..47100f5f9a 100755 --- a/gensim/models/hdpmodel.py +++ b/gensim/models/hdpmodel.py @@ -60,7 +60,7 @@ from six.moves import xrange from gensim import interfaces, utils, matutils -from gensim.matutils import dirichlet_expectation +from gensim.matutils import dirichlet_expectation, mean_absolute_difference from gensim.models import basemodel, ldamodel from gensim.utils import deprecated @@ -130,7 +130,7 @@ def lda_e_step(doc_word_ids, doc_word_counts, alpha, beta, max_iter=100): Elogtheta = dirichlet_expectation(gamma) expElogtheta = np.exp(Elogtheta) phinorm = np.dot(expElogtheta, betad) + 1e-100 - meanchange = np.mean(abs(gamma - lastgamma)) + meanchange = mean_absolute_difference(gamma, lastgamma) if meanchange < meanchangethresh: break diff --git a/gensim/test/simspeed.py b/gensim/test/simspeed.py index 7ba25fc2ea..27f52fd276 100755 --- a/gensim/test/simspeed.py +++ b/gensim/test/simspeed.py @@ -119,7 +119,7 @@ unchunksizeed = sims else: queries = math.ceil(1.0 * len(corpus_dense) / chunksize) - diff = np.mean(np.abs(unchunksizeed - sims)) + diff = gensim.matutils.mean_absolute_difference(unchunksizeed, sims) logging.info( "chunksize=%i, time=%.4fs (%.2f docs/s, %.2f queries/s), meandiff=%.3e", chunksize, taken, len(corpus_dense) / taken, queries / taken, diff @@ -161,7 +161,7 @@ unchunksizeed = sims else: queries = math.ceil(1.0 * len(corpus_sparse) / chunksize) - diff = np.mean(np.abs(unchunksizeed - sims)) + diff = gensim.matutils.mean_absolute_difference(unchunksizeed, sims) logging.info( "chunksize=%i, time=%.4fs (%.2f docs/s, %.2f queries/s), meandiff=%.3e", chunksize, taken, len(corpus_sparse) / taken, queries / taken, diff From 15c02c62acf00615c48f2320125b7c4e5ab4d89a Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Wed, 12 Dec 2018 16:37:45 +0500 Subject: [PATCH 47/66] Reuse precalculated updated prior in `ldamodel.update_dir_prior` (#2274) --- gensim/models/ldamodel.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index c6547de50a..08d6e7fda5 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -145,11 +145,11 @@ def update_dir_prior(prior, N, logphat, rho): dprior = -(gradf - b) / q - if all(rho * dprior + prior > 0): - prior += rho * dprior + updated_prior = rho * dprior + prior + if all(updated_prior > 0): + prior = updated_prior else: - logger.warning("updated prior not positive") - + logger.warning("updated prior is not positive") return prior @@ -665,8 +665,9 @@ def inference(self, chunk, collect_sstats=False): # Inference code copied from Hoffman's `onlineldavb.py` (esp. the # Lee&Seung trick which speeds things up by an order of magnitude, compared # to Blei's original LDA-C code, cool!). + integer_types = six.integer_types + (np.integer,) for d, doc in enumerate(chunk): - if len(doc) > 0 and not isinstance(doc[0][0], six.integer_types + (np.integer,)): + if len(doc) > 0 and not isinstance(doc[0][0], integer_types): # make sure the term IDs are ints, otherwise np will get upset ids = [int(idx) for idx, _ in doc] else: From 6fb086f3df289f9ea3ba6b651984a77a21a6567f Mon Sep 17 00:00:00 2001 From: Johann Petrak Date: Thu, 13 Dec 2018 10:21:37 +0000 Subject: [PATCH 48/66] Fix `keep_n` behavior for `Dictionary.filter_extremes` (#2232) * Proper commit for fixing issue #2230 If keep_tokens are specified, this fixes the problem by using a lambda for sorting which returns a fake document frequence equal to the number of documents for all tokens in the keep list. * Add a unit test for keep_n with keep_tokens. --- gensim/corpora/dictionary.py | 3 ++- gensim/test/test_corpora_dictionary.py | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/gensim/corpora/dictionary.py b/gensim/corpora/dictionary.py index 590fe02453..d1c437f3d5 100644 --- a/gensim/corpora/dictionary.py +++ b/gensim/corpora/dictionary.py @@ -363,12 +363,13 @@ def filter_extremes(self, no_below=5, no_above=0.5, keep_n=100000, keep_tokens=N v for v in itervalues(self.token2id) if no_below <= self.dfs.get(v, 0) <= no_above_abs or v in keep_ids ) + good_ids = sorted(good_ids, key=lambda x: self.num_docs if x in keep_ids else self.dfs.get(x), reverse=True) else: good_ids = ( v for v in itervalues(self.token2id) if no_below <= self.dfs.get(v, 0) <= no_above_abs ) - good_ids = sorted(good_ids, key=self.dfs.get, reverse=True) + good_ids = sorted(good_ids, key=self.dfs.get, reverse=True) if keep_n is not None: good_ids = good_ids[:keep_n] bad_words = [(self[idx], self.dfs.get(idx, 0)) for idx in set(self).difference(good_ids)] diff --git a/gensim/test/test_corpora_dictionary.py b/gensim/test/test_corpora_dictionary.py index 13a16a3cd1..a7fb170253 100644 --- a/gensim/test/test_corpora_dictionary.py +++ b/gensim/test/test_corpora_dictionary.py @@ -129,6 +129,20 @@ def testFilterKeepTokens_unseenToken(self): expected = {'graph', 'trees', 'system', 'user'} self.assertEqual(set(d.token2id.keys()), expected) + def testFilterKeepTokens_keepn(self): + # keep_tokens should also work if the keep_n parameter is used, but only + # to keep a maximum of n (so if keep_n < len(keep_n) the tokens to keep are + # still getting removed to reduce the size to keep_n!) + d = Dictionary(self.texts) + # Note: there are four tokens with freq 3, all the others have frequence 2 + # in self.texts. In order to make the test result deterministic, we add + # 2 tokens of frequency one + d.add_documents([['worda'], ['wordb']]) + # this should keep the 3 tokens with freq 3 and the one we want to keep + d.filter_extremes(keep_n=5, no_below=0, no_above=1.0, keep_tokens=['worda']) + expected = {'graph', 'trees', 'system', 'user', 'worda'} + self.assertEqual(set(d.token2id.keys()), expected) + def testFilterMostFrequent(self): d = Dictionary(self.texts) d.filter_n_most_frequent(4) From 74533bbc772dc9a023c62bdf6dd039779085a742 Mon Sep 17 00:00:00 2001 From: jeni Shah Date: Thu, 13 Dec 2018 18:45:21 +0530 Subject: [PATCH 49/66] Add documentation about ranges to scoring functions for `Phrases` (#2242) --- gensim/models/phrases.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/gensim/models/phrases.py b/gensim/models/phrases.py index ca8c1cfa3b..e143e364e4 100644 --- a/gensim/models/phrases.py +++ b/gensim/models/phrases.py @@ -676,6 +676,11 @@ def original_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count corpus_word_count : int Not used in this particular scoring technique. + Returns + ------- + float + Score for given bi-gram, greater than or equal to 0. + Notes ----- Formula: :math:`\frac{(bigram\_count - min\_count) * len\_vocab }{ (worda\_count * wordb\_count)}`. @@ -703,6 +708,11 @@ def npmi_scorer(worda_count, wordb_count, bigram_count, len_vocab, min_count, co corpus_word_count : int Total number of words in the corpus. + Returns + ------- + float + Score for given bi-gram, in the range -1 to 1. + Notes ----- Formula: :math:`\frac{ln(prop(word_a, word_b) / (prop(word_a)*prop(word_b)))}{ -ln(prop(word_a, word_b)}`, From c3d2299b74d30b54659ba3274b6f79253a33ff93 Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Fri, 14 Dec 2018 14:59:15 +0500 Subject: [PATCH 50/66] Remove unnecessary creations of lists at all (#2261) * remove unnecessary creations of lists at all gensim * fixes after review: - hanged indent - return assertTrue line - fix bug in _get_average_score in python2 - a bit refactor bm25 --- gensim/corpora/bleicorpus.py | 4 +++- gensim/corpora/sharded_corpus.py | 9 ++++----- gensim/corpora/ucicorpus.py | 4 +++- gensim/corpora/wikicorpus.py | 7 ++++--- gensim/matutils.py | 8 ++++---- gensim/models/atmodel.py | 10 +++++----- gensim/models/basemodel.py | 2 +- gensim/models/coherencemodel.py | 2 +- gensim/models/hdpmodel.py | 16 +++++++--------- gensim/models/ldamodel.py | 17 ++++++++++------- gensim/models/poincare.py | 16 ++++++++-------- gensim/models/rpmodel.py | 4 +++- gensim/models/wrappers/dtmmodel.py | 2 +- gensim/models/wrappers/ldamallet.py | 4 ++-- gensim/models/wrappers/ldavowpalwabbit.py | 2 +- gensim/similarities/docsim.py | 6 +++--- gensim/summarization/bm25.py | 11 ++++++++--- gensim/summarization/keywords.py | 7 ++----- gensim/summarization/summarizer.py | 4 ++-- gensim/test/test_corpora.py | 4 ++-- gensim/test/test_doc2vec.py | 10 +++++----- gensim/test/test_fasttext.py | 2 +- gensim/test/test_ldamodel.py | 2 +- gensim/test/test_sharded_corpus.py | 6 +++--- gensim/test/test_word2vec.py | 2 +- gensim/topic_coherence/text_analysis.py | 4 ++-- 26 files changed, 87 insertions(+), 78 deletions(-) diff --git a/gensim/corpora/bleicorpus.py b/gensim/corpora/bleicorpus.py index b0e5094ac0..43eb5fdb7e 100644 --- a/gensim/corpora/bleicorpus.py +++ b/gensim/corpora/bleicorpus.py @@ -143,8 +143,10 @@ def save_corpus(fname, corpus, id2word=None, metadata=False): logger.info("no word id mapping provided; initializing from corpus") id2word = utils.dict_from_corpus(corpus) num_terms = len(id2word) + elif id2word: + num_terms = 1 + max(id2word) else: - num_terms = 1 + max([-1] + id2word.keys()) + num_terms = 0 logger.info("storing corpus in Blei's LDA-C format into %s", fname) with utils.smart_open(fname, 'wb') as fout: diff --git a/gensim/corpora/sharded_corpus.py b/gensim/corpora/sharded_corpus.py index 9be2d02f8f..3d300bfa7f 100644 --- a/gensim/corpora/sharded_corpus.py +++ b/gensim/corpora/sharded_corpus.py @@ -766,13 +766,12 @@ def save(self, *args, **kwargs): """ # Can we save to a different file than output_prefix? Well, why not? if len(args) == 0: - args = tuple([self.output_prefix]) + args = (self.output_prefix,) attrs_to_ignore = ['current_shard', 'current_shard_n', 'current_offset'] - if 'ignore' not in kwargs: - kwargs['ignore'] = frozenset(attrs_to_ignore) - else: - kwargs['ignore'] = frozenset([v for v in kwargs['ignore']] + attrs_to_ignore) + if 'ignore' in kwargs: + attrs_to_ignore.extend(kwargs['ignore']) + kwargs['ignore'] = frozenset(attrs_to_ignore) super(ShardedCorpus, self).save(*args, **kwargs) @classmethod diff --git a/gensim/corpora/ucicorpus.py b/gensim/corpora/ucicorpus.py index 09451581e7..5a89b071de 100644 --- a/gensim/corpora/ucicorpus.py +++ b/gensim/corpora/ucicorpus.py @@ -278,8 +278,10 @@ def save_corpus(fname, corpus, id2word=None, progress_cnt=10000, metadata=False) logger.info("no word id mapping provided; initializing from corpus") id2word = utils.dict_from_corpus(corpus) num_terms = len(id2word) + elif id2word: + num_terms = 1 + max(id2word) else: - num_terms = 1 + max([-1] + list(id2word)) + num_terms = 0 # write out vocabulary fname_vocab = utils.smart_extension(fname, '.vocab') diff --git a/gensim/corpora/wikicorpus.py b/gensim/corpora/wikicorpus.py index a7302fe5c0..b7ad94083d 100644 --- a/gensim/corpora/wikicorpus.py +++ b/gensim/corpora/wikicorpus.py @@ -289,10 +289,10 @@ def remove_template(s): # Find the start and end position of each template by finding the opening # '{{' and closing '}}' n_open, n_close = 0, 0 - starts, ends = [], [] + starts, ends = [], [-1] in_template = False prev_c = None - for i, c in enumerate(iter(s)): + for i, c in enumerate(s): if not in_template: if c == '{' and c == prev_c: starts.append(i - 1) @@ -310,7 +310,8 @@ def remove_template(s): prev_c = c # Remove all the templates - return ''.join([s[end + 1:start] for start, end in zip(starts + [None], [-1] + ends)]) + starts.append(None) + return ''.join(s[end + 1:start] for end, start in zip(ends, starts)) def remove_file(s): diff --git a/gensim/matutils.py b/gensim/matutils.py index d159490a70..92a2f61929 100644 --- a/gensim/matutils.py +++ b/gensim/matutils.py @@ -152,8 +152,8 @@ def corpus2csc(corpus, num_terms=None, dtype=np.float64, num_docs=None, num_nnz= for docno, doc in enumerate(corpus): if printprogress and docno % printprogress == 0: logger.info("PROGRESS: at document #%i", docno) - indices.extend([feature_id for feature_id, _ in doc]) - data.extend([feature_weight for _, feature_weight in doc]) + indices.extend(feature_id for feature_id, _ in doc) + data.extend(feature_weight for _, feature_weight in doc) num_nnz += len(doc) indptr.append(num_nnz) if num_terms is None: @@ -850,8 +850,8 @@ def softcossim(vec1, vec2, similarity_matrix): vec2 = dict(vec2) word_indices = sorted(set(chain(vec1, vec2))) dtype = similarity_matrix.dtype - vec1 = np.array([vec1[i] if i in vec1 else 0 for i in word_indices], dtype=dtype) - vec2 = np.array([vec2[i] if i in vec2 else 0 for i in word_indices], dtype=dtype) + vec1 = np.fromiter((vec1[i] if i in vec1 else 0 for i in word_indices), dtype=dtype, count=len(word_indices)) + vec2 = np.fromiter((vec2[i] if i in vec2 else 0 for i in word_indices), dtype=dtype, count=len(word_indices)) dense_matrix = similarity_matrix[[[i] for i in word_indices], word_indices].todense() vec1len = vec1.T.dot(dense_matrix).dot(vec1)[0, 0] vec2len = vec2.T.dot(dense_matrix).dot(vec2)[0, 0] diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index f51e7848f0..a76f6ed046 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -465,10 +465,10 @@ def inference(self, chunk, author2doc, doc2author, rhot, collect_sstats=False, c else: ids = [idx for idx, _ in doc] ids = np.array(ids, dtype=np.int) - cts = np.array([cnt for _, cnt in doc], dtype=np.int) + cts = np.fromiter((cnt for _, cnt in doc), dtype=np.int, count=len(doc)) # Get all authors in current document, and convert the author names to integer IDs. - authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.int) + authors_d = np.fromiter((self.author2id[a] for a in self.doc2author[doc_no]), dtype=np.int) gammad = self.state.gamma[authors_d, :] # gamma of document d before update. tilde_gamma = gammad.copy() # gamma that will be updated. @@ -976,9 +976,9 @@ def bound(self, chunk, chunk_doc_idx=None, subsample_ratio=1.0, author2doc=None, else: doc_no = d # Get all authors in current document, and convert the author names to integer IDs. - authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.int) - ids = np.array([id for id, _ in doc], dtype=np.int) # Word IDs in doc. - cts = np.array([cnt for _, cnt in doc], dtype=np.int) # Word counts. + authors_d = np.fromiter((self.author2id[a] for a in self.doc2author[doc_no]), dtype=np.int) + ids = np.fromiter((id for id, _ in doc), dtype=np.int, count=len(doc)) # Word IDs in doc. + cts = np.fromiter((cnt for _, cnt in doc), dtype=np.int, count=len(doc)) # Word counts. if d % self.chunksize == 0: logger.debug("bound: at document #%i in chunk", d) diff --git a/gensim/models/basemodel.py b/gensim/models/basemodel.py index 371b5b7010..04422f8199 100644 --- a/gensim/models/basemodel.py +++ b/gensim/models/basemodel.py @@ -15,7 +15,7 @@ def print_topic(self, topicno, topn=10): String representation of topic, like '-0.340 * "category" + 0.298 * "$M$" + 0.183 * "algebra" + ... '. """ - return ' + '.join(['%.3f*"%s"' % (v, k) for k, v in self.show_topic(topicno, topn)]) + return ' + '.join('%.3f*"%s"' % (v, k) for k, v in self.show_topic(topicno, topn)) def print_topics(self, num_topics=20, num_words=10): """Get the most significant topics (alias for `show_topics()` method). diff --git a/gensim/models/coherencemodel.py b/gensim/models/coherencemodel.py index a46414a1a5..9633a2e62f 100644 --- a/gensim/models/coherencemodel.py +++ b/gensim/models/coherencemodel.py @@ -444,7 +444,7 @@ def _ensure_elements_are_ids(self, topic): try: return np.array([self.dictionary.token2id[token] for token in topic]) except KeyError: # might be a list of token ids already, but let's verify all in dict - topic = [self.dictionary.id2token[_id] for _id in topic] + topic = (self.dictionary.id2token[_id] for _id in topic) return np.array([self.dictionary.token2id[token] for token in topic]) def _update_accumulator(self, new_topics): diff --git a/gensim/models/hdpmodel.py b/gensim/models/hdpmodel.py index 47100f5f9a..47eb997921 100755 --- a/gensim/models/hdpmodel.py +++ b/gensim/models/hdpmodel.py @@ -1042,14 +1042,12 @@ def show_topics(self, num_topics=10, num_words=10, log=False, formatted=True): """ shown = [] - if num_topics < 0: - num_topics = len(self.data) - + num_topics = max(num_topics, 0) num_topics = min(num_topics, len(self.data)) for k in xrange(num_topics): - lambdak = list(self.data[k, :]) - lambdak = lambdak / sum(lambdak) + lambdak = self.data[k, :] + lambdak = lambdak / lambdak.sum() temp = zip(lambdak, xrange(len(lambdak))) temp = sorted(temp, key=lambda x: x[0], reverse=True) @@ -1131,8 +1129,8 @@ def show_topic(self, topic_id, topn=20, log=False, formatted=False, num_words=No ) topn = num_words - lambdak = list(self.data[topic_id, :]) - lambdak = lambdak / sum(lambdak) + lambdak = self.data[topic_id, :] + lambdak = lambdak / lambdak.sum() temp = zip(lambdak, xrange(len(lambdak))) temp = sorted(temp, key=lambda x: x[0], reverse=True) @@ -1186,9 +1184,9 @@ def format_topic(self, topic_id, topic_terms): """ if self.STYLE_GENSIM == self.style: - fmt = ' + '.join(['%.3f*%s' % (weight, word) for (word, weight) in topic_terms]) + fmt = ' + '.join('%.3f*%s' % (weight, word) for (word, weight) in topic_terms) else: - fmt = '\n'.join([' %20s %.8f' % (word, weight) for (word, weight) in topic_terms]) + fmt = '\n'.join(' %20s %.8f' % (word, weight) for (word, weight) in topic_terms) fmt = (topic_id, fmt) return fmt diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index 08d6e7fda5..a1de55d51b 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -567,15 +567,18 @@ def init_dir_prior(self, prior, name): if isinstance(prior, six.string_types): if prior == 'symmetric': logger.info("using symmetric %s at %s", name, 1.0 / self.num_topics) - init_prior = np.asarray([1.0 / self.num_topics for i in xrange(prior_shape)], dtype=self.dtype) + init_prior = np.fromiter((1.0 / self.num_topics for i in xrange(prior_shape)), + dtype=self.dtype, count=prior_shape) elif prior == 'asymmetric': init_prior = \ - np.asarray([1.0 / (i + np.sqrt(prior_shape)) for i in xrange(prior_shape)], dtype=self.dtype) + np.fromiter((1.0 / (i + np.sqrt(prior_shape)) for i in xrange(prior_shape)), + dtype=self.dtype, count=prior_shape) init_prior /= init_prior.sum() logger.info("using asymmetric %s %s", name, list(init_prior)) elif prior == 'auto': is_auto = True - init_prior = np.asarray([1.0 / self.num_topics for i in xrange(prior_shape)], dtype=self.dtype) + init_prior = np.fromiter((1.0 / self.num_topics for i in xrange(prior_shape)), + dtype=self.dtype, count=prior_shape) if name == 'alpha': logger.info("using autotuned %s, starting with %s", name, list(init_prior)) else: @@ -584,8 +587,8 @@ def init_dir_prior(self, prior, name): init_prior = np.asarray(prior, dtype=self.dtype) elif isinstance(prior, np.ndarray): init_prior = prior.astype(self.dtype, copy=False) - elif isinstance(prior, np.number) or isinstance(prior, numbers.Real): - init_prior = np.asarray([prior] * prior_shape, dtype=self.dtype) + elif isinstance(prior, (np.number, numbers.Real)): + init_prior = np.fromiter((prior for i in xrange(prior_shape)), dtype=self.dtype) else: raise ValueError("%s must be either a np array of scalars, list of scalars, or scalar" % name) @@ -672,7 +675,7 @@ def inference(self, chunk, collect_sstats=False): ids = [int(idx) for idx, _ in doc] else: ids = [idx for idx, _ in doc] - cts = np.array([cnt for _, cnt in doc], dtype=self.dtype) + cts = np.fromiter((cnt for _, cnt in doc), dtype=self.dtype, count=len(doc)) gammad = gamma[d, :] Elogthetad = Elogtheta[d, :] expElogthetad = expElogtheta[d, :] @@ -1162,7 +1165,7 @@ def show_topics(self, num_topics=10, num_words=10, log=False, formatted=True): bestn = matutils.argsort(topic_, num_words, reverse=True) topic_ = [(self.id2word[id], topic_[id]) for id in bestn] if formatted: - topic_ = ' + '.join(['%.3f*"%s"' % (v, k) for k, v in topic_]) + topic_ = ' + '.join('%.3f*"%s"' % (v, k) for k, v in topic_) shown.append((i, topic_)) if log: diff --git a/gensim/models/poincare.py b/gensim/models/poincare.py index 7a4a33d561..1c4b089077 100644 --- a/gensim/models/poincare.py +++ b/gensim/models/poincare.py @@ -194,8 +194,8 @@ def _load_relations(self): logger.info("loaded %d relations from train data, %d nodes", len(all_relations), len(vocab)) self.kv.vocab = vocab self.kv.index2word = index2word - self.indices_set = set((range(len(index2word)))) # Set of all node indices - self.indices_array = np.array(range(len(index2word))) # Numpy array of all node indices + self.indices_set = set(range(len(index2word))) # Set of all node indices + self.indices_array = np.fromiter(range(len(index2word)), dtype=int) # Numpy array of all node indices self.all_relations = all_relations self.node_relations = node_relations self._init_node_probabilities() @@ -209,11 +209,11 @@ def _init_embeddings(self): def _init_node_probabilities(self): """Initialize a-priori probabilities.""" - counts = np.array([ + counts = np.fromiter(( self.kv.vocab[self.kv.index2word[i]].count for i in range(len(self.kv.index2word)) - ], - dtype=np.float64) + ), + dtype=np.float64, count=len(self.kv.index2word)) self._node_counts_cumsum = np.cumsum(counts) self._node_probabilities = counts / counts.sum() @@ -475,8 +475,8 @@ def _sample_negatives_batch(self, nodes): Parameters ---------- - nodes : list of int - List of node indices for which negative samples are to be returned. + nodes : iterable of int + Iterable of node indices for which negative samples are to be returned. Returns ------- @@ -503,7 +503,7 @@ def _train_on_batch(self, relations, check_gradients=False): The batch that was just trained on, contains computed loss for the batch. """ - all_negatives = self._sample_negatives_batch([relation[0] for relation in relations]) + all_negatives = self._sample_negatives_batch(relation[0] for relation in relations) batch = self._prepare_training_batch(relations, all_negatives, check_gradients) self._update_vectors_batch(batch) return batch diff --git a/gensim/models/rpmodel.py b/gensim/models/rpmodel.py index a348d4040b..a07af66112 100644 --- a/gensim/models/rpmodel.py +++ b/gensim/models/rpmodel.py @@ -85,8 +85,10 @@ def initialize(self, corpus): logger.info("no word id mapping provided; initializing from corpus, assuming identity") self.id2word = utils.dict_from_corpus(corpus) self.num_terms = len(self.id2word) + elif self.id2word: + self.num_terms = 1 + max(self.id2word) else: - self.num_terms = 1 + max([-1] + self.id2word.keys()) + self.num_terms = 0 shape = self.num_topics, self.num_terms logger.info("constructing %s random matrix", str(shape)) diff --git a/gensim/models/wrappers/dtmmodel.py b/gensim/models/wrappers/dtmmodel.py index a88d7eb23e..8efd81995a 100644 --- a/gensim/models/wrappers/dtmmodel.py +++ b/gensim/models/wrappers/dtmmodel.py @@ -532,7 +532,7 @@ def print_topic(self, topicid, time, topn=10, num_words=None): warnings.warn("The parameter `num_words` is deprecated, will be removed in 4.0.0, use `topn` instead.") topn = num_words - return ' + '.join(['%.3f*%s' % v for v in self.show_topic(topicid, time, topn=topn)]) + return ' + '.join('%.3f*%s' % v for v in self.show_topic(topicid, time, topn=topn)) def dtm_vis(self, corpus, time): """Get data specified by pyLDAvis format. diff --git a/gensim/models/wrappers/ldamallet.py b/gensim/models/wrappers/ldamallet.py index 1c321d1269..2649eb6953 100644 --- a/gensim/models/wrappers/ldamallet.py +++ b/gensim/models/wrappers/ldamallet.py @@ -340,7 +340,7 @@ def load_word_topics(self): with utils.smart_open(self.fstate()) as fin: _ = next(fin) # header - self.alpha = numpy.array([float(val) for val in next(fin).split()[2:]]) + self.alpha = numpy.fromiter(next(fin).split()[2:], dtype=float) assert len(self.alpha) == self.num_topics, "mismatch between MALLET vs. requested topics" _ = next(fin) # noqa:F841 beta for lineno, line in enumerate(fin): @@ -560,7 +560,7 @@ def read_doctopics(self, fname, eps=1e-6, renorm=True): if renorm: # explicitly normalize weights to sum up to 1.0, just to be sure... - total_weight = float(sum([weight for _, weight in doc])) + total_weight = float(sum(weight for _, weight in doc)) if total_weight: doc = [(id_, float(weight) / total_weight) for id_, weight in doc] yield doc diff --git a/gensim/models/wrappers/ldavowpalwabbit.py b/gensim/models/wrappers/ldavowpalwabbit.py index 6582fe96d6..44450340f6 100644 --- a/gensim/models/wrappers/ldavowpalwabbit.py +++ b/gensim/models/wrappers/ldavowpalwabbit.py @@ -376,7 +376,7 @@ def print_topic(self, topicid, topn=10): Topic `topicid` in text representation. """ - return ' + '.join(['{0:.3f}*{1}'.format(v[0], v[1]) for v in self.show_topic(topicid, topn)]) + return ' + '.join('{0:.3f}*{1}'.format(v[0], v[1]) for v in self.show_topic(topicid, topn)) def show_topic(self, topicid, topn=10): """Get `num_words` most probable words for the given `topicid`. diff --git a/gensim/similarities/docsim.py b/gensim/similarities/docsim.py index e301486eb3..5efada4608 100755 --- a/gensim/similarities/docsim.py +++ b/gensim/similarities/docsim.py @@ -333,7 +333,7 @@ def __init__(self, output_prefix, corpus, num_features, num_best=None, chunksize def __len__(self): """Get length of index.""" - return len(self.fresh_docs) + sum([len(shard) for shard in self.shards]) + return len(self.fresh_docs) + sum(len(shard) for shard in self.shards) def __str__(self): return "Similarity index with %i documents in %i shards (stored under %s)" % ( @@ -467,11 +467,11 @@ def query_shards(self, query): Query results. """ - args = zip([query] * len(self.shards), self.shards) + args = izip([query] * len(self.shards), self.shards) if PARALLEL_SHARDS and PARALLEL_SHARDS > 1: logger.debug("spawning %i query processes", PARALLEL_SHARDS) pool = multiprocessing.Pool(PARALLEL_SHARDS) - result = pool.imap(query_shard, args, chunksize=1 + len(list(args)) / PARALLEL_SHARDS) + result = pool.imap(query_shard, args, chunksize=1 + len(self.shards) / PARALLEL_SHARDS) else: # serial processing, one shard after another pool = None diff --git a/gensim/summarization/bm25.py b/gensim/summarization/bm25.py index e516d625f6..7385078304 100644 --- a/gensim/summarization/bm25.py +++ b/gensim/summarization/bm25.py @@ -79,7 +79,7 @@ def __init__(self, corpus): """ self.corpus_size = len(corpus) - self.avgdl = sum(float(len(x)) for x in corpus) / self.corpus_size + self.avgdl = 0 self.corpus = corpus self.f = [] self.df = {} @@ -89,9 +89,12 @@ def __init__(self, corpus): def initialize(self): """Calculates frequencies of terms in documents and in corpus. Also computes inverse document frequencies.""" + num_doc = 0 for document in self.corpus: - frequencies = {} + num_doc += len(document) self.doc_len.append(len(document)) + + frequencies = {} for word in document: if word not in frequencies: frequencies[word] = 0 @@ -103,6 +106,8 @@ def initialize(self): self.df[word] = 0 self.df[word] += 1 + self.avgdl = float(num_doc) / self.corpus_size + for word, freq in iteritems(self.df): self.idf[word] = math.log(self.corpus_size - freq + 0.5) - math.log(freq + 0.5) @@ -213,7 +218,7 @@ def get_bm25_weights(corpus, n_jobs=1): """ bm25 = BM25(corpus) - average_idf = sum(float(val) for val in bm25.idf.values()) / len(bm25.idf) + average_idf = float(sum(val for val in bm25.idf.values())) / len(bm25.idf) n_processes = effective_n_jobs(n_jobs) if n_processes == 1: diff --git a/gensim/summarization/keywords.py b/gensim/summarization/keywords.py index 4d0523c9fb..1d963de71d 100644 --- a/gensim/summarization/keywords.py +++ b/gensim/summarization/keywords.py @@ -426,11 +426,8 @@ def _get_average_score(concept, _keywords): """ word_list = concept.split() - word_counter = 0 - total = 0 - for word in word_list: - total += _keywords[word] - word_counter += 1 + word_counter = len(word_list) + total = float(sum(_keywords[word] for word in word_list)) return total / word_counter diff --git a/gensim/summarization/summarizer.py b/gensim/summarization/summarizer.py index 50c77ab6c2..d7250363a0 100644 --- a/gensim/summarization/summarizer.py +++ b/gensim/summarization/summarizer.py @@ -145,7 +145,7 @@ def _get_doc_length(doc): Length of document. """ - return sum([item[1] for item in doc]) + return sum(item[1] for item in doc) @deprecated("Function will be removed in 4.0.0") @@ -303,7 +303,7 @@ def _format_results(extracted_sentences, split): """ if split: return [sentence.text for sentence in extracted_sentences] - return "\n".join([sentence.text for sentence in extracted_sentences]) + return "\n".join(sentence.text for sentence in extracted_sentences) def _build_hasheable_corpus(corpus): diff --git a/gensim/test/test_corpora.py b/gensim/test/test_corpora.py index a9568e4508..ac5581ad6f 100644 --- a/gensim/test/test_corpora.py +++ b/gensim/test/test_corpora.py @@ -726,7 +726,7 @@ def reject_all(elem, *args, **kwargs): return False corpus = self.corpus_class(self.enwiki, filter_articles=reject_all) texts = corpus.get_texts() - self.assertTrue(all([not t for t in texts])) + self.assertFalse(any(texts)) def keep_some(elem, title, *args, **kwargs): return title[0] == 'C' @@ -847,7 +847,7 @@ def test_non_trivial_structure(self): corpus = textcorpus.TextDirectoryCorpus(dirpath) filenames = list(corpus.iter_filepaths()) - base_names = sorted([name[len(dirpath) + 1:] for name in filenames]) + base_names = sorted(name[len(dirpath) + 1:] for name in filenames) expected = sorted([ '0.txt', 'a_folder/1.txt', diff --git a/gensim/test/test_doc2vec.py b/gensim/test/test_doc2vec.py index d61adef085..a320bd72a1 100644 --- a/gensim/test/test_doc2vec.py +++ b/gensim/test/test_doc2vec.py @@ -717,7 +717,7 @@ def __getitem__(self, token): def __str__(self): """Abbreviated name, built from submodels' names""" - return "+".join([str(model) for model in self.models]) + return "+".join(str(model) for model in self.models) @property def epochs(self): @@ -812,11 +812,11 @@ def read_su_sentiment_rotten_tomatoes(dirname, lowercase=True): split = [None, 'train', 'test', 'dev'][split_i] phrases[id] = SentimentPhrase(words, [id], split, sentiment, sentence_id) - assert len([phrase for phrase in phrases if phrase.sentence_id is not None]) == len(info_by_sentence) # all + assert sum(1 for phrase in phrases if phrase.sentence_id is not None) == len(info_by_sentence) # all # counts don't match 8544, 2210, 1101 because 13 TRAIN and 1 DEV sentences are duplicates - assert len([phrase for phrase in phrases if phrase.split == 'train']) == 8531 # 'train' - assert len([phrase for phrase in phrases if phrase.split == 'test']) == 2210 # 'test' - assert len([phrase for phrase in phrases if phrase.split == 'dev']) == 1100 # 'dev' + assert sum(1 for phrase in phrases if phrase.split == 'train') == 8531 # 'train' + assert sum(1 for phrase in phrases if phrase.split == 'test') == 2210 # 'test' + assert sum(1 for phrase in phrases if phrase.split == 'dev') == 1100 # 'dev' logging.info( "loaded corpus with %i sentences and %i phrases from %s", diff --git a/gensim/test/test_fasttext.py b/gensim/test/test_fasttext.py index 4cca6d5d1a..c9935431e4 100644 --- a/gensim/test/test_fasttext.py +++ b/gensim/test/test_fasttext.py @@ -674,7 +674,7 @@ def online_sanity(self, model): terro.append(l) else: others.append(l) - self.assertTrue(all(['terrorism' not in l for l in others])) + self.assertTrue(all('terrorism' not in l for l in others)) model.build_vocab(others) model.train(others, total_examples=model.corpus_count, epochs=model.epochs) # checks that `vectors` is different from `vectors_vocab` diff --git a/gensim/test/test_ldamodel.py b/gensim/test/test_ldamodel.py index 11df414903..4fb0a1d5d8 100644 --- a/gensim/test/test_ldamodel.py +++ b/gensim/test/test_ldamodel.py @@ -272,7 +272,7 @@ def testGetDocumentTopics(self): word_phi_count_na += 1 self.assertTrue(model.state.numdocs > doc_topic_count_na) - self.assertTrue(sum([len(i) for i in corpus]) > word_phi_count_na) + self.assertTrue(sum(len(i) for i in corpus) > word_phi_count_na) doc_topics, word_topics, word_phis = model.get_document_topics(self.corpus[1], per_word_topics=True) diff --git a/gensim/test/test_sharded_corpus.py b/gensim/test/test_sharded_corpus.py index 70aba1eabe..a7fcbe0aa0 100644 --- a/gensim/test/test_sharded_corpus.py +++ b/gensim/test/test_sharded_corpus.py @@ -25,7 +25,7 @@ class TestShardedCorpus(unittest.TestCase): # cls.dim = 1000 # cls.data = mock_data(dim=cls.dim) # - # random_string = ''.join([random.choice('1234567890') for _ in xrange(8)]) + # random_string = ''.join(random.choice('1234567890') for _ in xrange(8)) # # cls.tmp_dir = 'test-temp-' + random_string # os.makedirs(cls.tmp_dir) @@ -39,7 +39,7 @@ class TestShardedCorpus(unittest.TestCase): def setUp(self): self.dim = 1000 - self.random_string = ''.join([random.choice('1234567890') for _ in xrange(8)]) + self.random_string = ''.join(random.choice('1234567890') for _ in xrange(8)) self.tmp_dir = 'test-temp-' + self.random_string os.makedirs(self.tmp_dir) @@ -157,7 +157,7 @@ def test_getitem_sparse2sparse(self): self.assertTrue(isinstance(dslice, sparse.csr_matrix)) self.assertEqual(dslice.shape, (4, corpus.dim)) - expected_nnz = sum([len(self.data[i]) for i in range(2, 6)]) + expected_nnz = sum(len(self.data[i]) for i in range(2, 6)) self.assertEqual(dslice.getnnz(), expected_nnz) ilist = corpus[[2, 3, 4, 5]] diff --git a/gensim/test/test_word2vec.py b/gensim/test/test_word2vec.py index 7a7ef31262..de8abd702a 100644 --- a/gensim/test/test_word2vec.py +++ b/gensim/test/test_word2vec.py @@ -246,7 +246,7 @@ def onlineSanity(self, model, trained_model=False): terro.append(l) else: others.append(l) - self.assertTrue(all(['terrorism' not in l for l in others])) + self.assertTrue(all('terrorism' not in l for l in others)) model.build_vocab(others, update=trained_model) model.train(others, total_examples=model.corpus_count, epochs=model.epochs) self.assertFalse('terrorism' in model.wv.vocab) diff --git a/gensim/topic_coherence/text_analysis.py b/gensim/topic_coherence/text_analysis.py index ddd06bde8a..79ffc132fd 100644 --- a/gensim/topic_coherence/text_analysis.py +++ b/gensim/topic_coherence/text_analysis.py @@ -302,10 +302,10 @@ def _iter_texts(self, texts): dtype = np.uint16 if np.iinfo(np.uint16).max >= self._vocab_size else np.uint32 for text in texts: if self.text_is_relevant(text): - yield np.array([ + yield np.fromiter(( self.id2contiguous[self.token2id[w]] if w in self.relevant_words else self._none_token - for w in text], dtype=dtype) + for w in text), dtype=dtype, count=len(text)) def text_is_relevant(self, text): """Check if the text has any relevant words.""" From 5f6b28c538d7509138eb090c41917cb59e4709af Mon Sep 17 00:00:00 2001 From: Rupal Sharma <39291744+rsdel2007@users.noreply.github.com> Date: Sat, 15 Dec 2018 14:10:23 +0530 Subject: [PATCH 51/66] Fix deprecation warning `np.sum(generator)` (#2296) --- gensim/models/ldamodel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index a1de55d51b..6d5cfa209f 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -1094,7 +1094,7 @@ def bound(self, corpus, gamma=None, subsample_ratio=1.0): assert Elogthetad.dtype == self.dtype # E[log p(doc | theta, beta)] - score += np.sum(cnt * logsumexp(Elogthetad + Elogbeta[:, int(id)]) for id, cnt in doc) + score += sum(cnt * logsumexp(Elogthetad + Elogbeta[:, int(id)]) for id, cnt in doc) # E[log p(theta | alpha) - log q(theta | gamma)]; assumes alpha is a vector score += np.sum((self.alpha - gammad) * Elogthetad) From b8219aca7fbab22724fbd35c64dde0386d364048 Mon Sep 17 00:00:00 2001 From: Rupal Sharma <39291744+rsdel2007@users.noreply.github.com> Date: Sun, 16 Dec 2018 20:01:09 +0530 Subject: [PATCH 52/66] Fix typos in `gensim/models/keyedvectors.py` (#2290) --- gensim/models/keyedvectors.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gensim/models/keyedvectors.py b/gensim/models/keyedvectors.py index 9954abe75f..ac41cae7da 100644 --- a/gensim/models/keyedvectors.py +++ b/gensim/models/keyedvectors.py @@ -16,7 +16,7 @@ and *vectors*. Each entity is identified by its string id, so this is a mapping between {str => 1D numpy array}. The entity typically corresponds to a word (so the mapping maps words to 1D vectors), -but for some models, they key can also correspond to a document, a graph node etc. To generalize +but for some models, the key can also correspond to a document, a graph node etc. To generalize over different use-cases, this module calls the keys **entities**. Each entity is always represented by its string id, no matter whether the entity is a word, a document or a graph node. @@ -767,7 +767,7 @@ def wmdistance(self, document1, document2): if len(document1) == 0 or len(document2) == 0: logger.info( - "At least one of the documents had no words that werein the vocabulary. " + "At least one of the documents had no words that were in the vocabulary. " "Aborting (returning inf)." ) return float('inf') @@ -1401,7 +1401,7 @@ def save_word2vec_format(self, fname, fvocab=None, binary=False, total_vec=None) fvocab : str, optional Optional file path used to save the vocabulary binary : bool, optional - If True, the data wil be saved in binary word2vec format, else it will be saved in plain text. + If True, the data will be saved in binary word2vec format, else it will be saved in plain text. total_vec : int, optional Optional parameter to explicitly specify total no. of vectors (in case word vectors are appended with document vectors afterwards). @@ -1839,7 +1839,7 @@ def save_word2vec_format(self, fname, prefix='*dt_', fvocab=None, Explicitly specify total no. of vectors (in case word vectors are appended with document vectors afterwards) binary : bool, optional - If True, the data wil be saved in binary word2vec format, else it will be saved in plain text. + If True, the data will be saved in binary word2vec format, else it will be saved in plain text. write_first_line : bool, optional Whether to print the first line in the file. Useful when saving doc-vectors after word-vectors. From 3d5a21c1c8128cb8dd4f6e51e9ef3dc5af000871 Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Mon, 17 Dec 2018 10:19:42 +0500 Subject: [PATCH 53/66] Improve `six` usage (`xrange`, `map, `zip`) (#2264) * replace xrange with range + import from six.moves Now the code contains both range and xrange. It's not consistent. Since Python2 support will end soon, it's better to make the code more like in Python3. * fix build --- docs/notebooks/Wordrank_comparisons.ipynb | 2 +- docs/notebooks/translation_matrix.ipynb | 6 +++--- gensim/corpora/_mmreader.pyx | 6 +++--- gensim/corpora/bleicorpus.py | 4 ++-- gensim/corpora/dictionary.py | 7 +++---- gensim/corpora/lowcorpus.py | 4 ++-- gensim/corpora/sharded_corpus.py | 14 ++++++------- gensim/corpora/ucicorpus.py | 4 ++-- gensim/interfaces.py | 4 ++-- gensim/matutils.py | 8 ++++---- gensim/models/atmodel.py | 8 ++++---- gensim/models/base_any2vec.py | 6 +++--- gensim/models/deprecated/doc2vec.py | 6 +++--- gensim/models/deprecated/keyedvectors.py | 8 ++++---- gensim/models/deprecated/word2vec.py | 22 ++++++++++---------- gensim/models/doc2vec.py | 4 ++-- gensim/models/hdpmodel.py | 14 ++++++------- gensim/models/keyedvectors.py | 6 +++--- gensim/models/ldamodel.py | 25 +++++++++++------------ gensim/models/ldamulticore.py | 4 ++-- gensim/models/lsimodel.py | 10 ++++----- gensim/models/poincare.py | 1 + gensim/models/utils_any2vec.py | 6 +++--- gensim/models/word2vec.py | 18 ++++++++-------- gensim/parsing/porter.py | 4 ++-- gensim/similarities/docsim.py | 10 ++++----- gensim/summarization/bm25.py | 6 +++--- gensim/summarization/keywords.py | 10 ++++----- gensim/summarization/pagerank_weighted.py | 6 +++--- gensim/summarization/summarizer.py | 10 ++++----- gensim/summarization/textcleaner.py | 4 ++-- gensim/test/test_doc2vec.py | 4 ++-- gensim/test/test_sharded_corpus.py | 14 ++++++------- gensim/utils.py | 10 ++++----- 34 files changed, 137 insertions(+), 138 deletions(-) diff --git a/docs/notebooks/Wordrank_comparisons.ipynb b/docs/notebooks/Wordrank_comparisons.ipynb index 26bac2e880..968e481cd7 100644 --- a/docs/notebooks/Wordrank_comparisons.ipynb +++ b/docs/notebooks/Wordrank_comparisons.ipynb @@ -1071,7 +1071,7 @@ " # sort analogies according to their mean frequences \n", " copy_mean_freq = sorted(copy_mean_freq.items(), key=lambda x: x[1][1])\n", " # prepare analogies buckets according to given size\n", - " for centre_p in xrange(bucket_size//2, len(copy_mean_freq), bucket_size):\n", + " for centre_p in range(bucket_size//2, len(copy_mean_freq), bucket_size):\n", " bucket = copy_mean_freq[centre_p-bucket_size//2:centre_p+bucket_size//2]\n", " b_acc = 0\n", " # calculate current bucket accuracy with b_acc count\n", diff --git a/docs/notebooks/translation_matrix.ipynb b/docs/notebooks/translation_matrix.ipynb index 2a82098752..5439005a8c 100644 --- a/docs/notebooks/translation_matrix.ipynb +++ b/docs/notebooks/translation_matrix.ipynb @@ -417,7 +417,7 @@ "duration = []\n", "sizeofword = []\n", "\n", - "for idx in xrange(0, test_case):\n", + "for idx in range(0, test_case):\n", " sub_pair = word_pair[: (idx + 1) * step]\n", "\n", " startTime = time.time()\n", @@ -1450,7 +1450,7 @@ "small_train_docs = train_docs[:15000]\n", "# train for small corpus\n", "model1.build_vocab(small_train_docs)\n", - "for epoch in xrange(50):\n", + "for epoch in range(50):\n", " shuffle(small_train_docs)\n", " model1.train(small_train_docs, total_examples=len(small_train_docs), epochs=1)\n", "model.save(\"small_doc_15000_iter50.bin\")\n", @@ -1458,7 +1458,7 @@ "large_train_docs = train_docs + test_docs\n", "# train for large corpus\n", "model2.build_vocab(large_train_docs)\n", - "for epoch in xrange(50):\n", + "for epoch in range(50):\n", " shuffle(large_train_docs)\n", " model2.train(large_train_docs, total_examples=len(train_docs), epochs=1)\n", "# save the model\n", diff --git a/gensim/corpora/_mmreader.pyx b/gensim/corpora/_mmreader.pyx index f4844127a3..36cf11a1b9 100644 --- a/gensim/corpora/_mmreader.pyx +++ b/gensim/corpora/_mmreader.pyx @@ -8,7 +8,7 @@ from __future__ import with_statement from gensim import utils from six import string_types -from six.moves import xrange +from six.moves import range import logging cimport cython @@ -148,7 +148,7 @@ cdef class MmReader(object): # return implicit (empty) documents between previous id and new id # too, to keep consistent document numbering and corpus length - for previd in xrange(previd + 1, docid): + for previd in range(previd + 1, docid): yield previd, [] # from now on start adding fields to a new document, with a new id @@ -163,7 +163,7 @@ cdef class MmReader(object): # return empty documents between the last explicit document and the number # of documents as specified in the header - for previd in xrange(previd + 1, self.num_docs): + for previd in range(previd + 1, self.num_docs): yield previd, [] def docbyoffset(self, offset): diff --git a/gensim/corpora/bleicorpus.py b/gensim/corpora/bleicorpus.py index 43eb5fdb7e..701831b1b1 100644 --- a/gensim/corpora/bleicorpus.py +++ b/gensim/corpora/bleicorpus.py @@ -14,7 +14,7 @@ from gensim import utils from gensim.corpora import IndexedCorpus -from six.moves import xrange +from six.moves import range logger = logging.getLogger(__name__) @@ -161,7 +161,7 @@ def save_corpus(fname, corpus, id2word=None, metadata=False): fname_vocab = utils.smart_extension(fname, '.vocab') logger.info("saving vocabulary of %i words to %s", num_terms, fname_vocab) with utils.smart_open(fname_vocab, 'wb') as fout: - for featureid in xrange(num_terms): + for featureid in range(num_terms): fout.write(utils.to_utf8("%s\n" % id2word.get(featureid, '---'))) return offsets diff --git a/gensim/corpora/dictionary.py b/gensim/corpora/dictionary.py index d1c437f3d5..8d2ce58364 100644 --- a/gensim/corpora/dictionary.py +++ b/gensim/corpora/dictionary.py @@ -16,8 +16,7 @@ from gensim import utils from six import PY3, iteritems, iterkeys, itervalues, string_types -from six.moves import xrange -from six.moves import zip as izip +from six.moves import zip, range if sys.version_info[0] >= 3: unicode = str @@ -466,7 +465,7 @@ def compactify(self): logger.debug("rebuilding dictionary, shrinking gaps") # build mapping from old id -> new id - idmap = dict(izip(sorted(itervalues(self.token2id)), xrange(len(self.token2id)))) + idmap = dict(zip(sorted(itervalues(self.token2id)), range(len(self.token2id)))) # reassign mappings to new ids self.token2id = {token: idmap[tokenid] for token, tokenid in iteritems(self.token2id)} @@ -703,7 +702,7 @@ def from_corpus(corpus, id2word=None): if id2word is None: # make sure length(result) == get_max_id(corpus) + 1 - result.token2id = {unicode(i): i for i in xrange(max_id + 1)} + result.token2id = {unicode(i): i for i in range(max_id + 1)} else: # id=>word mapping given: simply copy it result.token2id = {utils.to_unicode(token): idx for idx, token in iteritems(id2word)} diff --git a/gensim/corpora/lowcorpus.py b/gensim/corpora/lowcorpus.py index 9986c780f3..2944aafd27 100644 --- a/gensim/corpora/lowcorpus.py +++ b/gensim/corpora/lowcorpus.py @@ -14,7 +14,7 @@ from gensim import utils from gensim.corpora import IndexedCorpus from six import iterkeys -from six.moves import xrange, zip as izip +from six.moves import zip, range logger = logging.getLogger(__name__) @@ -109,7 +109,7 @@ def __init__(self, fname, id2word=None, line2words=split_on_space): all_terms.update(word for word, wordCnt in doc) all_terms = sorted(all_terms) # sort the list of all words; rank in that list = word's integer id # build a mapping of word id(int) -> word (string) - self.id2word = dict(izip(xrange(len(all_terms)), all_terms)) + self.id2word = dict(zip(range(len(all_terms)), all_terms)) else: logger.info("using provided word mapping (%i ids)", len(id2word)) self.id2word = id2word diff --git a/gensim/corpora/sharded_corpus.py b/gensim/corpora/sharded_corpus.py index 3d300bfa7f..754cc7bbf4 100644 --- a/gensim/corpora/sharded_corpus.py +++ b/gensim/corpora/sharded_corpus.py @@ -26,7 +26,7 @@ import scipy.sparse as sparse import time -from six.moves import xrange +from six.moves import range import gensim from gensim.corpora import IndexedCorpus @@ -432,7 +432,7 @@ def resize_shards(self, shardsize): new_shard_names = [] new_offsets = [0] - for new_shard_idx in xrange(n_new_shards): + for new_shard_idx in range(n_new_shards): new_start = shardsize * new_shard_idx new_stop = new_start + shardsize @@ -461,7 +461,7 @@ def resize_shards(self, shardsize): # Move old shard files out, new ones in. Complicated due to possibility # of exceptions. - old_shard_names = [self._shard_name(n) for n in xrange(self.n_shards)] + old_shard_names = [self._shard_name(n) for n in range(self.n_shards)] try: for old_shard_n, old_shard_name in enumerate(old_shard_names): os.remove(old_shard_name) @@ -644,7 +644,7 @@ def __getitem__(self, offset): s_result = self.__add_to_slice(s_result, result_start, result_stop, shard_start, shard_stop) # First and last get special treatment, these are in between - for shard_n in xrange(first_shard + 1, last_shard): + for shard_n in range(first_shard + 1, last_shard): self.load_shard(shard_n) result_start = result_stop @@ -735,7 +735,7 @@ def row_sparse2gensim(row_idx, csr_matrix): g_row = [(col_idx, csr_matrix[row_idx, col_idx]) for col_idx in indices] return g_row - output = (row_sparse2gensim(i, result) for i in xrange(result.shape[0])) + output = (row_sparse2gensim(i, result) for i in range(result.shape[0])) return output @@ -745,7 +745,7 @@ def _getitem_dense2gensim(self, result): output = gensim.matutils.full2sparse(result) else: output = (gensim.matutils.full2sparse(result[i]) - for i in xrange(result.shape[0])) + for i in range(result.shape[0])) return output # Overriding the IndexedCorpus and other corpus superclass methods @@ -754,7 +754,7 @@ def __iter__(self): Yield dataset items one by one (generator). """ - for i in xrange(len(self)): + for i in range(len(self)): yield self[i] def save(self, *args, **kwargs): diff --git a/gensim/corpora/ucicorpus.py b/gensim/corpora/ucicorpus.py index 5a89b071de..9831c7bba3 100644 --- a/gensim/corpora/ucicorpus.py +++ b/gensim/corpora/ucicorpus.py @@ -17,7 +17,7 @@ from gensim.corpora import IndexedCorpus from gensim.matutils import MmReader from gensim.matutils import MmWriter -from six.moves import xrange +from six.moves import range logger = logging.getLogger(__name__) @@ -287,7 +287,7 @@ def save_corpus(fname, corpus, id2word=None, progress_cnt=10000, metadata=False) fname_vocab = utils.smart_extension(fname, '.vocab') logger.info("saving vocabulary of %i words to %s", num_terms, fname_vocab) with utils.smart_open(fname_vocab, 'wb') as fout: - for featureid in xrange(num_terms): + for featureid in range(num_terms): fout.write(utils.to_utf8("%s\n" % id2word.get(featureid, '---'))) logger.info("storing corpus in UCI Bag-of-Words format: %s", fname) diff --git a/gensim/interfaces.py b/gensim/interfaces.py index 56c71da747..3fd266eb62 100644 --- a/gensim/interfaces.py +++ b/gensim/interfaces.py @@ -19,7 +19,7 @@ import logging from gensim import utils, matutils -from six.moves import xrange +from six.moves import range logger = logging.getLogger(__name__) @@ -385,7 +385,7 @@ def __iter__(self): # assumes `self.corpus` holds the index as a 2-d numpy array. # this is true for MatrixSimilarity and SparseMatrixSimilarity, but # may not be true for other (future) classes..? - for chunk_start in xrange(0, self.index.shape[0], self.chunksize): + for chunk_start in range(0, self.index.shape[0], self.chunksize): # scipy.sparse doesn't allow slicing beyond real size of the matrix # (unlike numpy). so, clip the end of the chunk explicitly to make # scipy.sparse happy diff --git a/gensim/matutils.py b/gensim/matutils.py index 92a2f61929..74c0107cde 100644 --- a/gensim/matutils.py +++ b/gensim/matutils.py @@ -24,7 +24,7 @@ from scipy.special import psi # gamma function utils from six import iteritems, itervalues, string_types -from six.moves import xrange, zip as izip +from six.moves import zip, range logger = logging.getLogger(__name__) @@ -586,7 +586,7 @@ def __iter__(self): Document in BoW format. """ - for indprev, indnow in izip(self.sparse.indptr, self.sparse.indptr[1:]): + for indprev, indnow in zip(self.sparse.indptr, self.sparse.indptr[1:]): yield list(zip(self.sparse.indices[indprev:indnow], self.sparse.data[indprev:indnow])) def __len__(self): @@ -1516,7 +1516,7 @@ def __iter__(self): # return implicit (empty) documents between previous id and new id # too, to keep consistent document numbering and corpus length - for previd in xrange(previd + 1, docid): + for previd in range(previd + 1, docid): yield previd, [] # from now on start adding fields to a new document, with a new id @@ -1531,7 +1531,7 @@ def __iter__(self): # return empty documents between the last explicit document and the number # of documents as specified in the header - for previd in xrange(previd + 1, self.num_docs): + for previd in range(previd + 1, self.num_docs): yield previd, [] def docbyoffset(self, offset): diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index a76f6ed046..dd0d7ffbd5 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -71,7 +71,7 @@ from gensim.corpora import MmCorpus from itertools import chain from scipy.special import gammaln # gamma function utils -from six.moves import xrange +from six.moves import range import six logger = logging.getLogger(__name__) @@ -482,7 +482,7 @@ def inference(self, chunk, author2doc, doc2author, rhot, collect_sstats=False, c phinorm = self.compute_phinorm(expElogthetad, expElogbetad) # Iterate between gamma and phi until convergence - for _ in xrange(self.iterations): + for _ in range(self.iterations): lastgamma = tilde_gamma.copy() # Update gamma. @@ -699,7 +699,7 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, # Just keep training on the already available data. # Assumes self.update() has been called before with input documents and corresponding authors. assert self.total_docs > 0, 'update() was called with no documents to train on.' - train_corpus_idx = [d for d in xrange(self.total_docs)] + train_corpus_idx = [d for d in range(self.total_docs)] num_input_authors = len(self.author2doc) else: if doc2author is None and author2doc is None: @@ -816,7 +816,7 @@ def update(self, corpus=None, author2doc=None, doc2author=None, chunksize=None, def rho(): return pow(offset + pass_ + (self.num_updates / chunksize), -decay) - for pass_ in xrange(passes): + for pass_ in range(passes): if self.dispatcher: logger.info('initializing %s workers', self.numworkers) self.dispatcher.reset(self.state) diff --git a/gensim/models/base_any2vec.py b/gensim/models/base_any2vec.py index d72301dccd..48461b23e4 100644 --- a/gensim/models/base_any2vec.py +++ b/gensim/models/base_any2vec.py @@ -36,7 +36,7 @@ import logging from timeit import default_timer import threading -from six.moves import xrange +from six.moves import range from six import itervalues, string_types from gensim import matutils from numpy import float32 as REAL, ones, random, dtype, zeros @@ -284,7 +284,7 @@ def _job_producer(self, data_iterator, job_queue, cur_epoch=0, total_examples=No ) # give the workers heads up that they can finish -- no more work! - for _ in xrange(self.workers): + for _ in range(self.workers): job_queue.put(None) logger.debug("job loop exiting, total %i jobs", job_no) @@ -472,7 +472,7 @@ def _train_epoch(self, data_iterable, cur_epoch=0, total_examples=None, total_wo threading.Thread( target=self._worker_loop, args=(job_queue, progress_queue,)) - for _ in xrange(self.workers) + for _ in range(self.workers) ] workers.append(threading.Thread( diff --git a/gensim/models/deprecated/doc2vec.py b/gensim/models/deprecated/doc2vec.py index b841866b93..76e4a7e2d4 100644 --- a/gensim/models/deprecated/doc2vec.py +++ b/gensim/models/deprecated/doc2vec.py @@ -81,7 +81,7 @@ from gensim.models.deprecated.old_saveload import SaveLoad from gensim import matutils # utility fnc for pickling, common scipy operations etc -from six.moves import xrange, zip +from six.moves import zip, range from six import string_types, integer_types logger = logging.getLogger(__name__) @@ -490,7 +490,7 @@ def reset_weights(self, model): self.doctag_syn0 = empty((length, model.vector_size), dtype=REAL) self.doctag_syn0_lockf = ones((length,), dtype=REAL) # zeros suppress learning - for i in xrange(length): + for i in range(length): # construct deterministic seed from index AND model seed seed = "%d %s" % (model.seed, self.index_to_doctag(i)) self.doctag_syn0[i] = model.seeded_vector(seed) @@ -510,7 +510,7 @@ def init_sims(self, replace=False): if getattr(self, 'doctag_syn0norm', None) is None or replace: logger.info("precomputing L2-norms of doc weight vectors") if replace: - for i in xrange(self.doctag_syn0.shape[0]): + for i in range(self.doctag_syn0.shape[0]): self.doctag_syn0[i, :] /= sqrt((self.doctag_syn0[i, :] ** 2).sum(-1)) self.doctag_syn0norm = self.doctag_syn0 else: diff --git a/gensim/models/deprecated/keyedvectors.py b/gensim/models/deprecated/keyedvectors.py index 4dbb3cd70c..8f5ccaf355 100644 --- a/gensim/models/deprecated/keyedvectors.py +++ b/gensim/models/deprecated/keyedvectors.py @@ -98,7 +98,7 @@ from gensim import utils, matutils # utility fnc for pickling, common scipy operations etc from gensim.corpora.dictionary import Dictionary from six import string_types, iteritems -from six.moves import xrange +from six.moves import range from scipy import stats @@ -239,7 +239,7 @@ def add_word(word, weights): if binary: binary_len = dtype(REAL).itemsize * vector_size - for _ in xrange(vocab_size): + for _ in range(vocab_size): # mixed text and binary: read text first, then binary word = [] while True: @@ -254,7 +254,7 @@ def add_word(word, weights): weights = fromstring(fin.read(binary_len), dtype=REAL) add_word(word, weights) else: - for line_no in xrange(vocab_size): + for line_no in range(vocab_size): line = fin.readline() if line == b'': raise EOFError("unexpected end of input; is count incorrect or file otherwise damaged?") @@ -1084,7 +1084,7 @@ def init_sims(self, replace=False): if getattr(self, 'syn0norm', None) is None or replace: logger.info("precomputing L2-norms of word weight vectors") if replace: - for i in xrange(self.syn0.shape[0]): + for i in range(self.syn0.shape[0]): self.syn0[i, :] /= sqrt((self.syn0[i, :] ** 2).sum(-1)) self.syn0norm = self.syn0 else: diff --git a/gensim/models/deprecated/word2vec.py b/gensim/models/deprecated/word2vec.py index d647bfb8f1..b8b04d4c10 100644 --- a/gensim/models/deprecated/word2vec.py +++ b/gensim/models/deprecated/word2vec.py @@ -157,7 +157,7 @@ from gensim import utils from gensim import matutils # utility fnc for pickling, common scipy operations etc from six import iteritems, itervalues, string_types -from six.moves import xrange +from six.moves import range from types import GeneratorType logger = logging.getLogger(__name__) @@ -658,10 +658,10 @@ def make_cum_table(self, power=0.75, domain=2**31 - 1): self.cum_table = zeros(vocab_size, dtype=uint32) # compute sum of all power (Z in paper) train_words_pow = 0.0 - for word_index in xrange(vocab_size): + for word_index in range(vocab_size): train_words_pow += self.wv.vocab[self.wv.index2word[word_index]].count**power cumulative = 0.0 - for word_index in xrange(vocab_size): + for word_index in range(vocab_size): cumulative += self.wv.vocab[self.wv.index2word[word_index]].count**power self.cum_table[word_index] = round(cumulative / train_words_pow * domain) if len(self.cum_table) > 0: @@ -678,7 +678,7 @@ def create_binary_tree(self): # build the huffman tree heap = list(itervalues(self.wv.vocab)) heapq.heapify(heap) - for i in xrange(len(self.wv.vocab) - 1): + for i in range(len(self.wv.vocab) - 1): min1, min2 = heapq.heappop(heap), heapq.heappop(heap) heapq.heappush( heap, Vocab(count=min1.count + min2.count, index=i + len(self.wv.vocab), left=min1, right=min2) @@ -1135,7 +1135,7 @@ def job_producer(): ) # give the workers heads up that they can finish -- no more work! - for _ in xrange(self.workers): + for _ in range(self.workers): job_queue.put(None) logger.debug("job loop exiting, total %i jobs", job_no) @@ -1143,7 +1143,7 @@ def job_producer(): job_queue = Queue(maxsize=queue_factor * self.workers) progress_queue = Queue(maxsize=(queue_factor + 1) * self.workers) - workers = [threading.Thread(target=worker_loop) for _ in xrange(self.workers)] + workers = [threading.Thread(target=worker_loop) for _ in range(self.workers)] unfinished_worker_count = len(workers) workers.append(threading.Thread(target=job_producer)) @@ -1280,7 +1280,7 @@ def worker_loop(): job_queue = Queue(maxsize=queue_factor * self.workers) progress_queue = Queue(maxsize=(queue_factor + 1) * self.workers) - workers = [threading.Thread(target=worker_loop) for _ in xrange(self.workers)] + workers = [threading.Thread(target=worker_loop) for _ in range(self.workers)] for thread in workers: thread.daemon = True # make interrupting the process with ctrl+c easier thread.start() @@ -1307,7 +1307,7 @@ def worker_loop(): job_queue.put(items) except StopIteration: logger.info("reached end of input; waiting to finish %i outstanding jobs", job_no - done_jobs + 1) - for _ in xrange(self.workers): + for _ in range(self.workers): job_queue.put(None) # give the workers heads up that they can finish -- no more work! push_done = True try: @@ -1354,7 +1354,7 @@ def update_weights(self): newsyn0 = empty((gained_vocab, self.vector_size), dtype=REAL) # randomize the remaining words - for i in xrange(len(self.wv.syn0), len(self.wv.vocab)): + for i in range(len(self.wv.syn0), len(self.wv.vocab)): # construct deterministic seed from word AND seed argument newsyn0[i - len(self.wv.syn0)] = self.seeded_vector(self.wv.index2word[i] + str(self.seed)) @@ -1381,7 +1381,7 @@ def reset_weights(self): logger.info("resetting layer weights") self.wv.syn0 = empty((len(self.wv.vocab), self.vector_size), dtype=REAL) # randomize weights vector by vector, rather than materializing a huge random matrix in RAM at once - for i in xrange(len(self.wv.vocab)): + for i in range(len(self.wv.vocab)): # construct deterministic seed from word AND seed argument self.wv.syn0[i] = self.seeded_vector(self.wv.index2word[i] + str(self.seed)) if self.hs: @@ -1421,7 +1421,7 @@ def intersect_word2vec_format(self, fname, lockf=0.0, binary=False, encoding='ut # TOCONSIDER: maybe mismatched vectors still useful enough to merge (truncating/padding)? if binary: binary_len = dtype(REAL).itemsize * vector_size - for _ in xrange(vocab_size): + for _ in range(vocab_size): # mixed text and binary: read text first, then binary word = [] while True: diff --git a/gensim/models/doc2vec.py b/gensim/models/doc2vec.py index d9b905cb3b..a4fb34d1fa 100644 --- a/gensim/models/doc2vec.py +++ b/gensim/models/doc2vec.py @@ -81,7 +81,7 @@ from gensim import utils, matutils # utility fnc for pickling, common scipy operations etc from gensim.models.word2vec import Word2VecKeyedVectors, Word2VecVocab, Word2VecTrainables, train_cbow_pair,\ train_sg_pair, train_batch_sg -from six.moves import xrange +from six.moves import range from six import string_types, integer_types, itervalues from gensim.models.base_any2vec import BaseWordEmbeddingsModel from gensim.models.keyedvectors import Doc2VecKeyedVectors @@ -1450,7 +1450,7 @@ def reset_doc_weights(self, docvecs): docvecs.vectors_docs = empty((length, docvecs.vector_size), dtype=REAL) self.vectors_docs_lockf = ones((length,), dtype=REAL) # zeros suppress learning - for i in xrange(length): + for i in range(length): # construct deterministic seed from index AND model seed seed = "%d %s" % ( self.seed, Doc2VecKeyedVectors._index_to_doctag(i, docvecs.offset2doctag, docvecs.max_rawint)) diff --git a/gensim/models/hdpmodel.py b/gensim/models/hdpmodel.py index 47eb997921..dde0126209 100755 --- a/gensim/models/hdpmodel.py +++ b/gensim/models/hdpmodel.py @@ -57,7 +57,7 @@ import numpy as np from scipy.special import gammaln, psi # gamma function utils -from six.moves import xrange +from six.moves import zip, range from gensim import interfaces, utils, matutils from gensim.matutils import dirichlet_expectation, mean_absolute_difference @@ -123,7 +123,7 @@ def lda_e_step(doc_word_ids, doc_word_counts, alpha, beta, max_iter=100): betad = beta[:, doc_word_ids] phinorm = np.dot(expElogtheta, betad) + 1e-100 counts = np.array(doc_word_counts) - for _ in xrange(max_iter): + for _ in range(max_iter): lastgamma = gamma gamma = alpha + expElogtheta * np.dot(counts / phinorm, betad.T) @@ -740,7 +740,7 @@ def update_expectations(self): print out the topics we've learned we'll get the correct behavior. """ - for w in xrange(self.m_W): + for w in range(self.m_W): self.m_lambda[:, w] *= np.exp(self.m_r[-1] - self.m_r[self.m_timestamp[w]]) self.m_Elogbeta = \ psi(self.m_eta + self.m_lambda) - psi(self.m_W * self.m_eta + self.m_lambda_sum[:, np.newaxis]) @@ -889,7 +889,7 @@ def hdp_to_lda(self): sticks = self.m_var_sticks[0] / (self.m_var_sticks[0] + self.m_var_sticks[1]) alpha = np.zeros(self.m_T) left = 1.0 - for i in xrange(0, self.m_T - 1): + for i in range(0, self.m_T - 1): alpha[i] = sticks[i] * left left = left - alpha[i] alpha[self.m_T - 1] = left @@ -1045,11 +1045,11 @@ def show_topics(self, num_topics=10, num_words=10, log=False, formatted=True): num_topics = max(num_topics, 0) num_topics = min(num_topics, len(self.data)) - for k in xrange(num_topics): + for k in range(num_topics): lambdak = self.data[k, :] lambdak = lambdak / lambdak.sum() - temp = zip(lambdak, xrange(len(lambdak))) + temp = zip(lambdak, range(len(lambdak))) temp = sorted(temp, key=lambda x: x[0], reverse=True) topic_terms = self.show_topic_terms(temp, num_words) @@ -1132,7 +1132,7 @@ def show_topic(self, topic_id, topn=20, log=False, formatted=False, num_words=No lambdak = self.data[topic_id, :] lambdak = lambdak / lambdak.sum() - temp = zip(lambdak, xrange(len(lambdak))) + temp = zip(lambdak, range(len(lambdak))) temp = sorted(temp, key=lambda x: x[0], reverse=True) topic_terms = self.show_topic_terms(temp, topn) diff --git a/gensim/models/keyedvectors.py b/gensim/models/keyedvectors.py index ac41cae7da..0b2be1d732 100644 --- a/gensim/models/keyedvectors.py +++ b/gensim/models/keyedvectors.py @@ -182,7 +182,7 @@ from gensim import utils, matutils # utility fnc for pickling, common scipy operations etc from gensim.corpora.dictionary import Dictionary from six import string_types, integer_types -from six.moves import xrange, zip +from six.moves import zip, range from scipy import sparse, stats from gensim.utils import deprecated from gensim.models.utils_any2vec import _save_word2vec_format, _load_word2vec_format, _compute_ngrams, _ft_hash @@ -1378,7 +1378,7 @@ def init_sims(self, replace=False): if getattr(self, 'vectors_norm', None) is None or replace: logger.info("precomputing L2-norms of word weight vectors") if replace: - for i in xrange(self.vectors.shape[0]): + for i in range(self.vectors.shape[0]): self.vectors[i, :] /= sqrt((self.vectors[i, :] ** 2).sum(-1)) self.vectors_norm = self.vectors else: @@ -1595,7 +1595,7 @@ def init_sims(self, replace=False): if getattr(self, 'vectors_docs_norm', None) is None or replace: logger.info("precomputing L2-norms of doc weight vectors") if replace: - for i in xrange(self.vectors_docs.shape[0]): + for i in range(self.vectors_docs.shape[0]): self.vectors_docs[i, :] /= sqrt((self.vectors_docs[i, :] ** 2).sum(-1)) self.vectors_docs_norm = self.vectors_docs else: diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index 6d5cfa209f..74ff567d20 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -92,7 +92,7 @@ import six from scipy.special import gammaln, psi # gamma function utils from scipy.special import polygamma -from six.moves import xrange +from six.moves import range from collections import defaultdict from gensim import interfaces, utils, matutils @@ -567,18 +567,17 @@ def init_dir_prior(self, prior, name): if isinstance(prior, six.string_types): if prior == 'symmetric': logger.info("using symmetric %s at %s", name, 1.0 / self.num_topics) - init_prior = np.fromiter((1.0 / self.num_topics for i in xrange(prior_shape)), - dtype=self.dtype, count=prior_shape) + init_prior = np.fromiter((1.0 / self.num_topics for i in range(prior_shape)), + dtype=self.dtype, count=prior_shape) elif prior == 'asymmetric': - init_prior = \ - np.fromiter((1.0 / (i + np.sqrt(prior_shape)) for i in xrange(prior_shape)), - dtype=self.dtype, count=prior_shape) + init_prior = np.fromiter((1.0 / (i + np.sqrt(prior_shape)) for i in range(prior_shape)), + dtype=self.dtype, count=prior_shape) init_prior /= init_prior.sum() logger.info("using asymmetric %s %s", name, list(init_prior)) elif prior == 'auto': is_auto = True - init_prior = np.fromiter((1.0 / self.num_topics for i in xrange(prior_shape)), - dtype=self.dtype, count=prior_shape) + init_prior = np.fromiter((1.0 / self.num_topics for i in range(prior_shape)), + dtype=self.dtype, count=prior_shape) if name == 'alpha': logger.info("using autotuned %s, starting with %s", name, list(init_prior)) else: @@ -588,7 +587,7 @@ def init_dir_prior(self, prior, name): elif isinstance(prior, np.ndarray): init_prior = prior.astype(self.dtype, copy=False) elif isinstance(prior, (np.number, numbers.Real)): - init_prior = np.fromiter((prior for i in xrange(prior_shape)), dtype=self.dtype) + init_prior = np.fromiter((prior for i in range(prior_shape)), dtype=self.dtype) else: raise ValueError("%s must be either a np array of scalars, list of scalars, or scalar" % name) @@ -688,7 +687,7 @@ def inference(self, chunk, collect_sstats=False): phinorm = np.dot(expElogthetad, expElogbetad) + eps # Iterate between gamma and phi until convergence - for _ in xrange(self.iterations): + for _ in range(self.iterations): lastgamma = gammad # We represent phi implicitly to save memory and time. # Substituting the value of the optimal phi back into @@ -951,7 +950,7 @@ def rho(): # initialize metrics list to store metric values after every epoch self.metrics = defaultdict(list) - for pass_ in xrange(passes): + for pass_ in range(passes): if self.dispatcher: logger.info('initializing %s workers', self.numworkers) self.dispatcher.reset(self.state) @@ -1457,8 +1456,8 @@ def diff(self, other, distance="kullback_leibler", num_words=100, t1_size, t2_size = d1.shape[0], d2.shape[0] annotation_terms = None - fst_topics = [{w for (w, _) in self.show_topic(topic, topn=num_words)} for topic in xrange(t1_size)] - snd_topics = [{w for (w, _) in other.show_topic(topic, topn=num_words)} for topic in xrange(t2_size)] + fst_topics = [{w for (w, _) in self.show_topic(topic, topn=num_words)} for topic in range(t1_size)] + snd_topics = [{w for (w, _) in other.show_topic(topic, topn=num_words)} for topic in range(t2_size)] if distance == "jaccard": d1, d2 = fst_topics, snd_topics diff --git a/gensim/models/ldamulticore.py b/gensim/models/ldamulticore.py index d154e367df..87108d039c 100644 --- a/gensim/models/ldamulticore.py +++ b/gensim/models/ldamulticore.py @@ -90,7 +90,7 @@ from gensim.models.ldamodel import LdaModel, LdaState import six -from six.moves import queue, xrange +from six.moves import queue, range from multiprocessing import Pool, Queue, cpu_count logger = logging.getLogger(__name__) @@ -258,7 +258,7 @@ def rho(): logger.info("training LDA model using %i processes", self.workers) pool = Pool(self.workers, worker_e_step, (job_queue, result_queue,)) - for pass_ in xrange(self.passes): + for pass_ in range(self.passes): queue_size, reallen = [0], 0 other = LdaState(self.eta, self.state.sstats.shape) diff --git a/gensim/models/lsimodel.py b/gensim/models/lsimodel.py index 8547f0f153..95cdb6302a 100644 --- a/gensim/models/lsimodel.py +++ b/gensim/models/lsimodel.py @@ -67,7 +67,7 @@ import scipy.sparse from scipy.sparse import sparsetools from six import iterkeys -from six.moves import xrange +from six.moves import range from gensim import interfaces, matutils, utils from gensim.models import basemodel @@ -314,7 +314,7 @@ def merge(self, other, decay=1.0): # make each column of U start with a non-negative number (to force canonical decomposition) if self.u.shape[0] > 0: - for i in xrange(self.u.shape[1]): + for i in range(self.u.shape[1]): if self.u[0, i] < 0.0: self.u[:, i] *= -1.0 @@ -693,7 +693,7 @@ def show_topics(self, num_topics=-1, num_words=10, log=False, formatted=True): shown = [] if num_topics < 0: num_topics = self.num_topics - for i in xrange(min(num_topics, self.num_topics)): + for i in range(min(num_topics, self.num_topics)): if i < len(self.projection.s): if formatted: topic = self.print_topic(i, topn=num_words) @@ -937,7 +937,7 @@ def stochastic_svd(corpus, rank, num_terms, chunksize=20000, extra_dims=None, q, _ = matutils.qr_destroy(y) # orthonormalize the range logger.debug("running %i power iterations", power_iters) - for _ in xrange(power_iters): + for _ in range(power_iters): q = corpus.T * q q = [corpus * q] q, _ = matutils.qr_destroy(q) # orthonormalize the range after each power iteration step @@ -963,7 +963,7 @@ def stochastic_svd(corpus, rank, num_terms, chunksize=20000, extra_dims=None, y = [y] q, _ = matutils.qr_destroy(y) # orthonormalize the range - for power_iter in xrange(power_iters): + for power_iter in range(power_iters): logger.info("running power iteration #%i", power_iter + 1) yold = q.copy() q[:] = 0.0 diff --git a/gensim/models/poincare.py b/gensim/models/poincare.py index 1c4b089077..060ff63e20 100644 --- a/gensim/models/poincare.py +++ b/gensim/models/poincare.py @@ -52,6 +52,7 @@ from numpy import random as np_random from scipy.stats import spearmanr from six import string_types +from six.moves import zip, range from smart_open import smart_open from gensim import utils, matutils diff --git a/gensim/models/utils_any2vec.py b/gensim/models/utils_any2vec.py index 199b8c034f..fd047f93eb 100644 --- a/gensim/models/utils_any2vec.py +++ b/gensim/models/utils_any2vec.py @@ -12,7 +12,7 @@ from numpy import zeros, dtype, float32 as REAL, ascontiguousarray, fromstring -from six.moves import xrange +from six.moves import range from six import iteritems logger = logging.getLogger(__name__) @@ -197,7 +197,7 @@ def add_word(word, weights): if binary: binary_len = dtype(REAL).itemsize * vector_size - for _ in xrange(vocab_size): + for _ in range(vocab_size): # mixed text and binary: read text first, then binary word = [] while True: @@ -214,7 +214,7 @@ def add_word(word, weights): weights = fromstring(fin.read(binary_len), dtype=REAL).astype(datatype) add_word(word, weights) else: - for line_no in xrange(vocab_size): + for line_no in range(vocab_size): line = fin.readline() if line == b'': raise EOFError("unexpected end of input; is count incorrect or file otherwise damaged?") diff --git a/gensim/models/word2vec.py b/gensim/models/word2vec.py index a961d6f004..15dc777f42 100755 --- a/gensim/models/word2vec.py +++ b/gensim/models/word2vec.py @@ -147,7 +147,7 @@ from gensim import utils, matutils # utility fnc for pickling, common scipy operations etc from gensim.utils import deprecated from six import iteritems, itervalues, string_types -from six.moves import xrange +from six.moves import range logger = logging.getLogger(__name__) @@ -986,7 +986,7 @@ def worker_loop(): job_queue = Queue(maxsize=queue_factor * self.workers) progress_queue = Queue(maxsize=(queue_factor + 1) * self.workers) - workers = [threading.Thread(target=worker_loop) for _ in xrange(self.workers)] + workers = [threading.Thread(target=worker_loop) for _ in range(self.workers)] for thread in workers: thread.daemon = True # make interrupting the process with ctrl+c easier thread.start() @@ -1013,7 +1013,7 @@ def worker_loop(): job_queue.put(items) except StopIteration: logger.info("reached end of input; waiting to finish %i outstanding jobs", job_no - done_jobs + 1) - for _ in xrange(self.workers): + for _ in range(self.workers): job_queue.put(None) # give the workers heads up that they can finish -- no more work! push_done = True try: @@ -1083,7 +1083,7 @@ def intersect_word2vec_format(self, fname, lockf=0.0, binary=False, encoding='ut # TOCONSIDER: maybe mismatched vectors still useful enough to merge (truncating/padding)? if binary: binary_len = dtype(REAL).itemsize * vector_size - for _ in xrange(vocab_size): + for _ in range(vocab_size): # mixed text and binary: read text first, then binary word = [] while True: @@ -1779,7 +1779,7 @@ def create_binary_tree(self, wv): # build the huffman tree heap = list(itervalues(wv.vocab)) heapq.heapify(heap) - for i in xrange(len(wv.vocab) - 1): + for i in range(len(wv.vocab) - 1): min1, min2 = heapq.heappop(heap), heapq.heappop(heap) heapq.heappush( heap, Vocab(count=min1.count + min2.count, index=i + len(wv.vocab), left=min1, right=min2) @@ -1817,10 +1817,10 @@ def make_cum_table(self, wv, domain=2**31 - 1): self.cum_table = zeros(vocab_size, dtype=uint32) # compute sum of all power (Z in paper) train_words_pow = 0.0 - for word_index in xrange(vocab_size): + for word_index in range(vocab_size): train_words_pow += wv.vocab[wv.index2word[word_index]].count**self.ns_exponent cumulative = 0.0 - for word_index in xrange(vocab_size): + for word_index in range(vocab_size): cumulative += wv.vocab[wv.index2word[word_index]].count**self.ns_exponent self.cum_table[word_index] = round(cumulative / train_words_pow * domain) if len(self.cum_table) > 0: @@ -1853,7 +1853,7 @@ def reset_weights(self, hs, negative, wv): logger.info("resetting layer weights") wv.vectors = empty((len(wv.vocab), wv.vector_size), dtype=REAL) # randomize weights vector by vector, rather than materializing a huge random matrix in RAM at once - for i in xrange(len(wv.vocab)): + for i in range(len(wv.vocab)): # construct deterministic seed from word AND seed argument wv.vectors[i] = self.seeded_vector(wv.index2word[i] + str(self.seed), wv.vector_size) if hs: @@ -1871,7 +1871,7 @@ def update_weights(self, hs, negative, wv): newvectors = empty((gained_vocab, wv.vector_size), dtype=REAL) # randomize the remaining words - for i in xrange(len(wv.vectors), len(wv.vocab)): + for i in range(len(wv.vectors), len(wv.vocab)): # construct deterministic seed from word AND seed argument newvectors[i - len(wv.vectors)] = self.seeded_vector(wv.index2word[i] + str(self.seed), wv.vector_size) diff --git a/gensim/parsing/porter.py b/gensim/parsing/porter.py index 7d298d7486..b70377855d 100644 --- a/gensim/parsing/porter.py +++ b/gensim/parsing/porter.py @@ -29,7 +29,7 @@ """ -from six.moves import xrange +from six.moves import range class PorterStemmer(object): @@ -160,7 +160,7 @@ def _vowelinstem(self): True """ - return not all(self._cons(i) for i in xrange(self.j + 1)) + return not all(self._cons(i) for i in range(self.j + 1)) def _doublec(self, j): """Check if b[j - 1: j + 1] contain a double consonant letter. diff --git a/gensim/similarities/docsim.py b/gensim/similarities/docsim.py index 5efada4608..1b44e300d2 100755 --- a/gensim/similarities/docsim.py +++ b/gensim/similarities/docsim.py @@ -76,7 +76,7 @@ import scipy.sparse from gensim import interfaces, utils, matutils -from six.moves import map as imap, xrange, zip as izip +from six.moves import map, range, zip logger = logging.getLogger(__name__) @@ -467,7 +467,7 @@ def query_shards(self, query): Query results. """ - args = izip([query] * len(self.shards), self.shards) + args = zip([query] * len(self.shards), self.shards) if PARALLEL_SHARDS and PARALLEL_SHARDS > 1: logger.debug("spawning %i query processes", PARALLEL_SHARDS) pool = multiprocessing.Pool(PARALLEL_SHARDS) @@ -475,7 +475,7 @@ def query_shards(self, query): else: # serial processing, one shard after another pool = None - result = imap(query_shard, args) + result = map(query_shard, args) return pool, result def __getitem__(self, query): @@ -547,7 +547,7 @@ def convert(shard_no, doc): shard_result = [convert(shard_no, doc) for doc in result] results.append(shard_result) result = [] - for parts in izip(*results): + for parts in zip(*results): merged = heapq.nlargest(self.num_best, itertools.chain(*parts), key=lambda item: item[1]) result.append(merged) if pool: @@ -674,7 +674,7 @@ def iter_chunks(self, chunksize=None): for shard in self.shards: query = shard.get_index().index - for chunk_start in xrange(0, query.shape[0], chunksize): + for chunk_start in range(0, query.shape[0], chunksize): # scipy.sparse doesn't allow slicing beyond real size of the matrix # (unlike numpy). so, clip the end of the chunk explicitly to make # scipy.sparse happy diff --git a/gensim/summarization/bm25.py b/gensim/summarization/bm25.py index 7385078304..d27043b1cb 100644 --- a/gensim/summarization/bm25.py +++ b/gensim/summarization/bm25.py @@ -39,7 +39,7 @@ import math from six import iteritems -from six.moves import xrange +from six.moves import range from functools import partial from multiprocessing import Pool from ..utils import effective_n_jobs @@ -156,7 +156,7 @@ def get_scores(self, document, average_idf): """ scores = [] - for index in xrange(self.corpus_size): + for index in range(self.corpus_size): score = self.get_score(document, index, average_idf) scores.append(score) return scores @@ -182,7 +182,7 @@ def _get_scores(bm25, document, average_idf): """ scores = [] - for index in xrange(bm25.corpus_size): + for index in range(bm25.corpus_size): score = bm25.get_score(document, index, average_idf) scores.append(score) return scores diff --git a/gensim/summarization/keywords.py b/gensim/summarization/keywords.py index 1d963de71d..80f293517c 100644 --- a/gensim/summarization/keywords.py +++ b/gensim/summarization/keywords.py @@ -42,7 +42,7 @@ from gensim.utils import to_unicode from itertools import combinations as _combinations from six.moves.queue import Queue as _Queue -from six.moves import xrange +from six.moves import range from six import iteritems @@ -235,7 +235,7 @@ def _process_text(graph, tokens, split_text): """ queue = _init_queue(split_text) - for i in xrange(WINDOW_SIZE, len(split_text)): + for i in range(WINDOW_SIZE, len(split_text)): word = split_text[i] _process_word(graph, tokens, queue, word) _update_queue(queue, word) @@ -256,7 +256,7 @@ def _queue_iterator(queue): """ iterations = queue.qsize() - for _ in xrange(iterations): + for _ in range(iterations): var = queue.get() yield var queue.put(var) @@ -391,13 +391,13 @@ def _get_combined_keywords(_keywords, split_text): result = [] _keywords = _keywords.copy() len_text = len(split_text) - for i in xrange(len_text): + for i in range(len_text): word = _strip_word(split_text[i]) if word in _keywords: combined_word = [word] if i + 1 == len_text: result.append(word) # appends last word if keyword and doesn't iterate - for j in xrange(i + 1, len_text): + for j in range(i + 1, len_text): other_word = _strip_word(split_text[j]) if other_word in _keywords and other_word == split_text[j] and other_word not in combined_word: combined_word.append(other_word) diff --git a/gensim/summarization/pagerank_weighted.py b/gensim/summarization/pagerank_weighted.py index e49d43fa6c..f961d6b729 100644 --- a/gensim/summarization/pagerank_weighted.py +++ b/gensim/summarization/pagerank_weighted.py @@ -41,7 +41,7 @@ from scipy.linalg import eig from scipy.sparse import csr_matrix from scipy.sparse.linalg import eigs -from six.moves import xrange +from six.moves import range def pagerank_weighted(graph, damping=0.85): @@ -91,10 +91,10 @@ def build_adjacency_matrix(graph): nodes = graph.nodes() length = len(nodes) - for i in xrange(length): + for i in range(length): current_node = nodes[i] neighbors_sum = sum(graph.edge_weight((current_node, neighbor)) for neighbor in graph.neighbors(current_node)) - for j in xrange(length): + for j in range(length): edge_weight = float(graph.edge_weight((current_node, nodes[j]))) if i != j and edge_weight != 0.0: row.append(i) diff --git a/gensim/summarization/summarizer.py b/gensim/summarization/summarizer.py index d7250363a0..3c81d99d5b 100644 --- a/gensim/summarization/summarizer.py +++ b/gensim/summarization/summarizer.py @@ -61,7 +61,7 @@ from gensim.summarization.bm25 import get_bm25_weights as _bm25_weights from gensim.corpora import Dictionary from math import log10 as _log10 -from six.moves import xrange +from six.moves import range INPUT_MIN_LENGTH = 10 @@ -84,8 +84,8 @@ def _set_graph_edge_weights(graph): documents = graph.nodes() weights = _bm25_weights(documents) - for i in xrange(len(documents)): - for j in xrange(len(documents)): + for i in range(len(documents)): + for j in range(len(documents)): if i == j or weights[i][j] < WEIGHT_THRESHOLD: continue @@ -117,8 +117,8 @@ def _create_valid_graph(graph): """ nodes = graph.nodes() - for i in xrange(len(nodes)): - for j in xrange(len(nodes)): + for i in range(len(nodes)): + for j in range(len(nodes)): if i == j: continue diff --git a/gensim/summarization/textcleaner.py b/gensim/summarization/textcleaner.py index 9e0dfdd971..fbbf486cf4 100644 --- a/gensim/summarization/textcleaner.py +++ b/gensim/summarization/textcleaner.py @@ -23,7 +23,7 @@ from gensim.summarization.syntactic_unit import SyntacticUnit from gensim.parsing.preprocessing import preprocess_documents from gensim.utils import tokenize -from six.moves import xrange +from six.moves import range import re import logging @@ -201,7 +201,7 @@ def merge_syntactic_units(original_units, filtered_units, tags=None): """ units = [] - for i in xrange(len(original_units)): + for i in range(len(original_units)): if filtered_units[i] == '': continue diff --git a/gensim/test/test_doc2vec.py b/gensim/test/test_doc2vec.py index a320bd72a1..d9af7070d3 100644 --- a/gensim/test/test_doc2vec.py +++ b/gensim/test/test_doc2vec.py @@ -16,7 +16,7 @@ import os import six -from six.moves import zip as izip +from six.moves import zip, range from collections import namedtuple from testfixtures import log_capture @@ -776,7 +776,7 @@ def read_su_sentiment_rotten_tomatoes(dirname, lowercase=True): with open(os.path.join(dirname, 'datasetSplit.txt'), 'r') as splits: next(sentences) # legend next(splits) # legend - for sentence_line, split_line in izip(sentences, splits): + for sentence_line, split_line in zip(sentences, splits): (id, text) = sentence_line.split('\t') id = int(id) text = text.rstrip() diff --git a/gensim/test/test_sharded_corpus.py b/gensim/test/test_sharded_corpus.py index a7fcbe0aa0..3a56f240e2 100644 --- a/gensim/test/test_sharded_corpus.py +++ b/gensim/test/test_sharded_corpus.py @@ -13,7 +13,7 @@ from gensim.utils import is_corpus from gensim.corpora.sharded_corpus import ShardedCorpus -from gensim.utils import mock_data, xrange +from gensim.utils import mock_data, range ############################################################################# @@ -25,7 +25,7 @@ class TestShardedCorpus(unittest.TestCase): # cls.dim = 1000 # cls.data = mock_data(dim=cls.dim) # - # random_string = ''.join(random.choice('1234567890') for _ in xrange(8)) + # random_string = ''.join(random.choice('1234567890') for _ in range(8)) # # cls.tmp_dir = 'test-temp-' + random_string # os.makedirs(cls.tmp_dir) @@ -39,7 +39,7 @@ class TestShardedCorpus(unittest.TestCase): def setUp(self): self.dim = 1000 - self.random_string = ''.join(random.choice('1234567890') for _ in xrange(8)) + self.random_string = ''.join(random.choice('1234567890') for _ in range(8)) self.tmp_dir = 'test-temp-' + self.random_string os.makedirs(self.tmp_dir) @@ -80,7 +80,7 @@ def test_getitem(self): self.assertEqual((7, self.corpus.dim), item.shape) self.assertEqual(self.corpus.current_shard_n, 2) - for i in xrange(220, 227): + for i in range(220, 227): self.assertTrue(np.array_equal(self.corpus[i], item[i - 220])) def test_sparse_serialization(self): @@ -229,11 +229,11 @@ def test_getitem_dense2gensim(self): # From generators to lists self.assertEqual(len(ilist), len(dslice)) - for i in xrange(len(ilist)): + for i in range(len(ilist)): self.assertEqual(len(ilist[i]), len(dslice[i]), "Row %d: dims %d/%d" % (i, len(ilist[i]), len(dslice[i]))) - for j in xrange(len(ilist[i])): + for j in range(len(ilist[i])): self.assertEqual(ilist[i][j], dslice[i][j], "ilist[%d][%d] = %s ,dslice[%d][%d] = %s" % ( i, j, str(ilist[i][j]), i, j, @@ -252,7 +252,7 @@ def test_resize(self): dataset.resize_shards(250) self.assertEqual(4, dataset.n_shards) - for n in xrange(dataset.n_shards): + for n in range(dataset.n_shards): fname = dataset._shard_name(n) self.assertTrue(os.path.isfile(fname)) diff --git a/gensim/utils.py b/gensim/utils.py index 3204664476..e1540af562 100644 --- a/gensim/utils.py +++ b/gensim/utils.py @@ -40,7 +40,7 @@ import scipy.sparse from six import iterkeys, iteritems, itervalues, u, string_types, unichr -from six.moves import xrange +from six.moves import range from smart_open import smart_open @@ -773,7 +773,7 @@ def iteritems(self): Pair of (id, token). """ - for i in xrange(self.num_terms): + for i in range(self.num_terms): yield i, str(i) def keys(self): @@ -981,7 +981,7 @@ def __init__(self, corpus, n): self.n = n def __iter__(self): - for _ in xrange(self.n): + for _ in range(self.n): for document in self.corpus: yield document @@ -1719,7 +1719,7 @@ def mock_data_row(dim=1000, prob_nnz=0.5, lam=1.0): """ nnz = np.random.uniform(size=(dim,)) - return [(i, float(np.random.poisson(lam=lam) + 1.0)) for i in xrange(dim) if nnz[i] < prob_nnz] + return [(i, float(np.random.poisson(lam=lam) + 1.0)) for i in range(dim) if nnz[i] < prob_nnz] def mock_data(n_items=1000, dim=1000, prob_nnz=0.5, lam=1.0): @@ -1743,7 +1743,7 @@ def mock_data(n_items=1000, dim=1000, prob_nnz=0.5, lam=1.0): Gensim-style corpus. """ - return [mock_data_row(dim=dim, prob_nnz=prob_nnz, lam=lam) for _ in xrange(n_items)] + return [mock_data_row(dim=dim, prob_nnz=prob_nnz, lam=lam) for _ in range(n_items)] def prune_vocab(vocab, min_reduce, trim_rule=None): From ce403d38373be6c3254417f5fac6c41a38fd5dd1 Mon Sep 17 00:00:00 2001 From: Rupal Sharma <39291744+rsdel2007@users.noreply.github.com> Date: Wed, 19 Dec 2018 09:58:07 +0530 Subject: [PATCH 54/66] Fix broken link in `tutorials.md` (#2302) Fix broken link in `tutorials.md` --- tutorials.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tutorials.md b/tutorials.md index 648ce5f61c..062fe5b1be 100644 --- a/tutorials.md +++ b/tutorials.md @@ -39,7 +39,7 @@ * [Colouring words by topic in a document, print words in a topics](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/topic_methods.ipynb) * [Topic Coherence, a metric that correlates that human judgement on topic quality.](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/topic_coherence_tutorial.ipynb) * [Compare topics and documents using Jaccard, Kullback-Leibler and Hellinger similarities](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/distance_metrics.ipynb) -* [America's Next Topic Model slides](https://speakerdeck.com/tmylk/americas-next-topic-model?slide=6) -- How to choose your next topic model, presented at Pydata London 5 July 2016 by Lev Konstantinovsky +* [America's Next Topic Model slides](https://speakerdeck.com/tmylk/americas-next-topic-model-at-pydata-berlin-august-2016?slide=7) -- How to choose your next topic model, presented at Pydata Berlin 10 August 2016 by Lev Konstantinovsky * [Classification of News Articles using Topic Modeling](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/gensim_news_classification.ipynb) * [LDA: pre-processing and training tips](https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/lda_training_tips.ipynb) From 9c5215afe3bc4edba7dde565b6f2db982bba5113 Mon Sep 17 00:00:00 2001 From: Ivan Menshikh Date: Mon, 7 Jan 2019 16:07:48 +0300 Subject: [PATCH 55/66] Fix gensim build (docs & pyemd issues) (#2318) * disable-pyemd * revert pyemd to setup.py (it still works on linux) * extend 'catch' on import * correct test skipping * fix flake8 * fix docs building * correct skipping if pyemd not available * fix typo * upd * pin sphinx * revert sphinx pin * disable -W for sphinx (REVERT ME), issue not reproduced locally, only here * more verbosity * MOAR verbosity * try to use different path * build binaries before docs * pin previous version of programoutput (avoid bug from 0.13) * revert Makefile * fix * disable programoutput sphinx plugin * revert pinning * one more attempt * cleanup * cleanup[2] * fix --- .circleci/config.yml | 2 +- docs/src/conf.py | 2 +- gensim/models/deprecated/keyedvectors.py | 2 +- gensim/models/keyedvectors.py | 2 +- gensim/test/test_fasttext.py | 8 ++++++ gensim/test/test_fasttext_wrapper.py | 9 ++++++ gensim/test/test_similarities.py | 36 ++++++++++-------------- gensim/test/test_word2vec.py | 15 ++++------ tox.ini | 8 ++++++ 9 files changed, 49 insertions(+), 35 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d2125123c3..fd4dc7f12f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -30,7 +30,7 @@ jobs: name: Build documentation command: | source venv/bin/activate - tox -e docs -vv + tox -e compile,docs -vv - store_artifacts: path: docs/src/_build diff --git a/docs/src/conf.py b/docs/src/conf.py index 3ba4ae06b2..da7d0a1994 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -17,7 +17,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath('.')) +sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ----------------------------------------------------- diff --git a/gensim/models/deprecated/keyedvectors.py b/gensim/models/deprecated/keyedvectors.py index 8f5ccaf355..5ead121e48 100644 --- a/gensim/models/deprecated/keyedvectors.py +++ b/gensim/models/deprecated/keyedvectors.py @@ -86,7 +86,7 @@ try: from pyemd import emd PYEMD_EXT = True -except ImportError: +except (ImportError, ValueError): PYEMD_EXT = False from numpy import dot, zeros, dtype, float32 as REAL,\ diff --git a/gensim/models/keyedvectors.py b/gensim/models/keyedvectors.py index 0b2be1d732..7911fe5805 100644 --- a/gensim/models/keyedvectors.py +++ b/gensim/models/keyedvectors.py @@ -172,7 +172,7 @@ try: from pyemd import emd PYEMD_EXT = True -except ImportError: +except (ImportError, ValueError): PYEMD_EXT = False from numpy import dot, float32 as REAL, empty, memmap as np_memmap, \ diff --git a/gensim/test/test_fasttext.py b/gensim/test/test_fasttext.py index c9935431e4..1bb3c80e4b 100644 --- a/gensim/test/test_fasttext.py +++ b/gensim/test/test_fasttext.py @@ -18,6 +18,13 @@ from gensim.models.keyedvectors import Word2VecKeyedVectors from gensim.test.utils import datapath, get_tmpfile, temporary_file, common_texts as sentences + +try: + from pyemd import emd # noqa:F401 + PYEMD_EXT = True +except (ImportError, ValueError): + PYEMD_EXT = False + logger = logging.getLogger(__name__) IS_WIN32 = (os.name == "nt") and (struct.calcsize('P') * 8 == 32) @@ -357,6 +364,7 @@ def test_contains(self): self.assertFalse('nights' in self.test_model.wv.vocab) self.assertTrue('nights' in self.test_model.wv) + @unittest.skipIf(PYEMD_EXT is False, "pyemd not installed or have some issues") def test_wm_distance(self): doc = ['night', 'payment'] oov_doc = ['nights', 'forests', 'payments'] diff --git a/gensim/test/test_fasttext_wrapper.py b/gensim/test/test_fasttext_wrapper.py index bc995f8159..66dd7b47c5 100644 --- a/gensim/test/test_fasttext_wrapper.py +++ b/gensim/test/test_fasttext_wrapper.py @@ -18,6 +18,14 @@ from gensim.models import keyedvectors from gensim.test.utils import datapath, get_tmpfile + +try: + from pyemd import emd # noqa:F401 + PYEMD_EXT = True +except (ImportError, ValueError): + PYEMD_EXT = False + + logger = logging.getLogger(__name__) @@ -311,6 +319,7 @@ def testContains(self): self.assertFalse('a!@' in self.test_model.wv.vocab) self.assertFalse('a!@' in self.test_model) + @unittest.skipIf(PYEMD_EXT is False, "pyemd not installed or have some issues") def testWmdistance(self): """Tests wmdistance for docs with in-vocab and out-of-vocab words""" doc = ['night', 'payment'] diff --git a/gensim/test/test_similarities.py b/gensim/test/test_similarities.py index 4965d96d6f..e1f876e216 100644 --- a/gensim/test/test_similarities.py +++ b/gensim/test/test_similarities.py @@ -29,7 +29,7 @@ try: from pyemd import emd # noqa:F401 PYEMD_EXT = True -except ImportError: +except (ImportError, ValueError): PYEMD_EXT = False sentences = [doc2vec.TaggedDocument(words, [i]) for i, words in enumerate(texts)] @@ -78,9 +78,8 @@ def testFull(self, num_best=None, shardsize=100): index.destroy() def testNumBest(self): - if self.cls == similarities.WmdSimilarity and not PYEMD_EXT: - return + self.skipTest("pyemd not installed or have some issues") for num_best in [None, 0, 1, 9, 1000]: self.testFull(num_best=num_best) @@ -110,6 +109,9 @@ def test_scipy2scipy_clipped(self): def testEmptyQuery(self): index = self.factoryMethod() + if isinstance(index, similarities.WmdSimilarity) and not PYEMD_EXT: + self.skipTest("pyemd not installed or have some issues") + query = [] try: sims = index[query] @@ -166,7 +168,7 @@ def testIter(self): def testPersistency(self): if self.cls == similarities.WmdSimilarity and not PYEMD_EXT: - return + self.skipTest("pyemd not installed or have some issues") fname = get_tmpfile('gensim_similarities.tst.pkl') index = self.factoryMethod() @@ -186,7 +188,7 @@ def testPersistency(self): def testPersistencyCompressed(self): if self.cls == similarities.WmdSimilarity and not PYEMD_EXT: - return + self.skipTest("pyemd not installed or have some issues") fname = get_tmpfile('gensim_similarities.tst.pkl.gz') index = self.factoryMethod() @@ -206,7 +208,7 @@ def testPersistencyCompressed(self): def testLarge(self): if self.cls == similarities.WmdSimilarity and not PYEMD_EXT: - return + self.skipTest("pyemd not installed or have some issues") fname = get_tmpfile('gensim_similarities.tst.pkl') index = self.factoryMethod() @@ -228,7 +230,7 @@ def testLarge(self): def testLargeCompressed(self): if self.cls == similarities.WmdSimilarity and not PYEMD_EXT: - return + self.skipTest("pyemd not installed or have some issues") fname = get_tmpfile('gensim_similarities.tst.pkl.gz') index = self.factoryMethod() @@ -250,7 +252,7 @@ def testLargeCompressed(self): def testMmap(self): if self.cls == similarities.WmdSimilarity and not PYEMD_EXT: - return + self.skipTest("pyemd not installed or have some issues") fname = get_tmpfile('gensim_similarities.tst.pkl') index = self.factoryMethod() @@ -273,7 +275,7 @@ def testMmap(self): def testMmapCompressed(self): if self.cls == similarities.WmdSimilarity and not PYEMD_EXT: - return + self.skipTest("pyemd not installed or have some issues") fname = get_tmpfile('gensim_similarities.tst.pkl.gz') index = self.factoryMethod() @@ -298,12 +300,10 @@ def factoryMethod(self): # Override factoryMethod. return self.cls(texts, self.w2v_model) + @unittest.skipIf(PYEMD_EXT is False, "pyemd not installed or have some issues") def testFull(self, num_best=None): # Override testFull. - if not PYEMD_EXT: - return - index = self.cls(texts, self.w2v_model) index.num_best = num_best query = texts[0] @@ -319,15 +319,13 @@ def testFull(self, num_best=None): self.assertTrue(numpy.alltrue(sims[1:] > 0.0)) self.assertTrue(numpy.alltrue(sims[1:] < 1.0)) + @unittest.skipIf(PYEMD_EXT is False, "pyemd not installed or have some issues") def testNonIncreasing(self): ''' Check that similarities are non-increasing when `num_best` is not `None`.''' # NOTE: this could be implemented for other similarities as well (i.e. # in _TestSimilarityABC). - if not PYEMD_EXT: - return - index = self.cls(texts, self.w2v_model, num_best=3) query = texts[0] sims = index[query] @@ -337,12 +335,10 @@ def testNonIncreasing(self): cond = sum(numpy.diff(sims2) < 0) == len(sims2) - 1 self.assertTrue(cond) + @unittest.skipIf(PYEMD_EXT is False, "pyemd not installed or have some issues") def testChunking(self): # Override testChunking. - if not PYEMD_EXT: - return - index = self.cls(texts, self.w2v_model) query = texts[:3] sims = index[query] @@ -358,12 +354,10 @@ def testChunking(self): self.assertTrue(numpy.alltrue(sim > 0.0)) self.assertTrue(numpy.alltrue(sim <= 1.0)) + @unittest.skipIf(PYEMD_EXT is False, "pyemd not installed or have some issues") def testIter(self): # Override testIter. - if not PYEMD_EXT: - return - index = self.cls(texts, self.w2v_model) for sims in index: self.assertTrue(numpy.alltrue(sims >= 0.0)) diff --git a/gensim/test/test_word2vec.py b/gensim/test/test_word2vec.py index de8abd702a..11257bebb1 100644 --- a/gensim/test/test_word2vec.py +++ b/gensim/test/test_word2vec.py @@ -26,7 +26,7 @@ try: from pyemd import emd # noqa:F401 PYEMD_EXT = True -except ImportError: +except (ImportError, ValueError): PYEMD_EXT = False @@ -1023,12 +1023,11 @@ def test_compute_training_loss(self): # endclass TestWord2VecModel class TestWMD(unittest.TestCase): + + @unittest.skipIf(PYEMD_EXT is False, "pyemd not installed or have some issues") def testNonzero(self): '''Test basic functionality with a test sentence.''' - if not PYEMD_EXT: - return - model = word2vec.Word2Vec(sentences, min_count=2, seed=42, workers=1) sentence1 = ['human', 'interface', 'computer'] sentence2 = ['survey', 'user', 'computer', 'system', 'response', 'time'] @@ -1037,12 +1036,10 @@ def testNonzero(self): # Check that distance is non-zero. self.assertFalse(distance == 0.0) + @unittest.skipIf(PYEMD_EXT is False, "pyemd not installed or have some issues") def testSymmetry(self): '''Check that distance is symmetric.''' - if not PYEMD_EXT: - return - model = word2vec.Word2Vec(sentences, min_count=2, seed=42, workers=1) sentence1 = ['human', 'interface', 'computer'] sentence2 = ['survey', 'user', 'computer', 'system', 'response', 'time'] @@ -1050,12 +1047,10 @@ def testSymmetry(self): distance2 = model.wv.wmdistance(sentence2, sentence1) self.assertTrue(np.allclose(distance1, distance2)) + @unittest.skipIf(PYEMD_EXT is False, "pyemd not installed or have some issues") def testIdenticalSentences(self): '''Check that the distance from a sentence to itself is zero.''' - if not PYEMD_EXT: - return - model = word2vec.Word2Vec(sentences, min_count=1) sentence = ['survey', 'user', 'computer', 'system', 'response', 'time'] distance = model.wv.wmdistance(sentence, sentence) diff --git a/tox.ini b/tox.ini index c5446a8097..eb7db07013 100644 --- a/tox.ini +++ b/tox.ini @@ -69,6 +69,14 @@ deps = flake8-rst == 0.4.3 commands = flake8-rst gensim/ docs/ {posargs} +[testenv:compile] +basepython = python2 +recreate = True + +deps = numpy == 1.11.3 +commands = python setup.py build_ext --inplace + + [testenv:docs] basepython = python2 recreate = True From e0bfb3f7ea5aca8b82e00ac6164e8027f4cfd497 Mon Sep 17 00:00:00 2001 From: Dmitry Persiyanov Date: Tue, 8 Jan 2019 06:09:27 +0300 Subject: [PATCH 56/66] Update `Doc2Vec` documentation: how tags are assigned in `corpus_file` mode (#2320) * add clarification regarding tags of documents in corpus_file mode for Doc2Vec * based on -> equal to --- gensim/models/doc2vec.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/gensim/models/doc2vec.py b/gensim/models/doc2vec.py index a4fb34d1fa..3b5b9c960c 100644 --- a/gensim/models/doc2vec.py +++ b/gensim/models/doc2vec.py @@ -487,7 +487,8 @@ def __init__(self, documents=None, corpus_file=None, dm_mean=None, dm=1, dbow_wo corpus_file : str, optional Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or - `corpus_file` arguments need to be passed (or none of them). + `corpus_file` arguments need to be passed (or none of them). Documents' tags are assigned automatically + and are equal to line number, as in :class:`~gensim.models.doc2vec.TaggedLineDocument`. dm : {1,0}, optional Defines the training algorithm. If `dm=1`, 'distributed memory' (PV-DM) is used. Otherwise, `distributed bag of words` (PV-DBOW) is employed. @@ -761,7 +762,8 @@ def train(self, documents=None, corpus_file=None, total_examples=None, total_wor corpus_file : str, optional Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or - `corpus_file` arguments need to be passed (not both of them). + `corpus_file` arguments need to be passed (not both of them). Documents' tags are assigned automatically + and are equal to line number, as in :class:`~gensim.models.doc2vec.TaggedLineDocument`. total_examples : int, optional Count of sentences. total_words : int, optional @@ -1140,7 +1142,8 @@ def build_vocab(self, documents=None, corpus_file=None, update=False, progress_p corpus_file : str, optional Path to a corpus file in :class:`~gensim.models.word2vec.LineSentence` format. You may use this argument instead of `sentences` to get performance boost. Only one of `sentences` or - `corpus_file` arguments need to be passed (not both of them). + `corpus_file` arguments need to be passed (not both of them). Documents' tags are assigned automatically + and are equal to a line number, as in :class:`~gensim.models.doc2vec.TaggedLineDocument`. update : bool If true, the new words in `sentences` will be added to model's vocab. progress_per : int From 02e8bf52c35d03bf30ce6c058763e7f8c7ef8ac6 Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Tue, 8 Jan 2019 15:32:39 +0500 Subject: [PATCH 57/66] Optimize `remove_unreachable_nodes` in `gensim.summarization` (#2263) * slightly optimize remove_unreachable_nodes * add test --- gensim/summarization/commons.py | 2 +- gensim/test/test_summarization.py | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/gensim/summarization/commons.py b/gensim/summarization/commons.py index 0735a357e7..133551f8f7 100644 --- a/gensim/summarization/commons.py +++ b/gensim/summarization/commons.py @@ -63,5 +63,5 @@ def remove_unreachable_nodes(graph): """ for node in graph.nodes(): - if sum(graph.edge_weight((node, other)) for other in graph.neighbors(node)) == 0: + if all(graph.edge_weight((node, other)) == 0 for other in graph.neighbors(node)): graph.del_node(node) diff --git a/gensim/test/test_summarization.py b/gensim/test/test_summarization.py index 45c0bce8ee..7024385bba 100644 --- a/gensim/test/test_summarization.py +++ b/gensim/test/test_summarization.py @@ -19,6 +19,30 @@ from gensim import utils from gensim.corpora import Dictionary from gensim.summarization import summarize, summarize_corpus, keywords, mz_keywords +from gensim.summarization.commons import remove_unreachable_nodes, build_graph + + +class TestCommons(unittest.TestCase): + + def _build_graph(self): + graph = build_graph(['a', 'b', 'c', 'd']) + graph.add_edge(('a', 'b')) + graph.add_edge(('b', 'c')) + graph.add_edge(('c', 'a')) + return graph + + def test_remove_unreachable_nodes(self): + graph = self._build_graph() + self.assertTrue(graph.has_node('d')) + remove_unreachable_nodes(graph) + self.assertFalse(graph.has_node('d')) + + graph = self._build_graph() + graph.add_edge(('d', 'a'), wt=0.0) + graph.add_edge(('b', 'd'), wt=0) + self.assertTrue(graph.has_node('d')) + remove_unreachable_nodes(graph) + self.assertFalse(graph.has_node('d')) class TestSummarizationTest(unittest.TestCase): From e8933dfcf18801635d50cb0361c5b33c18548578 Mon Sep 17 00:00:00 2001 From: Rupal Sharma <39291744+rsdel2007@users.noreply.github.com> Date: Tue, 8 Jan 2019 17:28:07 +0530 Subject: [PATCH 58/66] Fixed typos in notebooks (#2322) * Fixed typo * Fixed typo * Fixed typo * Fixed typo * Fixed typo * Fixed typo * Fixed Typo --- docs/notebooks/FastText_Tutorial.ipynb | 2 +- docs/notebooks/Poincare Evaluation.ipynb | 2 +- docs/notebooks/Tensorboard_visualizations.ipynb | 7 ++++--- docs/notebooks/Topics_and_Transformations.ipynb | 4 ++-- docs/notebooks/WMD_tutorial.ipynb | 4 ++-- docs/notebooks/Wordrank_comparisons.ipynb | 2 +- 6 files changed, 11 insertions(+), 10 deletions(-) diff --git a/docs/notebooks/FastText_Tutorial.ipynb b/docs/notebooks/FastText_Tutorial.ipynb index f547009215..bc964b2829 100644 --- a/docs/notebooks/FastText_Tutorial.ipynb +++ b/docs/notebooks/FastText_Tutorial.ipynb @@ -134,7 +134,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Hyperparameters for training the model follow the same pattern as Word2Vec. FastText supports the folllowing parameters from the original word2vec - \n", + "Hyperparameters for training the model follow the same pattern as Word2Vec. FastText supports the following parameters from the original word2vec - \n", " - model: Training architecture. Allowed values: `cbow`, `skipgram` (Default `cbow`)\n", " - size: Size of embeddings to be learnt (Default 100)\n", " - alpha: Initial learning rate (Default 0.025)\n", diff --git a/docs/notebooks/Poincare Evaluation.ipynb b/docs/notebooks/Poincare Evaluation.ipynb index 0d3f8bb851..d2dd4bfac5 100644 --- a/docs/notebooks/Poincare Evaluation.ipynb +++ b/docs/notebooks/Poincare Evaluation.ipynb @@ -1706,7 +1706,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "1. The model can be investigated further to understand why it doesn't produce results as good as the paper. It is possible that this might be due to training details not present in the paper, or due to us incorrectly interpreting some ambiguous parts of the paper. We have not been able to clarify all such ambiguitities in communication with the authors.\n", + "1. The model can be investigated further to understand why it doesn't produce results as good as the paper. It is possible that this might be due to training details not present in the paper, or due to us incorrectly interpreting some ambiguous parts of the paper. We have not been able to clarify all such ambiguities in communication with the authors.\n", "2. Optimizing the training process further - with a model size of 50 dimensions and a dataset with ~700k relations and ~80k nodes, the Gensim implementation takes around 45 seconds to complete an epoch (~15k relations per second), whereas the open source C++ implementation takes around 1/6th the time (~95k relations per second).\n", "3. Implementing the variant of the model mentioned in the paper for symmetric graphs and evaluating on the scientific collaboration datasets described earlier in the report." ] diff --git a/docs/notebooks/Tensorboard_visualizations.ipynb b/docs/notebooks/Tensorboard_visualizations.ipynb index a2d88e9619..f65083b938 100644 --- a/docs/notebooks/Tensorboard_visualizations.ipynb +++ b/docs/notebooks/Tensorboard_visualizations.ipynb @@ -844,7 +844,7 @@ "- **T-SNE**: The idea of T-SNE is to place the local neighbours close to each other, and almost completely ignoring the global structure. It is useful for exploring local neighborhoods and finding local clusters. But the global trends are not represented accurately and the separation between different groups is often not preserved (see the t-sne plots of our data below which testify the same).\n", "\n", "\n", - "- **Custom Projections**: This is a custom bethod based on the text searches you define for different directions. It could be useful for finding meaningful directions in the vector space, for example, female to male, currency to country etc.\n", + "- **Custom Projections**: This is a custom method based on the text searches you define for different directions. It could be useful for finding meaningful directions in the vector space, for example, female to male, currency to country etc.\n", "\n", "You can refer to this [doc](https://www.tensorflow.org/get_started/embedding_viz) for instructions on how to use and navigate through different panels available in TensorBoard." ] @@ -1112,9 +1112,10 @@ "\n", "The above plot was generated with perplexity 11, learning rate 10 and iteration 1100. Though the results could vary on successive runs, and you may not get the exact plot as above even with same hyperparameter settings. But some small clusters will start forming as above, with different orientations.\n", "\n", - "I named some clusters above based on the genre of it's movies and also using the `show_topic()` to see relevant terms of the topic which was most prevelant in a cluster. Most of the clusters had doocumets belonging dominantly to a single topic. For ex. The cluster with movies belonging primarily to topic 0 could be named Fantasy/Romance based on terms displayed below for topic 0. You can play with the visualization yourself on this [link](http://projector.tensorflow.org/?config=https://raw.githubusercontent.com/parulsethi/LdaProjector/master/doc_lda_config.json) and try to conclude a label for clusters based on movies it has and their dominant topic. You can see the top 5 topics of every point by hovering over it.\n", + "I named some clusters above based on the genre of it's movies and also using the `show_topic()` to see relevant terms of the topic which was most prevalent in a cluster. Most of the clusters had documents belonging dominantly to a single topic. For ex. The cluster with movies belonging primarily to topic 0 could be named Fantasy/Romance based on terms displayed below for topic 0. You can play with the visualization yourself on this [link](http://projector.tensorflow.org/?config=https://raw.githubusercontent.com/parulsethi/LdaProjector/master/doc_lda_config.json) and try to conclude a label for clusters based on movies it has and + dominant topic. You can see the top 5 topics of every point by hovering over it.\n", "\n", - "Now, we can notice that their are more than 10 clusters in the above image, whereas we trained our model for `num_topics=10`. It's because their are few clusters, which has documents belonging to more than one topic with an approximately close topic probability values." + "Now, we can notice that there are more than 10 clusters in the above image, whereas we trained our model for `num_topics=10`. It's because there are few clusters, which has documents belonging to more than one topic with an approximately close topic probability values." ] }, { diff --git a/docs/notebooks/Topics_and_Transformations.ipynb b/docs/notebooks/Topics_and_Transformations.ipynb index 5a8ec7f985..b8b2ff129f 100644 --- a/docs/notebooks/Topics_and_Transformations.ipynb +++ b/docs/notebooks/Topics_and_Transformations.ipynb @@ -199,7 +199,7 @@ "In this particular case, we are transforming the same corpus that we used for training, but this is only incidental. Once the transformation model has been initialized, it can be used on any vectors (provided they come from the same vector space, of course), even if they were not used in the training corpus at all. This is achieved by a process called folding-in for LSA, by topic inference for LDA etc.\n", "\n", "> Note: \n", - "> Calling model[corpus] only creates a wrapper around the old corpus document stream – actual conversions are done on-the-fly, during document iteration. We cannot convert the entire corpus at the time of calling corpus_transformed = model[corpus], because that would mean storing the result in main memory, and that contradicts gensim’s objective of memory-indepedence. If you will be iterating over the transformed corpus_transformed multiple times, and the transformation is costly, serialize the resulting corpus to disk first and continue using that.\n", + "> Calling model[corpus] only creates a wrapper around the old corpus document stream – actual conversions are done on-the-fly, during document iteration. We cannot convert the entire corpus at the time of calling corpus_transformed = model[corpus], because that would mean storing the result in main memory, and that contradicts gensim’s objective of memory-independence. If you will be iterating over the transformed corpus_transformed multiple times, and the transformation is costly, serialize the resulting corpus to disk first and continue using that.\n", "\n", "Transformations can also be serialized, one on top of another, in a sort of chain:" ] @@ -332,7 +332,7 @@ "metadata": {}, "source": [ "### [Latent Semantic Indexing, LSI (or sometimes LSA)](http://en.wikipedia.org/wiki/Latent_semantic_indexing) \n", - "LSI transforms documents from either bag-of-words or (preferrably) TfIdf-weighted space into a latent space of a lower dimensionality. For the toy corpus above we used only 2 latent dimensions, but on real corpora, target dimensionality of 200–500 is recommended as a “golden standard” [1]." + "LSI transforms documents from either bag-of-words or (preferably) TfIdf-weighted space into a latent space of a lower dimensionality. For the toy corpus above we used only 2 latent dimensions, but on real corpora, target dimensionality of 200–500 is recommended as a “golden standard” [1]." ] }, { diff --git a/docs/notebooks/WMD_tutorial.ipynb b/docs/notebooks/WMD_tutorial.ipynb index 3a529f471e..8f627c37ce 100644 --- a/docs/notebooks/WMD_tutorial.ipynb +++ b/docs/notebooks/WMD_tutorial.ipynb @@ -14,7 +14,7 @@ "\n", "WMD is a method that allows us to assess the \"distance\" between two documents in a meaningful way, even when they have no words in common. It uses [word2vec](http://rare-technologies.com/word2vec-tutorial/) [4] vector embeddings of words. It been shown to outperform many of the state-of-the-art methods in *k*-nearest neighbors classification [3].\n", "\n", - "WMD is illustrated below for two very similar sentences (illustration taken from [Vlad Niculae's blog](http://vene.ro/blog/word-movers-distance-in-python.html)). The sentences have no words in common, but by matching the relevant words, WMD is able to accurately measure the (dis)similarity between the two sentences. The method also uses the bag-of-words representation of the documents (simply put, the word's frequencies in the documents), noted as $d$ in the figure below. The intution behind the method is that we find the minimum \"traveling distance\" between documents, in other words the most efficient way to \"move\" the distribution of document 1 to the distribution of document 2.\n", + "WMD is illustrated below for two very similar sentences (illustration taken from [Vlad Niculae's blog](http://vene.ro/blog/word-movers-distance-in-python.html)). The sentences have no words in common, but by matching the relevant words, WMD is able to accurately measure the (dis)similarity between the two sentences. The method also uses the bag-of-words representation of the documents (simply put, the word's frequencies in the documents), noted as $d$ in the figure below. The intuition behind the method is that we find the minimum \"traveling distance\" between documents, in other words the most efficient way to \"move\" the distribution of document 1 to the distribution of document 2.\n", "\n", "\n", "\n", @@ -639,4 +639,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +} diff --git a/docs/notebooks/Wordrank_comparisons.ipynb b/docs/notebooks/Wordrank_comparisons.ipynb index 968e481cd7..a3ab167cc1 100644 --- a/docs/notebooks/Wordrank_comparisons.ipynb +++ b/docs/notebooks/Wordrank_comparisons.ipynb @@ -1174,7 +1174,7 @@ "source": [ "This shows the results for text8(17 million tokens). Following points can be observed in this case-\n", "\n", - "1. For Semantic analogies, all the models perform comparitively poor on rare words and also when the word frequency is high towards the end.\n", + "1. For Semantic analogies, all the models perform comparatively poor on rare words and also when the word frequency is high towards the end.\n", "2. For Syntactic Analogies, FastText performance is fairly well on rare words but then falls steeply at highly frequent words.\n", "3. WordRank and Word2Vec perform very similar with low accuracy for rare and highly frequent words in Syntactic Analogies.\n", "4. FastText is again better in total analogies case due to the same reason described previously. Here the total no. of Semantic analogies is 7416 and Syntactic Analogies is 10411.\n", From ebc79717cb201eccb92ea39b4aae1550a77ef295 Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Tue, 8 Jan 2019 19:33:45 +0500 Subject: [PATCH 59/66] Fix `malletmodel2ldamodel` conversion (#2288) * Fixes #2069: wrong malletmodel2ldamodel `malletmodel2ldamodel` sets up expElogbeta attribute but LdaModel.show_topics uses inner not dirichleted state instead. And moreover LdaState and LdaModel were not synced. * add test * fix linter * replace sklearn with gensim + use larger dataset & num topics (for more strict check) * remove sklearn import --- gensim/models/wrappers/ldamallet.py | 6 ++++-- gensim/test/test_ldamallet_wrapper.py | 21 ++++++++++++++++++++- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/gensim/models/wrappers/ldamallet.py b/gensim/models/wrappers/ldamallet.py index 2649eb6953..e1bf2a85e8 100644 --- a/gensim/models/wrappers/ldamallet.py +++ b/gensim/models/wrappers/ldamallet.py @@ -588,9 +588,11 @@ def malletmodel2ldamodel(mallet_model, gamma_threshold=0.001, iterations=50): """ model_gensim = LdaModel( id2word=mallet_model.id2word, num_topics=mallet_model.num_topics, - alpha=mallet_model.alpha, iterations=iterations, + alpha=mallet_model.alpha, eta=0, + iterations=iterations, gamma_threshold=gamma_threshold, dtype=numpy.float64 # don't loose precision when converting from MALLET ) - model_gensim.expElogbeta[:] = mallet_model.wordtopics + model_gensim.state.sstats[...] = mallet_model.wordtopics + model_gensim.sync_state() return model_gensim diff --git a/gensim/test/test_ldamallet_wrapper.py b/gensim/test/test_ldamallet_wrapper.py index 42ed6890d4..f8f432bc96 100644 --- a/gensim/test/test_ldamallet_wrapper.py +++ b/gensim/test/test_ldamallet_wrapper.py @@ -19,9 +19,11 @@ from gensim.corpora import mmcorpus, Dictionary from gensim.models.wrappers import ldamallet from gensim import matutils +from gensim.utils import simple_preprocess from gensim.models import ldamodel from gensim.test import basetmtests from gensim.test.utils import datapath, get_tmpfile, common_texts +import gensim.downloader as api dictionary = Dictionary(common_texts) corpus = [dictionary.doc2bow(text) for text in common_texts] @@ -90,6 +92,10 @@ def testMallet2Model(self): tm1 = ldamallet.LdaMallet(self.mallet_path, corpus=corpus, num_topics=2, id2word=dictionary) tm2 = ldamallet.malletmodel2ldamodel(tm1) + + # set num_topics=-1 to exclude random influence + self.assertEqual(tm1.show_topics(-1, 10), tm2.show_topics(-1, 10)) + for document in corpus: element1_1, element1_2 = tm1[document][0] element2_1, element2_2 = tm2[document][0] @@ -101,7 +107,20 @@ def testMallet2Model(self): self.assertAlmostEqual(element1_2, element2_2, 1) logging.debug('%d %d', element1_1, element2_1) logging.debug('%d %d', element1_2, element2_2) - logging.debug('%d %d', tm1[document][1], tm2[document][1]) + logging.debug('%s %s', tm1[document][1], tm2[document][1]) + + def testMallet2ModelOn20NewsGroups(self): + corpus = [simple_preprocess(doc["data"]) for doc in api.load("20-newsgroups")] + dictionary = Dictionary(corpus) + + corpus = [dictionary.doc2bow(text) for text in corpus] + + lda_mallet_model = ldamallet.LdaMallet( + self.mallet_path, corpus=corpus, + num_topics=20, id2word=dictionary, iterations=500) + + lda_gensim_model = ldamallet.malletmodel2ldamodel(lda_mallet_model, iterations=1000) + self.assertEqual(lda_mallet_model.show_topics(20, 50), lda_gensim_model.show_topics(20, 50)) def testPersistence(self): if not self.mallet_path: From 24e541ace7e18a23e4f9fd0ff4ca438a6def167b Mon Sep 17 00:00:00 2001 From: Rupal Sharma <39291744+rsdel2007@users.noreply.github.com> Date: Tue, 8 Jan 2019 23:30:28 +0530 Subject: [PATCH 60/66] Fix typos in `gensim.models` (#2323) --- gensim/models/atmodel.py | 4 ++-- gensim/models/doc2vec.py | 4 ++-- gensim/models/fasttext.py | 2 +- gensim/models/ldamodel.py | 6 +++--- gensim/models/phrases.py | 2 +- gensim/models/translation_matrix.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/gensim/models/atmodel.py b/gensim/models/atmodel.py index dd0d7ffbd5..c4f0f3232f 100755 --- a/gensim/models/atmodel.py +++ b/gensim/models/atmodel.py @@ -910,7 +910,7 @@ def bound(self, chunk, chunk_doc_idx=None, subsample_ratio=1.0, author2doc=None, Assigns the value for document index. subsample_ratio : float, optional Used for calculation of word score for estimation of variational bound. - author2doc : dict of (str, list of int), optinal + author2doc : dict of (str, list of int), optional A dictionary where keys are the names of authors and values are lists of documents that the author contributes to. doc2author : dict of (int, list of str), optional @@ -1094,7 +1094,7 @@ def rollback_new_author_chages(): gamma_new = self.random_state.gamma(100., 1. / 100., (num_new_authors, self.num_topics)) self.state.gamma = np.vstack([self.state.gamma, gamma_new]) - # Should not record the sstats, as we are goint to delete the new author after calculated. + # Should not record the sstats, as we are going to delete the new author after calculated. try: gammat, _ = self.inference( corpus, self.author2doc, self.doc2author, rho(), diff --git a/gensim/models/doc2vec.py b/gensim/models/doc2vec.py index 3b5b9c960c..e0af132c23 100644 --- a/gensim/models/doc2vec.py +++ b/gensim/models/doc2vec.py @@ -1043,7 +1043,7 @@ def save_word2vec_format(self, fname, doctag_vec=False, word_vec=True, prefix='* fvocab : str, optional Optional file path used to save the vocabulary. binary : bool, optional - If True, the data wil be saved in binary word2vec format, otherwise - will be saved in plain text. + If True, the data will be saved in binary word2vec format, otherwise - will be saved in plain text. """ total_vec = len(self.wv.vocab) + len(self.docvecs) @@ -1265,7 +1265,7 @@ def __init__(self, max_vocab_size=None, min_count=5, sample=1e-3, sorted_vocab=T if there are more unique words than this, then prune the infrequent ones. Every 10 million word types need about 1GB of RAM, set to `None` for no limit. min_count : int - Words with frequency lower than this limit will be discarded form the vocabulary. + Words with frequency lower than this limit will be discarded from the vocabulary. sample : float, optional The threshold for configuring which higher-frequency words are randomly downsampled, useful range is (0, 1e-5). diff --git a/gensim/models/fasttext.py b/gensim/models/fasttext.py index 17b314fec9..cd61c3f0a6 100644 --- a/gensim/models/fasttext.py +++ b/gensim/models/fasttext.py @@ -14,7 +14,7 @@ This module contains a fast native C implementation of Fasttext with Python interfaces. It is **not** only a wrapper around Facebook's implementation. -For a tutorial see `this noteboook +For a tutorial see `this notebook `_. **Make sure you have a C compiler before installing Gensim, to use the optimized (compiled) Fasttext diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index 74ff567d20..51f2710fe9 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -371,7 +371,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, Mapping from word IDs to words. It is used to determine the vocabulary size, as well as for debugging and topic printing. distributed : bool, optional - Whether distributed computing should be used to accerelate training. + Whether distributed computing should be used to accelerate training. chunksize : int, optional Number of documents to be used in each training chunk. passes : int, optional @@ -1101,7 +1101,7 @@ def bound(self, corpus, gamma=None, subsample_ratio=1.0): score += gammaln(np.sum(self.alpha)) - gammaln(np.sum(gammad)) # Compensate likelihood for when `corpus` above is only a sample of the whole corpus. This ensures - # that the likelihood is always rougly on the same scale. + # that the likelihood is always roughly on the same scale. score *= subsample_ratio # E[log p(beta | eta) - log q (beta | lambda)]; assumes eta is a scalar @@ -1303,7 +1303,7 @@ def get_document_topics(self, bow, minimum_probability=None, minimum_phi_value=N Most probable topics per word. Each element in the list is a pair of a word's id, and a list of topics sorted by their relevance to this word. Only returned if `per_word_topics` was set to True. list of (int, list of float), optional - Phi relevance values, multipled by the feature length, for each word-topic combination. + Phi relevance values, multiplied by the feature length, for each word-topic combination. Each element in the list is a pair of a word's id and a list of the phi values between this word and each topic. Only returned if `per_word_topics` was set to True. diff --git a/gensim/models/phrases.py b/gensim/models/phrases.py index e143e364e4..c10aeed0e7 100644 --- a/gensim/models/phrases.py +++ b/gensim/models/phrases.py @@ -533,7 +533,7 @@ def add_vocab(self, sentences): # uses a separate vocab to collect the token counts from `sentences`. # this consumes more RAM than merging new sentences into `self.vocab` # directly, but gives the new sentences a fighting chance to collect - # sufficient counts, before being pruned out by the (large) accummulated + # sufficient counts, before being pruned out by the (large) accumulated # counts collected in previous learn_vocab runs. min_reduce, vocab, total_words = self.learn_vocab( sentences, self.max_vocab_size, self.delimiter, self.progress_per, self.common_terms) diff --git a/gensim/models/translation_matrix.py b/gensim/models/translation_matrix.py index 8969d02bc2..8ae96e678e 100644 --- a/gensim/models/translation_matrix.py +++ b/gensim/models/translation_matrix.py @@ -280,7 +280,7 @@ def translate(self, source_words, topn=5, gc=0, sample_num=None, source_lang_vec source_words : {str, list of str} Single word or a list of words to be translated topn : int, optional - Number of words than will be returned as translation for each `source_words` + Number of words that will be returned as translation for each `source_words` gc : int, optional Define translation algorithm, if `gc == 0` - use standard NN retrieval, otherwise, use globally corrected neighbour retrieval method (as described in [1]_). From 9af941627a1290e5a75fcb41dcb680d7d2c8421b Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Wed, 9 Jan 2019 00:03:47 +0500 Subject: [PATCH 61/66] Optimize `mz_entropy` from `gensim.summarization` (#2267) * optimize mz_entropy - add cache for __analytic_entropy - optimise time of creation the word_count remaking the method of word counting * fix cache --- gensim/summarization/mz_entropy.py | 68 ++++++++++++++++++++++-------- 1 file changed, 50 insertions(+), 18 deletions(-) diff --git a/gensim/summarization/mz_entropy.py b/gensim/summarization/mz_entropy.py index 492ae7e79a..75d667e906 100644 --- a/gensim/summarization/mz_entropy.py +++ b/gensim/summarization/mz_entropy.py @@ -6,7 +6,7 @@ from gensim.summarization.textcleaner import tokenize_by_word as _tokenize_by_word from gensim.utils import to_unicode -import numpy +import numpy as np import scipy @@ -57,20 +57,15 @@ def mz_keywords(text, blocksize=1024, scores=False, split=False, weighted=True, text = to_unicode(text) words = [word for word in _tokenize_by_word(text)] vocab = sorted(set(words)) - word_counts = numpy.array( - [ - [words[i:i + blocksize].count(word) for word in vocab] - for i in range(0, len(words), blocksize) - ] - ).astype('d') + word_counts = count_freqs_by_blocks(words, vocab, blocksize) n_blocks = word_counts.shape[0] totals = word_counts.sum(axis=0) n_words = totals.sum() p = word_counts / totals - log_p = numpy.log2(p) - h = numpy.nan_to_num(p * log_p).sum(axis=0) + log_p = np.log2(p) + h = np.nan_to_num(p * log_p).sum(axis=0) analytic = __analytic_entropy(blocksize, n_blocks, n_words) - h += analytic(totals).astype('d') + h += analytic(totals).astype('d', copy=False) if weighted: h *= totals / n_words if threshold == 'auto': @@ -83,12 +78,41 @@ def mz_keywords(text, blocksize=1024, scores=False, split=False, weighted=True, return result +def count_freqs_by_blocks(words, vocab, blocksize): + """Count word frequencies in chunks + + Parameters + ---------- + words: list(str) + List of all words. + vocab: list(str) + List of words in vocabulary. + blocksize: int + Size of blocks to use for count. + + Returns + ------- + results: numpy.array(list(double)) + Array of list of word frequencies in one chunk. + The order of word frequencies is the same as words in vocab. + """ + word2ind = {word: i for i, word in enumerate(vocab)} + + word_counts = [] + for i in range(0, len(words), blocksize): + counts = [0] * len(vocab) + for word in words[i: i + blocksize]: + counts[word2ind[word]] += 1 + word_counts.append(counts) + return np.array(word_counts, dtype=np.double) + + def __log_combinations_inner(n, m): """Calculates the logarithm of n!/m!(n-m)!""" - return -(numpy.log(n + 1) + scipy.special.betaln(n - m + 1, m + 1)) + return -(np.log(n + 1) + scipy.special.betaln(n - m + 1, m + 1)) -__log_combinations = numpy.frompyfunc(__log_combinations_inner, 2, 1) +__log_combinations = np.frompyfunc(__log_combinations_inner, 2, 1) def __marginal_prob(blocksize, n_words): @@ -97,23 +121,31 @@ def marginal_prob(n, m): """Marginal probability of a word that occurs n times in the document occurring m times in a given block""" - return numpy.exp( + return np.exp( __log_combinations(n, m) + __log_combinations(n_words - n, blocksize - m) - __log_combinations(n_words, blocksize) ) - return numpy.frompyfunc(marginal_prob, 2, 1) + return np.frompyfunc(marginal_prob, 2, 1) def __analytic_entropy(blocksize, n_blocks, n_words): marginal = __marginal_prob(blocksize, n_words) + cache = {1: 0.0} # special case def analytic_entropy(n): """Predicted entropy for a word that occurs n times in the document""" - m = numpy.arange(1, min(blocksize, n) + 1).astype('d') + n = int(n) + if n in cache: + return cache[n] + m = np.arange(1, min(blocksize, n) + 1, dtype=np.double) p = m / n - elements = numpy.nan_to_num(p * numpy.log2(p)) * marginal(n, m) - return -n_blocks * elements.sum() + # m >= 1, so p > 0 and np.log2(p) != nan + elements = (p * np.log2(p)) * marginal(n, m) + result = -n_blocks * elements.sum() + + cache[n] = result + return result - return numpy.frompyfunc(analytic_entropy, 1, 1) + return np.frompyfunc(analytic_entropy, 1, 1) From 1b07f81b8276a0ced35c11824deb961ed128246a Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Wed, 9 Jan 2019 10:52:04 +0500 Subject: [PATCH 62/66] Replace custom epsilons with numpy equivalent in `LdaModel` (#2308) * Fix #2115: Replace custom epsilons with automatic numpy equivalent * fix typo --- gensim/models/ldamodel.py | 22 +++++----------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/gensim/models/ldamodel.py b/gensim/models/ldamodel.py index 51f2710fe9..503c2b48e3 100755 --- a/gensim/models/ldamodel.py +++ b/gensim/models/ldamodel.py @@ -106,13 +106,6 @@ logger = logging.getLogger(__name__) -# Epsilon (very small) values used by each expected data type instead of 0, to avoid Arithmetic Errors. -DTYPE_TO_EPS = { - np.float16: 1e-5, - np.float32: 1e-35, - np.float64: 1e-100, -} - def update_dir_prior(prior, N, logphat, rho): """Update a given prior using Newton's method, described in @@ -426,12 +419,7 @@ def __init__(self, corpus=None, num_topics=100, id2word=None, Data-type to use during calculations inside model. All inputs are also converted. """ - if dtype not in DTYPE_TO_EPS: - raise ValueError( - "Incorrect 'dtype', please choose one of {}".format( - ", ".join("numpy.{}".format(tp.__name__) for tp in sorted(DTYPE_TO_EPS)))) - - self.dtype = dtype + self.dtype = np.finfo(dtype).dtype # store user-supplied parameters self.id2word = id2word @@ -668,6 +656,7 @@ def inference(self, chunk, collect_sstats=False): # Lee&Seung trick which speeds things up by an order of magnitude, compared # to Blei's original LDA-C code, cool!). integer_types = six.integer_types + (np.integer,) + epsilon = np.finfo(self.dtype).eps for d, doc in enumerate(chunk): if len(doc) > 0 and not isinstance(doc[0][0], integer_types): # make sure the term IDs are ints, otherwise np will get upset @@ -683,8 +672,7 @@ def inference(self, chunk, collect_sstats=False): # The optimal phi_{dwk} is proportional to expElogthetad_k * expElogbetad_w. # phinorm is the normalizer. # TODO treat zeros explicitly, instead of adding epsilon? - eps = DTYPE_TO_EPS[self.dtype] - phinorm = np.dot(expElogthetad, expElogbetad) + eps + phinorm = np.dot(expElogthetad, expElogbetad) + epsilon # Iterate between gamma and phi until convergence for _ in range(self.iterations): @@ -695,7 +683,7 @@ def inference(self, chunk, collect_sstats=False): gammad = self.alpha + expElogthetad * np.dot(cts / phinorm, expElogbetad.T) Elogthetad = dirichlet_expectation(gammad) expElogthetad = np.exp(Elogthetad) - phinorm = np.dot(expElogthetad, expElogbetad) + eps + phinorm = np.dot(expElogthetad, expElogbetad) + epsilon # If gamma hasn't changed much, we're done. meanchange = mean_absolute_difference(gammad, lastgamma) if meanchange < self.gamma_threshold: @@ -1289,7 +1277,7 @@ def get_document_topics(self, bow, minimum_probability=None, minimum_phi_value=N minimum_probability : float Topics with an assigned probability lower than this threshold will be discarded. minimum_phi_value : float - f `per_word_topics` is True, this represents a lower bound on the term probabilities that are included. + If `per_word_topics` is True, this represents a lower bound on the term probabilities that are included. If set to None, a value of 1e-8 is used to prevent 0s. per_word_topics : bool If True, this function will also return two extra lists as explained in the "Returns" section. From 7cbf715fc08b1608b275e73219883d00aec02bec Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Wed, 9 Jan 2019 12:09:57 +0500 Subject: [PATCH 63/66] Improve `filter_extremes` methods in `Dictionary` and `HashDictionary` (#2303) - use search by set instead of list - refine default value of dict.get() method - inplace sort --- gensim/corpora/dictionary.py | 14 +++++++------- gensim/corpora/hashdictionary.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/gensim/corpora/dictionary.py b/gensim/corpora/dictionary.py index 8d2ce58364..c08d4e31b8 100644 --- a/gensim/corpora/dictionary.py +++ b/gensim/corpora/dictionary.py @@ -357,18 +357,18 @@ def filter_extremes(self, no_below=5, no_above=0.5, keep_n=100000, keep_tokens=N # determine which tokens to keep if keep_tokens: - keep_ids = [self.token2id[v] for v in keep_tokens if v in self.token2id] - good_ids = ( + keep_ids = {self.token2id[v] for v in keep_tokens if v in self.token2id} + good_ids = [ v for v in itervalues(self.token2id) if no_below <= self.dfs.get(v, 0) <= no_above_abs or v in keep_ids - ) - good_ids = sorted(good_ids, key=lambda x: self.num_docs if x in keep_ids else self.dfs.get(x), reverse=True) + ] + good_ids.sort(key=lambda x: self.num_docs if x in keep_ids else self.dfs.get(x, 0), reverse=True) else: - good_ids = ( + good_ids = [ v for v in itervalues(self.token2id) if no_below <= self.dfs.get(v, 0) <= no_above_abs - ) - good_ids = sorted(good_ids, key=self.dfs.get, reverse=True) + ] + good_ids.sort(key=self.dfs.get, reverse=True) if keep_n is not None: good_ids = good_ids[:keep_n] bad_words = [(self[idx], self.dfs.get(idx, 0)) for idx in set(self).difference(good_ids)] diff --git a/gensim/corpora/hashdictionary.py b/gensim/corpora/hashdictionary.py index 141f384271..433f61aa42 100644 --- a/gensim/corpora/hashdictionary.py +++ b/gensim/corpora/hashdictionary.py @@ -303,7 +303,7 @@ def filter_extremes(self, no_below=5, no_above=0.5, keep_n=100000): tokenid: {token for token in tokens if token in self.dfs_debug} for tokenid, tokens in iteritems(self.id2token) } - self.dfs = {tokenid: freq for tokenid, freq in iteritems(self.dfs) if self.id2token.get(tokenid, set())} + self.dfs = {tokenid: freq for tokenid, freq in iteritems(self.dfs) if self.id2token.get(tokenid, False)} # for word->document frequency logger.info( From 9b28532787ecc0a9f54b2a197336de95d3dc498d Mon Sep 17 00:00:00 2001 From: horpto <__Singleton__@hackerdom.ru> Date: Wed, 9 Jan 2019 14:05:58 +0500 Subject: [PATCH 64/66] Refactor `BM25` (#2275) * Refactor BM25 - remove unnecessary attributes - move calculation of average_idf in _initialize - more readable names * fix PEP8 * fix build --- gensim/summarization/bm25.py | 86 +++++++++++++++++------------------- 1 file changed, 40 insertions(+), 46 deletions(-) diff --git a/gensim/summarization/bm25.py b/gensim/summarization/bm25.py index d27043b1cb..18af369be7 100644 --- a/gensim/summarization/bm25.py +++ b/gensim/summarization/bm25.py @@ -58,14 +58,10 @@ class BM25(object): Size of corpus (number of documents). avgdl : float Average length of document in `corpus`. - corpus : list of list of str - Corpus of documents. - f : list of dicts of int + doc_freqs : list of dicts of int Dictionary with terms frequencies for each document in `corpus`. Words used as keys and frequencies as values. - df : dict - Dictionary with terms frequencies for whole `corpus`. Words used as keys and frequencies as values. idf : dict - Dictionary with inversed terms frequencies for whole `corpus`. Words used as keys and frequencies as values. + Dictionary with inversed documents frequencies for whole `corpus`. Words used as keys and frequencies as values. doc_len : list of int List of document lengths. """ @@ -80,38 +76,50 @@ def __init__(self, corpus): """ self.corpus_size = len(corpus) self.avgdl = 0 - self.corpus = corpus - self.f = [] - self.df = {} + self.doc_freqs = [] self.idf = {} self.doc_len = [] - self.initialize() + self._initialize(corpus) - def initialize(self): + def _initialize(self, corpus): """Calculates frequencies of terms in documents and in corpus. Also computes inverse document frequencies.""" + nd = {} # word -> number of documents with word num_doc = 0 - for document in self.corpus: - num_doc += len(document) + for document in corpus: self.doc_len.append(len(document)) + num_doc += len(document) frequencies = {} for word in document: if word not in frequencies: frequencies[word] = 0 frequencies[word] += 1 - self.f.append(frequencies) + self.doc_freqs.append(frequencies) for word, freq in iteritems(frequencies): - if word not in self.df: - self.df[word] = 0 - self.df[word] += 1 + if word not in nd: + nd[word] = 0 + nd[word] += 1 self.avgdl = float(num_doc) / self.corpus_size - - for word, freq in iteritems(self.df): - self.idf[word] = math.log(self.corpus_size - freq + 0.5) - math.log(freq + 0.5) - - def get_score(self, document, index, average_idf): + # collect idf sum to calculate an average idf for epsilon value + idf_sum = 0 + # collect words with negative idf to set them a special epsilon value. + # idf can be negative if word is contained in more than half of documents + negative_idfs = [] + for word, freq in iteritems(nd): + idf = math.log(self.corpus_size - freq + 0.5) - math.log(freq + 0.5) + self.idf[word] = idf + idf_sum += idf + if idf < 0: + negative_idfs.append(word) + self.average_idf = float(idf_sum) / len(self.idf) + + eps = EPSILON * self.average_idf + for word in negative_idfs: + self.idf[word] = eps + + def get_score(self, document, index): """Computes BM25 score of given `document` in relation to item of corpus selected by `index`. Parameters @@ -120,8 +128,6 @@ def get_score(self, document, index, average_idf): Document to be scored. index : int Index of document in corpus selected to score with `document`. - average_idf : float - Average idf in corpus. Returns ------- @@ -130,15 +136,15 @@ def get_score(self, document, index, average_idf): """ score = 0 + doc_freqs = self.doc_freqs[index] for word in document: - if word not in self.f[index]: + if word not in doc_freqs: continue - idf = self.idf[word] if self.idf[word] >= 0 else EPSILON * average_idf - score += (idf * self.f[index][word] * (PARAM_K1 + 1) - / (self.f[index][word] + PARAM_K1 * (1 - PARAM_B + PARAM_B * self.doc_len[index] / self.avgdl))) + score += (self.idf[word] * doc_freqs[word] * (PARAM_K1 + 1) + / (doc_freqs[word] + PARAM_K1 * (1 - PARAM_B + PARAM_B * self.doc_len[index] / self.avgdl))) return score - def get_scores(self, document, average_idf): + def get_scores(self, document): """Computes and returns BM25 scores of given `document` in relation to every item in corpus. @@ -146,8 +152,6 @@ def get_scores(self, document, average_idf): ---------- document : list of str Document to be scored. - average_idf : float - Average idf in corpus. Returns ------- @@ -155,14 +159,11 @@ def get_scores(self, document, average_idf): BM25 scores. """ - scores = [] - for index in range(self.corpus_size): - score = self.get_score(document, index, average_idf) - scores.append(score) + scores = [self.get_score(document, index) for index in range(self.corpus_size)] return scores -def _get_scores(bm25, document, average_idf): +def _get_scores(bm25, document): """Helper function for retrieving bm25 scores of given `document` in parallel in relation to every item in corpus. @@ -172,8 +173,6 @@ def _get_scores(bm25, document, average_idf): BM25 object fitted on the corpus where documents are retrieved. document : list of str Document to be scored. - average_idf : float - Average idf in corpus. Returns ------- @@ -181,11 +180,7 @@ def _get_scores(bm25, document, average_idf): BM25 scores. """ - scores = [] - for index in range(bm25.corpus_size): - score = bm25.get_score(document, index, average_idf) - scores.append(score) - return scores + return bm25.get_scores(document) def get_bm25_weights(corpus, n_jobs=1): @@ -218,14 +213,13 @@ def get_bm25_weights(corpus, n_jobs=1): """ bm25 = BM25(corpus) - average_idf = float(sum(val for val in bm25.idf.values())) / len(bm25.idf) n_processes = effective_n_jobs(n_jobs) if n_processes == 1: - weights = [bm25.get_scores(doc, average_idf) for doc in corpus] + weights = [bm25.get_scores(doc) for doc in corpus] return weights - get_score = partial(_get_scores, bm25, average_idf=average_idf) + get_score = partial(_get_scores, bm25) pool = Pool(n_processes) weights = pool.map(get_score, corpus) pool.close() From 8b558c1e3628b31f2768c9abcf45126fa178df54 Mon Sep 17 00:00:00 2001 From: jeni Shah Date: Wed, 9 Jan 2019 19:18:10 +0530 Subject: [PATCH 65/66] Fix poincate viz incompatibility with `plotly>=3.0.0` (#2226) * modify phrasegrams * fix bug * revert non-relevant phrases change --- gensim/viz/poincare.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gensim/viz/poincare.py b/gensim/viz/poincare.py index 41ea3d2eab..f20fd8ab2d 100644 --- a/gensim/viz/poincare.py +++ b/gensim/viz/poincare.py @@ -59,7 +59,7 @@ def poincare_2d_visualization(model, tree, figure_title, num_nodes=50, show_node mode='markers', marker=dict(color='rgb(30, 100, 200)'), text=node_labels, - textposition='bottom' + textposition='bottom center' ) nodes_x, nodes_y, node_labels = [], [], [] @@ -73,7 +73,7 @@ def poincare_2d_visualization(model, tree, figure_title, num_nodes=50, show_node mode='markers+text', marker=dict(color='rgb(200, 100, 200)'), text=node_labels, - textposition='bottom' + textposition='bottom center' ) node_out_degrees = Counter(hypernym_pair[1] for hypernym_pair in tree) @@ -92,7 +92,7 @@ def poincare_2d_visualization(model, tree, figure_title, num_nodes=50, show_node edges_x += [vector_u[0], vector_v[0], None] edges_y += [vector_u[1], vector_v[1], None] edges = go.Scatter( - x=edges_x, y=edges_y, mode="line", hoverinfo=False, + x=edges_x, y=edges_y, mode="lines", hoverinfo='none', line=dict(color='rgb(50,50,50)', width=1)) layout = go.Layout( From 6af20b6fc1590a34eea18b3d8fae50c93482d3f5 Mon Sep 17 00:00:00 2001 From: Ram Prakash Date: Wed, 9 Jan 2019 23:13:34 +0530 Subject: [PATCH 66/66] Fix pyemd import (#2240) * Fixed pyemd import * Fixed pythonic-equality and typo * Updated pyemd import error * Updated import error for pyemd again --- gensim/models/keyedvectors.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/gensim/models/keyedvectors.py b/gensim/models/keyedvectors.py index 7911fe5805..4ba4683c88 100644 --- a/gensim/models/keyedvectors.py +++ b/gensim/models/keyedvectors.py @@ -167,14 +167,6 @@ except ImportError: from Queue import Queue, Empty # noqa:F401 -# If pyemd C extension is available, import it. -# If pyemd is attempted to be used, but isn't installed, ImportError will be raised in wmdistance -try: - from pyemd import emd - PYEMD_EXT = True -except (ImportError, ValueError): - PYEMD_EXT = False - from numpy import dot, float32 as REAL, empty, memmap as np_memmap, \ double, array, zeros, vstack, sqrt, newaxis, integer, \ ndarray, sum as np_sum, prod, argmax, divide as np_divide @@ -752,8 +744,10 @@ def wmdistance(self, document1, document2): If `pyemd `_ isn't installed. """ - if not PYEMD_EXT: - raise ImportError("Please install pyemd Python package to compute WMD.") + + # If pyemd C extension is available, import it. + # If pyemd is attempted to be used, but isn't installed, ImportError will be raised in wmdistance + from pyemd import emd # Remove out-of-vocabulary words. len_pre_oov1 = len(document1) @@ -765,7 +759,7 @@ def wmdistance(self, document1, document2): if diff1 > 0 or diff2 > 0: logger.info('Removed %d and %d OOV words from document 1 and 2 (respectively).', diff1, diff2) - if len(document1) == 0 or len(document2) == 0: + if not document1 or not document2: logger.info( "At least one of the documents had no words that were in the vocabulary. " "Aborting (returning inf)."