Skip to content

Commit

Permalink
Fix additional warnings from the CI test suite
Browse files Browse the repository at this point in the history
  • Loading branch information
Witiko committed Sep 9, 2020
1 parent aa4771b commit e724bd0
Show file tree
Hide file tree
Showing 7 changed files with 9 additions and 11 deletions.
6 changes: 3 additions & 3 deletions gensim/matutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,9 +191,9 @@ def pad(mat, padrow, padcol):
if padcol < 0:
padcol = 0
rows, cols = mat.shape
return np.bmat([
[mat, np.matrix(np.zeros((rows, padcol)))],
[np.matrix(np.zeros((padrow, cols + padcol)))],
return np.block([
[mat, np.zeros((rows, padcol))],
[np.zeros((padrow, cols + padcol))],
])


Expand Down
2 changes: 1 addition & 1 deletion gensim/models/keyedvectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -1475,7 +1475,7 @@ def save_word2vec_format(self, fname, fvocab=None, binary=False, total_vec=None,
row = self[key]
if binary:
row = row.astype(REAL)
fout.write(utils.to_utf8(prefix + str(key)) + b" " + row.tostring())
fout.write(utils.to_utf8(prefix + str(key)) + b" " + row.tobytes())
else:
fout.write(utils.to_utf8("%s%s %s\n" % (prefix, str(key), ' '.join(repr(val) for val in row))))

Expand Down
3 changes: 1 addition & 2 deletions gensim/models/wrappers/wordrank.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,8 +254,7 @@ def load_wordrank_model(cls, model_file, vocab_file=None, context_file=None, sor
If 1 - use ensemble of word and context vectors.
"""
glove2word2vec(model_file, model_file + '.w2vformat')
model = cls.load_word2vec_format('%s.w2vformat' % model_file)
model = cls.load_word2vec_format(model_file, binary=False, no_header=True)
if ensemble and context_file:
model.ensemble_embedding(model_file, context_file)
if sorted_vocab and vocab_file:
Expand Down
3 changes: 2 additions & 1 deletion gensim/test/test_fasttext.py
Original file line number Diff line number Diff line change
Expand Up @@ -1375,7 +1375,8 @@ def test_in_vocab(self):

def test_out_of_vocab(self):
model = train_gensim(bucket=0)
self.assertRaises(KeyError, model.wv.word_vec, 'streamtrain')
with self.assertRaises(KeyError):
model.wv.get_vector('streamtrain')

def test_cbow_neg(self):
"""See `gensim.test.test_word2vec.TestWord2VecModel.test_cbow_neg`."""
Expand Down
2 changes: 1 addition & 1 deletion gensim/test/test_lsimodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def testTransformFloat32(self):
def testCorpusTransform(self):
"""Test lsi[corpus] transformation."""
model = self.model
got = np.vstack(matutils.sparse2full(doc, 2) for doc in model[self.corpus])
got = np.vstack([matutils.sparse2full(doc, 2) for doc in model[self.corpus]])
expected = np.array([
[0.65946639, 0.14211544],
[2.02454305, -0.42088759],
Expand Down
2 changes: 1 addition & 1 deletion gensim/test/test_phrases.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def testBigramConstructionFromArray(self):
bigram1_seen = False
bigram2_seen = False

for s in self.bigram[np.array(self.sentences)]:
for s in self.bigram[np.array(self.sentences, dtype=object)]:
if not bigram1_seen and self.bigram1 in s:
bigram1_seen = True
if not bigram2_seen and self.bigram2 in s:
Expand Down
2 changes: 0 additions & 2 deletions gensim/test/test_wordrank_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,15 +39,13 @@ def testLoadWordrankFormat(self):
vocab_size, dim = 76, 50
self.assertEqual(model.vectors.shape, (vocab_size, dim))
self.assertEqual(len(model), vocab_size)
os.remove(self.wr_file + '.w2vformat')

def testEnsemble(self):
"""Test ensemble of two embeddings"""
if not self.wr_path:
return
new_emb = self.test_model.ensemble_embedding(self.wr_file, self.wr_file)
self.assertEqual(new_emb.shape, (76, 50))
os.remove(self.wr_file + '.w2vformat')

def testPersistence(self):
"""Test storing/loading the entire model"""
Expand Down

0 comments on commit e724bd0

Please sign in to comment.