How to use word2vec2tensor in gensim?
I was able to solve the issue by using the following code:
model = gensim.models.keyedvectors.KeyedVectors.load(file_name)
max_size = len(model.wv.vocab)-1
w2v = np.zeros((max_size,model.layer1_size))
if not os.path.exists('projections'):
os.makedirs('projections')
with open("projections/metadata.tsv", 'w+') as file_metadata:
for i, word in enumerate(model.wv.index2word[:max_size]):
#store the embeddings of the word
w2v[i] = model.wv[word]
#write the word to a file
file_metadata.write(word + '\n')
sess = tf.InteractiveSession()
with tf.device("/cpu:0"):
embedding = tf.Variable(w2v, trainable=False, name='embedding')
tf.global_variables_initializer().run()
saver = tf.train.Saver()
writer = tf.summary.FileWriter('projections', sess.graph)
config = projector.ProjectorConfig()
embed= config.embeddings.add()
embed.tensor_name = 'embedding'
embed.metadata_path = 'metadata.tsv'
projector.visualize_embeddings(writer, config)
saver.save(sess, 'projections/model.ckpt', global_step=max_size)