understanding word embeddings code example
Example: word embedding
from sklearn.linear_model import LogisticRegresion
from zeugma.embeddings import EmbeddingTransformer
glove = EmbeddingTransformer('glove')
x_train = glove.transform(corpus_train)
model = LogisticRegression()
model.fit(x_train, y_train)
x_test = glove.transform(corpus_test)
model.predict(x_test)