embedding_lookup
import tensorflow as tf
embedding = tf.get_variable("embedding", initializer=tf.ones...(shape=[10, 5]))
look_uop = tf.nn.embedding_lookup(embedding, [1, 2, 3, 4])
# embedding_lookup就像是给 其它行的变量加上了...tf.get_variable("w", shape=[5, 1])
z = tf.matmul(look_uop, w1)
opt = tf.train.GradientDescentOptimizer(0.1)
#梯度的计算和更新依旧和之前一样...,没有需要注意的
gradients = tf.gradients(z, xs=[embedding])
train = opt.apply_gradients([(gradients[0],embedding