I am building a BiLSTM model using Elmo embeddings with TensorFlow Hub.
My code looks like this:
import tensorflow as tf
import tensorflow_hub as hub
from keras.layers import Layer
import keras.backend as K
class ElmoEmbeddingLayer(Layer):
def __init__(self, **kwargs):
self.dimensions = 1024
super(ElmoEmbeddingLayer, self).__init__(**kwargs)
def build(self, input_shape):
self.elmo = hub.Module('https://tfhub.dev/google/elmo/3', trainable=self.trainable, name="{}_module".format(self.name))
super(ElmoEmbeddingLayer, self).build(input_shape)
def call(self, x, mask=None):
result = self.elmo(K.squeeze(K.cast(x, tf.string), axis=1),
as_dict=True,
signature='default',
)['default']
return result
def compute_mask(self, inputs, mask=None):
return K.not_equal(inputs, '--PAD--')
def compute_output_shape(self, input_shape):
return (input_shape[0], self.dimensions)
input_text = Input(shape=(1,), dtype=tf.string)
embedding = ElmoEmbeddingLayer()(input_text)
dense = Dense(256, activation='relu')(embedding)
pred = Dense(1, activation='sigmoid')(dense)
model = Model(inputs=[input_text], outputs=pred)
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.summary()
model.fit(train_text,
train_label,
validation_data=(test_text, test_label),
epochs=5,
batch_size=32)
I am unable to train the model because of the error:
FailedPreconditionError: Could not find variable elmo_embedding_layer_6_module/bilm/CNN_high_0/b_carry. This could mean that the variable has been deleted. In TF1, it can also mean the variable is uninitialized. Debug info: container=localhost, status error message=Resource localhost/elmo_embedding_layer_7_module/bilm/CNN/W_cnn_3/N10tensorflow3VarE does not exist [[{{node elmo_embedding_layer_7/elmo_embedding_layer_7_module_apply_default/bilm/CNN/Conv2D_3/ReadVariableOp}}]]
Can someone explain what the error means? I tried updating all modules, did not solve the issue