import tensorflow as tf
from transformers import BertModel
def create_model(bert_model, max_len):
input_ids = tf.keras.Input(shape=(max_len,), dtype='int32')
attention_masks = tf.keras.Input(shape=(max_len,), dtype='int32')
bert_output = bert_model(input_ids, attention_mask=attention_masks, return_dict=True)
embedding = tf.keras.layers.Dropout(0.3)(bert_output.last_hidden_state)
output = tf.keras.layers.Dense(17, activation='softmax')(embedding)
model = tf.keras.models.Model(inputs=[input_ids, attention_masks], outputs=[output])
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.00001), loss='sparse_categorical_crossentropy', metrics=['accuracy'])
return model
# Load the BERT model
bert_model = BertModel.from_pretrained('bert-base-uncased')
# Define the maximum sequence length
MAX_LEN = 128
# Create the model with the instantiated BERT model
model = create_model(bert_model, MAX_LEN)
# Display the model summary
model.summary()
it just expecting a model summary but
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-119-af021ea2919e> in <cell line: 27>()
25
26 # Create the model with the instantiated BERT model
---> 27 model = create_model(bert_model, MAX_LEN)
28
29 # Display the model summary
3 frames
/usr/local/lib/python3.10/dist-packages/transformers/models/bert/modeling_bert.py in forward(self, input_ids, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, encoder_hidden_states, encoder_attention_mask, past_key_values, use_cache, output_attentions, output_hidden_states, return_dict)
959 elif input_ids is not None:
960 self.warn_if_padding_and_no_attention_mask(input_ids, attention_mask)
--> 961 input_shape = input_ids.size()
962 elif inputs_embeds is not None:
963 input_shape = inputs_embeds.size()[:-1]
AttributeError: 'KerasTensor' object has no attribute 'size'