from keras.models import Sequential
from keras.layers import Dense, Dropout, LSTM, BatchNormalization,Flatten
from keras.optimizers import adam
import tensorflow as tf
import numpy as np
a = np.array([[1,2,3,4,4,5,61,2,3,4,4,5,6],[1,2,3,4,4,5,61,2,3,4,4,5,6],[1,2,3,4,4,5,61,2,3,4,4,5,6],[1,2,3,4,4,5,61,2,3,4,4,5,6]])
y = np.array([[1],[1],[1],[1]])
model = Sequential([
Dense(20, activation="relu"),
Dense(100, activation="relu"),
Dense(1, activation="sigmoid")
])
print(np.shape(a))
def generator_aa(a, y, batch_size):
while True:
indices = np.random.permutation(len(a))
for i in range(0, len(indices), batch_size):
batch_indices = indices[i:i+batch_size]
yield a[batch_indices], y[batch_indices]
my_opt = adam(learning_rate=0.01)
model.compile(loss='binary_crossentropy', optimizer=my_opt, metrics=['accuracy'])
dataset = tf.data.Dataset.from_generator(generator, args=(a,y,2),
output_types=('float32', 'float32'),
)
model.fit(dataset, epochs=3)
The above code has the following error. I believe it is related to the tf.data.Dataset, the error is like this:
I tried the following, it works fine, not sure what is the issue with my tf.data.Dataset.from_generator.
batch_size=2
model.fit(generator_aa(a,y,2),epochs=2,steps_per_epoch=2)