I was training a network and I decided to add more data for training. my data set is selected from another data but both have (460,620,3) and Uint8 type. but when I train my net with this data, I got this error:

Epoch 1/40
  1/100 [..............................] - ETA: 8:10 - loss: 10312.7480 - X_coordinate_loss: 5268.6304 - Y_coordinate_loss: 5044.1172 - X_coordinate_mae: 382.9972 - Y_coordinate_mae: 382.5627
---------------------------------------------------------------------------
InvalidArgumentError                      Traceback (most recent call last)
<ipython-input-14-0695a4e6d1ee> in <module>()
      5     callbacks=callbacks,
      6     validation_data=valid_dataloader,
----> 7     validation_steps=20,
      8 )

1 frames
/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/execute.py in quick_execute(op_name, num_outputs, inputs, attrs, ctx, name)
     53     ctx.ensure_initialized()
     54     tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,
---> 55                                         inputs, attrs, num_outputs)
     56   except core._NotOkStatusException as e:
     57     if name is not None:

InvalidArgumentError: Graph execution error:

TypeError: `generator` yielded an element of shape (8, 0) where an element of shape (None,) was expected.
Traceback (most recent call last):

  File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/script_ops.py", line 271, in __call__
    ret = func(*args)

  File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/autograph/impl/api.py", line 642, in wrapper
    return func(*args, **kwargs)

  File "/usr/local/lib/python3.7/dist-packages/tensorflow/python/data/ops/dataset_ops.py", line 1048, in generator_py_func
    f"`generator` yielded an element of shape {ret_array.shape} "

TypeError: `generator` yielded an element of shape (8, 0) where an element of shape (None,) was expected.


     [[{{node PyFunc}}]]
     [[IteratorGetNext]] [Op:__inference_train_function_3420]

my batch size is = 8 and my network is:


class MultiOutputModel():
    def make_default_hidden_layers(self, inputs):
        x = Conv2D(16, (3, 3), padding="same")(inputs)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=-1)(x)
        x = MaxPooling2D(pool_size=(3, 3))(x)
        x = Dropout(0.25)(x)
        x = Conv2D(32, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=-1)(x)
        x = MaxPooling2D(pool_size=(2, 2))(x)
        x = Dropout(0.25)(x)
        x = Conv2D(64, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=-1)(x)
        x = MaxPooling2D(pool_size=(2, 2))(x)
        x = Dropout(0.25)(x)
        return x

    def build_X_coordinate(self, inputs):
        x = self.make_default_hidden_layers(inputs)
        x = Flatten()(x)
        x = Dense(100)(x)
        x = Activation("relu")(x)
        x = BatchNormalization()(x)
        x = Dropout(0.5)(x)
        x = Dense(1)(x)
        x = Activation("linear", name="X_coordinate")(x)
        return x

    def build_Y_coordinate(self, inputs):   
        x = self.make_default_hidden_layers(inputs)
        x = Flatten()(x)
        x = Dense(100)(x)
        x = Activation("relu")(x)
        x = BatchNormalization()(x)
        x = Dropout(0.5)(x)
        x = Dense(1)(x)
        x = Activation("linear", name="Y_coordinate")(x)
        return x

    def assemble_full_model(self, width, height):
        input_shape = (height, width, 3)
        inputs = Input(shape=input_shape)
        X_branch = self.build_X_coordinate(inputs)
        Y_branch = self.build_Y_coordinate(inputs)
        model = Model(inputs=inputs,outputs = [X_branch, Y_branch ])
        return model

I would mention that before adding the new data, it is working well. number of my data = 1043 len(train) 80 len(test) 37 len(valid) thank you a lot.

its my colab link: https://colab.research.google.com/drive/1f0PdSyxoQV1b8Loob0qgD2SAuA3LdeHG?usp=sharing

1

There are 1 best solutions below

1
On

But you didn't show the generator & the signature in caller - so nobody could see... I had the same problem (for the topic's name), therefore (if somebody wiil need) I show the simplified example:

# Importing the tensorflow library
import tensorflow as tf 
import numpy as np
 
def fn_t():
  for x in range(1,10,1):
  # print(x)   # need async print to console !
    tt= x+2
    t1= tf.convert_to_tensor(np.array([[0,4],[tt,5]]))
    yield x, t1   # using generators each item should be a pair of the input data and the label

# Specifying a dataset of some elements
dataset = tf.data.Dataset.from_generator(
    fn_t,
         output_signature=(
         tf.TensorSpec(shape=(), dtype=tf.int32),
         tf.TensorSpec(shape=(2, None), dtype=tf.int32))
)
 
ds= dataset.take(2)
labels = list(map(lambda x: x[1], ds))
print(labels, "\n=========\n")

I had error until I achieved similar shapes in t_fn yield & output_signature in method that catches this yield... Code works OK for simplified t1 in t_fn (it gave error before my corrections). Let it be helpfull.

p.s. here examle for supervised dataset