Home > Software design >  ValueError: Dimensions must be equal keras
ValueError: Dimensions must be equal keras

Time:09-17

I have a model of this structure:

filter_size = (3,3)
filters = 32
pool = 2

input_layer = keras.Input(shape=(100,300,1))

conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(input_layer)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)
conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(conv_extractor)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)

#conv_extractor = layers.Reshape(target_shape=(100 // (pool ** 2), (100 // (pool ** 2)) * filters))(conv_extractor)
shape = ((100 // 4), (300 // 4) * 32)
#conv_extractor = layers.Dense(512, activation='relu')(conv_extractor)
conv_extractor = layers.Reshape(target_shape=(23,2336))(conv_extractor)

gru_1 = GRU(512, return_sequences=True)(conv_extractor)
gru_1b = GRU(512, return_sequences=True, go_backwards=True)(conv_extractor)
gru1_merged = add([gru_1, gru_1b])
gru_2 = GRU(512, return_sequences=True)(gru1_merged)
gru_2b = GRU(512, return_sequences=True, go_backwards=True)(gru1_merged)

inner = layers.Dense(30, activation='LeakyReLU')(concatenate([gru_2, gru_2b]))
inner = layers.Dense(10, activation='LeakyReLU')(inner)
inner = layers.Dense(3, activation='LeakyReLU')(inner)
model = Model(input_layer,inner)
model.compile(loss = "poisson", optimizer = optimizers.Adam(2e-4), metrics=['accuracy'])

All of the above seems to work, when trying to train using model.fit(x_train, y_train,epochs=3) I get the following error:

ValueError                                Traceback (most recent call last)
/var/folders/nc/c4mgwn897qbg8g52tp3mhbjr0000gp/T/ipykernel_3907/1977739458.py in <module>
----> 1 model.fit(x_train, y_train,epochs=3)

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
   1181                 _r=1):
   1182               callbacks.on_train_batch_begin(step)
-> 1183               tmp_logs = self.train_function(iterator)
   1184               if data_handler.should_sync:
   1185                 context.async_wait()

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
    887 
    888       with OptionalXlaContext(self._jit_compile):
--> 889         result = self._call(*args, **kwds)
    890 
    891       new_tracing_count = self.experimental_get_tracing_count()

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
    931       # This is the first call of __call__, so we have to initialize.
    932       initializers = []
--> 933       self._initialize(args, kwds, add_initializers_to=initializers)
    934     finally:
    935       # At this point we know that the initialization is complete (or less

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
    761     self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
    762     self._concrete_stateful_fn = (
--> 763         self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
    764             *args, **kwds))
    765 

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
   3048       args, kwargs = None, None
   3049     with self._lock:
-> 3050       graph_function, _ = self._maybe_define_function(args, kwargs)
   3051     return graph_function
   3052 

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
   3442 
   3443           self._function_cache.missed.add(call_context_key)
-> 3444           graph_function = self._create_graph_function(args, kwargs)
   3445           self._function_cache.primary[cache_key] = graph_function
   3446 

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
   3277     arg_names = base_arg_names   missing_arg_names
   3278     graph_function = ConcreteFunction(
-> 3279         func_graph_module.func_graph_from_py_func(
   3280             self._name,
   3281             self._python_function,

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
    997         _, original_func = tf_decorator.unwrap(python_func)
    998 
--> 999       func_outputs = python_func(*func_args, **func_kwargs)
   1000 
   1001       # invariant: `func_outputs` contains only Tensors, CompositeTensors,

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
    670         # the function a weak reference to itself to avoid a reference cycle.
    671         with OptionalXlaContext(compile_with_xla):
--> 672           out = weak_wrapped_fn().__wrapped__(*args, **kwds)
    673         return out
    674 

~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
    984           except Exception as e:  # pylint:disable=broad-except
    985             if hasattr(e, "ag_error_metadata"):
--> 986               raise e.ag_error_metadata.to_exception(e)
    987             else:
    988               raise

ValueError: in user code:

    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:855 train_function  *
        return step_function(self, iterator)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:845 step_function  **
        outputs = model.distribute_strategy.run(run_step, args=(data,))
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:1285 run
        return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:2833 call_for_each_replica
        return self._call_for_each_replica(fn, args, kwargs)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:3608 _call_for_each_replica
        return fn(*args, **kwargs)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:838 run_step  **
        outputs = model.train_step(data)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:800 train_step
        self.compiled_metrics.update_state(y, y_pred, sample_weight)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/compile_utils.py:460 update_state
        metric_obj.update_state(y_t, y_p, sample_weight=mask)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/utils/metrics_utils.py:86 decorated
        update_op = update_state_fn(*args, **kwargs)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/metrics.py:177 update_state_fn
        return ag_update_state(*args, **kwargs)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/metrics.py:664 update_state  **
        matches = ag_fn(y_true, y_pred, **self._fn_kwargs)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/util/dispatch.py:206 wrapper
        return target(*args, **kwargs)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/metrics.py:3485 sparse_categorical_accuracy
        return math_ops.cast(math_ops.equal(y_true, y_pred), backend.floatx())
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/util/dispatch.py:206 wrapper
        return target(*args, **kwargs)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/ops/math_ops.py:1729 equal
        return gen_math_ops.equal(x, y, name=name)
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/ops/gen_math_ops.py:3228 equal
        _, _, _op, _outputs = _op_def_library._apply_op_helper(
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/op_def_library.py:748 _apply_op_helper
        op = g._create_op_internal(op_type_name, inputs, dtypes=None,
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py:599 _create_op_internal
        return super(FuncGraph, self)._create_op_internal(  # pylint: disable=protected-access
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/ops.py:3557 _create_op_internal
        ret = Operation(
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/ops.py:2041 __init__
        self._c_op = _create_c_op(self._graph, node_def, inputs,
    /Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/ops.py:1883 _create_c_op
        raise ValueError(str(e))

    ValueError: Dimensions must be equal, but are 3 and 23 for '{{node Equal}} = Equal[T=DT_FLOAT, incompatible_shape_error=true](Cast_1, Cast_2)' with input shapes: [?,3], [?,23].

FYI: the shape of x_train is 2000,100,300,1 and y_train is 2000,3

CodePudding user response:

Your model output is (None, 23, 3), while it should be (None, 3) to match with your target variable (y_train) which is (2000,3).

Since your Dense layers input is a 3 dimensional (after concatenate layer), their output will be also a 3D (None, 23, 3). Simply add a Flatten layers before Dense layers.:

gru_2b = layers.GRU(512, return_sequences=True, go_backwards=True)(gru1_merged)
x = layers.concatenate([gru_2, gru_2b])   # move concatenate layer aside
x = layers.Flatten()(x)                   # add this
inner = layers.Dense(30, activation='LeakyReLU')(x)

Or you can remove return_sequence=True from your last GRU layers like this:

gru_2 = layers.GRU(512)(gru1_merged)                      # remove return_sequence=True
gru_2b = layers.GRU(512, go_backwards=True)(gru1_merged)  # remove return_sequence=True
inner = layers.Dense(30, activation='LeakyReLU')(concatenate([gru_2, gru_2b]))
  • Related