Home > OS >  Input 0 of layer dense is incompatible with the layer (Newbie Question)
Input 0 of layer dense is incompatible with the layer (Newbie Question)

Time:03-14

How to fix the error "Input 0 of layer dense is incompatible with the layer: expected axis -1 of input shape to have value 150528 but received input with shape [224, 672]".

I'd problem to build sequantial model. Hope you all will help me on this time.

def decode_csv(csv_row):
    record_defaults = ["path", "flower"]
    filename, label_string = tf.io.decode_csv(csv_row, record_defaults)
    img = read_and_decode(filename, [IMG_HEIGHT, IMG_WIDTH])
    return img, label_string

train_dataset (tf.data.TextLineDataset("/home/pi/Downloads/ml_code/train_set.csv").map(decode_csv)).take(500)
eval_dataset = (tf.data.TextLineDataset("/home/pi/Downloads/ml_code/eval_set.csv").map(decode_csv)).take(50)

model =tf.keras.Sequential([
            tf.keras.layers.Flatten(input_shape = (IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS)),
            tf.keras.layers.Dense(len(CLASS_NAME), activation = "softmax")
        ])

model.compile(optimizer="adam",
          loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits = False),
              metrics = ['accuracy'])

model.summary()

tf.keras.utils.plot_model(model, show_shapes = True, show_layer_names = False, to_file = "model.jpg")

history = model.fit(train_dataset, validation_data = eval_dataset, epochs = 10)

model.save("first")

json.dump(history.history, open("First_History", "w"))

CodePudding user response:

Try something like this:

import pandas as pd

# Create dummy data

tf.keras.utils.save_img('image1.png', tf.random.normal((64, 64, 3)))
tf.keras.utils.save_img('image2.png', tf.random.normal((64, 64, 3)))
tf.keras.utils.save_img('image3.png', tf.random.normal((64, 64, 3)))
tf.keras.utils.save_img('image4.png', tf.random.normal((64, 64, 3)))
tf.keras.utils.save_img('image5.png', tf.random.normal((64, 64, 3)))

df = pd.DataFrame(data= {'path': ['/content/image1.png', '/content/image2.png', '/content/image3.png', '/content/image4.png', '/content/image5.png'],
                         'label': ['0', '1', '2', '3', '2']})
df.to_csv('data.csv', index=False)

Preprocess data and train:

import tensorflow as tf

def decode_csv(csv_row):
    record_defaults = ["path", "label"]
    filename, label_string = tf.io.decode_csv(csv_row, record_defaults)
    img = tf.io.decode_png(tf.io.read_file(filename), channels=3)
    return img, tf.strings.to_number(label_string, out_type=tf.int32)

# Skip header row.
train_dataset = tf.data.TextLineDataset("/content/data.csv").skip(1).map(decode_csv).batch(2)

model =tf.keras.Sequential([
            tf.keras.layers.Flatten(input_shape = (64, 64, 3)),
            tf.keras.layers.Dense(4, activation = "softmax")
        ])

model.compile(optimizer="adam",
          loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits = False),
              metrics = ['accuracy'])

model.summary()

tf.keras.utils.plot_model(model, show_shapes = True, show_layer_names = False, to_file = "model.jpg")

history = model.fit(train_dataset, epochs = 2)
model.save("first")
Model: "sequential_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 flatten_2 (Flatten)         (None, 12288)             0         
                                                                 
 dense_2 (Dense)             (None, 4)                 49156     
                                                                 
=================================================================
Total params: 49,156
Trainable params: 49,156
Non-trainable params: 0
_________________________________________________________________
Epoch 1/2
3/3 [==============================] - 1s 62ms/step - loss: 623.7551 - accuracy: 0.4000
Epoch 2/2
3/3 [==============================] - 0s 7ms/step - loss: 1710.6586 - accuracy: 0.2000
INFO:tensorflow:Assets written to: first/assets

CodePudding user response:

My Final Fix Code is here:

def read_and_decode(filename, reshape_dims):
    img = tf.io.read_file(filename)
    # Range [0, 255]
    img = tf.image.decode_jpeg(img, channels = 3)
    #convert into range[0, 1] for ml flexible
    img = tf.image.convert_image_dtype(img, tf.float32)
    return img

def decode_csv(csv_row):
    record_defaults = ["path", "flower"]
    filenames, label_string = tf.io.decode_csv(csv_row, record_defaults, field_delim = ",")

    img = read_and_decode(filenames, [IMG_HEIGHT, IMG_WIDTH])

    return img, tf.argmax(tf.cast(label_string == CLASS_NAME, tf.int32))

train_dataset = tf.data.TextLineDataset("/home/pi/Downloads/ml_code/train_set.csv").skip(1).map(decode_csv).batch(2)
eval_dataset = tf.data.TextLineDataset("/home/pi/Downloads/ml_code/eval_set.csv").skip(1).map(decode_csv).batch(2)

model =tf.keras.Sequential([
            tf.keras.layers.Flatten(input_shape = (IMG_HEIGHT, IMG_WIDTH, 3)),
            tf.keras.layers.Dense(len(CLASS_NAME), activation = "softmax")
        ])

model.compile(optimizer="adam",
              loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits = False),
              metrics = ['accuracy'])

model.summary()

tf.keras.utils.plot_model(model, show_shapes = True, show_layer_names = False, to_file = "model.jpg")

history = model.fit(train_dataset, validation_data = eval_dataset, epochs = 2)

model.save("first")

json.dump(history.history, open("First_History", "w"))

All credits should go to AloneTogether, Thanks for your help ^_^.

  • Related