Home > database >  how to extract label in processing image with tf.data
how to extract label in processing image with tf.data

Time:08-01

I am using map function to apply preprocessing on dataset in order to read and extract labels from file paths using tf.data but it returns the same label for all images the file path is on the following shape where A is the label /content/drive/MyDrive/prom02/dataset/train/A0_jpg.rf.292a080422ba984985192f413101af41.jpg

images_ds = tf.data.Dataset.list_files('/content/drive/MyDrive/prom02/dataset/train/*', shuffle=True)

images_ds = images_ds.shuffle(200)

train_size = int(image_count*0.8)
train_ds = images_ds.take(train_size)
test_ds = images_ds.skip(train_size)
len(train_ds),len(test_ds)

def hot_encode(label):
    import string
    alphabet = list(string.ascii_uppercase)
    i=0
    while i<26:
      if label==alphabet[i]:
        label=i
        break
      i =1
    label=tf.one_hot(label,26)
    return label
def get_label(file_path):
    import os
    label=(str(file_path).split('/')[-1][0])
    label= hot_encode(label)
    return label
def scale(image,label):
    return image/255, label
def process_image(file_path):
    label = tf.cast(get_label(file_path),tf.float32)
    img = tf.io.read_file(file_path) # load the raw data from the file as a string
    img = tf.image.decode_jpeg(img)
    img = tf.image.resize(img, [320, 320])
    return tf.cast(img,tf.float32), label

train_ds = train_ds.map(process_image).map(scale).batch(32).cache().prefetch(tf.data.AUTOTUNE)
test_ds = test_ds.map(process_image).map(scale).batch(32).prefetch(tf.data.AUTOTUNE)
for img,label in train_ds.take(1):
  print(label.numpy())

the result is always the encoded tensor of letter T as label while when i use

for img in images_ds.take(2):
  print(get_label(img.numpy()))

it returns the true label

CodePudding user response:

I suggest to introduce some modifications, in order to make your code clean do not extract labels inside map function, create list of both path and labels, try this one:

from glob import glob
import string

alphabet = list(string.ascii_uppercase)
idx = range(26)
#create dict alphabet -> idx
dict_alpha = dict(tuple(zip(alphabet, idx)))

paths = glob('/content/drive/MyDrive/prom02/dataset/train/*.jpg')
labels = []
for path in paths:
  alpha = path.split("/")[-1][0]
  labels.append(dict_alpha[alpha])

#split paths, labels using train_test_split sklearn
train_ds = tf.data.Dataset.from_tensor_slices((paths, labels))
train_ds = train_ds.shuffle(len(paths))

def process_image(file_path, label):
  img = tf.io.read_file(file_path)
  img = tf.image.decode_jpeg(img)
  img = tf.cast(img,tf.float32)
  img = tf.image.resize(img, [320, 320], method = 'nearest')
  return img, tf.one_hot(label, 26)
        
    
batch = 1 #for test   

train_ds = train_ds.map(process_image).batch(batch).cache().prefetch(tf.data.AUTOTUNE)
    
for img, label in train_ds.take(1):
  print(label)

Observations:

  • You can just replace the scale mapping by img = tf.cast(img, tf.float32) inside process_image
  • tf.image.resize(img, [320, 320]) by default the resizing method is 'bilinear' which is going change pixel values, for images i prefer tf.image.resize(img, [320, 320], method='nearest') which has no effect on the pixel values, it is based on duplications.
  • Related