Home > Back-end >  does fit give whole batch size in tensorflow
does fit give whole batch size in tensorflow

Time:10-20

i am triying to train a model with tensorflow. i have a custom loss function and ste the batch size to 1960 but the fit only seems to be giving my loss func one value at atime. my loss function processes the hole batch though. code:

import tensorflow as tf
from tensorflow import keras

import math

import numpy as np


from tensorflow.keras.layers import Dense

from tensorflow.keras.layers import Dropout

chunksize = 40

stepsize = 1961



x = tf.data.Dataset.from_tensor_slices(np.random.rand(9000, 40, 7))
y = tf.data.Dataset.from_tensor_slices(np.random.rand(9000, 40, 7))
dataset = tf.data.Dataset.zip((x, y)).batch(stepsize - 1)

def Generator():
    model_m = keras.models.Sequential()
    

    model_m.add(tf.keras.layers.Conv1D(3, 1, input_shape=(40, 7)))
    model_m.add(tf.compat.v1.keras.layers.CuDNNLSTM(units=50, return_sequences=True))
    model_m.add(Dropout(0.2))
    model_m.add(tf.compat.v1.keras.layers.CuDNNLSTM(units=50,return_sequences=True))
    model_m.add(Dropout(0.2))
    model_m.add(tf.compat.v1.keras.layers.CuDNNLSTM(units=50,return_sequences=True))
    model_m.add(Dropout(0.2))
    model_m.add(tf.compat.v1.keras.layers.CuDNNLSTM(units=50))
    model_m.add(Dropout(0.2))
    model_m.add(Dense(units=1, activation="sigmoid"))
    
    return model_m

model = Generator()


def generator_loss(target, genor_output1):
    
    dat = tf.cast(tf.reshape(target, (len(target), 1)), tf.float32)
    
    mult = tf.reduce_sum(dat)
    dat = tf.math.multiply(genor_output1, dat)    
    dat2 = tf.reduce_sum(dat)
    dat2 = tf.math.divide(tf.math.add(tf.math.add(tf.math.abs(mult), 1), mult), tf.math.add(tf.math.add(tf.math.abs(mult), 1), dat2))    
    return dat2, dat
generator_optimizer = tf.keras.optimizers.Adam(learning_rate=0.00007, epsilon=2e-4, beta_1=0.5)
model.compile(loss=generator_loss, optimizer="Adam")
model.fit(dataset, epochs=100, batch_size=(stepsize - 1))

also why am i getting this error?

ValueError: Shapes must be equal rank, but are 0 and 2
        From merging shape 0 with other shapes. for '{{node generator_loss/weighted_loss/packed}} = Pack[N=2, T=DT_FLOAT, axis=0](generator_loss/truediv, generator_loss/Mul)' with input shapes: [], [?,1].

CodePudding user response:

  • Related