Home > Net >  neural network gives TypeError: ('Keyword argument not understood:', 'training')
neural network gives TypeError: ('Keyword argument not understood:', 'training')

Time:03-31

i was trying to train a fcnn model with tensorflow probability but i get an error that i do not understand. the neural net is

import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.model_selection import train_test_split
import timeit
import tensorflow as tf
from tqdm import tqdm_notebook as tqdm
import tensorflow_probability as tfp
from tensorflow.keras.callbacks import TensorBoard
import datetime,os
tfd = tfp.distributions
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import Input
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam

def normal_exp(params): 
  return tfd.Normal(loc=params[:,0:1], scale=tf.math.exp(params[:,1:2]))

def NLL(y, distr): 
  return -distr.log_prob(y) 

def create_model():
  return tf.keras.models.Sequential([
    Input(shape=(1,)),
    Dense(200,activation="relu"),
    Dropout(0.1, training=True),
    Dense(500,activation="relu"),
    Dropout(0.1, training=True),
    Dense(500,activation="relu"),
    Dropout(0.1, training=True),
    Dense(200,activation="relu"),
    Dropout(0.1, training=True),
    Dense(2),
    tfp.layers.DistributionLambda(normal_exp, name='normal_exp')
])
def train_model():
    model = create_model() 
    model.compile(Adam(learning_rate=0.0002), loss=NLL)
    logdir = os.path.join("logs", datetime.datetime.now().strftime("%Y%m    %d-%H%M%S"))
    tensorboard_callback = tf.keras.callbacks.TensorBoard(logdir, histogram_freq=1)
    model.fit(x= X_train, y =y_train, epochs=1500, validation_data=(X_val, y_val), callbacks=[tensorboard_callback])

train_model()

while the error says

`/usr/local/lib/python3.7/dist-packages/keras/utils/generic_utils.py in validate_kwargs(kwargs, allowed_kwargs, error_message) 1172 for kwarg in kwargs: 1173 if kwarg not in allowed_kwargs: -> 1174 raise TypeError(error_message, kwarg) 1175 1176

TypeError: ('Keyword argument not understood:', 'training')`

i tried modifying the way the neural network is defined inside Sequential() but i do not know where is the problem

CodePudding user response:

In Sequential API you can't use trining=True in layers inputs as **kwargs. But you can use training=True in Functional API like below:

x = Input(shape=(1,))
x = Dense(200,activation="relu")(x)
x = Dropout(0.1)(x, training=True)
x = Dense(2)(x)
out = tfp.layers.DistributionLambda(normal_exp, name='normal_exp')(x)

Your correct code in Sequential API:

import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.model_selection import train_test_split
import timeit
import tensorflow as tf
from tqdm import tqdm_notebook as tqdm
import tensorflow_probability as tfp
from tensorflow.keras.callbacks import TensorBoard
import datetime,os
tfd = tfp.distributions
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import Input
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam

def normal_exp(params): 
  return tfd.Normal(loc=params[:,0:1], scale=tf.math.exp(params[:,1:2]))

def NLL(y, distr): 
  return -distr.log_prob(y) 

def create_model():
  return tf.keras.models.Sequential([
    Input(shape=(1,)),
    Dense(200,activation="relu"),
    Dropout(0.1),
    Dense(500,activation="relu"),
    Dropout(0.1),
    Dense(500,activation="relu"),
    Dropout(0.1),
    Dense(200,activation="relu"),
    Dropout(0.1),
    Dense(2),
    tfp.layers.DistributionLambda(normal_exp, name='normal_exp')
])
def train_model():
    model = create_model() 
    model.compile(Adam(learning_rate=0.0002), loss=NLL)
    logdir = os.path.join("logs", datetime.datetime.now().strftime("%Y%m    %d-%H%M%S"))
    tensorboard_callback = tf.keras.callbacks.TensorBoard(logdir, histogram_freq=1)
    model.fit(x= X_train, y =y_train, epochs=2, validation_data=(X_val, y_val), callbacks=[tensorboard_callback])

    
X_train = np.random.rand(10,1)
y_train = np.random.rand(10)

    
X_val = np.random.rand(10,1)
y_val = np.random.rand(10)

train_model()

Output:

Epoch 1/2
1/1 [==============================] - 1s 1s/step - loss: 1.1478 - val_loss: 1.0427
Epoch 2/2
1/1 [==============================] - 0s 158ms/step - loss: 1.1299 - val_loss: 1.0281

CodePudding user response:

It's because Dropout layers don't have a training argument. When using model.fit, training will be appropriately set to True automatically, and in other contexts, you can set the kwarg explicitly to True when calling the layer :

tf.keras.layers.Dropout(0.2, noise_shape=None, seed=None)(dense, training=True)
  • Related