It seems to be KerasClassifier has done some wrapper to the customizable model, but I can't figure out how to get it out...
I want move my lstm model bellow from barely created to keras wrapper e.g. KerasClassifier
:
model1 = Sequential()
model1.add(LSTM(units=60, activation='relu', input_shape=(60, 1),
return_sequences=True, recurrent_dropout=0.1))
model1.add(LSTM(units=30))
model1.add(Dense(units=1, activation='sigmoid'))
model1.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
to
def create_model():
model = Sequential()
model.add(LSTM(units=60, activation='relu', input_shape=(60, 1),
return_sequences=True, recurrent_dropout=0.1))
model.add(LSTM(units=30))
model.add(Dense(units=1, activation='sigmoid'))
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
return model
modelk = KerasClassifier(build_fn=create_model,
epochs=10,
batch_size=30,
verbose=0)
if I do model1.summary()
using model1
returned the first method, I get something like:
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
lstm (LSTM) (None, 60, 60) 14880
lstm_1 (LSTM) (None, 30) 10920
dense (Dense) (None, 1) 31
=================================================================
Total params: 25,831
Trainable params: 25,831
Non-trainable params: 0
but if I do 'modelk.summary()', using modelk
returned from the second method, I get error like:
'KerasClassifier' object has no attribute 'summary'
CodePudding user response:
Try modelk.build_fn().summary()
:
from keras.models import Sequential
from keras.layers import Dense, LSTM
from keras.wrappers.scikit_learn import KerasClassifier
def create_model():
model = Sequential()
model.add(LSTM(units=60, activation='relu', input_shape=(60, 1),
return_sequences=True, recurrent_dropout=0.1))
model.add(LSTM(units=30))
model.add(Dense(units=1, activation='sigmoid'))
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
return model
modelk = KerasClassifier(build_fn=create_model,
epochs=10,
batch_size=30,
verbose=0)
print(modelk.build_fn().summary())
Model: "sequential_2"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
lstm_4 (LSTM) (None, 60, 60) 14880
lstm_5 (LSTM) (None, 30) 10920
dense_2 (Dense) (None, 1) 31
=================================================================
Total params: 25,831
Trainable params: 25,831
Non-trainable params: 0
_________________________________________________________________
None
What you could also do is use model.summary
inside create_model
and the summary will be printed when model.fit
is called internally:
from keras.models import Sequential
from keras.layers import Dense, LSTM
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import GridSearchCV
import numpy as np
def create_model(optimizer='rmsprop'):
model = Sequential()
model.add(LSTM(units=60, activation='relu', input_shape=(60, 1),
return_sequences=True, recurrent_dropout=0.1))
model.add(LSTM(units=30))
model.add(Dense(units=1, activation='sigmoid'))
model.compile(optimizer=optimizer, loss='binary_crossentropy', metrics=['accuracy'])
print(model.summary())
return model
modelk = KerasClassifier(build_fn=create_model,
epochs=10,
batch_size=25,
verbose=0)
optimizers = ['rmsprop', 'adam']
param_grid = dict(optimizer=optimizers)
grid = GridSearchCV(estimator=modelk, param_grid=param_grid)
X = np.random.random((50, 60, 1))
Y = np.random.random((50,))
grid_result = grid.fit(X, Y)