We trained a binary classification nn with ~90% accuracy/~20% loss. When we use the model by calling model.predict() function with the testing data (which is 20% of our entire dataset), we get a relatively even distribution of 1's and 0's. But when we input individual points from the testing data as a numpy array, we only get 0's regardless of shuffling or not. Can anyone help us explain why we are getting this behavior?
When we use X_test (from the split above) instead of dummyTest
, binary_prediction
equals 1 for the respective data point array from dummyTest
), but when we use dummyTest
's data individually, binary_prediction
only equals 0.
Full Code Shown Below:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from pandas.plotting import scatter_matrix
from sklearn import model_selection
from keras.utils.np_utils import to_categorical
from keras.models import Sequential
from keras.layers import Dense
from tensorflow.keras.optimizers import Adam
from keras.layers import Dropout
import os
full_dataset = pd.read_csv('noQMarkDataset.csv')
data = full_dataset[~full_dataset.isin(['?'])]
data = data.dropna(axis=0)
data = data.apply(pd.to_numeric)
X = np.array(data.drop(['target'], 1))
y = np.array(data['target'])
mean = X.mean(axis=0)
X -= mean
std = X.std(axis=0)
X /= std
X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y, random_state = 0, test_size = 0.2, shuffle=False) #stratify = y
# convert data to categorical labels
Y_train = to_categorical(y_train, num_classes=None)
Y_test = to_categorical(y_test, num_classes=None)
Y_train_binary = y_train.copy()
Y_test_binary = y_test.copy()
Y_train_binary[Y_train_binary > 0] = 1
Y_test_binary[Y_test_binary > 0] = 1
def create_binary_model():
model = Sequential()
model.add(Dense(16, input_dim=7, activation='relu')) # prev input_dim =1 3
model.add(Dropout(0.25))
model.add(Dense(16, activation = 'relu'))
model.add(Dropout(0.25))
model.add(Dense(16))
model.add(Dropout(0.25))
model.add(Dense(12, activation = 'relu'))
model.add(Dense(12, activation = 'relu'))
model.add(Dropout(0.25))
model.add(Dense(8, activation='relu'))
model.add(Dropout(0.25))
model.add(Dense(1, activation='sigmoid'))
adam = Adam(lr=0.001)
model.compile(loss='binary_crossentropy', optimizer='rmsprop', metrics=['accuracy']) #optimizer = 'rmsprop' previously
return model
binary_model = create_binary_model()
history=binary_model.fit(X_train, Y_train_binary, validation_data=(X_test, Y_test_binary), epochs=130, batch_size=15) #epochs = 130, batch_size = 20 previously (best)
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('Model Accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'])
plt.show()
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model Loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'])
plt.show()
from sklearn.metrics import classification_report, accuracy_score
dummyTest = np.array([[67.0,1.0,4.0,0.0,2.0,129.0,2.0]])
binary_pred = np.round(binary_model.predict(dummyTest)).astype(int)
#binary_pred = np.round(binary_model.predict(X_test)).astype(int)
print(f"Binary Pred: {binary_pred}")
binary_model.save(os.path.join(".", "test_model.h5"))
#after loading
from tensorflow.keras.models import load_model
model2 = load_model(os.path.join(".", "test_model.h5"))
binary_pred2 = np.round(binary_model.predict(dummyTest)).astype(int)
#binary_pred2 = np.round(binary_model.predict(X_test)).astype(int)
print(f"Binary Pred2 after load: {binary_pred2}")
Full Dataset Shown Below
age,sex,cp,fbs,restecg,thalach,ca,target
63.0,1.0,1.0,1.0,2.0,150.0,0.0,0
67.0,1.0,4.0,0.0,2.0,108.0,3.0,2
67.0,1.0,4.0,0.0,2.0,129.0,2.0,1
37.0,1.0,3.0,0.0,0.0,187.0,0.0,0
41.0,0.0,2.0,0.0,2.0,172.0,0.0,0
56.0,1.0,2.0,0.0,0.0,178.0,0.0,0
62.0,0.0,4.0,0.0,2.0,160.0,2.0,3
57.0,0.0,4.0,0.0,0.0,163.0,0.0,0
63.0,1.0,4.0,0.0,2.0,147.0,1.0,2
53.0,1.0,4.0,1.0,2.0,155.0,0.0,1
57.0,1.0,4.0,0.0,0.0,148.0,0.0,0
56.0,0.0,2.0,0.0,2.0,153.0,0.0,0
56.0,1.0,3.0,1.0,2.0,142.0,1.0,2
44.0,1.0,2.0,0.0,0.0,173.0,0.0,0
52.0,1.0,3.0,1.0,0.0,162.0,0.0,0
57.0,1.0,3.0,0.0,0.0,174.0,0.0,0
48.0,1.0,2.0,0.0,0.0,168.0,0.0,1
54.0,1.0,4.0,0.0,0.0,160.0,0.0,0
48.0,0.0,3.0,0.0,0.0,139.0,0.0,0
49.0,1.0,2.0,0.0,0.0,171.0,0.0,0
64.0,1.0,1.0,0.0,2.0,144.0,0.0,0
58.0,0.0,1.0,1.0,2.0,162.0,0.0,0
58.0,1.0,2.0,0.0,2.0,160.0,0.0,1
58.0,1.0,3.0,0.0,2.0,173.0,2.0,3
60.0,1.0,4.0,0.0,2.0,132.0,2.0,4
50.0,0.0,3.0,0.0,0.0,158.0,0.0,0
58.0,0.0,3.0,0.0,0.0,172.0,0.0,0
66.0,0.0,1.0,0.0,0.0,114.0,0.0,0
43.0,1.0,4.0,0.0,0.0,171.0,0.0,0
40.0,1.0,4.0,0.0,2.0,114.0,0.0,3
69.0,0.0,1.0,0.0,0.0,151.0,2.0,0
60.0,1.0,4.0,1.0,0.0,160.0,2.0,2
64.0,1.0,3.0,0.0,0.0,158.0,0.0,1
59.0,1.0,4.0,0.0,0.0,161.0,0.0,0
44.0,1.0,3.0,0.0,0.0,179.0,0.0,0
42.0,1.0,4.0,0.0,0.0,178.0,0.0,0
43.0,1.0,4.0,0.0,2.0,120.0,0.0,3
57.0,1.0,4.0,0.0,2.0,112.0,1.0,1
55.0,1.0,4.0,0.0,0.0,132.0,1.0,3
61.0,1.0,3.0,1.0,0.0,137.0,0.0,0
65.0,0.0,4.0,0.0,2.0,114.0,3.0,4
40.0,1.0,1.0,0.0,0.0,178.0,0.0,0
71.0,0.0,2.0,0.0,0.0,162.0,2.0,0
59.0,1.0,3.0,1.0,0.0,157.0,0.0,0
61.0,0.0,4.0,0.0,2.0,169.0,0.0,1
58.0,1.0,3.0,0.0,2.0,165.0,1.0,4
51.0,1.0,3.0,0.0,0.0,123.0,0.0,0
50.0,1.0,4.0,0.0,2.0,128.0,0.0,4
65.0,0.0,3.0,1.0,2.0,157.0,1.0,0
53.0,1.0,3.0,1.0,2.0,152.0,0.0,0
41.0,0.0,2.0,0.0,0.0,168.0,1.0,0
65.0,1.0,4.0,0.0,0.0,140.0,0.0,0
44.0,1.0,4.0,0.0,2.0,153.0,1.0,2
44.0,1.0,2.0,0.0,2.0,188.0,0.0,0
60.0,1.0,4.0,0.0,0.0,144.0,1.0,1
54.0,1.0,4.0,0.0,2.0,109.0,1.0,1
50.0,1.0,3.0,0.0,0.0,163.0,1.0,1
41.0,1.0,4.0,0.0,2.0,158.0,0.0,1
54.0,1.0,3.0,0.0,2.0,152.0,1.0,0
51.0,1.0,1.0,0.0,2.0,125.0,1.0,0
51.0,0.0,4.0,0.0,0.0,142.0,0.0,2
46.0,0.0,3.0,0.0,2.0,160.0,0.0,0
58.0,1.0,4.0,0.0,2.0,131.0,3.0,1
54.0,0.0,3.0,1.0,0.0,170.0,0.0,0
54.0,1.0,4.0,0.0,0.0,113.0,1.0,2
60.0,1.0,4.0,0.0,2.0,142.0,2.0,2
60.0,1.0,3.0,0.0,2.0,155.0,0.0,1
54.0,1.0,3.0,0.0,2.0,165.0,0.0,0
59.0,1.0,4.0,0.0,2.0,140.0,0.0,2
46.0,1.0,3.0,0.0,0.0,147.0,0.0,1
65.0,0.0,3.0,0.0,0.0,148.0,0.0,0
67.0,1.0,4.0,1.0,0.0,163.0,2.0,3
62.0,1.0,4.0,0.0,0.0,99.0,2.0,1
65.0,1.0,4.0,0.0,2.0,158.0,2.0,1
44.0,1.0,4.0,0.0,2.0,177.0,1.0,1
65.0,0.0,3.0,0.0,2.0,151.0,0.0,0
60.0,1.0,4.0,0.0,2.0,141.0,1.0,1
51.0,0.0,3.0,0.0,2.0,142.0,1.0,0
48.0,1.0,2.0,0.0,2.0,180.0,0.0,0
58.0,1.0,4.0,0.0,2.0,111.0,0.0,3
45.0,1.0,4.0,0.0,2.0,148.0,0.0,0
53.0,0.0,4.0,0.0,2.0,143.0,0.0,0
39.0,1.0,3.0,0.0,2.0,182.0,0.0,0
68.0,1.0,3.0,1.0,2.0,150.0,0.0,3
52.0,1.0,2.0,0.0,0.0,172.0,0.0,0
44.0,1.0,3.0,0.0,2.0,180.0,0.0,0
47.0,1.0,3.0,0.0,2.0,156.0,0.0,0
53.0,0.0,3.0,0.0,2.0,115.0,0.0,0
53.0,0.0,4.0,0.0,2.0,160.0,0.0,0
51.0,0.0,3.0,0.0,2.0,149.0,0.0,0
66.0,1.0,4.0,0.0,2.0,151.0,0.0,0
62.0,0.0,4.0,0.0,2.0,145.0,3.0,3
62.0,1.0,3.0,0.0,0.0,146.0,3.0,0
44.0,0.0,3.0,0.0,0.0,175.0,0.0,0
63.0,0.0,3.0,0.0,2.0,172.0,0.0,0
52.0,1.0,4.0,0.0,0.0,161.0,1.0,1
59.0,1.0,4.0,0.0,2.0,142.0,1.0,2
60.0,0.0,4.0,0.0,2.0,157.0,2.0,3
52.0,1.0,2.0,0.0,0.0,158.0,1.0,0
48.0,1.0,4.0,0.0,2.0,186.0,0.0,0
45.0,1.0,4.0,0.0,2.0,185.0,0.0,0
34.0,1.0,1.0,0.0,2.0,174.0,0.0,0
57.0,0.0,4.0,0.0,2.0,159.0,1.0,0
71.0,0.0,3.0,1.0,2.0,130.0,1.0,0
49.0,1.0,3.0,0.0,0.0,139.0,3.0,3
54.0,1.0,2.0,0.0,0.0,156.0,0.0,0
59.0,1.0,4.0,0.0,0.0,162.0,1.0,2
57.0,1.0,3.0,0.0,2.0,150.0,1.0,1
61.0,1.0,4.0,0.0,0.0,140.0,1.0,2
39.0,1.0,4.0,0.0,0.0,140.0,0.0,3
61.0,0.0,4.0,0.0,2.0,146.0,0.0,1
56.0,1.0,4.0,1.0,2.0,144.0,1.0,1
52.0,1.0,1.0,0.0,2.0,190.0,0.0,0
43.0,0.0,4.0,1.0,2.0,136.0,0.0,2
62.0,0.0,3.0,0.0,0.0,97.0,1.0,2
41.0,1.0,2.0,0.0,0.0,132.0,0.0,0
58.0,1.0,3.0,1.0,2.0,165.0,0.0,0
35.0,0.0,4.0,0.0,0.0,182.0,0.0,0
63.0,1.0,4.0,1.0,2.0,132.0,3.0,3
65.0,1.0,4.0,0.0,2.0,127.0,1.0,2
48.0,1.0,4.0,1.0,2.0,150.0,2.0,3
63.0,0.0,4.0,0.0,2.0,154.0,3.0,4
51.0,1.0,3.0,0.0,0.0,143.0,0.0,0
55.0,1.0,4.0,0.0,0.0,111.0,0.0,3
65.0,1.0,1.0,1.0,2.0,174.0,1.0,1
45.0,0.0,2.0,0.0,2.0,175.0,0.0,0
56.0,0.0,4.0,1.0,2.0,133.0,2.0,3
54.0,1.0,4.0,0.0,0.0,126.0,1.0,3
44.0,1.0,2.0,0.0,0.0,170.0,0.0,0
62.0,0.0,4.0,0.0,0.0,163.0,0.0,0
54.0,1.0,3.0,0.0,2.0,147.0,0.0,0
51.0,1.0,3.0,0.0,0.0,154.0,1.0,0
29.0,1.0,2.0,0.0,2.0,202.0,0.0,0
51.0,1.0,4.0,0.0,2.0,186.0,0.0,0
43.0,0.0,3.0,0.0,0.0,165.0,0.0,0
55.0,0.0,2.0,0.0,2.0,161.0,0.0,0
70.0,1.0,4.0,0.0,0.0,125.0,0.0,4
62.0,1.0,2.0,0.0,2.0,103.0,1.0,3
35.0,1.0,4.0,0.0,0.0,130.0,0.0,1
51.0,1.0,3.0,1.0,2.0,166.0,0.0,0
59.0,1.0,2.0,0.0,0.0,164.0,0.0,0
59.0,1.0,1.0,0.0,2.0,159.0,0.0,1
52.0,1.0,2.0,1.0,0.0,184.0,0.0,0
64.0,1.0,3.0,0.0,0.0,131.0,0.0,1
58.0,1.0,3.0,0.0,2.0,154.0,0.0,0
47.0,1.0,3.0,0.0,0.0,152.0,0.0,1
57.0,1.0,4.0,1.0,2.0,124.0,3.0,4
41.0,1.0,3.0,0.0,0.0,179.0,0.0,0
45.0,1.0,2.0,0.0,2.0,170.0,0.0,0
60.0,0.0,3.0,0.0,0.0,160.0,1.0,0
52.0,1.0,1.0,1.0,0.0,178.0,0.0,0
42.0,0.0,4.0,0.0,2.0,122.0,0.0,0
67.0,0.0,3.0,0.0,2.0,160.0,0.0,0
55.0,1.0,4.0,0.0,2.0,145.0,1.0,4
64.0,1.0,4.0,0.0,2.0,96.0,1.0,3
70.0,1.0,4.0,0.0,2.0,109.0,3.0,1
51.0,1.0,4.0,0.0,0.0,173.0,0.0,1
58.0,1.0,4.0,0.0,2.0,171.0,2.0,1
60.0,1.0,4.0,0.0,2.0,170.0,2.0,2
68.0,1.0,3.0,0.0,0.0,151.0,1.0,0
46.0,1.0,2.0,1.0,0.0,156.0,0.0,0
77.0,1.0,4.0,0.0,2.0,162.0,3.0,4
54.0,0.0,3.0,0.0,0.0,158.0,0.0,0
58.0,0.0,4.0,0.0,2.0,122.0,0.0,0
48.0,1.0,3.0,1.0,0.0,175.0,2.0,0
57.0,1.0,4.0,0.0,0.0,168.0,0.0,0
54.0,0.0,2.0,1.0,2.0,159.0,1.0,0
35.0,1.0,4.0,0.0,2.0,156.0,0.0,1
45.0,0.0,2.0,0.0,0.0,138.0,0.0,0
70.0,1.0,3.0,0.0,0.0,112.0,1.0,3
53.0,1.0,4.0,0.0,2.0,111.0,0.0,0
59.0,0.0,4.0,0.0,0.0,143.0,0.0,1
62.0,0.0,4.0,0.0,2.0,157.0,0.0,0
64.0,1.0,4.0,0.0,2.0,132.0,2.0,4
57.0,1.0,4.0,0.0,0.0,88.0,1.0,1
52.0,1.0,4.0,1.0,0.0,147.0,3.0,0
56.0,1.0,4.0,0.0,2.0,105.0,1.0,1
43.0,1.0,3.0,0.0,0.0,162.0,1.0,0
53.0,1.0,3.0,1.0,2.0,173.0,3.0,0
48.0,1.0,4.0,0.0,2.0,166.0,0.0,3
56.0,0.0,4.0,0.0,2.0,150.0,2.0,2
42.0,1.0,1.0,0.0,2.0,178.0,2.0,0
59.0,1.0,1.0,0.0,2.0,145.0,0.0,0
60.0,0.0,4.0,0.0,2.0,161.0,0.0,1
63.0,0.0,2.0,0.0,0.0,179.0,2.0,0
42.0,1.0,3.0,1.0,0.0,194.0,0.0,0
66.0,1.0,2.0,0.0,0.0,120.0,3.0,2
54.0,1.0,2.0,0.0,2.0,195.0,1.0,1
69.0,1.0,3.0,0.0,2.0,146.0,3.0,2
50.0,1.0,3.0,0.0,0.0,163.0,0.0,0
51.0,1.0,4.0,0.0,0.0,122.0,3.0,3
62.0,0.0,4.0,1.0,0.0,106.0,3.0,2
68.0,0.0,3.0,0.0,2.0,115.0,0.0,0
67.0,1.0,4.0,0.0,2.0,125.0,2.0,3
69.0,1.0,1.0,1.0,2.0,131.0,1.0,0
45.0,0.0,4.0,0.0,2.0,152.0,0.0,0
50.0,0.0,2.0,0.0,0.0,162.0,0.0,0
59.0,1.0,1.0,0.0,2.0,125.0,0.0,1
50.0,0.0,4.0,0.0,2.0,159.0,0.0,0
64.0,0.0,4.0,0.0,0.0,154.0,0.0,0
57.0,1.0,3.0,1.0,0.0,173.0,1.0,0
64.0,0.0,3.0,0.0,0.0,133.0,0.0,0
43.0,1.0,4.0,0.0,0.0,161.0,0.0,0
45.0,1.0,4.0,0.0,2.0,147.0,3.0,3
58.0,1.0,4.0,0.0,2.0,130.0,2.0,3
50.0,1.0,4.0,0.0,2.0,126.0,0.0,3
55.0,1.0,2.0,0.0,0.0,155.0,0.0,0
62.0,0.0,4.0,0.0,0.0,154.0,0.0,1
37.0,0.0,3.0,0.0,0.0,170.0,0.0,0
38.0,1.0,1.0,0.0,0.0,182.0,0.0,4
41.0,1.0,3.0,0.0,2.0,168.0,0.0,0
66.0,0.0,4.0,1.0,0.0,165.0,2.0,3
52.0,1.0,4.0,0.0,0.0,160.0,1.0,1
56.0,1.0,1.0,0.0,2.0,162.0,0.0,0
46.0,0.0,2.0,0.0,0.0,172.0,0.0,0
46.0,0.0,4.0,0.0,2.0,152.0,0.0,0
64.0,0.0,4.0,0.0,0.0,122.0,2.0,0
59.0,1.0,4.0,0.0,2.0,182.0,0.0,0
41.0,0.0,3.0,0.0,2.0,172.0,0.0,0
54.0,0.0,3.0,0.0,2.0,167.0,0.0,0
39.0,0.0,3.0,0.0,0.0,179.0,0.0,0
53.0,1.0,4.0,0.0,0.0,95.0,2.0,3
63.0,0.0,4.0,0.0,0.0,169.0,2.0,1
34.0,0.0,2.0,0.0,0.0,192.0,0.0,0
47.0,1.0,4.0,0.0,0.0,143.0,0.0,0
67.0,0.0,3.0,0.0,0.0,172.0,1.0,0
54.0,1.0,4.0,0.0,2.0,108.0,1.0,3
66.0,1.0,4.0,0.0,2.0,132.0,1.0,2
52.0,0.0,3.0,0.0,2.0,169.0,0.0,0
55.0,0.0,4.0,0.0,1.0,117.0,0.0,2
49.0,1.0,3.0,0.0,2.0,126.0,3.0,1
74.0,0.0,2.0,0.0,2.0,121.0,1.0,0
54.0,0.0,3.0,0.0,0.0,163.0,1.0,0
54.0,1.0,4.0,0.0,2.0,116.0,2.0,3
56.0,1.0,4.0,1.0,2.0,103.0,0.0,2
46.0,1.0,4.0,0.0,2.0,144.0,0.0,1
49.0,0.0,2.0,0.0,0.0,162.0,0.0,0
42.0,1.0,2.0,0.0,0.0,162.0,0.0,0
41.0,1.0,2.0,0.0,0.0,153.0,0.0,0
41.0,0.0,2.0,0.0,0.0,163.0,0.0,0
49.0,0.0,4.0,0.0,0.0,163.0,0.0,0
61.0,1.0,1.0,0.0,0.0,145.0,2.0,2
60.0,0.0,3.0,1.0,0.0,96.0,0.0,0
67.0,1.0,4.0,0.0,0.0,71.0,0.0,2
58.0,1.0,4.0,0.0,0.0,156.0,1.0,2
47.0,1.0,4.0,0.0,2.0,118.0,1.0,1
52.0,1.0,4.0,0.0,0.0,168.0,2.0,3
62.0,1.0,2.0,1.0,2.0,140.0,0.0,0
57.0,1.0,4.0,0.0,0.0,126.0,0.0,0
58.0,1.0,4.0,0.0,0.0,105.0,1.0,1
64.0,1.0,4.0,0.0,0.0,105.0,1.0,0
51.0,0.0,3.0,0.0,2.0,157.0,0.0,0
43.0,1.0,4.0,0.0,0.0,181.0,0.0,0
42.0,0.0,3.0,0.0,0.0,173.0,0.0,0
67.0,0.0,4.0,0.0,0.0,142.0,2.0,0
76.0,0.0,3.0,0.0,1.0,116.0,0.0,0
70.0,1.0,2.0,0.0,2.0,143.0,0.0,0
57.0,1.0,2.0,0.0,0.0,141.0,0.0,1
44.0,0.0,3.0,0.0,0.0,149.0,1.0,0
58.0,0.0,2.0,1.0,2.0,152.0,2.0,3
60.0,0.0,1.0,0.0,0.0,171.0,0.0,0
44.0,1.0,3.0,0.0,0.0,169.0,0.0,0
61.0,1.0,4.0,0.0,2.0,125.0,1.0,4
42.0,1.0,4.0,0.0,0.0,125.0,0.0,2
52.0,1.0,4.0,1.0,0.0,156.0,0.0,2
59.0,1.0,3.0,1.0,0.0,134.0,1.0,2
40.0,1.0,4.0,0.0,0.0,181.0,0.0,1
42.0,1.0,3.0,0.0,0.0,150.0,0.0,0
61.0,1.0,4.0,0.0,2.0,138.0,1.0,1
66.0,1.0,4.0,0.0,2.0,138.0,0.0,0
46.0,1.0,4.0,0.0,0.0,120.0,2.0,2
71.0,0.0,4.0,0.0,0.0,125.0,0.0,0
59.0,1.0,1.0,0.0,0.0,162.0,2.0,1
64.0,1.0,1.0,0.0,2.0,155.0,0.0,0
66.0,0.0,3.0,0.0,2.0,152.0,1.0,0
39.0,0.0,3.0,0.0,0.0,152.0,0.0,0
57.0,1.0,2.0,0.0,2.0,164.0,1.0,1
58.0,0.0,4.0,0.0,0.0,131.0,0.0,0
57.0,1.0,4.0,0.0,0.0,143.0,1.0,2
47.0,1.0,3.0,0.0,0.0,179.0,0.0,0
55.0,0.0,4.0,0.0,1.0,130.0,1.0,3
35.0,1.0,2.0,0.0,0.0,174.0,0.0,0
61.0,1.0,4.0,0.0,0.0,161.0,1.0,2
58.0,1.0,4.0,0.0,1.0,140.0,3.0,4
58.0,0.0,4.0,1.0,2.0,146.0,2.0,2
56.0,1.0,2.0,0.0,2.0,163.0,0.0,0
56.0,1.0,2.0,0.0,0.0,169.0,0.0,0
67.0,1.0,3.0,0.0,2.0,150.0,0.0,1
55.0,0.0,2.0,0.0,0.0,166.0,0.0,0
44.0,1.0,4.0,0.0,0.0,144.0,0.0,2
63.0,1.0,4.0,0.0,2.0,144.0,2.0,2
63.0,0.0,4.0,0.0,0.0,136.0,0.0,1
41.0,1.0,2.0,0.0,0.0,182.0,0.0,0
59.0,1.0,4.0,1.0,2.0,90.0,2.0,3
57.0,0.0,4.0,0.0,0.0,123.0,0.0,1
45.0,1.0,1.0,0.0,0.0,132.0,0.0,1
68.0,1.0,4.0,1.0,0.0,141.0,2.0,2
57.0,1.0,4.0,0.0,0.0,115.0,1.0,3
57.0,0.0,2.0,0.0,2.0,174.0,1.0,1
66.0,0.0,3.0,0.0,2.0,152.0,1.0,0
39.0,0.0,3.0,0.0,0.0,152.0,0.0,0
57.0,1.0,2.0,0.0,2.0,164.0,1.0,1
58.0,0.0,4.0,0.0,0.0,131.0,0.0,0
57.0,1.0,4.0,0.0,0.0,143.0,1.0,2
47.0,1.0,3.0,0.0,0.0,179.0,0.0,0
55.0,0.0,4.0,0.0,1.0,130.0,1.0,3
35.0,1.0,2.0,0.0,0.0,174.0,0.0,0
61.0,1.0,4.0,0.0,0.0,161.0,1.0,2
58.0,1.0,4.0,0.0,1.0,140.0,3.0,4
46.0,1.0,4.0,0.0,0.0,120.0,2.0,2
71.0,0.0,4.0,0.0,0.0,125.0,0.0,0
59.0,1.0,1.0,0.0,0.0,162.0,2.0,1
64.0,1.0,1.0,0.0,2.0,155.0,0.0,0
66.0,0.0,3.0,0.0,2.0,152.0,1.0,0
39.0,0.0,3.0,0.0,0.0,152.0,0.0,0
57.0,1.0,2.0,0.0,2.0,164.0,1.0,1
58.0,0.0,4.0,0.0,0.0,131.0,0.0,0
57.0,1.0,4.0,0.0,0.0,143.0,1.0,2
47.0,1.0,3.0,0.0,0.0,179.0,0.0,0
55.0,0.0,4.0,0.0,1.0,130.0,1.0,3
35.0,1.0,2.0,0.0,0.0,174.0,0.0,0
61.0,1.0,4.0,0.0,0.0,161.0,1.0,2
58.0,1.0,4.0,0.0,1.0,140.0,3.0,4
58.0,0.0,4.0,1.0,2.0,146.0,2.0,2
39.0,0.0,3.0,0.0,0.0,179.0,0.0,0
53.0,1.0,4.0,0.0,0.0,95.0,2.0,3
63.0,0.0,4.0,0.0,0.0,169.0,2.0,1
34.0,0.0,2.0,0.0,0.0,192.0,0.0,0
47.0,1.0,4.0,0.0,0.0,143.0,0.0,0
67.0,0.0,3.0,0.0,0.0,172.0,1.0,0
54.0,1.0,4.0,0.0,2.0,108.0,1.0,3
66.0,1.0,4.0,0.0,2.0,132.0,1.0,2
52.0,0.0,3.0,0.0,2.0,169.0,0.0,0
55.0,0.0,4.0,0.0,1.0,117.0,0.0,2
49.0,1.0,3.0,0.0,2.0,126.0,3.0,1
74.0,0.0,2.0,0.0,2.0,121.0,1.0,0
54.0,0.0,3.0,0.0,0.0,163.0,1.0,0
54.0,1.0,4.0,0.0,2.0,116.0,2.0,3
56.0,1.0,4.0,1.0,2.0,103.0,0.0,2
46.0,1.0,4.0,0.0,2.0,144.0,0.0,1
49.0,0.0,2.0,0.0,0.0,162.0,0.0,0
42.0,1.0,2.0,0.0,0.0,162.0,0.0,0
45.0,1.0,4.0,0.0,2.0,185.0,0.0,0
34.0,1.0,1.0,0.0,2.0,174.0,0.0,0
57.0,0.0,4.0,0.0,2.0,159.0,1.0,0
71.0,0.0,3.0,1.0,2.0,130.0,1.0,0
49.0,1.0,3.0,0.0,0.0,139.0,3.0,3
54.0,1.0,2.0,0.0,0.0,156.0,0.0,0
59.0,1.0,4.0,0.0,0.0,162.0,1.0,2
57.0,1.0,3.0,0.0,2.0,150.0,1.0,1
61.0,1.0,4.0,0.0,0.0,140.0,1.0,2
39.0,1.0,4.0,0.0,0.0,140.0,0.0,3
61.0,0.0,4.0,0.0,2.0,146.0,0.0,1
56.0,1.0,4.0,1.0,2.0,144.0,1.0,1
52.0,1.0,1.0,0.0,2.0,190.0,0.0,0
43.0,0.0,4.0,1.0,2.0,136.0,0.0,2
67.0,1.0,4.0,0.0,2.0,108.0,3.0,2
67.0,1.0,4.0,0.0,2.0,129.0,2.0,1
37.0,1.0,3.0,0.0,0.0,187.0,0.0,0
41.0,0.0,2.0,0.0,2.0,172.0,0.0,0
56.0,1.0,2.0,0.0,0.0,178.0,0.0,0
62.0,0.0,4.0,0.0,2.0,160.0,2.0,3
57.0,0.0,4.0,0.0,0.0,163.0,0.0,0
63.0,1.0,4.0,0.0,2.0,147.0,1.0,2
53.0,1.0,4.0,1.0,2.0,155.0,0.0,1
57.0,1.0,4.0,0.0,0.0,148.0,0.0,0
56.0,0.0,2.0,0.0,2.0,153.0,0.0,0
56.0,1.0,3.0,1.0,2.0,142.0,1.0,2
44.0,1.0,2.0,0.0,0.0,173.0,0.0,0
52.0,1.0,3.0,1.0,0.0,162.0,0.0,0
57.0,1.0,3.0,0.0,0.0,174.0,0.0,0
48.0,1.0,2.0,0.0,0.0,168.0,0.0,1
54.0,1.0,4.0,0.0,0.0,160.0,0.0,0
48.0,0.0,3.0,0.0,0.0,139.0,0.0,0
50.0,1.0,2.0,0.0,0.0,160.0,0.0,0
54.0,0.0,2.0,0.0,0.0,150.0,1.0,0
54.0,0.0,2.0,0.0,1.0,155.0,1.0,0
54.0,0.0,2.0,0.0,0.0,130.0,1.0,0
54.0,0.0,2.0,0.0,0.0,130.0,1.0,0
54.0,1.0,1.0,0.0,0.0,137.0,1.0,0
54.0,1.0,2.0,0.0,0.0,142.0,1.0,0
54.0,1.0,2.0,0.0,0.0,154.0,1.0,0
54.0,1.0,2.0,0.0,0.0,110.0,1.0,0
54.0,1.0,2.0,0.0,1.0,130.0,1.0,0
59.0,1.0,4.0,0.0,0.0,140.0,0.0,0
47.0,1.0,4.0,0.0,0.0,98.0,0.0,1
56.0,1.0,4.0,0.0,0.0,120.0,0.0,1
59.0,1.0,4.0,0.0,1.0,131.0,0.0,0
CodePudding user response:
You are training your model with normalized data but predicting on data that is not normalized. X_test
is normalized data so the predictions on it are as expected. dummyTest
is not normalized. If you normalize the dummyTest
variable before feeding it to your neural network like so:
dummyTest[0] -= mean
dummyTest[0] /= std
You will receive the expected output(1
)