Home > other >  Use LSTM model to predict the stock trend, use batch_size, activation, internal units manually adjus
Use LSTM model to predict the stock trend, use batch_size, activation, internal units manually adjus

Time:04-20

# import standard library
The import pandas as pd
The import matplotlib. Pyplot as PLT
The import numpy as np
Training_set=pd. Read_csv (" stock_lstm. CSV ")
Training_set=training_set. Iloc [: 1:2] values
The from sklearn. Preprocessing import MinMaxScaler
The from sklearn. Model_selection import TimeSeriesSplit
The from sklearn. Metrics import mean_squared_error, r2_score
The import matplotlib. Dates as mdates
The from sklearn import linear_model
The from config import batch_size

The dataset=pd. Read_csv (" stock_lstm. CSV, "na_values=[' null '], index_col='Date', parse_dates=True, infer_datetime_format=True)
# draw in recent years, the stock trend
The dataset [' Adj Close ']. The plot ()
X=dataset. Drop ([' Adj Close ', 'Close'], axis=1)
Test=dataset
# Target column, set the feature set and the dependent variable vector
Adj target_adj_close=pd. DataFrame (dataset [' Close '])
Feature_columns=[' Open ', 'High', 'Low', 'Volume']
The from sklearn. Preprocessing import MinMaxScaler
Scaler=MinMaxScaler ()
Feature_minmax_transform_data=https://bbs.csdn.net/topics/scaler.fit_transform (test [feature_columns])
Feature_minmax_transform=pd. DataFrame (columns=feature_columns data=https://bbs.csdn.net/topics/feature_minmax_transform_data, index=test. The index)
Feature_minmax_transform. Head ()
# separate training set and test set
Ts_split=TimeSeriesSplit (n_splits=10)
For train_index, test_index ts_split in split (feature_minmax_transform) :
X_train, X_test=feature_minmax_transform [: len (train_index)], feature_minmax_transform [len (train_index) : (len (train_index) + len (test_index)]
Y_train, y_test=target_adj_close [: len (train_index)]. Values. The ravel (), target_adj_close [len (train_index) : (len (train_index) + len (test_index)]. Values. Ravel ()

X_train=np. Array (X_train)
X_test=np. Array (X_test)

X_train_3=X_train. Reshape (X_train. Shape [0], 1, X_train. Shape [1])
X_test_3=X_test. Reshape (X_test. Shape [0], 1, X_test. Shape [1])

The from keras. Models import Sequential
The from keras. The layers import Dense
The import keras. Backend as K
The from keras. Callbacks import EarlyStopping
The from keras. Optimizers import Adam
The from keras. Models import load_model
The from keras. The layers import LSTM
That final lear_session ()
Model_lstm=Sequential ()
Model_lstm. Add (LSTM (16, input_shape=(1, X_train shape [1]), the activation='relu', return_sequences=False))
Model_lstm. Add (Dense (1))
Model_lstm.com from running (loss='mean_squared_error, optimizer=' Adam ')
='loss' early_stop=EarlyStopping (monitor, patience=5, verbose=1)
History_model_lstm=model_lstm. Fit (X_train_3 y_train, epochs=200, batch_size=8, verbose=1, shuffle=False, callbacks=[early_stop])
# batch

# evaluation model
Y_pred_test_lstm=model_lstm. Predict (X_test_3)
Y_train_pred_lstm=model_lstm. Predict (X_train_3)
Print (" The R2 score on The Train set is: \ t {: 0.3 f} ". The format (r2_score (y_train y_train_pred_lstm)))
R2_train=r2_score (y_train y_train_pred_lstm)

Draw the model # forecast figure
PLT. The plot (y_test, label='True', color='grey')
PLT. The plot (y_pred_test_lstm, label='LSTM_pred', color='orange')
PLT. Title (" LSTM s_Prediction ")
PLT. Xlabel (' the features')
PLT. Ylabel (' adj_Close)
PLT. Legend ()
PLT. The show ()