GridSearch期间的早期停止没有停止LSTM训练

问题描述 投票:1回答:1

我正在使用Keras开发LSTM网络。我正在使用“ gridsearchcv”优化参数,因为我不想对时代参数进行网格搜索,因此决定引入“提前停止”功能。不幸的是,即使我将“ delta_min”设置得很大而“耐心”设置得很低,训练也不会停止。看来训练阶段会忽略提早停止的回调。

gridsearchcv和早期停止可能不兼容吗?

在我的代码下面:

import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import LSTM, Dense
from keras.wrappers.scikit_learn import KerasRegressor
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import make_scorer

from sklearn.model_selection import GridSearchCV
from time import time

#for earlystop implementation
from keras.callbacks import EarlyStopping

def create_model(optimizer, hl1_nodes, input_shape):
    # creation of the NN - Electric Load
    # LSTM layers followed by other LSTM layer must have the parameter "return_sequences" set at True
    model = Sequential()
    model.add(LSTM(units = hl1_nodes , input_shape=input_shape, return_sequences=False))
    model.add(Dense(1, activation="linear"))  # output layer
    model.compile(optimizer=optimizer, loss='mean_squared_error', metrics=['mean_absolute_error'])
    model.summary()
    return model

def LSTM_1HL_method(X_train, X_test, Y_train, Y_test):

    # normalize X and Y data
    mmsx = MinMaxScaler()
    mmsy = MinMaxScaler()

    X_train = mmsx.fit_transform(X_train)
    X_test = mmsx.transform(X_test)
    Y_train = mmsy.fit_transform(Y_train)
    Y_test = mmsy.transform(Y_test)  

    # NN for Electric Load
    # LSTM  Input Shape
    time_steps = 1  # number of time-steps you are feeding a sequence (?)
    inputs_numb = X_train.shape[1]  # number of inputs
    input_shape=(time_steps, inputs_numb)

    model = KerasRegressor(build_fn=create_model,verbose=1,input_shape=input_shape)

    #GridSearch code
    start=time()
    optimizers = ['adam']
    epochs = np.array([1000])
    hl1_nodes = np.array([32, 64, 128])
    btcsz = np.array([1,X_train.shape[0]])

    earlyStop=[EarlyStopping(monitor="loss",verbose=1,mode='min',min_delta=1000,patience=1)] #early stop setting


    param_grid = dict(optimizer=optimizers, hl1_nodes=hl1_nodes, nb_epoch=epochs,batch_size=btcsz, callbacks=[earlyStop])
    scoring = make_scorer(mean_squared_error) #in order to use a metric as a scorer
    grid = GridSearchCV(estimator=model, param_grid=param_grid, scoring = scoring)

    # NN training
    X_train = X_train.reshape(X_train.shape[0], 1, X_train.shape[1])
    grid_result = grid.fit(X_train, Y_train)

    # Predictions - Electric Load
    Yhat_train = grid_result.predict(X_train)
    X_test = X_test.reshape(X_test.shape[0], 1, X_test.shape[1])
    Yhat_test = grid_result.predict(X_test)


    # Denormalization - Electric Load
    Yhat_train=Yhat_train.reshape(-1,1)
    Yhat_test=Yhat_test.reshape(-1,1)
    Yhat_train = mmsy.inverse_transform(Yhat_train)
    Yhat_test = mmsy.inverse_transform(Yhat_test)


    return Yhat_train, Yhat_test
lstm exit gridsearchcv
1个回答
0
投票

我通常使用鼠标位置输入来安全地提前停止。

((这不是众所周知的方法,所以我还是把它放在这里)

def queryMousePosition():
    from ctypes import windll, Structure, c_long, byref
    class POINT(Structure): _fields_ = [("x", c_long), ("y", c_long)]
    pt = POINT()
    windll.user32.GetCursorPos(byref(pt))
    return pt.x, pt.y  # %timeit queryMousePosition()


class TerminateOnFlag(keras.callbacks.Callback):
    def on_batch_end(self, batch, logs=None):
        mouse_x, mouse_y = queryMousePosition()
        if mouse_x < 10:
            self.model.stop_training = True

callbacks=[TerminateOnFlag()]

model.fit_generator(..., callbacks=callbacks, ...)
© www.soinside.com 2019 - 2024. All rights reserved.