custom callbacks
traderpedroso
Registered Posts: 2 ✭✭✭✭
im trying to use custom callback
callbacks = [ EarlyStopping(monitor='val_accuracy', min_delta=1e-3, patience=5, mode='max', restore_best_weights=True, verbose=1), ]
its running ok! but o dashboard not showing epochs and the running state only showing -> Optimization results will appear as soon as they are available.
sharing full code below from Training
from keras.callbacks import EarlyStopping early_stopping_callback = [ EarlyStopping(monitor='val_accuracy', min_delta=1e-3, patience=5, mode='max', restore_best_weights=True, verbose=1), ] # A function that builds train and validation sequences. # You can define your custom data augmentation based on the original train and validation sequences # build_train_sequence_with_batch_size - function that returns train data sequence depending on # batch size # build_validation_sequence_with_batch_size - function that returns validation data sequence depending on # batch size def build_sequences(build_train_sequence_with_batch_size, build_validation_sequence_with_batch_size): batch_size = 64 train_sequence = build_train_sequence_with_batch_size(batch_size) validation_sequence = build_validation_sequence_with_batch_size(batch_size) return train_sequence, validation_sequence # A function that contains a call to fit a model. # model - compiled model # train_sequence - train data sequence, returned in build_sequence # validation_sequence - validation data sequence, returned in build_sequence # base_callbacks - a list of Dataiku callbacks, that are not to be removed. User callbacks can be added to this list def fit_model(model, train_sequence, validation_sequence, base_callbacks): epochs = 50 model.fit_generator(train_sequence, epochs=epochs, callbacks=early_stopping_callback, validation_data=(validation_sequence), shuffle=True)
and for model Architecture
from keras.layers import Input, Dense from keras.models import Model from keras import Sequential from keras.layers import Embedding, Bidirectional, LSTM, Dense from keras.optimizers import Adam # Define the keras architecture of your model in 'build_model' and return it. Compilation must be done in 'compile_model'. # input_shapes - dictionary of shapes per input as defined in features handling # n_classes - For classification, number of target classes def build_model(input_shapes, n_classes=None): # This input will receive all the preprocessed features # sent to 'main' input_main = Input(shape=input_shapes["main"], name="main") #input_main = Input(shape=(50), name="name_preprocessed") num_alphabets=27 name_length=50 embedding_dim=256 x = Sequential() x = Embedding(num_alphabets, embedding_dim, input_length=name_length)(input_main) x = Bidirectional(LSTM(units=128, recurrent_dropout=0.2, dropout=0.2))(x) x = Dense(1, activation="sigmoid")(x) predictions = Dense(1, activation='sigmoid')(x) # The 'inputs' parameter of your model must contain the # full list of inputs used in the architecture model = Model(inputs=[input_main], outputs=predictions) return model # Compile your model and return it # model - model defined in 'build_model' def compile_model(model): # The loss function depends on the type of problem you solve. # 'binary_crossentropy' is appropriate for a binary classification. model.compile(loss='binary_crossentropy', optimizer=Adam(learning_rate=0.001), metrics=['accuracy']) return model
Operating system used: linux ubuntu
Tagged:
Best Answer
-
I got the solution for that I didn't know about the base_callbacks is fundamental for saving a model than append a custom callback to base one !
from keras.callbacks import EarlyStopping def build_sequences(build_train_sequence_with_batch_size, build_validation_sequence_with_batch_size): batch_size = 64 train_sequence = build_train_sequence_with_batch_size(batch_size) validation_sequence = build_validation_sequence_with_batch_size(batch_size) return train_sequence, validation_sequence def fit_model(model, train_sequence, validation_sequence, base_callbacks): epochs = 1 early_stopping_callback = EarlyStopping(monitor='val_accuracy', min_delta=1e-3, patience=5, mode='max', restore_best_weights=True, verbose=1) base_callbacks.append(early_stopping_callback) model.fit_generator(train_sequence, epochs=epochs, callbacks=base_callbacks, validation_data=(validation_sequence), shuffle=True)
Answers
-
CoreyS Dataiker Alumni, Dataiku DSS Core Designer, Dataiku DSS Core Concepts, Registered Posts: 1,150 ✭✭✭✭✭✭✭✭✭
Thank you for sharing your solution with us @traderpedroso
!