Warning: Tensorflow: Can save best model only with f1 available, skipping

  keras, python, tensorflow

I am doing Hyper parameter tuning using Keras tuners. I have made a custom objective function-

from keras import backend as K

def recall_m(y_true, y_pred):
    true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
    possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
    recall = true_positives / (possible_positives + K.epsilon())
    return recall

def precision_m(y_true, y_pred):
    true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
    predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))
    precision = true_positives / (predicted_positives + K.epsilon())
    return precision

def f1_m(y_true, y_pred):
    precision = precision_m(y_true, y_pred)
    recall = recall_m(y_true, y_pred)
    f1_m = 2*((precision*recall)/(precision+recall+K.epsilon())) 
    return f1_m

The compile function is –

model.compile(
            optimizer=keras.optimizers.Adam(
                hp.Float(
                    "learning_rate",
                    min_value=1e-6,
                    max_value=1e-2,
                    sampling="LOG",
                    default=1e-3,
                )
            ),
            loss="binary_crossentropy",
            metrics=[f1_m],
        )

This is where Iam calling the tuners

import time

from kerastuner.tuners import (
    BayesianOptimization,
    Hyperband,
    RandomSearch,
)
from loguru import logger
from pathlib import Path



SEED = 1

NUM_CLASSES = 1
INPUT_SHAPE = (25,1)

N_EPOCH_SEARCH = 40
HYPERBAND_MAX_EPOCHS = 40
MAX_TRIALS = 20
EXECUTION_PER_TRIAL = 2
BAYESIAN_NUM_INITIAL_POINTS = 1


def run_hyperparameter_tuning():
    

    hypermodel = CNNHyperModel(input_shape=INPUT_SHAPE, num_classes=NUM_CLASSES)

    output_dir = Path("drive/MyDrive/New_Tuning_upload_latest_3/")
    tuners = define_tuners(
        hypermodel, directory=output_dir, project_name="simple_cnn_tuning_part3"
    )

    results = []
    for tuner in tuners:

        elapsed_time, loss, accuracy = tuner_evaluation(
            tuner, X_test, X_train, y_test, y_train
        )
        logger.info(
            f"Elapsed time = {elapsed_time:10.4f} s, accuracy = {accuracy}, loss = {loss}"
        )
        results.append([elapsed_time, loss, accuracy])
    logger.info(results)


def tuner_evaluation(tuner, X_test, X_train, y_test, y_train):
    set_gpu_config()

    # Overview of the task
    tuner.search_space_summary()

    # Performs the hyperparameter tuning
    logger.info("Start hyperparameter tuning")
    search_start = time.time()
    tuner.search(X_train, y_train, epochs=N_EPOCH_SEARCH, validation_split=0.1)
    search_end = time.time()
    elapsed_time = search_end - search_start

    # Show a summary of the search
    tuner.results_summary()

    # Retrieve the best model.
    best_model = tuner.get_best_models(num_models=1)[0]

    # Evaluate the best model.
    loss, accuracy = best_model.evaluate(X_test, y_test)
    return elapsed_time, loss, accuracy


def define_tuners(hypermodel, directory, project_name):
    random_tuner = RandomSearch(
        hypermodel,
        objective= Objective("f1", direction='max'),
        seed=SEED,
        max_trials=MAX_TRIALS,
        executions_per_trial=EXECUTION_PER_TRIAL,
        directory=f"{directory}_random_search",
        project_name=project_name,
    )
    hyperband_tuner = Hyperband(
        hypermodel,
        max_epochs=HYPERBAND_MAX_EPOCHS,
        objective= Objective("f1", direction='max'),
        seed=SEED,
        executions_per_trial=EXECUTION_PER_TRIAL,
        directory=f"{directory}_hyperband",
        project_name=project_name,
    )
    
    return [random_tuner, hyperband_tuner]


if __name__ == "__main__":
    run_hyperparameter_tuning()

When I am training for every epoch the warning is coming

Source: Python Questions

LEAVE A COMMENT