I want to conduct a hyperparameter tuning for the learning rate. However, I got the error that I do not know how to solve.
I used the Tensorflow.Keras package.
import tensorflow as tf
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.datasets.mnist import load_data
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import (Flatten, BatchNormalization, Dropout, Dense)
from keras.wrappers.scikit_learn import KerasClassifier
(x_train_all, y_train_all), (x_test, y_test) = load_data()
x_train, x_valid, x_test = x_train_all[5000:]/255.0, x_train_all[:5000]/255.0, x_test/255.0
y_train, y_valid = y_train_all[5000:], y_train_all[:5000]
tf.cast(x_train, tf.float32)
tf.cast(x_valid, tf.float32)
tf.cast(x_test, tf.float32)
def my_model(learning_rate = 5e-3):
model = Sequential([
Flatten(input_shape = (28, 28)),
BatchNormalization(),
Dropout(rate = 0.2),
Dense(300, activation = "elu", kernel_initializer = "he_normal"),
Dropout(rate = 0.2),
BatchNormalization(),
Dense(300, activation = "elu", kernel_initializer = "he_normal"),
Dropout(rate = 0.2),
BatchNormalization(),
Dense(10, activation = "softmax",kernel_initializer = "he_normal")])
opt = Adam(lr = learning_rate)
model.summary()
model.compile(loss = "sparse_categorical_crossentropy", optimizer = opt, learning_rate = learning_rate, metrics = ["accuracy"])
return model
from sklearn.model_selection import RandomizedSearchCV
keras_classifier = KerasClassifier(my_model)
param_distribs = {"learning_rate": [1e-5, 5e-5, 1e-4, 5e-4, 1e-3, 5e-3]}
rnd_search_cv = RandomizedSearchCV(keras_classifier, param_distribs, n_iter = 10, cv = 3)
rnd_search_cv.fit(x_train, y_train, epochs = 10, validation_data = (x_valid, y_valid))
I got the value error as the following:
ValueError: Session keyword arguments are not support during eager execution. You passed: {'learning_rate': 1e-05}
Mentioning the Solution in this Section (even though it is present in Comment's section), for the benefit of the community.
The issue is resolved by removing learning_rate = learning_rate
in model.compile
.
Correct Code is mentioned below:
def my_model(learning_rate = 5e-3):
model = Sequential([
Flatten(input_shape = (28, 28)),
BatchNormalization(),
Dropout(rate = 0.2),
Dense(300, activation = "elu", kernel_initializer = "he_normal"),
Dropout(rate = 0.2),
BatchNormalization(),
Dense(300, activation = "elu", kernel_initializer = "he_normal"),
Dropout(rate = 0.2),
BatchNormalization(),
Dense(10, activation = "softmax",kernel_initializer = "he_normal")])
opt = Adam(lr = learning_rate)
model.summary()
model.compile(loss = "sparse_categorical_crossentropy", optimizer = opt, metrics = ["accuracy"])
return model
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With