Step By Step Code Implementation of **Keras Tuner.**
All about code implementation of **Keras tuner.**
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import tensorflow
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import Flatten
from tensorflow.keras.datasets import fashion_mnist
print(tensorflow.__version__)
(X_train,y_train),(X_test,y_test) = fashion_mnist.load_data()
for i in range(25):
# define subplot
plt.subplot(5, 5, i+1)
# plot raw pixel data
plt.imshow(X_train[i], cmap=plt.get_cmap('gray'))
# show the figure
plt.figure(figsize=(12,8))
plt.show()
X_train = X_train/255
X_test = X_test/255
model = Sequential([
#flattening the images
Flatten(input_shape=(28,28)),
#adding first hidden layer
Dense(256, activation='relu'),
#adding second hidden layer
Dense(128, activation='relu'),
#adding third hidden layer
Dense(64, activation='relu'),
#adding output layer
Dense(10, activation='softmax')
])
model.compile(loss='sparse_categorical_crossentropy',optimizer='adam',metrics=['accuracy'])
#fitting the model
model.fit(X_train, y_train, epochs = 10)
model.evaluate(X_test,y_test)
'''
Now lets tune the following hyperparameter in model -
1. Number of hidden layers
2. Number of neurons in each hidden layers
3. Learning rate
4. Activation Function
'''
! pip install keras-tuner
from tensorflow import keras
from keras_tuner import RandomSearch
def build_model(hp): # hp means hyper parameters
model=Sequential()
model.add(Flatten(input_shape = (28,28)))
# Providing range for number of neurons in a hidden layer
model.add(Dense(units = hp.Int('num_of_neurons', min_value = 32, max_value = 512, step = 32),
activation ='relu'))
# Output layer
model.add(Dense(10, activation='softmax'))
# Compiling the model
model.compile(optimizer = keras.optimizers.Adam(hp.Choice('learning_rate',values = [1e-2, 1e-3, 1e-4])),
loss = 'sparse_categorical_crossentropy', metrics = ['accuracy'])
return model
tuner = RandomSearch(build_model,
objective = 'val_accuracy',
max_trials = 5,
executions_per_trial = 3,
directory = 'tuner1',
project_name = 'Clothing')
# So this will run for (5*3)= 15 times with 10 epochs
# in our case it's 2 = neurons, learning rate
tuner.search_space_summary()
tuner.search(X_train, y_train, epochs = 10, validation_data = (X_test, y_test))
tuner.results_summary()
'''
Now lets tune some more parameters -
Now we will provide the range of the number of the layers to be
used in the model which is between 2 to 20
'''
def build_model(hp): # Hp means hyper parameters
model=Sequential()
model.add(Flatten(input_shape = (28,28)))
# Providing the range for hidden layers
for i in range(hp.Int('num_of_layers', 2, 20)):
# Providing range for number of neurons in hidden layers
model.add(Dense(units = hp.Int('num_of_neurons'+ str(i), min_value=32, max_value = 512, step = 32),
activation = 'relu'))
model.add(Dense(10, activation = 'softmax')) # Output layer
# Compiling the model
model.compile(optimizer=keras.optimizers.Adam(hp.Choice('learning_rate', values=[1e-2, 1e-3, 1e-4])), #tuning learning rate
loss='sparse_categorical_crossentropy', metrics=['accuracy'])
return model
tuner=RandomSearch(build_model,
objective = 'val_accuracy',
max_trials = 5,
executions_per_trial = 3,
directory = 'project',
project_name = 'Clothing')
# in our case it's 3 = layers, neurons, learning rate
tuner.search_space_summary()
tuner.search(X_train, y_train, epochs = 10, validation_data = (X_test, y_test))
tuner.results_summary()