Created
December 31, 2021 15:40
-
-
Save AnasBrital98/f9d6403b341be8fe2fa4eb9a5d1aba57 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Import the necessary libraries | |
import numpy as np | |
import matplotlib.pyplot as plt | |
from tensorflow.keras.utils import to_categorical | |
from tensorflow.keras.models import Sequential | |
from tensorflow.keras.layers import Conv2D , AveragePooling2D , Dense , Dropout , Flatten | |
from tensorflow.keras.datasets import mnist | |
from tensorflow.keras.optimizers import Adam | |
from sklearn.metrics import accuracy_score | |
# Defining The Structure of The Model (LeNet5 Architecture) | |
def LeNet5(): | |
model = Sequential() | |
model.add(Conv2D(filters=6 , kernel_size=(5,5) , strides=(1,1) , activation="tanh" , input_shape=(32 , 32 , 1))) | |
model.add(AveragePooling2D((2,2))) | |
model.add(Conv2D(filters=16 , kernel_size=(5,5) , strides=(1,1) , activation="tanh" )) | |
model.add(AveragePooling2D((2,2))) | |
model.add(Flatten()) | |
model.add(Dense(units=120 , activation="tanh")) | |
model.add(Dropout(0.2)) | |
model.add(Dense(units=84 , activation="tanh")) | |
model.add(Dropout(0.2)) | |
model.add(Dense(units=10 , activation="softmax")) | |
adam = Adam(learning_rate=0.01) | |
model.compile(optimizer=adam , loss = 'binary_crossentropy' , metrics=['accuracy']) | |
return model | |
# Function to prepare The Data | |
def prepare_dataSet(x ,y): | |
#Transform The Images from 28*28 to 32*32 | |
x = np.pad(x , ((0,0) , (2,2) , (2,2))) | |
#Reshape The Data | |
x = x.reshape((x.shape[0] , 32 , 32 , 1)) | |
#One Hot Encodig | |
y = to_categorical(y) | |
#Normalize The Data | |
x = x.astype('float32') | |
x /= 255.0 | |
return x , y | |
# Function display The accuracy of our Model | |
def displayAccuracy(history): | |
plt.plot(history.history['accuracy']) | |
plt.plot(history.history['val_accuracy']) | |
plt.title('model accuracy') | |
plt.ylabel('accuracy') | |
plt.xlabel('epoch') | |
plt.legend(['train', 'test'], loc='upper left') | |
plt.show() | |
# Function display the loss of our Model | |
def displayLoss(history): | |
plt.plot(history.history['loss']) | |
plt.plot(history.history['val_loss']) | |
plt.title('model loss') | |
plt.ylabel('loss') | |
plt.xlabel('epoch') | |
plt.legend(['train', 'test'], loc='upper left') | |
plt.show() | |
if __name__ == "__main__": | |
# display The Model Architecture | |
model = LeNet5() | |
print("The Architecture of The Model is : ") | |
print(model.summary()) | |
# Load The DataSet | |
(x_train , y_train) , (x_test , y_test) = mnist.load_data() | |
# prepare The data | |
x_train , y_train = prepare_dataSet(x_train , y_train) | |
x_test , y_test = prepare_dataSet(x_test , y_test) | |
# Train The Model | |
history = model.fit(x_train , y_train ,validation_split=0.33, epochs=10, batch_size=100) | |
#display The Model Accuracy | |
displayAccuracy(history) | |
#display The Model Loss | |
displayLoss(history) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment