Created
May 31, 2019 05:41
-
-
Save MagallanesFito/0f885c5f5522977f95925fc18002ef56 to your computer and use it in GitHub Desktop.
VGG16 Feature exctraction and training on medica dataset
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: utf-8 -*- | |
''' Importar bibliotecas''' | |
import keras | |
from keras.preprocessing import image | |
from keras.applications.vgg16 import VGG16 | |
from keras.applications.vgg16 import preprocess_input | |
from sklearn.model_selection import train_test_split | |
import numpy as np | |
from keras.models import Sequential | |
from keras.layers.core import Dense,Dropout | |
import matplotlib.pyplot as plt | |
#import datetime | |
# 0,1,2,3 | |
classes = ["blurry-nothing","colon-clear","polyps","esophagitis"] | |
path = "Medico_2018_development_set/" | |
#images_per_class = 150 | |
images_per_class = 150 | |
#path puede ser Medico_2018_development_set/+class for class in classes | |
model = VGG16(include_top=False) | |
#model.summary() | |
def extract_features(): | |
vgg16_feature_list = [] | |
labels = [] | |
j = 0 | |
for class_ in classes: | |
#vgg16_feature_list[str(class_)] = [] | |
#Tomar el timepo por clase | |
#start_time = datetime.datetime.now() | |
for i in range(1,images_per_class+1): | |
current_path = path+class_+"/"+str(i)+".jpg" | |
img= image.load_img(current_path,target_size=(224,224)) | |
img_data = image.img_to_array(img) | |
img_data = np.expand_dims(img_data,axis=0) | |
img_data = preprocess_input(img_data) | |
ith_feature = np.array(model.predict(img_data)) | |
#ith_feature_np = np.array(ith_feature) | |
vgg16_feature_list.append(ith_feature.flatten()) | |
labels.append(j) | |
j = j+1 | |
vgg16_feature_list_np = np.array(vgg16_feature_list) | |
labels_np = np.array(labels) | |
return (vgg16_feature_list_np,labels_np) | |
#features[str(class_)] = vgg16_feature_list_np | |
#end_time = datetime.datetime.now() | |
#elapsed = end_time - start_time | |
#print("tiempo para clase "+str(class_)) | |
#short_microseconds = elapsed.microseconds/(10**4) | |
#print("%d:%d s"%(elapsed.seconds,short_microseconds)) | |
#print("-"*10) | |
#vgg16_feature_list_np = np.array(vgg16_feature_list) | |
''' Final feature list | |
features | |
''' | |
dataset,labels = extract_features() | |
''' | |
print(dataset.shape) (40, 25088) | |
print(labels.shape) (40,) | |
''' | |
#--------PREPARAR DATOS----------------- | |
x_train,x_test,y_train,y_test = train_test_split(dataset,labels,test_size=0.25,random_state=4) | |
num_classes = len(classes) | |
#One-hot encoding | |
y_train = keras.utils.to_categorical(y_train, num_classes) | |
y_test = keras.utils.to_categorical(y_test, num_classes) | |
#----------ML ALGO----- | |
input_shape = (25088,) | |
model = Sequential() | |
model.add(Dense(256, activation='relu', input_shape=input_shape)) | |
model.add(Dropout(0.5)) | |
model.add(Dense(num_classes, activation='softmax')) | |
model.compile(loss=keras.losses.categorical_crossentropy, | |
optimizer=keras.optimizers.Adadelta(), | |
metrics=['accuracy']) | |
history = model.fit(x_train,y_train,epochs=20,validation_data=(x_test,y_test)) | |
acc = history.history['acc'] | |
val_acc = history.history['val_acc'] | |
loss = history.history['loss'] | |
val_loss = history.history['val_loss'] | |
epochs = range(len(acc)) | |
plt.plot(epochs, acc, 'bo', label='Training acc') | |
plt.plot(epochs, val_acc, 'b', label='Validation acc') | |
plt.title('Training and validation accuracy') | |
plt.legend() | |
plt.figure() | |
plt.plot(epochs, loss, 'bo', label='Training loss') | |
plt.plot(epochs, val_loss, 'b', label='Validation loss') | |
plt.title('Training and validation loss') | |
plt.legend() | |
plt.show() | |
score = model.evaluate(x_test, y_test, verbose=0) | |
print('Test loss:', score[0]) | |
print('Test accuracy:', score[1]) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment