In [18]:
import glob
import shutil
import os
import random
from PIL import Image
import seaborn as sns
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split,StratifiedKFold
In [104]:
from tensorflow.keras.layers import Dense, Input, Activation, Flatten
from tensorflow.keras.layers import BatchNormalization,Add,Dropout
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.models import Model, load_model
from tensorflow.keras.layers import LeakyReLU, ReLU, Conv2D, MaxPooling2D, BatchNormalization, Conv2DTranspose, UpSampling2D
from tensorflow.keras import callbacks
from tensorflow.keras import backend as K
In [6]:
mask_filenames = glob.glob('../input/imagedata128/masks/*')
mask_df = pd.DataFrame()
mask_df['filename'] = mask_filenames
mask_df['mask_percentage'] = 0
mask_df['labels'] = 0
mask_df.set_index('filename', inplace=True)
for file in mask_filenames:
mask_df.loc[file, 'mask_percentage'] = np.array(Image.open(file)).sum()/(128*128*255)
mask_df.loc[mask_df.mask_percentage > 0, 'labels'] = 1
In [11]:
train_valid_filenames = glob.glob('../input/imagedata128/train/*')
train_filenames, valid_filenames = train_test_split(train_valid_filenames, stratify = mask_df.labels, test_size = 0.1, random_state = 10)
mask_train_filenames = [f.replace('/train', '/masks') for f in train_filenames]
mask_valid_filenames = [f.replace('/train', '/masks') for f in valid_filenames]
In [13]:
train_x = np.zeros((1000,128,128))
valid_x = np.zeros((100,128,128))
train_mask_y = np.zeros((1000,128,128))
valid_mask_y = np.zeros((100,128,128))
train_y = np.zeros((1000))
valid_y = np.zeros((100))
# 훈련용 1000개
for (index, image) in enumerate(train_filenames[:1000]):
train_x[index] = np.array(Image.open(image))
# 검증용 100개 및 변병여부
for (index, image) in enumerate(valid_filenames[:100]):
valid_x[index] = np.array(Image.open(image))
# 훈련용 마스크 답지 1000개 및 병변여부
for (index, image) in enumerate(mask_train_filenames[:1000]):
train_mask_y[index] = np.array(Image.open(image))
train_y[index] = mask_df.loc[image, 'labels']
# 검증용 병변 답지 100개 및 병변여부
for (index, image) in enumerate(mask_valid_filenames[:100]):
valid_mask_y[index] = np.array(Image.open(image))
valid_y[index] = mask_df.loc[image, 'labels']
In [14]:
train_x = train_x.reshape(1000,128,128,1)
valid_x = valid_x.reshape(100,128,128,1)
train_mask_y = train_x.reshape(1000,128,128,1)
valid_mask_y = valid_x.reshape(100,128,128,1)
In [ ]:
def contract_block(x, in_shape, out_shape) :
x = Conv2D(filters = 64, kernel_size=(2,2), strides=(2,2), activation = None, padding="same")(x)
x = ReLU()(x)
x = BatchNormalization()(x)
In [105]:
def UNet(input_shape):
# 아주 간단한 타입의 딥러닝 모델이 워킹하는지만 살펴봄.
inp = Input(shape = (input_shape ))
# 1단계 contracting
x = Conv2D(filters = 64,kernel_size=3, strides =1, activation = None, padding="same")(inp)
x = ReLU()(x)
x = BatchNormalization()(x)
x = Conv2D(filters = 64,kernel_size=3, strides =1, activation = None, padding="same")(x)
x = ReLU()(x)
x = BatchNormalization()(x)
x = MaxPooling2D(2)(x)
x = Conv2DTranspose(filters = 128, kernel_size = 1)(x)
# 2단계 contracting
x = Conv2D(filters = 128,kernel_size=3, strides =1, activation = None, padding="same")(x)
x = ReLU()(x)
x = BatchNormalization()(x)
x = Conv2D(filters = 128,kernel_size=3, strides =1, activation = None, padding="same")(x)
x = ReLU()(x)
x = BatchNormalization()(x)
x = MaxPooling2D(2)(x)
x = Conv2DTranspose(filters = 256, kernel_size = 1)(x)
# Bottle neck
x = Conv2D(filters = 256,kernel_size=3, strides =1, activation = None, padding="same")(x)
x = ReLU()(x)
x = BatchNormalization()(x)
# 1단계 extracting
x = Conv2D(filters = 128,kernel_size=3, strides =1, activation = None, padding="same")(x)
x = ReLU()(x)
x = BatchNormalization()(x)
x = UpSampling2D()(x)
x = ReLU()(x)
x = BatchNormalization()(x)
x = Flatten()(x)
x = Dense(32, activation = 'relu')(x)
x = Dense(16, activation = 'relu')(x)
out = Dense(1, activation = 'linear')(x)
model = Model(inputs = inp, outputs=[out])
return model
In [106]:
model = UNet(input_shape = (128, 128, 1))
model.compile(loss='mae', optimizer='adam')
In [107]:
model.summary()
In [79]:
history = model.fit(train_x, [train_y], validation_data=(valid_x, [valid_y]),epochs = 5, batch_size = 10, verbose = 1)
In [ ]: