MASK RCNN fine-tuning 예제 파이썬 치트코드

mask rcnn

Mask RCNN 간단한 Head Fine Tuning 예제

In [ ]:
import os 
import sys
import random
import math
import numpy as np
import cv2
import matplotlib.pyplot as plt
from tqdm import tqdm
import pandas as pd 
import glob
from sklearn.model_selection import KFold
from PIL import Image
In [113]:
from mrcnn.config import Config
from mrcnn import utils
import mrcnn.model as modellib
from mrcnn import visualize
from mrcnn.model import log
In [114]:
import os
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np

데이터 사이즈 변경

In [116]:
mask = os.listdir('../maskrcnn/mask/')
train = os.listdir('../maskrcnn/train/')
In [117]:
def save_image(from_path, target_path) :
    images = os.listdir(from_path)
    for i in images:
        if i.lower().endswith(('.png', '.jpg', '.jpeg')) :
            image = Image.open(from_path + i)
            image = image.resize((128,128), Image.ANTIALIAS)
            ext = '.jpg'
            image.save(target_path + i + ext)
In [118]:
save_image('../maskrcnn/mask/','../maskrcnn/resized/mask/' )
In [119]:
save_image('../maskrcnn/train/','../maskrcnn/resized/train/' )
In [153]:
mask_resized = os.listdir('../maskrcnn/resized/mask/')
train_resized = os.listdir('../maskrcnn/resized/train/')

데이터셋 만들기

In [172]:
class DetectorDataset(utils.Dataset):
    
    def load_dataset(self, image_filenames):
        # Add classes
        self.add_class("openimage", 1, "hi")
        self.add_class("openimage", 2, "bi")

        for i in image_filenames:
            print(i)
            self.add_image("openimage", image_id=i, path='../maskrcnn/resized/train/' + i,width=128, height=128)    
    
#     def load_mask(self, image_id):
#         return np.zeros((128,128,1))
#         return np.array(Image.open('../maskrcnn/resized/mask/' + image_id))

dataset = DetectorDataset()
dataset.load_dataset(train_resized)
dataset.prepare()
000b55559b0244d7.jpg.jpg
000a24efa1736f93.jpg.jpg
000b4796f7fb05df.jpg.jpg
0000fc29545be065.jpg.jpg
000a2cf2ac516a22.jpg.jpg
000b72e1446f8849.jpg.jpg
000aabd8888cfde7.jpg.jpg
000bb2f7132013dc.jpg.jpg
000b373fbda8eb47.jpg.jpg
000b9007a01f7405.jpg.jpg
000ad6fa67b5ad96.jpg.jpg
0000de486dc6c49f.jpg.jpg
000ab7bec71cc50a.jpg.jpg
0000a54dd13a67a7.jpg.jpg
000b9c365c9e307a.jpg.jpg
000b5b8157441840.jpg.jpg
000ac95750ac7399.jpg.jpg
0000a30028ea7618.jpg.jpg
000baa6f7dae9b79.jpg.jpg
000b1b92f0800e94.jpg.jpg
0000d6d1c44decb6.jpg.jpg
000aacf9a5aed052.jpg.jpg
0000b72f1005caea.jpg.jpg
000ad2bb0f4cce5f.jpg.jpg
000af631fb329557.jpg.jpg
000ac34008b0ba4c.jpg.jpg
0000b4b26ef88376.jpg.jpg
000ba77b9789c090.jpg.jpg
0000b9115cdf1e54.jpg.jpg
000a38e00b2005ab.jpg.jpg
000b9adbc207ae54.jpg.jpg
000b093da01e5bfe.jpg.jpg
000aced46074103a.jpg.jpg
000b9df264af81ac.jpg.jpg
000a533ef1b9cacf.jpg.jpg
000a9c68eee799f0.jpg.jpg
000b87119cc301cf.jpg.jpg
000a8b60f92d5ed8.jpg.jpg
000b9f3ba4891c11.jpg.jpg
0000edccaaface2f.jpg.jpg
0000ce19115ae401.jpg.jpg
000b1d43ab537d20.jpg.jpg
000b2a982a903d0d.jpg.jpg
000bb846e2629e83.jpg.jpg
000a3e4dd29d96e3.jpg.jpg
000b8d80f7386698.jpg.jpg
000ba28d70b1a999.jpg.jpg
0000b86e2fd18333.jpg.jpg
000b97d7af0977eb.jpg.jpg
0000bdfa52154160.jpg.jpg
000a05dd07294ce4.jpg.jpg
0000ec18c34241ad.jpg.jpg
0000d776a625c28b.jpg.jpg
0000b3e5921ab7ff.jpg.jpg
000a1cb9f7f9f11b.jpg.jpg
0000a1b2fba255e9.jpg.jpg
000aee0af66d4237.jpg.jpg
000a6c3a2c4d97e7.jpg.jpg
000aa8909ca61e95.jpg.jpg
0000dc465ec144aa.jpg.jpg
000a55eef2fed455.jpg.jpg
000a2c98fe518c87.jpg.jpg
000a514fb1546570.jpg.jpg
000b0f5159f54105.jpg.jpg
000b09d5d3fc821f.jpg.jpg
000a0945ecb24c23.jpg.jpg
000a72372756f53b.jpg.jpg
resized000b9b61afea2cd4.jpg_resized.png.jpg
000af180a3163f17.jpg.jpg
000a53b9d6143935.jpg.jpg
000a42c09609c847.jpg.jpg
000a00e3922eef91.jpg.jpg
000ba221f70676c6.jpg.jpg
000b9814a07fd974.jpg.jpg
000ba940f8cfc9bf.jpg.jpg
0000c33c6f4b8518.jpg.jpg
000ad3d42653f5f6.jpg.jpg
000bb200fc78fc30.jpg.jpg
000a1c0dcd207581.jpg.jpg
000a9559c42c6619.jpg.jpg
000bb0ae453283b0.jpg.jpg
0000a1229c025d92.jpg.jpg
000a1a700c0c7950.jpg.jpg
000b9b00d7aef8f5.jpg.jpg
0000ff7af1656fab.jpg.jpg
000b168e791f591d.jpg.jpg
0000bcb094764718.jpg.jpg
000b606e130bdf5e.jpg.jpg
0000eb5027281f2a.jpg.jpg
000a9c6e22a06877.jpg.jpg
0000a7dbcec8d6d1.jpg.jpg
000abc821f66a892.jpg.jpg
000adef7197e3118.jpg.jpg
000b4935979bf4b5.jpg.jpg
000b1b3b85edd850.jpg.jpg
000a6ea7018b3a37.jpg.jpg
000b567c26dd4e5d.jpg.jpg
000b393437134262.jpg.jpg
0000b366aaf9672a.jpg.jpg
000a2179919d9795.jpg.jpg
0000a90019e380dc.jpg.jpg
0000ba5551c54f89.jpg.jpg
000b3940e7d25c03.jpg.jpg
000aadc350c9ae93.jpg.jpg
0000f8604a4e2cfe.jpg.jpg
000bab5b1a67844e.jpg.jpg
0000e2205e460318.jpg.jpg
000ba40bf7a2b458.jpg.jpg
000a816392d086c4.jpg.jpg
0000ee1cc1f4c792.jpg.jpg
000aa0407b4c6635.jpg.jpg
000a95b47f84af82.jpg.jpg
000a782e900a8f7b.jpg.jpg
000a7b3789b1392e.jpg.jpg
000b4fcdf1af3361.jpg.jpg
0000aa810854dc2e.jpg.jpg
0000bee39176697a.jpg.jpg
000ba64bb7d9c7be.jpg.jpg
000b65a36ad46f9e.jpg.jpg
000a2351195c83ac.jpg.jpg
000ae28755d2d20e.jpg.jpg
000b9b61afea2cd4.jpg.jpg
000a9b9566125270.jpg.jpg
0000c4f95a9d5a54.jpg.jpg
000adcdd7244ce4a.jpg.jpg
0000f2101250b009.jpg.jpg
000b825dea3016eb.jpg.jpg
000ae235808cc1e8.jpg.jpg
000b2d1789d5f80d.jpg.jpg
0000f8aef032941e.jpg.jpg
000b299b5f5ed902.jpg.jpg
000ab31e6be35fed.jpg.jpg
resized000b05b7f49cdacd.jpg_resized.png.jpg
000ac8c676b6077a.jpg.jpg
000ad0ecfb21ee63.jpg.jpg
0000c035a08c3770.jpg.jpg
000b7dfaa1810a83.jpg.jpg
000a05c6c81306c9.jpg.jpg
000ba3ca8a2ca955.jpg.jpg
000a3d08fdebc003.jpg.jpg
000a6b6adc4f11f7.jpg.jpg
000a93fcac30e4c0.jpg.jpg
000a54f1bdb96839.jpg.jpg
000aa552a9d80891.jpg.jpg
000b485cedacbf97.jpg.jpg
0000f53faa4d14c3.jpg.jpg
000b59a7822679e6.jpg.jpg
000b06c0eed42a4c.jpg.jpg
000bb8bd9b1bca65.jpg.jpg
000b260e1f08a32a.jpg.jpg
000a1bde7566ed4d.jpg.jpg
000bb81adefe7332.jpg.jpg
0000f509689e349c.jpg.jpg
000a0c9bcf87532e.jpg.jpg
000aac5dd07078a8.jpg.jpg
0000b7e1500c94d7.jpg.jpg
0000d59fa570d973.jpg.jpg
000b63a1445f53c8.jpg.jpg
000b42cae15622e0.jpg.jpg
000a63b5ac5dfd52.jpg.jpg
0000a566f5d88cb6.jpg.jpg
000a3facd19eeae6.jpg.jpg
000a0fceb7cac659.jpg.jpg
000ad20b5e452b24.jpg.jpg
0000bd371f4b1bba.jpg.jpg
000b681fede38674.jpg.jpg
000b432ae644b679.jpg.jpg
000a6f643b0e705f.jpg.jpg
000b38d9f2f664fe.jpg.jpg
000a002265fb11e9.jpg.jpg
0000dde1f8ec7be1.jpg.jpg
000a9f05fd25f657.jpg.jpg
000acf666d991c39.jpg.jpg
0000dc2ef3e406e1.jpg.jpg
0000c7640c802faf.jpg.jpg
000aa755e3bab315.jpg.jpg
000ac3a1acf08904.jpg.jpg
000b81b5757963e0.jpg.jpg
000a2263b3299bf8.jpg.jpg
000a0b93c3852d73.jpg.jpg
0000cf997957ffc1.jpg.jpg
000b013d7a97aa52.jpg.jpg
000b4ad7e2dbed61.jpg.jpg
000a1f347a775121.jpg.jpg
000afe7726e121ea.jpg.jpg
000a7e3276723572.jpg.jpg
0000b9fcba019d36.jpg.jpg
000bbdf0dc8099d8.jpg.jpg
000b397382b2464a.jpg.jpg
000a99f66168fece.jpg.jpg
000aecd78b230135.jpg.jpg
000a7548bf4a3592.jpg.jpg
0000bf2838ee5aeb.jpg.jpg
000a53bd9f355f07.jpg.jpg
000b1971d8daaeef.jpg.jpg
0000a16e4b057580.jpg.jpg
000a817a350d8bdb.jpg.jpg
000b76a9b80ba43a.jpg.jpg
000b0f235dcf2caa.jpg.jpg
0000fcb8ed0ea243.jpg.jpg
000b70a84aab664b.jpg.jpg
000aa1d02ddf1f99.jpg.jpg
000b05b7f49cdacd.jpg.jpg
0000ef4409880196.jpg.jpg
000b29496f75c8e5.jpg.jpg
000b9a97776b3634.jpg.jpg
000a45560c338704.jpg.jpg
000b4671075914cd.jpg.jpg
000aa58056ae2b02.jpg.jpg
000a3c4064d9caa4.jpg.jpg
0000a4e648c5897f.jpg.jpg
0000c6d9fe885c64.jpg.jpg
000abe5eddc5b303.jpg.jpg
000a1b7b7024a12b.jpg.jpg
0000d01325742829.jpg.jpg
000b93644609911f.jpg.jpg
000ad6c520be9ec5.jpg.jpg
0000cb13febe0138.jpg.jpg
000a12b69b0de9f2.jpg.jpg
000b21663becc68e.jpg.jpg
000a0897c1710790.jpg.jpg
000abc075d659122.jpg.jpg
0000dd8e0cb25756.jpg.jpg
0000c6a19dd90621.jpg.jpg
000a3de7da1df704.jpg.jpg
000a5fa54328fad5.jpg.jpg
000b5bc07c0c5df7.jpg.jpg
000a068523870528.jpg.jpg
0000eda1171fe14e.jpg.jpg
000b55e339f0b131.jpg.jpg
000a3f4144bf73ba.jpg.jpg
000a98aa78afeb18.jpg.jpg
000ab8c20b3e5b58.jpg.jpg
000aa0232503aafc.jpg.jpg
000a033f5e356e88.jpg.jpg
0000e8b36676338b.jpg.jpg
000ad8cfd97d0a98.jpg.jpg
000a8a98e1de3e25.jpg.jpg
000b9d6c0f7d794d.jpg.jpg
000b50bdd1933a36.jpg.jpg
000adfe5b817011c.jpg.jpg
000b2b00065e564a.jpg.jpg
0000e2d480cfb105.jpg.jpg
000bbbe341c5d139.jpg.jpg
000a4fa985aeb8a9.jpg.jpg
000aa6555c53f326.jpg.jpg
000a3accdba4b2cd.jpg.jpg
In [170]:
ROOT_DIR = os.path.abspath("../../")

# Directory to save logs and trained model
MODEL_DIR = os.path.join(ROOT_DIR, "logs")

# Local path to trained weights file
COCO_MODEL_PATH = "mask_rcnn_coco.h5"
# Download COCO trained weights from Releases if needed
if not os.path.exists("mask_rcnn_coco.h5"):
    utils.download_trained_weights(COCO_MODEL_PATH)
In [145]:
class ShapesConfig(Config):
    """Configuration for training on the toy shapes dataset.
    Derives from the base Config class and overrides values specific
    to the toy shapes dataset.
    """
    # Give the configuration a recognizable name
    NAME = "shapes"

    # Train on 1 GPU and 8 images per GPU. We can put multiple images on each
    # GPU because the images are small. Batch size is 8 (GPUs * images/GPU).
    GPU_COUNT = 1
    IMAGES_PER_GPU = 8

    # Number of classes (including background)
    NUM_CLASSES = 1 + 3  # background + 3 shapes

    # Use small images for faster training. Set the limits of the small side
    # the large side, and that determines the image shape.
    IMAGE_MIN_DIM = 128
    IMAGE_MAX_DIM = 128

    # Use smaller anchors because our image and objects are small
    RPN_ANCHOR_SCALES = (8, 16, 32, 64, 128)  # anchor side in pixels

    # Reduce training ROIs per image because the images are small and have
    # few objects. Aim to allow ROI sampling to pick 33% positive ROIs.
    TRAIN_ROIS_PER_IMAGE = 32

    # Use a small epoch since the data is simple
    STEPS_PER_EPOCH = 100

    # use small validation steps since the epoch is small
    VALIDATION_STEPS = 5
    
config = ShapesConfig()
config.display()
Configurations:
BACKBONE                       resnet101
BACKBONE_STRIDES               [4, 8, 16, 32, 64]
BATCH_SIZE                     8
BBOX_STD_DEV                   [0.1 0.1 0.2 0.2]
COMPUTE_BACKBONE_SHAPE         None
DETECTION_MAX_INSTANCES        100
DETECTION_MIN_CONFIDENCE       0.7
DETECTION_NMS_THRESHOLD        0.3
FPN_CLASSIF_FC_LAYERS_SIZE     1024
GPU_COUNT                      1
GRADIENT_CLIP_NORM             5.0
IMAGES_PER_GPU                 8
IMAGE_MAX_DIM                  128
IMAGE_META_SIZE                16
IMAGE_MIN_DIM                  128
IMAGE_MIN_SCALE                0
IMAGE_RESIZE_MODE              square
IMAGE_SHAPE                    [128 128   3]
LEARNING_MOMENTUM              0.9
LEARNING_RATE                  0.001
LOSS_WEIGHTS                   {'rpn_class_loss': 1.0, 'rpn_bbox_loss': 1.0, 'mrcnn_class_loss': 1.0, 'mrcnn_bbox_loss': 1.0, 'mrcnn_mask_loss': 1.0}
MASK_POOL_SIZE                 14
MASK_SHAPE                     [28, 28]
MAX_GT_INSTANCES               100
MEAN_PIXEL                     [123.7 116.8 103.9]
MINI_MASK_SHAPE                (56, 56)
NAME                           shapes
NUM_CLASSES                    4
POOL_SIZE                      7
POST_NMS_ROIS_INFERENCE        1000
POST_NMS_ROIS_TRAINING         2000
ROI_POSITIVE_RATIO             0.33
RPN_ANCHOR_RATIOS              [0.5, 1, 2]
RPN_ANCHOR_SCALES              (8, 16, 32, 64, 128)
RPN_ANCHOR_STRIDE              1
RPN_BBOX_STD_DEV               [0.1 0.1 0.2 0.2]
RPN_NMS_THRESHOLD              0.7
RPN_TRAIN_ANCHORS_PER_IMAGE    256
STEPS_PER_EPOCH                100
TOP_DOWN_PYRAMID_SIZE          256
TRAIN_BN                       False
TRAIN_ROIS_PER_IMAGE           32
USE_MINI_MASK                  True
USE_RPN_ROIS                   True
VALIDATION_STEPS               5
WEIGHT_DECAY                   0.0001


In [146]:
# Directory to save logs and trained model
MODEL_DIR = os.path.join("./model/")
In [147]:
model = modellib.MaskRCNN(mode="training", config=config,model_dir=MODEL_DIR)
In [148]:
model.load_weights(COCO_MODEL_PATH, by_name=True,
                   exclude=["mrcnn_class_logits", "mrcnn_bbox_fc", 
                            "mrcnn_bbox", "mrcnn_mask"])
In [ ]:
model.train(dataset, dataset, learning_rate=config.LEARNING_RATE, epochs=1, layers='heads')
Starting at epoch 0. LR=0.001

Checkpoint Path: ./model/shapes20190907T1144/mask_rcnn_shapes_{epoch:04d}.h5
Selecting layers to train
fpn_c5p5               (Conv2D)
fpn_c4p4               (Conv2D)
fpn_c3p3               (Conv2D)
fpn_c2p2               (Conv2D)
fpn_p5                 (Conv2D)
fpn_p2                 (Conv2D)
fpn_p3                 (Conv2D)
fpn_p4                 (Conv2D)
In model:  rpn_model
    rpn_conv_shared        (Conv2D)
    rpn_class_raw          (Conv2D)
    rpn_bbox_pred          (Conv2D)
mrcnn_mask_conv1       (TimeDistributed)
mrcnn_mask_bn1         (TimeDistributed)
mrcnn_mask_conv2       (TimeDistributed)
mrcnn_mask_bn2         (TimeDistributed)
mrcnn_class_conv1      (TimeDistributed)
mrcnn_class_bn1        (TimeDistributed)
mrcnn_mask_conv3       (TimeDistributed)
mrcnn_mask_bn3         (TimeDistributed)
mrcnn_class_conv2      (TimeDistributed)
mrcnn_class_bn2        (TimeDistributed)
mrcnn_mask_conv4       (TimeDistributed)
mrcnn_mask_bn4         (TimeDistributed)
mrcnn_bbox_fc          (TimeDistributed)
mrcnn_mask_deconv      (TimeDistributed)
mrcnn_class_logits     (TimeDistributed)
mrcnn_mask             (TimeDistributed)
Epoch 1/1
In [ ]:
 

답글 남기기