|
- import os
- import numpy as np
- import argparse
- from collections import OrderedDict
-
- # 判断transform数据增强所使用的方法
- # RRe: random_resize; RCo:random_color; HF: horizontal_flip; RA: random_affine; RE: random_elastic;
- # RRo: random_rotation; RCr: random_crop; LE: label_encoding;
- def get_transformString(trans_train):
- transform_str = ''
- transform_str = transform_str + '_isRRe' if 'random_resize' in trans_train else transform_str + '_noRRe'
- transform_str = transform_str + '_isRCo' if 'random_color' in trans_train else transform_str + '_noRCo'
- transform_str = transform_str + '_isHF' if 'horizontal_flip' in trans_train else transform_str + '_noHF'
- transform_str = transform_str + '_isRA' if 'random_affine' in trans_train else transform_str + '_noRA'
- transform_str = transform_str + '_isRE' if 'random_elastic' in trans_train else transform_str + '_noRE'
- transform_str = transform_str + '_isRRo' if 'random_rotation' in trans_train else transform_str + '_noRRo'
-
- transform_str = transform_str + '_isRCr' if 'random_crop' in trans_train else transform_str + '_noRCr'
- transform_str = transform_str + '_isLE' if 'label_encoding' in trans_train else transform_str + '_noLE'
- # transform_str = transform_str + '_isRG' if 'RandomGaussianBlur' in trans_train else transform_str + '_noRG'
- #transform_str = transform_str + '_isEX' if 'Expand' in trans_train else transform_str + '_noEX'
-
- '''
- transform_str = transform_str + '_toTensor' if ('to_tensor' in trans_train) else transform_str + '_noTensor'
- transform_str = transform_str + '_isNorm' if ('normalize' in trans_train) else transform_str + '_noNorm'
- '''
- return transform_str
-
-
- class Options:
- def __init__(self, isTrain):
- self.dataset = 'ade20k'#'MultiOrgan' # MoNuSeg # 'MultiOrgan' # MultiOrgan_Val
- self.isTrain = isTrain
- # self.index = '1'
- # --- model hyper-parameters --- #
- self.model = dict()
- self.model['in_c'] = 3 # input channel
- self.model['out_c'] = 3 # output channel
- self.model['n_layers'] =6# 8 # number of layers in a block org=6
- self.model['growth_rate'] = 24#24 # growth_rate
- self.model['drop_rate'] = 0.1
- self.model['compress_ratio'] = 0.5
- self.model['dilations'] = [1, 2, 4, 8, 16, 4,1]#[1, 2, 4, 8, 16,8, 4,2,1] # dilation factor for each block
- self.model['is_hybrid'] = True#
- self.model['layer_type'] = 'BottleneckLayer'#'basic'#
- self.model['mean_std'] = 'mean_std' # mean_std_iamgenet mean_std_MoNuSeg_12 mean_std_MoNuSeg_300
- if self.model['mean_std'] == 'mean_std':
- self.string = 'ms'
- elif self.model['mean_std'] == 'mean_std_MoNuSeg_12':
- self.string = 'ms12'
- elif self.model['mean_std'] == 'mean_std_MoNuSeg_300':
- self.string = 'ms3'
- self.model['add_weightMap'] = 1 # True # gai
-
- self.model['multi_class'] = True # add 是否要使用多类别label False True
-
- self.string = self.string + '_addWM' if self.model['add_weightMap'] == 1 else self.string + '_noWM'
-
- self.string = self.string + '_3c' if self.model['multi_class'] == True else self.string + '_2c'
- self.string = self.string + '_addLE2'
-
- # FullNet #FCN_pooling'FullNet_Unet'
- self.model['modelName'] = 'FullNet'#E'FullNet'#
-
- # --- training params --- #
- self.train = dict()
- self.train['num_epochs'] = 1000 if self.dataset == 'GlaS' else 300 # number of training epochs
- self.train['input_size'] = 208 # input size of the image #gai orginal = 208
- self.train['batch_size'] =4 # batch size # orginal = 8
- self.train['val_overlap'] = 80 # overlap size of patches for validation # orginal=80
- self.train['lr'] = 0.0005 if self.dataset == 'GlaS' else 0.001 # initial learning rate add 0.001
- self.train['weight_decay'] = 1e-4 # weight decay
- self.train['log_interval'] = 50 # iterations to print training results
- self.train['workers'] = 4 # number of workers to load images
- self.train['gpu'] = [0] # select gpu devices
- self.train['alpha'] = 1#0.0 # weight for variance term
- self.train['optimizer'] = 'adam' # define optimizer
- self.string = self.string + '_' + self.train['optimizer']
- self.train['validation'] = 1 # doing validation
- self.string = self.string + '_isVal' if self.train['validation'] == 1 else self.string + '_noVal'
-
- self.train['checkpoint_freq'] = 100 # epoch to save checkpoints
- # --- resume training --- #
- self.train['start_epoch'] = 0 # start epoch
- self.train['checkpoint'] = '' # checkpoint to resume training or evaluation
- # 'random_resize' 'random_color' 'horizontal_flip' 'random_affine' 'random_elastic'
- # 'random_rotation' 'random_crop' 'label_encoding' 'to_tensor' 'normalize'
- self.train['trans_train'] = ['random_resize','random_color','random_affine','random_elastic','horizontal_flip', 'random_rotation', 'random_crop', 'label_encoding', 'to_tensor', 'normalize','RandomGaussianBlur']#,'Expand'] # add control transform
- self.transform_string = str(self.train['trans_train'])
- self.transform_str = get_transformString(self.train['trans_train'])
- self.string = self.string + self.transform_str
- # _noAf _noAfnoElnoRs _a0_noAfnoRs _a0_noRs300 _a0_noAfnoEl300 _a0_noAfnoRs300 _a0_noAfnoRsnoEl300 _a0_isColor_noRs_noAf_noEl _a0_isColor_noRs_noAf _a0_isColor_noRs_noAf_noEl
- self.string = self.string + '_a0' if self.train['alpha'] == 0 else self.string + '_a' + str(
- int(self.train['alpha']))
-
- stringFirst = ''
- # stringFirst = 'Auto_'
- self.model['exp_filename'] = stringFirst + self.model['modelName']
- """'_' + 'is' + str(
- self.train['input_size']) + 'ol' + str(self.train['val_overlap']) \
- + 'bs' + str(self.train['batch_size']) + '_e' + str(
- self.train['num_epochs']) + '_' + self.string"""
-
- self.train['data_dir'] = './endoscope400/{:s}'.format(self.dataset) # path to data
- self.train['save_dir'] = './experiments/{:s}/{:s}'.format(self.dataset, self.model['exp_filename'])
-
- # --- data transform --- #
- self.transform = dict()
- # defined in parse function
-
- # --- post processing --- #
- self.post = dict()
- self.post['min_area'] = 100 if self.dataset == 'GlaS' else 20 # minimum area for an object
- self.post['radius'] = 4 if self.dataset == 'GlaS' else 1
-
- # --- test parameters --- #
- self.test = dict()
- self.test['filename'] = 'validation' # test #test2 #add to select test filename
- self.test['epoch'] = 'best'
- self.test['gpu'] = [0]
- self.test[ 'groundtruth'] = 1
- #add if groundtruth=1,using ground truth,other wise: using label_img(simulation result)
- self.test['img_dir'] = './endoscope400/{:s}/images/{:s}'.format(self.dataset,
- self.test['filename']) # gai test_same
- self.test['label_dir'] = './endoscope400/{:s}/labels/{:s}'.format(self.dataset, self.test[
- 'filename']) #gai labels_instance
- self.test['annotation_dir'] = './endoscope400/xml' # add Annotations
- self.test['tta'] = True # True #False # try
- self.test['save_flag'] = True # True #False #try
- self.test['patch_size'] = 0 if self.dataset == 'GlaS' else 208# 224 # orginal = else 208
- self.test['overlap'] = 50 # orginal=80
- self.test['savefilename'] = self.test['filename'] + '_' + self.test['epoch'] + '_minarea' + str(self.post['min_area']) + '_ra' + str(self.post['radius'])
- if (self.test['tta'] != True):
- self.test['savefilename'] = self.test['savefilename'] + '_notta'
- self.test['save_dir'] = './experiments/{:s}/{:s}/{:s}'.format(self.dataset, self.model['exp_filename'],
- self.test['savefilename'])
- self.test['model_path'] = './experiments/{:s}/{:s}/checkpoints/checkpoint_{:s}.pth.tar'.format(self.dataset,
- self.model[
- 'exp_filename'],
- self.test[
- 'epoch'])
-
- def parse(self):
- """ Parse the options, replace the default value if there is a new input """
- parser = argparse.ArgumentParser(description='')
- if self.isTrain:
- parser.add_argument('--dataset', type=str, default=self.dataset,
- help='input data set')
- parser.add_argument('--model-name', type=str, default=self.model['modelName'],
- help='input model Name for training')
- parser.add_argument('--gpu', type=list, default=self.train['gpu'], help='GPUs for training')
- parser.add_argument('--epochs', type=int, default=self.train['num_epochs'],
- help='number of epochs to train')
- parser.add_argument('--input-size', type=int, default=self.train['input_size'],
- help='input size for training')
- parser.add_argument('--val-overlap', type=int, default=self.train['val_overlap'],
- help='validation overlap size for training')
- parser.add_argument('--batch-size', type=int, default=self.train['batch_size'],
- help='input batch size for training')
- parser.add_argument('--weight-map', type=int, default=self.model['add_weightMap'],
- help='if add the weight map')
- parser.add_argument('--alpha', type=float, default=self.train['alpha'],
- help='The weight for the variance term in loss')
-
- parser.add_argument('--lr', type=float, default=self.train['lr'], help='learning rate')
- parser.add_argument('--log-interval', type=int, default=self.train['log_interval'],
- help='how many batches to wait before logging training status')
- parser.add_argument('--data-dir', type=str, default=self.train['data_dir'],
- help='directory of training data')
- parser.add_argument('--save-dir', type=str, default=self.train['save_dir'],
- help='directory to save training results') # revised (Replace with exp_filename)
- parser.add_argument('--checkpoint-path', type=str, default=self.train['checkpoint'],
- help='directory to load a checkpoint')
- parser.add_argument('--transform-train', type=str, default=self.transform_str,
- help='control transform')
- parser.add_argument('--exp-filename', type=str, default=self.model['exp_filename'],
- help='exp_filename')
- parser.add_argument('--validation', type=int, default=self.train['validation'],
- help='input size for training')
- parser.add_argument('--optimizer', type=str, default=self.train['optimizer'],
- help='input size for training')
- args = parser.parse_args()
- self.dataset = args.dataset
- self.model['modelName'] = args.model_name
- self.train['num_epochs'] = args.epochs
- self.train['input_size'] = args.input_size
- self.train['val_overlap'] = args.val_overlap
- self.train['batch_size'] = args.batch_size
- self.model['add_weightMap'] = args.weight_map
- self.train['alpha'] = args.alpha
-
- self.train['lr'] = args.lr
- self.train['log_interval'] = args.log_interval
- self.train['gpu'] = list(args.gpu) #args.gpu 不加list()会在所有gpu中并行运算
- self.train['checkpoint'] = args.checkpoint_path
- self.train['data_dir'] = './endoscope400/{:s}'.format(self.dataset) # args.data_dir
- self.train['img_dir'] = '{:s}/images'.format(self.train['data_dir'])
- self.train['label_dir'] = '{:s}/labels'.format(self.train['data_dir'])
- self.train['weight_map_dir'] = '{:s}/weight_maps'.format(self.train['data_dir'])
- self.train['trans_train'] = args.transform_train
- self.transform_string = str(self.train['trans_train'])
- self.transform_str = self.train['trans_train']
- self.train['validation'] = int(args.validation)
- self.train['optimizer'] = str(args.optimizer)
- self.model[
- 'exp_filename'] = args.exp_filename # 为了自动化(且更简洁),该句被上面几句话所替代(但目前无法替换掉test中的这句话,原因是生成exp_filename中的很多参数没有传进test)
- # self.train['save_dir'] = args.save_dir #为了自动化,该句被后面所替代
- self.train['save_dir'] = './experiments/{:s}/{:s}'.format(self.dataset, self.model['exp_filename'])
-
- if not os.path.exists(self.train['save_dir']):
- os.makedirs(self.train['save_dir'], exist_ok=True)
-
- # define data transforms for training
- self.transform['train'] = OrderedDict()
- self.transform['val'] = OrderedDict()
- if self.dataset == 'GlaS':
- self.transform['train'] = {
- 'scale': 208 + 30,
- 'horizontal_flip': True,
- 'random_affine': 0.3,
- 'random_elastic': [6, 15],
- 'random_rotation': 90,
- 'random_crop': self.train['input_size'],
- 'label_encoding': 2,
- 'to_tensor': 1,
- 'normalize': np.load('{:s}/{:s}.npy'.format(self.train['data_dir'], self.model['mean_std']))
- }
- self.transform['val'] = {
- 'scale': 208,
- 'label_encoding': 2,
- 'to_tensor': 1,
- 'normalize': np.load('{:s}/{:s}.npy'.format(self.train['data_dir'], self.model['mean_std']))
- }
- else:
- '''
- self.transform['train'] = {
- # 'random_resize': [0.8, 1.25],
- 'random_color': 1, # hhl20191115add
- 'horizontal_flip': True,
- # 'random_affine': 0.3,
- 'random_elastic': [6, 15], # [6, 15],
- 'random_rotation': 90,
- 'random_crop': self.train['input_size'],
- 'label_encoding': 1,
- 'to_tensor': 1,
- 'normalize': np.load('{:s}/{:s}.npy'.format(self.train['data_dir'], self.model['mean_std']))
- }
- '''
- if '_isRRe' in self.train['trans_train']:
- self.transform['train']['random_resize'] = [0.8, 1.25]
- if '_isRCo' in self.train['trans_train']:
- self.transform['train']['random_color'] = 1
- if '_isHF' in self.train['trans_train']:
- self.transform['train']['horizontal_flip'] = True
- # if '_isRG' in self.train['trans_train']:
- # self.transform['train']['RandomGaussianBlur'] = True
- #if '_isEX' in self.train['trans_train']:
- # self.transform['train']['Expand'] = True
- if '_isRA' in self.train['trans_train']:
- self.transform['train']['random_affine'] = 0.3
- if '_isRE' in self.train['trans_train']:
- self.transform['train']['random_elastic'] = [6, 15]
- if '_isRRo' in self.train['trans_train']:
- self.transform['train']['random_rotation'] = 90
- if '_isRCr' in self.train['trans_train']:
- self.transform['train']['random_crop'] = self.train['input_size']
- if '_isLE' in self.train['trans_train']:
- self.transform['train']['label_encoding'] = 1
-
-
-
- # if '_toTensor' in self.train['trans_train']:
- self.transform['train']['to_tensor'] = 1
- # if '_isNorm' in self.train['trans_train']:
- self.transform['train']['normalize'] =np.load('{:s}/{:s}.npy'.format(self.train['data_dir'],self.model['mean_std']))
- #np.load('/root/workspace/gland/fullnet_he/Nuclei_Segmentation/data/MultiOrgan/mean_std_MoNuSeg.npy')
-
-
- self.transform['val'] = {
- 'label_encoding': 1,
- 'to_tensor': 1,
- 'normalize': np.load('{:s}/{:s}.npy'.format(self.train['data_dir'], self.model['mean_std']))
- }
-
- else:
- parser.add_argument('--dataset', type=str, default=self.dataset,
- help='input data set')
- parser.add_argument('--model-name', type=str, default=self.model['modelName'],
- help='input model Name for training')
- parser.add_argument('--patch-size', type=int, default=self.test['patch_size'],
- help='patch size for testing')
- parser.add_argument('--test-overlap', type=int, default=self.test['overlap'],
- help='test overlap size for testing')
- parser.add_argument('--epoch', type=str, default=self.test['epoch'],
- help='select the model used for testing')
- parser.add_argument('--save-flag', type=bool, default=self.test['save_flag'],
- help='flag to save the network outputs and predictions')
- parser.add_argument('--gpu', type=list, default=self.test['gpu'], help='GPUs for training')
- parser.add_argument('--img-dir', type=str, default=self.test['img_dir'], help='directory of test images')
- parser.add_argument('--label-dir', type=str, default=self.test['label_dir'], help='directory of labels')
- parser.add_argument('--save-dir', type=str, default=self.test['save_dir'],
- help='directory to save test results')
- parser.add_argument('--model-path', type=str, default=self.test['model_path'],
- help='train model to be evaluated')
-
- parser.add_argument('--test-filename', type=str, default=self.test['filename'],
- help='test filename')
- parser.add_argument('--exp-filename', type=str, default=self.model['exp_filename'],
- help='exp_filename')
-
- parser.add_argument('--min-area', type=int, default=self.post['min_area'],
- help='min area')
- parser.add_argument('--radius', type=int, default=self.post['radius'],
- help='radius')
- parser.add_argument('--groundtruth', type=int, default=self.test['groundtruth'],
- help='radius')
-
- args = parser.parse_args()
- self.dataset = args.dataset
- self.model['modelName'] = args.model_name
- self.test['patch_size'] = args.patch_size
- self.test['overlap'] = args.test_overlap
- self.test['epoch'] = args.epoch
- self.test['gpu'] = list(args.gpu)
- self.test['save_flag'] = args.save_flag
-
- self.test['filename'] = args.test_filename
- self.model['exp_filename'] = args.exp_filename
- self.post['min_area'] = args.min_area
- self.post['radius'] = args.radius
- self.test['groundtruth'] = int(args.groundtruth)
-
- self.save_testfilename = self.test['filename'] + '_gt' + str(self.test['groundtruth']) + '_' + self.test[
- 'epoch'] + '_minarea' + \
- str(self.post['min_area']) + '_ra' + str(self.post['radius'])
- # self.test['img_dir'] = args.img_dir #为了自动化,该句被后面所替代
- self.test['img_dir'] = './endoscope400/{:s}/images/{:s}'.format(self.dataset, self.test['filename'])
- # self.test['label_dir'] = args.label_dir #为了自动化,该句被后面所替代
- self.test['label_dir'] = './endoscope400/{:s}/labels/{:s}'.format(self.dataset, self.test['filename'])
- self.test['annotation_dir'] = './endoscope400/xml' # hhl20191205add Annotations
-
- # self.test['save_dir'] = args.save_dir #为了自动化,该句被后面所替代
- self.test['save_dir'] = './experiments/{:s}/{:s}/{:s}'.format(self.dataset, self.model['exp_filename'],
- self.save_testfilename)
-
- # self.test['model_path'] = args.model_path #为了自动化,该句被后面所替代
- self.test['model_path'] = './experiments/{:s}/{:s}/checkpoints/checkpoint_{:s}.pth.tar'.format(self.dataset,
- self.model[
- 'exp_filename'],
- self.test[
- 'epoch'])
-
- if not os.path.exists(self.test['save_dir']):
- os.makedirs(self.test['save_dir'], exist_ok=True)
-
- self.transform['test'] = OrderedDict()
- if self.dataset == 'GlaS':
- self.transform['test'] = {
- 'scale': 208,
- 'to_tensor': 1,
- 'normalize': np.load('{:s}/{:s}.npy'.format(self.train['data_dir'], self.model['mean_std']))
- }
- else:
- self.transform['test'] = {
- 'to_tensor': 1,
- 'normalize': np.load('{:s}/{:s}.npy'.format(self.train['data_dir'], self.model['mean_std']))
- }
-
- def print_options(self, logger=None):
- message = '\n'
- message += self._generate_message_from_options()
- if not logger:
- print(message)
- else:
- logger.info(message)
-
- def save_options(self):
- if self.isTrain:
- filename = '{:s}/train_options.txt'.format(self.train['save_dir'])
- else:
- filename = '{:s}/test_options.txt'.format(self.test['save_dir'])
- message = self._generate_message_from_options()
- file = open(filename, 'w')
- file.write(message)
- file.close()
-
- def _generate_message_from_options(self):
- message = ''
- message += '# {str:s} Options {str:s} #\n'.format(str='-' * 25)
- train_groups = ['model', 'train', 'transform']
- test_groups = ['model', 'test', 'post', 'transform']
- cur_group = train_groups if self.isTrain else test_groups
-
- for group, options in self.__dict__.items():
- if group not in train_groups + test_groups:
- message += '{:>20}: {:<35}\n'.format(group, str(options))
- elif group in cur_group:
- message += '\n{:s} {:s} {:s}\n'.format('*' * 15, group, '*' * 15)
- if group == 'transform':
- for name, val in options.items():
- if (self.isTrain and name != 'test') or (not self.isTrain and name == 'test'):
- message += '{:s}:\n'.format(name)
- for t_name, t_val in val.items():
- t_val = str(t_val).replace('\n', ',\n{:22}'.format(''))
- message += '{:>20}: {:<35}\n'.format(t_name, str(t_val))
- else:
- for name, val in options.items():
- message += '{:>20}: {:<35}\n'.format(name, str(val))
- message += '# {str:s} End {str:s} #\n'.format(str='-' * 26)
- return message
|