2017-11-14 15:10:51 +08:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
from __future__ import division
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
|
|
|
|
|
import os
|
|
|
|
|
import sys
|
|
|
|
|
import math
|
|
|
|
|
import random
|
|
|
|
|
import logging
|
2017-12-05 19:59:25 +08:00
|
|
|
import pickle
|
2017-11-14 15:10:51 +08:00
|
|
|
import numpy as np
|
2018-03-14 11:45:36 +08:00
|
|
|
from image_iter import FaceImageIter
|
|
|
|
|
from image_iter import FaceImageIterList
|
2017-11-14 15:10:51 +08:00
|
|
|
import mxnet as mx
|
|
|
|
|
from mxnet import ndarray as nd
|
|
|
|
|
import argparse
|
|
|
|
|
import mxnet.optimizer as optimizer
|
2017-12-08 13:54:45 +08:00
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), 'common'))
|
|
|
|
|
import face_image
|
2017-11-29 17:04:05 +08:00
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), 'eval'))
|
2017-11-29 17:12:26 +08:00
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), 'symbols'))
|
2017-11-30 19:02:04 +08:00
|
|
|
import fresnet
|
|
|
|
|
import finception_resnet_v2
|
2017-12-06 19:46:33 +08:00
|
|
|
import fmobilenet
|
2018-01-19 21:12:17 +08:00
|
|
|
import fmobilenetv2
|
2018-04-23 12:21:10 +08:00
|
|
|
import fmobilefacenet
|
2017-12-07 15:09:34 +08:00
|
|
|
import fxception
|
2017-12-07 16:43:28 +08:00
|
|
|
import fdensenet
|
2017-12-11 13:03:18 +08:00
|
|
|
import fdpn
|
2017-12-23 19:08:22 +08:00
|
|
|
import fnasnet
|
2018-02-02 21:45:53 +08:00
|
|
|
import spherenet
|
2017-12-05 19:59:25 +08:00
|
|
|
import verification
|
2017-11-14 15:10:51 +08:00
|
|
|
import sklearn
|
2018-03-14 11:45:36 +08:00
|
|
|
#sys.path.append(os.path.join(os.path.dirname(__file__), 'losses'))
|
|
|
|
|
#import center_loss
|
2017-11-14 15:10:51 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger()
|
|
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
|
|
|
|
|
|
|
2018-01-21 16:04:32 +08:00
|
|
|
args = None
|
2017-11-14 15:10:51 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class AccMetric(mx.metric.EvalMetric):
|
|
|
|
|
def __init__(self):
|
|
|
|
|
self.axis = 1
|
|
|
|
|
super(AccMetric, self).__init__(
|
|
|
|
|
'acc', axis=self.axis,
|
|
|
|
|
output_names=None, label_names=None)
|
|
|
|
|
self.losses = []
|
2018-01-21 15:30:39 +08:00
|
|
|
self.count = 0
|
2017-11-14 15:10:51 +08:00
|
|
|
|
|
|
|
|
def update(self, labels, preds):
|
2018-01-21 16:04:32 +08:00
|
|
|
self.count+=1
|
2017-11-14 15:10:51 +08:00
|
|
|
preds = [preds[1]] #use softmax output
|
|
|
|
|
for label, pred_label in zip(labels, preds):
|
|
|
|
|
if pred_label.shape != label.shape:
|
|
|
|
|
pred_label = mx.ndarray.argmax(pred_label, axis=self.axis)
|
|
|
|
|
pred_label = pred_label.asnumpy().astype('int32').flatten()
|
2018-02-01 19:19:18 +08:00
|
|
|
label = label.asnumpy()
|
|
|
|
|
if label.ndim==2:
|
|
|
|
|
label = label[:,0]
|
|
|
|
|
label = label.astype('int32').flatten()
|
2017-11-14 15:10:51 +08:00
|
|
|
assert label.shape==pred_label.shape
|
|
|
|
|
self.sum_metric += (pred_label.flat == label.flat).sum()
|
|
|
|
|
self.num_inst += len(pred_label.flat)
|
|
|
|
|
|
2017-12-13 22:07:07 +08:00
|
|
|
class LossValueMetric(mx.metric.EvalMetric):
|
|
|
|
|
def __init__(self):
|
|
|
|
|
self.axis = 1
|
|
|
|
|
super(LossValueMetric, self).__init__(
|
|
|
|
|
'lossvalue', axis=self.axis,
|
|
|
|
|
output_names=None, label_names=None)
|
|
|
|
|
self.losses = []
|
|
|
|
|
|
|
|
|
|
def update(self, labels, preds):
|
|
|
|
|
loss = preds[-1].asnumpy()[0]
|
|
|
|
|
self.sum_metric += loss
|
|
|
|
|
self.num_inst += 1.0
|
2017-12-18 15:35:09 +08:00
|
|
|
gt_label = preds[-2].asnumpy()
|
|
|
|
|
#print(gt_label)
|
2017-12-13 22:07:07 +08:00
|
|
|
|
2017-11-14 15:10:51 +08:00
|
|
|
def parse_args():
|
|
|
|
|
parser = argparse.ArgumentParser(description='Train face network')
|
|
|
|
|
# general
|
2018-03-14 11:45:36 +08:00
|
|
|
parser.add_argument('--data-dir', default='', help='training set directory')
|
|
|
|
|
parser.add_argument('--prefix', default='../model/model', help='directory to save model.')
|
|
|
|
|
parser.add_argument('--pretrained', default='', help='pretrained model to load')
|
|
|
|
|
parser.add_argument('--ckpt', type=int, default=1, help='checkpoint saving option. 0: discard saving. 1: save when necessary. 2: always save')
|
|
|
|
|
parser.add_argument('--loss-type', type=int, default=4, help='loss type')
|
|
|
|
|
parser.add_argument('--verbose', type=int, default=2000, help='do verification testing and model saving every verbose batches')
|
|
|
|
|
parser.add_argument('--max-steps', type=int, default=0, help='max training batches')
|
|
|
|
|
parser.add_argument('--end-epoch', type=int, default=100000, help='training epoch size.')
|
|
|
|
|
parser.add_argument('--network', default='r50', help='specify network')
|
|
|
|
|
parser.add_argument('--version-se', type=int, default=0, help='whether to use se in network')
|
|
|
|
|
parser.add_argument('--version-input', type=int, default=1, help='network input config')
|
|
|
|
|
parser.add_argument('--version-output', type=str, default='E', help='network embedding output config')
|
|
|
|
|
parser.add_argument('--version-unit', type=int, default=3, help='resnet unit config')
|
|
|
|
|
parser.add_argument('--version-act', type=str, default='prelu', help='network activation config')
|
|
|
|
|
parser.add_argument('--use-deformable', type=int, default=0, help='use deformable cnn in network')
|
|
|
|
|
parser.add_argument('--lr', type=float, default=0.1, help='start learning rate')
|
|
|
|
|
parser.add_argument('--lr-steps', type=str, default='', help='steps of lr changing')
|
|
|
|
|
parser.add_argument('--wd', type=float, default=0.0005, help='weight decay')
|
2018-04-23 12:21:10 +08:00
|
|
|
parser.add_argument('--fc7-wd-mult', type=float, default=1.0, help='weight decay mult for fc7')
|
2018-08-03 16:19:36 +08:00
|
|
|
parser.add_argument('--fc7-lr-mult', type=float, default=1.0, help='lr mult for fc7')
|
2018-04-25 16:11:30 +08:00
|
|
|
parser.add_argument('--bn-mom', type=float, default=0.9, help='bn mom')
|
2018-03-14 11:45:36 +08:00
|
|
|
parser.add_argument('--mom', type=float, default=0.9, help='momentum')
|
|
|
|
|
parser.add_argument('--emb-size', type=int, default=512, help='embedding length')
|
|
|
|
|
parser.add_argument('--per-batch-size', type=int, default=128, help='batch size in each context')
|
|
|
|
|
parser.add_argument('--margin-m', type=float, default=0.5, help='margin for loss')
|
|
|
|
|
parser.add_argument('--margin-s', type=float, default=64.0, help='scale for feature')
|
|
|
|
|
parser.add_argument('--margin-a', type=float, default=1.0, help='')
|
|
|
|
|
parser.add_argument('--margin-b', type=float, default=0.0, help='')
|
|
|
|
|
parser.add_argument('--easy-margin', type=int, default=0, help='')
|
|
|
|
|
parser.add_argument('--margin', type=int, default=4, help='margin for sphere')
|
|
|
|
|
parser.add_argument('--beta', type=float, default=1000., help='param for sphere')
|
|
|
|
|
parser.add_argument('--beta-min', type=float, default=5., help='param for sphere')
|
|
|
|
|
parser.add_argument('--beta-freeze', type=int, default=0, help='param for sphere')
|
|
|
|
|
parser.add_argument('--gamma', type=float, default=0.12, help='param for sphere')
|
|
|
|
|
parser.add_argument('--power', type=float, default=1.0, help='param for sphere')
|
|
|
|
|
parser.add_argument('--scale', type=float, default=0.9993, help='param for sphere')
|
|
|
|
|
parser.add_argument('--rand-mirror', type=int, default=1, help='if do random mirror in training')
|
|
|
|
|
parser.add_argument('--cutoff', type=int, default=0, help='cut off aug')
|
|
|
|
|
parser.add_argument('--target', type=str, default='lfw,cfp_fp,agedb_30', help='verification targets')
|
2017-11-14 15:10:51 +08:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_symbol(args, arg_params, aux_params):
|
2017-11-29 17:04:05 +08:00
|
|
|
data_shape = (args.image_channel,args.image_h,args.image_w)
|
2017-11-14 15:10:51 +08:00
|
|
|
image_shape = ",".join([str(x) for x in data_shape])
|
2018-01-21 15:30:39 +08:00
|
|
|
margin_symbols = []
|
2017-12-07 16:43:28 +08:00
|
|
|
if args.network[0]=='d':
|
2018-01-18 17:18:41 +08:00
|
|
|
embedding = fdensenet.get_symbol(args.emb_size, args.num_layers,
|
2017-12-11 10:05:10 +08:00
|
|
|
version_se=args.version_se, version_input=args.version_input,
|
2017-12-07 16:43:28 +08:00
|
|
|
version_output=args.version_output, version_unit=args.version_unit)
|
2017-11-14 15:10:51 +08:00
|
|
|
elif args.network[0]=='m':
|
2017-12-06 19:46:33 +08:00
|
|
|
print('init mobilenet', args.num_layers)
|
2018-01-19 21:12:17 +08:00
|
|
|
if args.num_layers==1:
|
|
|
|
|
embedding = fmobilenet.get_symbol(args.emb_size,
|
|
|
|
|
version_se=args.version_se, version_input=args.version_input,
|
|
|
|
|
version_output=args.version_output, version_unit=args.version_unit)
|
|
|
|
|
else:
|
|
|
|
|
embedding = fmobilenetv2.get_symbol(args.emb_size)
|
2017-11-14 15:10:51 +08:00
|
|
|
elif args.network[0]=='i':
|
2017-11-30 19:02:04 +08:00
|
|
|
print('init inception-resnet-v2', args.num_layers)
|
2018-01-18 17:18:41 +08:00
|
|
|
embedding = finception_resnet_v2.get_symbol(args.emb_size,
|
2017-12-11 10:05:10 +08:00
|
|
|
version_se=args.version_se, version_input=args.version_input,
|
2017-12-09 17:14:24 +08:00
|
|
|
version_output=args.version_output, version_unit=args.version_unit)
|
2017-11-14 15:10:51 +08:00
|
|
|
elif args.network[0]=='x':
|
|
|
|
|
print('init xception', args.num_layers)
|
2018-01-18 17:18:41 +08:00
|
|
|
embedding = fxception.get_symbol(args.emb_size,
|
2017-12-11 10:05:10 +08:00
|
|
|
version_se=args.version_se, version_input=args.version_input,
|
2017-12-07 15:09:34 +08:00
|
|
|
version_output=args.version_output, version_unit=args.version_unit)
|
2017-12-11 13:03:18 +08:00
|
|
|
elif args.network[0]=='p':
|
|
|
|
|
print('init dpn', args.num_layers)
|
2018-01-18 17:18:41 +08:00
|
|
|
embedding = fdpn.get_symbol(args.emb_size, args.num_layers,
|
2017-12-11 13:03:18 +08:00
|
|
|
version_se=args.version_se, version_input=args.version_input,
|
|
|
|
|
version_output=args.version_output, version_unit=args.version_unit)
|
2017-12-23 19:08:22 +08:00
|
|
|
elif args.network[0]=='n':
|
|
|
|
|
print('init nasnet', args.num_layers)
|
2018-01-18 17:18:41 +08:00
|
|
|
embedding = fnasnet.get_symbol(args.emb_size)
|
2018-02-02 21:45:53 +08:00
|
|
|
elif args.network[0]=='s':
|
|
|
|
|
print('init spherenet', args.num_layers)
|
|
|
|
|
embedding = spherenet.get_symbol(args.emb_size, args.num_layers)
|
2018-04-23 12:21:10 +08:00
|
|
|
elif args.network[0]=='y':
|
|
|
|
|
print('init mobilefacenet', args.num_layers)
|
2018-08-03 16:19:36 +08:00
|
|
|
embedding = fmobilefacenet.get_symbol(args.emb_size, bn_mom = args.bn_mom, version_output=args.version_output)
|
2017-11-14 15:10:51 +08:00
|
|
|
else:
|
|
|
|
|
print('init resnet', args.num_layers)
|
2018-01-18 17:18:41 +08:00
|
|
|
embedding = fresnet.get_symbol(args.emb_size, args.num_layers,
|
2017-12-11 10:05:10 +08:00
|
|
|
version_se=args.version_se, version_input=args.version_input,
|
2018-03-12 16:02:23 +08:00
|
|
|
version_output=args.version_output, version_unit=args.version_unit,
|
|
|
|
|
version_act=args.version_act)
|
2018-01-30 20:13:45 +08:00
|
|
|
all_label = mx.symbol.Variable('softmax_label')
|
2018-03-14 11:45:36 +08:00
|
|
|
gt_label = all_label
|
2017-11-16 14:29:48 +08:00
|
|
|
extra_loss = None
|
2018-08-03 16:19:36 +08:00
|
|
|
_weight = mx.symbol.Variable("fc7_weight", shape=(args.num_classes, args.emb_size), lr_mult=args.fc7_lr_mult, wd_mult=args.fc7_wd_mult)
|
2018-01-04 12:25:16 +08:00
|
|
|
if args.loss_type==0: #softmax
|
2017-11-14 15:10:51 +08:00
|
|
|
_bias = mx.symbol.Variable('fc7_bias', lr_mult=2.0, wd_mult=0.0)
|
|
|
|
|
fc7 = mx.sym.FullyConnected(data=embedding, weight = _weight, bias = _bias, num_hidden=args.num_classes, name='fc7')
|
2018-01-04 12:25:16 +08:00
|
|
|
elif args.loss_type==1: #sphere
|
2017-11-14 15:10:51 +08:00
|
|
|
_weight = mx.symbol.L2Normalization(_weight, mode='instance')
|
|
|
|
|
fc7 = mx.sym.LSoftmax(data=embedding, label=gt_label, num_hidden=args.num_classes,
|
|
|
|
|
weight = _weight,
|
|
|
|
|
beta=args.beta, margin=args.margin, scale=args.scale,
|
2017-12-08 21:13:16 +08:00
|
|
|
beta_min=args.beta_min, verbose=1000, name='fc7')
|
2018-01-04 12:25:16 +08:00
|
|
|
elif args.loss_type==2:
|
2018-01-06 19:37:54 +08:00
|
|
|
s = args.margin_s
|
|
|
|
|
m = args.margin_m
|
2018-03-14 11:45:36 +08:00
|
|
|
assert(s>0.0)
|
|
|
|
|
assert(m>0.0)
|
2018-01-03 23:25:08 +08:00
|
|
|
_weight = mx.symbol.L2Normalization(_weight, mode='instance')
|
2018-01-18 17:18:41 +08:00
|
|
|
nembedding = mx.symbol.L2Normalization(embedding, mode='instance', name='fc1n')*s
|
|
|
|
|
fc7 = mx.sym.FullyConnected(data=nembedding, weight = _weight, no_bias = True, num_hidden=args.num_classes, name='fc7')
|
2018-03-14 11:45:36 +08:00
|
|
|
s_m = s*m
|
|
|
|
|
gt_one_hot = mx.sym.one_hot(gt_label, depth = args.num_classes, on_value = s_m, off_value = 0.0)
|
|
|
|
|
fc7 = fc7-gt_one_hot
|
2018-01-11 09:10:02 +08:00
|
|
|
elif args.loss_type==4:
|
|
|
|
|
s = args.margin_s
|
|
|
|
|
m = args.margin_m
|
2018-01-12 12:59:44 +08:00
|
|
|
assert s>0.0
|
2018-01-30 20:13:45 +08:00
|
|
|
assert m>=0.0
|
2018-01-12 12:59:44 +08:00
|
|
|
assert m<(math.pi/2)
|
2018-01-11 09:10:02 +08:00
|
|
|
_weight = mx.symbol.L2Normalization(_weight, mode='instance')
|
|
|
|
|
nembedding = mx.symbol.L2Normalization(embedding, mode='instance', name='fc1n')*s
|
|
|
|
|
fc7 = mx.sym.FullyConnected(data=nembedding, weight = _weight, no_bias = True, num_hidden=args.num_classes, name='fc7')
|
|
|
|
|
zy = mx.sym.pick(fc7, gt_label, axis=1)
|
|
|
|
|
cos_t = zy/s
|
2018-03-14 11:45:36 +08:00
|
|
|
cos_m = math.cos(m)
|
|
|
|
|
sin_m = math.sin(m)
|
|
|
|
|
mm = math.sin(math.pi-m)*m
|
|
|
|
|
#threshold = 0.0
|
|
|
|
|
threshold = math.cos(math.pi-m)
|
|
|
|
|
if args.easy_margin:
|
|
|
|
|
cond = mx.symbol.Activation(data=cos_t, act_type='relu')
|
2018-01-20 07:21:32 +08:00
|
|
|
else:
|
2018-03-14 11:45:36 +08:00
|
|
|
cond_v = cos_t - threshold
|
|
|
|
|
cond = mx.symbol.Activation(data=cond_v, act_type='relu')
|
|
|
|
|
body = cos_t*cos_t
|
|
|
|
|
body = 1.0-body
|
|
|
|
|
sin_t = mx.sym.sqrt(body)
|
|
|
|
|
new_zy = cos_t*cos_m
|
|
|
|
|
b = sin_t*sin_m
|
|
|
|
|
new_zy = new_zy - b
|
|
|
|
|
new_zy = new_zy*s
|
|
|
|
|
if args.easy_margin:
|
|
|
|
|
zy_keep = zy
|
|
|
|
|
else:
|
|
|
|
|
zy_keep = zy - s*mm
|
|
|
|
|
new_zy = mx.sym.where(cond, new_zy, zy_keep)
|
2018-02-22 16:44:41 +08:00
|
|
|
|
2018-02-23 00:49:16 +08:00
|
|
|
diff = new_zy - zy
|
|
|
|
|
diff = mx.sym.expand_dims(diff, 1)
|
|
|
|
|
gt_one_hot = mx.sym.one_hot(gt_label, depth = args.num_classes, on_value = 1.0, off_value = 0.0)
|
|
|
|
|
body = mx.sym.broadcast_mul(gt_one_hot, diff)
|
|
|
|
|
fc7 = fc7+body
|
2018-03-26 16:52:38 +08:00
|
|
|
elif args.loss_type==5:
|
|
|
|
|
s = args.margin_s
|
|
|
|
|
m = args.margin_m
|
|
|
|
|
assert s>0.0
|
|
|
|
|
_weight = mx.symbol.L2Normalization(_weight, mode='instance')
|
|
|
|
|
nembedding = mx.symbol.L2Normalization(embedding, mode='instance', name='fc1n')*s
|
|
|
|
|
fc7 = mx.sym.FullyConnected(data=nembedding, weight = _weight, no_bias = True, num_hidden=args.num_classes, name='fc7')
|
|
|
|
|
if args.margin_a!=1.0 or args.margin_m!=0.0 or args.margin_b!=0.0:
|
|
|
|
|
if args.margin_a==1.0 and args.margin_m==0.0:
|
|
|
|
|
s_m = s*args.margin_b
|
|
|
|
|
gt_one_hot = mx.sym.one_hot(gt_label, depth = args.num_classes, on_value = s_m, off_value = 0.0)
|
|
|
|
|
fc7 = fc7-gt_one_hot
|
|
|
|
|
else:
|
|
|
|
|
zy = mx.sym.pick(fc7, gt_label, axis=1)
|
|
|
|
|
cos_t = zy/s
|
|
|
|
|
t = mx.sym.arccos(cos_t)
|
2018-03-26 17:27:06 +08:00
|
|
|
if args.margin_a!=1.0:
|
2018-03-26 16:52:38 +08:00
|
|
|
t = t*args.margin_a
|
|
|
|
|
if args.margin_m>0.0:
|
|
|
|
|
t = t+args.margin_m
|
|
|
|
|
body = mx.sym.cos(t)
|
|
|
|
|
if args.margin_b>0.0:
|
|
|
|
|
body = body - args.margin_b
|
|
|
|
|
new_zy = body*s
|
|
|
|
|
diff = new_zy - zy
|
|
|
|
|
diff = mx.sym.expand_dims(diff, 1)
|
|
|
|
|
gt_one_hot = mx.sym.one_hot(gt_label, depth = args.num_classes, on_value = 1.0, off_value = 0.0)
|
|
|
|
|
body = mx.sym.broadcast_mul(gt_one_hot, diff)
|
|
|
|
|
fc7 = fc7+body
|
2017-12-12 22:55:18 +08:00
|
|
|
out_list = [mx.symbol.BlockGrad(embedding)]
|
2018-03-14 11:45:36 +08:00
|
|
|
softmax = mx.symbol.SoftmaxOutput(data=fc7, label = gt_label, name='softmax', normalization='valid')
|
|
|
|
|
out_list.append(softmax)
|
2017-12-12 22:55:18 +08:00
|
|
|
out = mx.symbol.Group(out_list)
|
2017-12-11 10:05:10 +08:00
|
|
|
return (out, arg_params, aux_params)
|
2017-11-14 15:10:51 +08:00
|
|
|
|
|
|
|
|
def train_net(args):
|
|
|
|
|
ctx = []
|
|
|
|
|
cvd = os.environ['CUDA_VISIBLE_DEVICES'].strip()
|
|
|
|
|
if len(cvd)>0:
|
|
|
|
|
for i in xrange(len(cvd.split(','))):
|
|
|
|
|
ctx.append(mx.gpu(i))
|
|
|
|
|
if len(ctx)==0:
|
|
|
|
|
ctx = [mx.cpu()]
|
|
|
|
|
print('use cpu')
|
|
|
|
|
else:
|
|
|
|
|
print('gpu num:', len(ctx))
|
2017-12-07 13:45:23 +08:00
|
|
|
prefix = args.prefix
|
2017-12-07 12:27:33 +08:00
|
|
|
prefix_dir = os.path.dirname(prefix)
|
|
|
|
|
if not os.path.exists(prefix_dir):
|
|
|
|
|
os.makedirs(prefix_dir)
|
2017-11-14 15:10:51 +08:00
|
|
|
end_epoch = args.end_epoch
|
|
|
|
|
args.ctx_num = len(ctx)
|
|
|
|
|
args.num_layers = int(args.network[1:])
|
|
|
|
|
print('num_layers', args.num_layers)
|
|
|
|
|
if args.per_batch_size==0:
|
|
|
|
|
args.per_batch_size = 128
|
|
|
|
|
args.batch_size = args.per_batch_size*args.ctx_num
|
|
|
|
|
args.rescale_threshold = 0
|
|
|
|
|
args.image_channel = 3
|
|
|
|
|
|
|
|
|
|
os.environ['BETA'] = str(args.beta)
|
2017-12-21 13:08:52 +08:00
|
|
|
data_dir_list = args.data_dir.split(',')
|
2018-03-14 11:45:36 +08:00
|
|
|
assert len(data_dir_list)==1
|
2017-12-21 13:08:52 +08:00
|
|
|
data_dir = data_dir_list[0]
|
2017-11-14 15:10:51 +08:00
|
|
|
path_imgrec = None
|
2017-11-16 20:43:43 +08:00
|
|
|
path_imglist = None
|
2017-12-21 13:08:52 +08:00
|
|
|
prop = face_image.load_property(data_dir)
|
2017-12-08 13:54:45 +08:00
|
|
|
args.num_classes = prop.num_classes
|
|
|
|
|
image_size = prop.image_size
|
|
|
|
|
args.image_h = image_size[0]
|
|
|
|
|
args.image_w = image_size[1]
|
|
|
|
|
print('image_size', image_size)
|
2017-11-16 20:43:43 +08:00
|
|
|
assert(args.num_classes>0)
|
|
|
|
|
print('num_classes', args.num_classes)
|
2017-12-21 13:08:52 +08:00
|
|
|
path_imgrec = os.path.join(data_dir, "train.rec")
|
2017-11-14 15:10:51 +08:00
|
|
|
|
2018-01-18 17:18:41 +08:00
|
|
|
if args.loss_type==1 and args.num_classes>20000:
|
2017-11-14 15:10:51 +08:00
|
|
|
args.beta_freeze = 5000
|
|
|
|
|
args.gamma = 0.06
|
|
|
|
|
|
|
|
|
|
print('Called with argument:', args)
|
2017-11-29 17:04:05 +08:00
|
|
|
data_shape = (args.image_channel,image_size[0],image_size[1])
|
2017-11-21 10:28:31 +08:00
|
|
|
mean = None
|
2017-11-14 15:10:51 +08:00
|
|
|
|
|
|
|
|
begin_epoch = 0
|
|
|
|
|
base_lr = args.lr
|
2017-11-18 19:10:15 +08:00
|
|
|
base_wd = args.wd
|
2017-12-13 22:07:07 +08:00
|
|
|
base_mom = args.mom
|
2017-12-11 13:03:18 +08:00
|
|
|
if len(args.pretrained)==0:
|
2017-11-14 15:10:51 +08:00
|
|
|
arg_params = None
|
|
|
|
|
aux_params = None
|
|
|
|
|
sym, arg_params, aux_params = get_symbol(args, arg_params, aux_params)
|
|
|
|
|
else:
|
2017-12-11 10:05:10 +08:00
|
|
|
vec = args.pretrained.split(',')
|
2017-12-18 15:35:09 +08:00
|
|
|
print('loading', vec)
|
2017-12-11 10:05:10 +08:00
|
|
|
_, arg_params, aux_params = mx.model.load_checkpoint(vec[0], int(vec[1]))
|
2017-11-14 15:10:51 +08:00
|
|
|
sym, arg_params, aux_params = get_symbol(args, arg_params, aux_params)
|
2018-02-02 21:45:53 +08:00
|
|
|
if args.network[0]=='s':
|
|
|
|
|
data_shape_dict = {'data' : (args.per_batch_size,)+data_shape}
|
|
|
|
|
spherenet.init_weights(sym, data_shape_dict, args.num_layers)
|
2017-11-14 15:10:51 +08:00
|
|
|
|
2018-03-14 11:45:36 +08:00
|
|
|
#label_name = 'softmax_label'
|
|
|
|
|
#label_shape = (args.batch_size,)
|
|
|
|
|
model = mx.mod.Module(
|
|
|
|
|
context = ctx,
|
|
|
|
|
symbol = sym,
|
|
|
|
|
)
|
|
|
|
|
val_dataiter = None
|
|
|
|
|
|
|
|
|
|
train_dataiter = FaceImageIter(
|
|
|
|
|
batch_size = args.batch_size,
|
|
|
|
|
data_shape = data_shape,
|
|
|
|
|
path_imgrec = path_imgrec,
|
|
|
|
|
shuffle = True,
|
|
|
|
|
rand_mirror = args.rand_mirror,
|
|
|
|
|
mean = mean,
|
|
|
|
|
cutoff = args.cutoff,
|
|
|
|
|
)
|
2017-11-14 15:10:51 +08:00
|
|
|
|
2017-12-13 22:07:07 +08:00
|
|
|
if args.loss_type<10:
|
|
|
|
|
_metric = AccMetric()
|
|
|
|
|
else:
|
|
|
|
|
_metric = LossValueMetric()
|
|
|
|
|
eval_metrics = [mx.metric.create(_metric)]
|
2017-11-14 15:10:51 +08:00
|
|
|
|
2018-04-25 16:11:30 +08:00
|
|
|
if args.network[0]=='r' or args.network[0]=='y':
|
2017-11-14 15:10:51 +08:00
|
|
|
initializer = mx.init.Xavier(rnd_type='gaussian', factor_type="out", magnitude=2) #resnet style
|
2017-12-07 19:34:13 +08:00
|
|
|
elif args.network[0]=='i' or args.network[0]=='x':
|
2017-11-14 15:10:51 +08:00
|
|
|
initializer = mx.init.Xavier(rnd_type='gaussian', factor_type="in", magnitude=2) #inception
|
|
|
|
|
else:
|
|
|
|
|
initializer = mx.init.Xavier(rnd_type='uniform', factor_type="in", magnitude=2)
|
2017-11-29 21:48:11 +08:00
|
|
|
_rescale = 1.0/args.ctx_num
|
2018-03-14 11:45:36 +08:00
|
|
|
opt = optimizer.SGD(learning_rate=base_lr, momentum=base_mom, wd=base_wd, rescale_grad=_rescale)
|
2017-12-18 15:35:09 +08:00
|
|
|
som = 20
|
|
|
|
|
_cb = mx.callback.Speedometer(args.batch_size, som)
|
2017-11-14 15:10:51 +08:00
|
|
|
|
2017-12-05 19:59:25 +08:00
|
|
|
ver_list = []
|
|
|
|
|
ver_name_list = []
|
2017-12-15 21:33:58 +08:00
|
|
|
for name in args.target.split(','):
|
2017-12-21 13:08:52 +08:00
|
|
|
path = os.path.join(data_dir,name+".bin")
|
2017-12-05 19:59:25 +08:00
|
|
|
if os.path.exists(path):
|
|
|
|
|
data_set = verification.load_bin(path, image_size)
|
|
|
|
|
ver_list.append(data_set)
|
|
|
|
|
ver_name_list.append(name)
|
|
|
|
|
print('ver', name)
|
2017-11-14 15:10:51 +08:00
|
|
|
|
2017-12-05 19:59:25 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def ver_test(nbatch):
|
|
|
|
|
results = []
|
|
|
|
|
for i in xrange(len(ver_list)):
|
2018-03-14 11:45:36 +08:00
|
|
|
acc1, std1, acc2, std2, xnorm, embeddings_list = verification.test(ver_list[i], model, args.batch_size, 10, None, None)
|
2017-12-15 21:33:58 +08:00
|
|
|
print('[%s][%d]XNorm: %f' % (ver_name_list[i], nbatch, xnorm))
|
2017-12-08 21:13:16 +08:00
|
|
|
#print('[%s][%d]Accuracy: %1.5f+-%1.5f' % (ver_name_list[i], nbatch, acc1, std1))
|
2017-12-05 19:59:25 +08:00
|
|
|
print('[%s][%d]Accuracy-Flip: %1.5f+-%1.5f' % (ver_name_list[i], nbatch, acc2, std2))
|
|
|
|
|
results.append(acc2)
|
|
|
|
|
return results
|
2017-11-14 15:10:51 +08:00
|
|
|
|
|
|
|
|
|
2017-11-21 10:28:31 +08:00
|
|
|
|
2017-12-24 16:38:40 +08:00
|
|
|
highest_acc = [0.0, 0.0] #lfw and target
|
2017-12-17 22:26:54 +08:00
|
|
|
#for i in xrange(len(ver_list)):
|
|
|
|
|
# highest_acc.append(0.0)
|
2017-11-14 15:10:51 +08:00
|
|
|
global_step = [0]
|
|
|
|
|
save_step = [0]
|
|
|
|
|
if len(args.lr_steps)==0:
|
2017-12-17 22:26:54 +08:00
|
|
|
lr_steps = [40000, 60000, 80000]
|
2018-02-22 16:44:41 +08:00
|
|
|
if args.loss_type>=1 and args.loss_type<=7:
|
2017-12-17 22:26:54 +08:00
|
|
|
lr_steps = [100000, 140000, 160000]
|
2017-12-07 09:33:18 +08:00
|
|
|
p = 512.0/args.batch_size
|
2017-12-07 09:35:00 +08:00
|
|
|
for l in xrange(len(lr_steps)):
|
2017-12-07 09:33:18 +08:00
|
|
|
lr_steps[l] = int(lr_steps[l]*p)
|
2017-11-14 15:10:51 +08:00
|
|
|
else:
|
|
|
|
|
lr_steps = [int(x) for x in args.lr_steps.split(',')]
|
|
|
|
|
print('lr_steps', lr_steps)
|
|
|
|
|
def _batch_callback(param):
|
|
|
|
|
#global global_step
|
|
|
|
|
global_step[0]+=1
|
|
|
|
|
mbatch = global_step[0]
|
|
|
|
|
for _lr in lr_steps:
|
|
|
|
|
if mbatch==args.beta_freeze+_lr:
|
|
|
|
|
opt.lr *= 0.1
|
|
|
|
|
print('lr change to', opt.lr)
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
_cb(param)
|
|
|
|
|
if mbatch%1000==0:
|
|
|
|
|
print('lr-batch-epoch:',opt.lr,param.nbatch,param.epoch)
|
|
|
|
|
|
|
|
|
|
if mbatch>=0 and mbatch%args.verbose==0:
|
2017-12-05 19:59:25 +08:00
|
|
|
acc_list = ver_test(mbatch)
|
2017-11-14 15:10:51 +08:00
|
|
|
save_step[0]+=1
|
|
|
|
|
msave = save_step[0]
|
2017-11-16 14:29:48 +08:00
|
|
|
do_save = False
|
2018-01-18 17:18:41 +08:00
|
|
|
if len(acc_list)>0:
|
|
|
|
|
lfw_score = acc_list[0]
|
|
|
|
|
if lfw_score>highest_acc[0]:
|
|
|
|
|
highest_acc[0] = lfw_score
|
|
|
|
|
if lfw_score>=0.998:
|
|
|
|
|
do_save = True
|
|
|
|
|
if acc_list[-1]>=highest_acc[-1]:
|
|
|
|
|
highest_acc[-1] = acc_list[-1]
|
|
|
|
|
if lfw_score>=0.99:
|
|
|
|
|
do_save = True
|
2018-01-03 23:25:08 +08:00
|
|
|
if args.ckpt==0:
|
|
|
|
|
do_save = False
|
|
|
|
|
elif args.ckpt>1:
|
|
|
|
|
do_save = True
|
|
|
|
|
if do_save:
|
2017-12-18 09:25:43 +08:00
|
|
|
print('saving', msave)
|
2017-11-16 14:29:48 +08:00
|
|
|
arg, aux = model.get_params()
|
|
|
|
|
mx.model.save_checkpoint(prefix, msave, model.symbol, arg, aux)
|
2017-12-24 16:38:40 +08:00
|
|
|
print('[%d]Accuracy-Highest: %1.5f'%(mbatch, highest_acc[-1]))
|
2017-11-14 15:10:51 +08:00
|
|
|
if mbatch<=args.beta_freeze:
|
|
|
|
|
_beta = args.beta
|
|
|
|
|
else:
|
|
|
|
|
move = max(0, mbatch-args.beta_freeze)
|
|
|
|
|
_beta = max(args.beta_min, args.beta*math.pow(1+args.gamma*move, -1.0*args.power))
|
|
|
|
|
#print('beta', _beta)
|
|
|
|
|
os.environ['BETA'] = str(_beta)
|
2018-01-19 21:12:17 +08:00
|
|
|
if args.max_steps>0 and mbatch>args.max_steps:
|
|
|
|
|
sys.exit(0)
|
2017-11-14 15:10:51 +08:00
|
|
|
|
|
|
|
|
epoch_cb = None
|
2018-07-03 16:47:07 +08:00
|
|
|
train_dataiter = mx.io.PrefetchingIter(train_dataiter)
|
2017-11-14 15:10:51 +08:00
|
|
|
|
|
|
|
|
model.fit(train_dataiter,
|
|
|
|
|
begin_epoch = begin_epoch,
|
|
|
|
|
num_epoch = end_epoch,
|
|
|
|
|
eval_data = val_dataiter,
|
|
|
|
|
eval_metric = eval_metrics,
|
|
|
|
|
kvstore = 'device',
|
|
|
|
|
optimizer = opt,
|
|
|
|
|
#optimizer_params = optimizer_params,
|
|
|
|
|
initializer = initializer,
|
|
|
|
|
arg_params = arg_params,
|
|
|
|
|
aux_params = aux_params,
|
|
|
|
|
allow_missing = True,
|
|
|
|
|
batch_end_callback = _batch_callback,
|
|
|
|
|
epoch_end_callback = epoch_cb )
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
#time.sleep(3600*6.5)
|
2018-01-21 16:04:32 +08:00
|
|
|
global args
|
2017-11-14 15:10:51 +08:00
|
|
|
args = parse_args()
|
|
|
|
|
train_net(args)
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
main()
|
|
|
|
|
|