From 8e7927d29da04ee8e9ce5e0a904ffddcd52609b7 Mon Sep 17 00:00:00 2001 From: Jia Guo Date: Mon, 5 Feb 2018 22:22:39 +0800 Subject: [PATCH] tiny --- src/common/face_preprocess.py | 2 +- src/data/dataset_clean.py | 3 ++- src/data/dataset_merge.py | 19 ++++++++++++++++++- 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/src/common/face_preprocess.py b/src/common/face_preprocess.py index 1c9525b..476c591 100644 --- a/src/common/face_preprocess.py +++ b/src/common/face_preprocess.py @@ -7,7 +7,7 @@ def parse_lst_line(line): vec = line.strip().split("\t") assert len(vec)>=3 aligned = False - if int(vec[0])==1: + if int(vec[0])>0: aligned = True image_path = vec[1] label = int(vec[2]) diff --git a/src/data/dataset_clean.py b/src/data/dataset_clean.py index d6d9403..83e8131 100644 --- a/src/data/dataset_clean.py +++ b/src/data/dataset_clean.py @@ -12,10 +12,11 @@ import time import sklearn from sklearn.decomposition import PCA from easydict import EasyDict as edict -import face_image from sklearn.cluster import DBSCAN import numpy as np +sys.path.append(os.path.join(os.path.dirname(__file__),'..', 'common')) +import face_image def do_clean(args): diff --git a/src/data/dataset_merge.py b/src/data/dataset_merge.py index a21c83d..5b5efa7 100644 --- a/src/data/dataset_merge.py +++ b/src/data/dataset_merge.py @@ -18,6 +18,9 @@ import numpy as np sys.path.append(os.path.join(os.path.dirname(__file__),'..', 'common')) import face_image +sys.path.append(os.path.join(os.path.dirname(__file__),'..', 'eval')) +import verification + def ch_dev(arg_params, aux_params, ctx): new_args = dict() new_auxs = dict() @@ -177,6 +180,20 @@ def main(args): _id_list.append( (_ds_id, identity, embedding) ) if test_limit>0 and pp>=test_limit: break + else: + _id_list = [] + data_set = verification.load_bin(args.exclude, image_size)[0][0] + print(data_set.shape) + data = nd.zeros( (1,3,image_size[0], image_size[1])) + for i in xrange(data_set.shape[0]): + data[0] = data_set[i] + db = mx.io.DataBatch(data=(data,)) + model.forward(db, is_train=False) + net_out = model.get_outputs() + embedding = net_out[0].asnumpy().flatten() + _norm=np.linalg.norm(embedding) + embedding /= _norm + _id_list.append( (i, i, embedding) ) #X = [] #for id_item in all_id_list: @@ -259,7 +276,7 @@ if __name__ == '__main__': parser.add_argument('--model', default='../model/softmax,50', help='path to load model.') parser.add_argument('--batch-size', default=32, type=int, help='') parser.add_argument('--param1', default=0.3, type=float, help='') - parser.add_argument('--param2', default=0.45, type=float, help='') + parser.add_argument('--param2', default=0.4, type=float, help='') parser.add_argument('--mode', default=1, type=int, help='') parser.add_argument('--test', default=0, type=int, help='') args = parser.parse_args()