diff --git a/.gitignore b/.gitignore
index 7bbc71c..4f61b8c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -99,3 +99,5 @@ ENV/
# mypy
.mypy_cache/
+
+.DS_Store
diff --git a/README.md b/README.md
index 89751e1..8930133 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,10 @@
# InsightFace: 2D and 3D Face Analysis Project
+
-
+
+
+In this module, we provide datasets and training/inference pipelines for face alignment.
-
-
-
+Supported methods:
+
+- [x] [SDUNets (BMVC'2018)](alignment/heatmap)
+- [x] [SimpleRegression](alignment/coordinate_reg)
+
+
+[SDUNets](alignment/heatmap) is a heatmap based method which accepted on [BMVC](http://bmvc2018.org/contents/papers/0051.pdf).
+
+[SimpleRegression](alignment/coordinate_reg) provides very lightweight facial landmark models with fast coordinate regression. The input of these models is loose cropped face image while the output is the direct landmark coordinates.
## Citation
@@ -312,11 +138,34 @@ For single cropped face image(112x112), total inference time is only 17ms on our
If you find *InsightFace* useful in your research, please consider to cite the following related papers:
```
-@inproceedings{deng2019retinaface,
-title={RetinaFace: Single-stage Dense Face Localisation in the Wild},
-author={Deng, Jiankang and Guo, Jia and Yuxiang, Zhou and Jinke Yu and Irene Kotsia and Zafeiriou, Stefanos},
-booktitle={arxiv},
-year={2019}
+
+@article{guo2021sample,
+ title={Sample and Computation Redistribution for Efficient Face Detection},
+ author={Guo, Jia and Deng, Jiankang and Lattas, Alexandros and Zafeiriou, Stefanos},
+ journal={arXiv preprint arXiv:2105.04714},
+ year={2021}
+}
+
+@inproceedings{an2020partical_fc,
+ title={Partial FC: Training 10 Million Identities on a Single Machine},
+ author={An, Xiang and Zhu, Xuhan and Xiao, Yang and Wu, Lan and Zhang, Ming and Gao, Yuan and Qin, Bin and
+ Zhang, Debing and Fu Ying},
+ booktitle={Arxiv 2010.05222},
+ year={2020}
+}
+
+@inproceedings{deng2020subcenter,
+ title={Sub-center ArcFace: Boosting Face Recognition by Large-scale Noisy Web Faces},
+ author={Deng, Jiankang and Guo, Jia and Liu, Tongliang and Gong, Mingming and Zafeiriou, Stefanos},
+ booktitle={Proceedings of the IEEE Conference on European Conference on Computer Vision},
+ year={2020}
+}
+
+@inproceedings{Deng2020CVPR,
+title = {RetinaFace: Single-Shot Multi-Level Face Localisation in the Wild},
+author = {Deng, Jiankang and Guo, Jia and Ververas, Evangelos and Kotsia, Irene and Zafeiriou, Stefanos},
+booktitle = {CVPR},
+year = {2020}
}
@inproceedings{guo2018stacked,
diff --git a/alignment/README.md b/alignment/README.md
index 9656d13..7b3706b 100644
--- a/alignment/README.md
+++ b/alignment/README.md
@@ -1,4 +1,42 @@
-You can now find heatmap based approaches under ``heatmapReg`` directory.
+## Face Alignment
+
+
+
+
+
+
+
+## Introduction
+
+These are the face alignment methods of [InsightFace](https://insightface.ai)
+
+
+
+
+
+
+
+### Datasets
+
+ Please refer to [datasets](_datasets_) page for the details of face alignment datasets used for training and evaluation.
+
+### Evaluation
+
+ Please refer to [evaluation](_evaluation_) page for the details of face alignment evaluation.
+
+
+## Methods
+
+
+Supported methods:
+
+- [x] [SDUNets (BMVC'2018)](heatmap)
+- [x] [SimpleRegression](coordinate_reg)
+
+
+
+## Contributing
+
+We appreciate all contributions to improve the face alignment model zoo of InsightFace.
-You can now find coordinate regression approaches under ``coordinateReg`` directory.
diff --git a/alignment/coordinateReg/README.md b/alignment/coordinate_reg/README.md
similarity index 100%
rename from alignment/coordinateReg/README.md
rename to alignment/coordinate_reg/README.md
diff --git a/alignment/coordinateReg/image_infer.py b/alignment/coordinate_reg/image_infer.py
similarity index 100%
rename from alignment/coordinateReg/image_infer.py
rename to alignment/coordinate_reg/image_infer.py
diff --git a/alignment/heatmapReg/README.md b/alignment/heatmap/README.md
similarity index 100%
rename from alignment/heatmapReg/README.md
rename to alignment/heatmap/README.md
diff --git a/alignment/heatmapReg/data.py b/alignment/heatmap/data.py
similarity index 100%
rename from alignment/heatmapReg/data.py
rename to alignment/heatmap/data.py
diff --git a/alignment/heatmapReg/img_helper.py b/alignment/heatmap/img_helper.py
similarity index 100%
rename from alignment/heatmapReg/img_helper.py
rename to alignment/heatmap/img_helper.py
diff --git a/alignment/heatmapReg/metric.py b/alignment/heatmap/metric.py
similarity index 100%
rename from alignment/heatmapReg/metric.py
rename to alignment/heatmap/metric.py
diff --git a/alignment/heatmapReg/optimizer.py b/alignment/heatmap/optimizer.py
similarity index 100%
rename from alignment/heatmapReg/optimizer.py
rename to alignment/heatmap/optimizer.py
diff --git a/alignment/heatmapReg/sample_config.py b/alignment/heatmap/sample_config.py
similarity index 100%
rename from alignment/heatmapReg/sample_config.py
rename to alignment/heatmap/sample_config.py
diff --git a/alignment/heatmapReg/symbol/sym_heatmap.py b/alignment/heatmap/symbol/sym_heatmap.py
similarity index 100%
rename from alignment/heatmapReg/symbol/sym_heatmap.py
rename to alignment/heatmap/symbol/sym_heatmap.py
diff --git a/alignment/heatmapReg/test.py b/alignment/heatmap/test.py
similarity index 100%
rename from alignment/heatmapReg/test.py
rename to alignment/heatmap/test.py
diff --git a/alignment/heatmapReg/test_rec_nme.py b/alignment/heatmap/test_rec_nme.py
similarity index 100%
rename from alignment/heatmapReg/test_rec_nme.py
rename to alignment/heatmap/test_rec_nme.py
diff --git a/alignment/heatmapReg/train.py b/alignment/heatmap/train.py
similarity index 100%
rename from alignment/heatmapReg/train.py
rename to alignment/heatmap/train.py
diff --git a/attribute/README.md b/attribute/README.md
new file mode 100644
index 0000000..1a8379c
--- /dev/null
+++ b/attribute/README.md
@@ -0,0 +1,41 @@
+## Face Alignment
+
+
+
+
+
+
+
+## Introduction
+
+These are the face attribute methods of [InsightFace](https://insightface.ai)
+
+
+
+
+
+
+
+### Datasets
+
+ Please refer to [datasets](_datasets_) page for the details of face attribute datasets used for training and evaluation.
+
+### Evaluation
+
+ Please refer to [evaluation](_evaluation_) page for the details of face attribute evaluation.
+
+
+## Methods
+
+
+Supported methods:
+
+- [x] [Gender_Age](gender_age)
+
+
+
+## Contributing
+
+We appreciate all contributions to improve the face attribute model zoo of InsightFace.
+
+
diff --git a/deploy/test.py b/attribute/gender_age/test.py
similarity index 53%
rename from deploy/test.py
rename to attribute/gender_age/test.py
index f5ecdf2..a92b216 100644
--- a/deploy/test.py
+++ b/attribute/gender_age/test.py
@@ -4,25 +4,21 @@ import sys
import numpy as np
import insightface
from insightface.app import FaceAnalysis
+from insightface.data import get_image as ins_get_image
-assert insightface.__version__>='0.2'
-parser = argparse.ArgumentParser(description='insightface test')
+parser = argparse.ArgumentParser(description='insightface gender-age test')
# general
parser.add_argument('--ctx', default=0, type=int, help='ctx id, <0 means using cpu')
args = parser.parse_args()
-app = FaceAnalysis(name='antelope')
+app = FaceAnalysis(allowed_modules=['detection', 'genderage'])
app.prepare(ctx_id=args.ctx, det_size=(640,640))
-img = cv2.imread('../sample-images/t1.jpg')
+img = ins_get_image('t1')
faces = app.get(img)
assert len(faces)==6
-rimg = app.draw_on(img, faces)
-cv2.imwrite("./t1_output.jpg", rimg)
-print(len(faces))
for face in faces:
print(face.bbox)
- print(face.kps)
- print(face.embedding.shape)
+ print(face.sex, face.age)
diff --git a/challenges/README.md b/challenges/README.md
new file mode 100644
index 0000000..b2422cc
--- /dev/null
+++ b/challenges/README.md
@@ -0,0 +1,31 @@
+## Challenges
+
+
+
+
+
+
+
+## Introduction
+
+These are challenges hold by [InsightFace](https://insightface.ai)
+
+
+
+
+
+
+
+
+## List
+
+
+Supported methods:
+
+- [LFR19 (ICCVW'2019)](iccv19-lfr)
+- [MFR21 (ICCVW'2021)](iccv21-mfr)
+- [IFRT](ifrt)
+
+
+
+
diff --git a/challenges/iccv19-lfr/README.md b/challenges/iccv19-lfr/README.md
index f9ae0e0..12aa535 100644
--- a/challenges/iccv19-lfr/README.md
+++ b/challenges/iccv19-lfr/README.md
@@ -31,7 +31,7 @@ insightface.challenge@gmail.com
*For Chinese:*
-
+
*For English:*
diff --git a/deploy/README.md b/deploy/README.md
deleted file mode 100644
index e65643d..0000000
--- a/deploy/README.md
+++ /dev/null
@@ -1,8 +0,0 @@
-InsightFace deployment README
----
-
-For insightface pip-package <= 0.1.5, we use MXNet as inference backend, please download all models from [onedrive](https://1drv.ms/u/s!AswpsDO2toNKrUy0VktHTWgIQ0bn?e=UEF7C4), and put them all under `~/.insightface/models/` directory.
-
-Starting from insightface>=0.2, we use onnxruntime as inference backend, please download our **antelope** model release from [onedrive](https://1drv.ms/u/s!AswpsDO2toNKrU0ydGgDkrHPdJ3m?e=iVgZox), and put it under `~/.insightface/models/`, so there're onnx models at `~/.insightface/models/antelope/*.onnx`.
-
-The **antelope** model release contains `ResNet100@Glint360K recognition model` and `SCRFD-10GF detection model`. Please check `test.py` for detail.
diff --git a/deploy/convert_onnx.py b/deploy/convert_onnx.py
deleted file mode 100644
index 3ed583d..0000000
--- a/deploy/convert_onnx.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import sys
-import os
-import argparse
-import onnx
-import mxnet as mx
-
-print('mxnet version:', mx.__version__)
-print('onnx version:', onnx.__version__)
-#make sure to install onnx-1.2.1
-#pip uninstall onnx
-#pip install onnx==1.2.1
-assert onnx.__version__ == '1.2.1'
-import numpy as np
-from mxnet.contrib import onnx as onnx_mxnet
-
-parser = argparse.ArgumentParser(
- description='convert insightface models to onnx')
-# general
-parser.add_argument('--prefix',
- default='./r100-arcface/model',
- help='prefix to load model.')
-parser.add_argument('--epoch',
- default=0,
- type=int,
- help='epoch number to load model.')
-parser.add_argument('--input-shape', default='3,112,112', help='input shape.')
-parser.add_argument('--output-onnx',
- default='./r100.onnx',
- help='path to write onnx model.')
-args = parser.parse_args()
-input_shape = (1, ) + tuple([int(x) for x in args.input_shape.split(',')])
-print('input-shape:', input_shape)
-
-sym_file = "%s-symbol.json" % args.prefix
-params_file = "%s-%04d.params" % (args.prefix, args.epoch)
-assert os.path.exists(sym_file)
-assert os.path.exists(params_file)
-converted_model_path = onnx_mxnet.export_model(sym_file, params_file,
- [input_shape], np.float32,
- args.output_onnx)
diff --git a/deploy/face_model.py b/deploy/face_model.py
deleted file mode 100644
index 2e4a361..0000000
--- a/deploy/face_model.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import sys
-import os
-import argparse
-import numpy as np
-import mxnet as mx
-import cv2
-import insightface
-from insightface.utils import face_align
-
-
-def do_flip(data):
- for idx in range(data.shape[0]):
- data[idx, :, :] = np.fliplr(data[idx, :, :])
-
-
-def get_model(ctx, image_size, prefix, epoch, layer):
- print('loading', prefix, epoch)
- sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
- all_layers = sym.get_internals()
- sym = all_layers[layer + '_output']
- model = mx.mod.Module(symbol=sym, context=ctx, label_names=None)
- #model.bind(data_shapes=[('data', (args.batch_size, 3, image_size[0], image_size[1]))], label_shapes=[('softmax_label', (args.batch_size,))])
- model.bind(data_shapes=[('data', (1, 3, image_size[0], image_size[1]))])
- model.set_params(arg_params, aux_params)
- return model
-
-
-class FaceModel:
- def __init__(self, ctx_id, model_prefix, model_epoch, use_large_detector=False):
- if use_large_detector:
- self.detector = insightface.model_zoo.get_model('retinaface_r50_v1')
- else:
- self.detector = insightface.model_zoo.get_model('retinaface_mnet025_v2')
- self.detector.prepare(ctx_id=ctx_id)
- if ctx_id>=0:
- ctx = mx.gpu(ctx_id)
- else:
- ctx = mx.cpu()
- image_size = (112,112)
- self.model = get_model(ctx, image_size, model_prefix, model_epoch, 'fc1')
- self.image_size = image_size
-
- def get_input(self, face_img):
- bbox, pts5 = self.detector.detect(face_img, threshold=0.8)
- if bbox.shape[0]==0:
- return None
- bbox = bbox[0, 0:4]
- pts5 = pts5[0, :]
- nimg = face_align.norm_crop(face_img, pts5)
- return nimg
-
- def get_feature(self, aligned):
- a = cv2.cvtColor(aligned, cv2.COLOR_BGR2RGB)
- a = np.transpose(a, (2, 0, 1))
- input_blob = np.expand_dims(a, axis=0)
- data = mx.nd.array(input_blob)
- db = mx.io.DataBatch(data=(data, ))
- self.model.forward(db, is_train=False)
- emb = self.model.get_outputs()[0].asnumpy()[0]
- norm = np.sqrt(np.sum(emb*emb)+0.00001)
- emb /= norm
- return emb
-
diff --git a/deploy/model_slim.py b/deploy/model_slim.py
deleted file mode 100644
index 421b0cd..0000000
--- a/deploy/model_slim.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import sys
-import os
-import argparse
-import numpy as np
-import mxnet as mx
-
-parser = argparse.ArgumentParser(description='face model slim')
-# general
-parser.add_argument('--model',
- default='../models/model-r34-amf/model,60',
- help='path to load model.')
-args = parser.parse_args()
-
-_vec = args.model.split(',')
-assert len(_vec) == 2
-prefix = _vec[0]
-epoch = int(_vec[1])
-print('loading', prefix, epoch)
-sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
-all_layers = sym.get_internals()
-sym = all_layers['fc1_output']
-dellist = []
-for k, v in arg_params.iteritems():
- if k.startswith('fc7'):
- dellist.append(k)
-for d in dellist:
- del arg_params[d]
-mx.model.save_checkpoint(prefix + "s", 0, sym, arg_params, aux_params)
diff --git a/deploy/mtcnn-model/det1-0001.params b/deploy/mtcnn-model/det1-0001.params
deleted file mode 100644
index e4b04aa..0000000
Binary files a/deploy/mtcnn-model/det1-0001.params and /dev/null differ
diff --git a/deploy/mtcnn-model/det1-symbol.json b/deploy/mtcnn-model/det1-symbol.json
deleted file mode 100644
index bd9b772..0000000
--- a/deploy/mtcnn-model/det1-symbol.json
+++ /dev/null
@@ -1,266 +0,0 @@
-{
- "nodes": [
- {
- "op": "null",
- "param": {},
- "name": "data",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "10",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1",
- "inputs": [[0, 0], [1, 0], [2, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1",
- "inputs": [[3, 0], [4, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(2,2)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1",
- "inputs": [[5, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "16",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2",
- "inputs": [[6, 0], [7, 0], [8, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2",
- "inputs": [[9, 0], [10, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "32",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3",
- "inputs": [[11, 0], [12, 0], [13, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3",
- "inputs": [[14, 0], [15, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(1,1)",
- "no_bias": "False",
- "num_filter": "4",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv4_2",
- "inputs": [[16, 0], [17, 0], [18, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(1,1)",
- "no_bias": "False",
- "num_filter": "2",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv4_1",
- "inputs": [[16, 0], [20, 0], [21, 0]],
- "backward_source_id": -1
- },
- {
- "op": "SoftmaxActivation",
- "param": {"mode": "channel"},
- "name": "prob1",
- "inputs": [[22, 0]],
- "backward_source_id": -1
- }
- ],
- "arg_nodes": [
- 0,
- 1,
- 2,
- 4,
- 7,
- 8,
- 10,
- 12,
- 13,
- 15,
- 17,
- 18,
- 20,
- 21
- ],
- "heads": [[19, 0], [23, 0]]
-}
\ No newline at end of file
diff --git a/deploy/mtcnn-model/det1.caffemodel b/deploy/mtcnn-model/det1.caffemodel
deleted file mode 100644
index 79e93b4..0000000
Binary files a/deploy/mtcnn-model/det1.caffemodel and /dev/null differ
diff --git a/deploy/mtcnn-model/det1.prototxt b/deploy/mtcnn-model/det1.prototxt
deleted file mode 100644
index c5c1657..0000000
--- a/deploy/mtcnn-model/det1.prototxt
+++ /dev/null
@@ -1,177 +0,0 @@
-name: "PNet"
-input: "data"
-input_dim: 1
-input_dim: 3
-input_dim: 12
-input_dim: 12
-
-layer {
- name: "conv1"
- type: "Convolution"
- bottom: "data"
- top: "conv1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 10
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "PReLU1"
- type: "PReLU"
- bottom: "conv1"
- top: "conv1"
-}
-layer {
- name: "pool1"
- type: "Pooling"
- bottom: "conv1"
- top: "pool1"
- pooling_param {
- pool: MAX
- kernel_size: 2
- stride: 2
- }
-}
-
-layer {
- name: "conv2"
- type: "Convolution"
- bottom: "pool1"
- top: "conv2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 16
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "PReLU2"
- type: "PReLU"
- bottom: "conv2"
- top: "conv2"
-}
-
-layer {
- name: "conv3"
- type: "Convolution"
- bottom: "conv2"
- top: "conv3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 32
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "PReLU3"
- type: "PReLU"
- bottom: "conv3"
- top: "conv3"
-}
-
-
-layer {
- name: "conv4-1"
- type: "Convolution"
- bottom: "conv3"
- top: "conv4-1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 2
- kernel_size: 1
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-layer {
- name: "conv4-2"
- type: "Convolution"
- bottom: "conv3"
- top: "conv4-2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 4
- kernel_size: 1
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prob1"
- type: "Softmax"
- bottom: "conv4-1"
- top: "prob1"
-}
diff --git a/deploy/mtcnn-model/det2-0001.params b/deploy/mtcnn-model/det2-0001.params
deleted file mode 100644
index a14a478..0000000
Binary files a/deploy/mtcnn-model/det2-0001.params and /dev/null differ
diff --git a/deploy/mtcnn-model/det2-symbol.json b/deploy/mtcnn-model/det2-symbol.json
deleted file mode 100644
index a13246a..0000000
--- a/deploy/mtcnn-model/det2-symbol.json
+++ /dev/null
@@ -1,324 +0,0 @@
-{
- "nodes": [
- {
- "op": "null",
- "param": {},
- "name": "data",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1",
- "inputs": [[0, 0], [1, 0], [2, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1",
- "inputs": [[3, 0], [4, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1",
- "inputs": [[5, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2",
- "inputs": [[6, 0], [7, 0], [8, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2",
- "inputs": [[9, 0], [10, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2",
- "inputs": [[11, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3",
- "inputs": [[12, 0], [13, 0], [14, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3",
- "inputs": [[15, 0], [16, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "128"
- },
- "name": "conv4",
- "inputs": [[17, 0], [18, 0], [19, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4",
- "inputs": [[20, 0], [21, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "4"
- },
- "name": "conv5_2",
- "inputs": [[22, 0], [23, 0], [24, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "conv5_1",
- "inputs": [[22, 0], [26, 0], [27, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prob1_label",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "SoftmaxOutput",
- "param": {
- "grad_scale": "1",
- "ignore_label": "-1",
- "multi_output": "False",
- "normalization": "null",
- "use_ignore": "False"
- },
- "name": "prob1",
- "inputs": [[28, 0], [29, 0]],
- "backward_source_id": -1
- }
- ],
- "arg_nodes": [
- 0,
- 1,
- 2,
- 4,
- 7,
- 8,
- 10,
- 13,
- 14,
- 16,
- 18,
- 19,
- 21,
- 23,
- 24,
- 26,
- 27,
- 29
- ],
- "heads": [[25, 0], [30, 0]]
-}
\ No newline at end of file
diff --git a/deploy/mtcnn-model/det2.caffemodel b/deploy/mtcnn-model/det2.caffemodel
deleted file mode 100644
index a5a540c..0000000
Binary files a/deploy/mtcnn-model/det2.caffemodel and /dev/null differ
diff --git a/deploy/mtcnn-model/det2.prototxt b/deploy/mtcnn-model/det2.prototxt
deleted file mode 100644
index 51093e6..0000000
--- a/deploy/mtcnn-model/det2.prototxt
+++ /dev/null
@@ -1,228 +0,0 @@
-name: "RNet"
-input: "data"
-input_dim: 1
-input_dim: 3
-input_dim: 24
-input_dim: 24
-
-
-##########################
-######################
-layer {
- name: "conv1"
- type: "Convolution"
- bottom: "data"
- top: "conv1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu1"
- type: "PReLU"
- bottom: "conv1"
- top: "conv1"
- propagate_down: true
-}
-layer {
- name: "pool1"
- type: "Pooling"
- bottom: "conv1"
- top: "pool1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2"
- type: "Convolution"
- bottom: "pool1"
- top: "conv2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu2"
- type: "PReLU"
- bottom: "conv2"
- top: "conv2"
- propagate_down: true
-}
-layer {
- name: "pool2"
- type: "Pooling"
- bottom: "conv2"
- top: "pool2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-####################################
-
-##################################
-layer {
- name: "conv3"
- type: "Convolution"
- bottom: "pool2"
- top: "conv3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu3"
- type: "PReLU"
- bottom: "conv3"
- top: "conv3"
- propagate_down: true
-}
-###############################
-
-###############################
-
-layer {
- name: "conv4"
- type: "InnerProduct"
- bottom: "conv3"
- top: "conv4"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- inner_product_param {
- num_output: 128
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu4"
- type: "PReLU"
- bottom: "conv4"
- top: "conv4"
-}
-
-layer {
- name: "conv5-1"
- type: "InnerProduct"
- bottom: "conv4"
- top: "conv5-1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- inner_product_param {
- num_output: 2
- #kernel_size: 1
- #stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "conv5-2"
- type: "InnerProduct"
- bottom: "conv4"
- top: "conv5-2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 4
- #kernel_size: 1
- #stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prob1"
- type: "Softmax"
- bottom: "conv5-1"
- top: "prob1"
-}
\ No newline at end of file
diff --git a/deploy/mtcnn-model/det3-0001.params b/deploy/mtcnn-model/det3-0001.params
deleted file mode 100644
index cae898b..0000000
Binary files a/deploy/mtcnn-model/det3-0001.params and /dev/null differ
diff --git a/deploy/mtcnn-model/det3-symbol.json b/deploy/mtcnn-model/det3-symbol.json
deleted file mode 100644
index 00061ed..0000000
--- a/deploy/mtcnn-model/det3-symbol.json
+++ /dev/null
@@ -1,418 +0,0 @@
-{
- "nodes": [
- {
- "op": "null",
- "param": {},
- "name": "data",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "32",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1",
- "inputs": [[0, 0], [1, 0], [2, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1",
- "inputs": [[3, 0], [4, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1",
- "inputs": [[5, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2",
- "inputs": [[6, 0], [7, 0], [8, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2",
- "inputs": [[9, 0], [10, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2",
- "inputs": [[11, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3",
- "inputs": [[12, 0], [13, 0], [14, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3",
- "inputs": [[15, 0], [16, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(2,2)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool3",
- "inputs": [[17, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv4",
- "inputs": [[18, 0], [19, 0], [20, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4",
- "inputs": [[21, 0], [22, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "256"
- },
- "name": "conv5",
- "inputs": [[23, 0], [24, 0], [25, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu5",
- "inputs": [[26, 0], [27, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "10"
- },
- "name": "conv6_3",
- "inputs": [[28, 0], [29, 0], [30, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "4"
- },
- "name": "conv6_2",
- "inputs": [[28, 0], [32, 0], [33, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "conv6_1",
- "inputs": [[28, 0], [35, 0], [36, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prob1_label",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "SoftmaxOutput",
- "param": {
- "grad_scale": "1",
- "ignore_label": "-1",
- "multi_output": "False",
- "normalization": "null",
- "use_ignore": "False"
- },
- "name": "prob1",
- "inputs": [[37, 0], [38, 0]],
- "backward_source_id": -1
- }
- ],
- "arg_nodes": [
- 0,
- 1,
- 2,
- 4,
- 7,
- 8,
- 10,
- 13,
- 14,
- 16,
- 19,
- 20,
- 22,
- 24,
- 25,
- 27,
- 29,
- 30,
- 32,
- 33,
- 35,
- 36,
- 38
- ],
- "heads": [[31, 0], [34, 0], [39, 0]]
-}
\ No newline at end of file
diff --git a/deploy/mtcnn-model/det3.caffemodel b/deploy/mtcnn-model/det3.caffemodel
deleted file mode 100644
index 7b4b8a4..0000000
Binary files a/deploy/mtcnn-model/det3.caffemodel and /dev/null differ
diff --git a/deploy/mtcnn-model/det3.prototxt b/deploy/mtcnn-model/det3.prototxt
deleted file mode 100644
index a192307..0000000
--- a/deploy/mtcnn-model/det3.prototxt
+++ /dev/null
@@ -1,294 +0,0 @@
-name: "ONet"
-input: "data"
-input_dim: 1
-input_dim: 3
-input_dim: 48
-input_dim: 48
-##################################
-layer {
- name: "conv1"
- type: "Convolution"
- bottom: "data"
- top: "conv1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 32
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu1"
- type: "PReLU"
- bottom: "conv1"
- top: "conv1"
-}
-layer {
- name: "pool1"
- type: "Pooling"
- bottom: "conv1"
- top: "pool1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-layer {
- name: "conv2"
- type: "Convolution"
- bottom: "pool1"
- top: "conv2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-layer {
- name: "prelu2"
- type: "PReLU"
- bottom: "conv2"
- top: "conv2"
-}
-layer {
- name: "pool2"
- type: "Pooling"
- bottom: "conv2"
- top: "pool2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv3"
- type: "Convolution"
- bottom: "pool2"
- top: "conv3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 3
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu3"
- type: "PReLU"
- bottom: "conv3"
- top: "conv3"
-}
-layer {
- name: "pool3"
- type: "Pooling"
- bottom: "conv3"
- top: "pool3"
- pooling_param {
- pool: MAX
- kernel_size: 2
- stride: 2
- }
-}
-layer {
- name: "conv4"
- type: "Convolution"
- bottom: "pool3"
- top: "conv4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 128
- kernel_size: 2
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu4"
- type: "PReLU"
- bottom: "conv4"
- top: "conv4"
-}
-
-
-layer {
- name: "conv5"
- type: "InnerProduct"
- bottom: "conv4"
- top: "conv5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- #kernel_size: 3
- num_output: 256
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-layer {
- name: "drop5"
- type: "Dropout"
- bottom: "conv5"
- top: "conv5"
- dropout_param {
- dropout_ratio: 0.25
- }
-}
-layer {
- name: "prelu5"
- type: "PReLU"
- bottom: "conv5"
- top: "conv5"
-}
-
-
-layer {
- name: "conv6-1"
- type: "InnerProduct"
- bottom: "conv5"
- top: "conv6-1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- #kernel_size: 1
- num_output: 2
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "conv6-2"
- type: "InnerProduct"
- bottom: "conv5"
- top: "conv6-2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- #kernel_size: 1
- num_output: 4
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "conv6-3"
- type: "InnerProduct"
- bottom: "conv5"
- top: "conv6-3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- #kernel_size: 1
- num_output: 10
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prob1"
- type: "Softmax"
- bottom: "conv6-1"
- top: "prob1"
-}
diff --git a/deploy/mtcnn-model/det4-0001.params b/deploy/mtcnn-model/det4-0001.params
deleted file mode 100644
index efca9a9..0000000
Binary files a/deploy/mtcnn-model/det4-0001.params and /dev/null differ
diff --git a/deploy/mtcnn-model/det4-symbol.json b/deploy/mtcnn-model/det4-symbol.json
deleted file mode 100644
index aa90e2a..0000000
--- a/deploy/mtcnn-model/det4-symbol.json
+++ /dev/null
@@ -1,1392 +0,0 @@
-{
- "nodes": [
- {
- "op": "null",
- "param": {},
- "name": "data",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "SliceChannel",
- "param": {
- "axis": "1",
- "num_outputs": "5",
- "squeeze_axis": "False"
- },
- "name": "slice",
- "inputs": [[0, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_1",
- "inputs": [[1, 0], [2, 0], [3, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_1",
- "inputs": [[4, 0], [5, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_1",
- "inputs": [[6, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_1",
- "inputs": [[7, 0], [8, 0], [9, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_1",
- "inputs": [[10, 0], [11, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_1",
- "inputs": [[12, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_1",
- "inputs": [[13, 0], [14, 0], [15, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_1",
- "inputs": [[16, 0], [17, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_2",
- "inputs": [[1, 1], [19, 0], [20, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_2",
- "inputs": [[21, 0], [22, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_2",
- "inputs": [[23, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_2",
- "inputs": [[24, 0], [25, 0], [26, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_2",
- "inputs": [[27, 0], [28, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_2",
- "inputs": [[29, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_2",
- "inputs": [[30, 0], [31, 0], [32, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_2",
- "inputs": [[33, 0], [34, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_3",
- "inputs": [[1, 2], [36, 0], [37, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_3",
- "inputs": [[38, 0], [39, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_3",
- "inputs": [[40, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_3",
- "inputs": [[41, 0], [42, 0], [43, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_3",
- "inputs": [[44, 0], [45, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_3",
- "inputs": [[46, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_3",
- "inputs": [[47, 0], [48, 0], [49, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_3",
- "inputs": [[50, 0], [51, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_4",
- "inputs": [[1, 3], [53, 0], [54, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_4",
- "inputs": [[55, 0], [56, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_4",
- "inputs": [[57, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_4",
- "inputs": [[58, 0], [59, 0], [60, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_4",
- "inputs": [[61, 0], [62, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_4",
- "inputs": [[63, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_4",
- "inputs": [[64, 0], [65, 0], [66, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_4",
- "inputs": [[67, 0], [68, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_5",
- "inputs": [[1, 4], [70, 0], [71, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_5",
- "inputs": [[72, 0], [73, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_5",
- "inputs": [[74, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_5",
- "inputs": [[75, 0], [76, 0], [77, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_5",
- "inputs": [[78, 0], [79, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_5",
- "inputs": [[80, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_5",
- "inputs": [[81, 0], [82, 0], [83, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_5",
- "inputs": [[84, 0], [85, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Concat",
- "param": {
- "dim": "1",
- "num_args": "5"
- },
- "name": "concat",
- "inputs": [[18, 0], [35, 0], [52, 0], [69, 0], [86, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "256"
- },
- "name": "fc4",
- "inputs": [[87, 0], [88, 0], [89, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4",
- "inputs": [[90, 0], [91, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_1",
- "inputs": [[92, 0], [93, 0], [94, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_1",
- "inputs": [[95, 0], [96, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_1",
- "inputs": [[97, 0], [98, 0], [99, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_2",
- "inputs": [[92, 0], [101, 0], [102, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_2",
- "inputs": [[103, 0], [104, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_2",
- "inputs": [[105, 0], [106, 0], [107, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_3",
- "inputs": [[92, 0], [109, 0], [110, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_3",
- "inputs": [[111, 0], [112, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_3",
- "inputs": [[113, 0], [114, 0], [115, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_4",
- "inputs": [[92, 0], [117, 0], [118, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_4",
- "inputs": [[119, 0], [120, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_4",
- "inputs": [[121, 0], [122, 0], [123, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_5",
- "inputs": [[92, 0], [125, 0], [126, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_5",
- "inputs": [[127, 0], [128, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_5",
- "inputs": [[129, 0], [130, 0], [131, 0]],
- "backward_source_id": -1
- }
- ],
- "arg_nodes": [
- 0,
- 2,
- 3,
- 5,
- 8,
- 9,
- 11,
- 14,
- 15,
- 17,
- 19,
- 20,
- 22,
- 25,
- 26,
- 28,
- 31,
- 32,
- 34,
- 36,
- 37,
- 39,
- 42,
- 43,
- 45,
- 48,
- 49,
- 51,
- 53,
- 54,
- 56,
- 59,
- 60,
- 62,
- 65,
- 66,
- 68,
- 70,
- 71,
- 73,
- 76,
- 77,
- 79,
- 82,
- 83,
- 85,
- 88,
- 89,
- 91,
- 93,
- 94,
- 96,
- 98,
- 99,
- 101,
- 102,
- 104,
- 106,
- 107,
- 109,
- 110,
- 112,
- 114,
- 115,
- 117,
- 118,
- 120,
- 122,
- 123,
- 125,
- 126,
- 128,
- 130,
- 131
- ],
- "heads": [[100, 0], [108, 0], [116, 0], [124, 0], [132, 0]]
-}
\ No newline at end of file
diff --git a/deploy/mtcnn-model/det4.caffemodel b/deploy/mtcnn-model/det4.caffemodel
deleted file mode 100644
index 38353c4..0000000
Binary files a/deploy/mtcnn-model/det4.caffemodel and /dev/null differ
diff --git a/deploy/mtcnn-model/det4.prototxt b/deploy/mtcnn-model/det4.prototxt
deleted file mode 100644
index 4cdc329..0000000
--- a/deploy/mtcnn-model/det4.prototxt
+++ /dev/null
@@ -1,995 +0,0 @@
-name: "LNet"
-input: "data"
-input_dim: 1
-input_dim: 15
-input_dim: 24
-input_dim: 24
-
-layer {
- name: "slicer_data"
- type: "Slice"
- bottom: "data"
- top: "data241"
- top: "data242"
- top: "data243"
- top: "data244"
- top: "data245"
- slice_param {
- axis: 1
- slice_point: 3
- slice_point: 6
- slice_point: 9
- slice_point: 12
- }
-}
-layer {
- name: "conv1_1"
- type: "Convolution"
- bottom: "data241"
- top: "conv1_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_1"
- type: "PReLU"
- bottom: "conv1_1"
- top: "conv1_1"
-
-}
-layer {
- name: "pool1_1"
- type: "Pooling"
- bottom: "conv1_1"
- top: "pool1_1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_1"
- type: "Convolution"
- bottom: "pool1_1"
- top: "conv2_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_1"
- type: "PReLU"
- bottom: "conv2_1"
- top: "conv2_1"
-}
-layer {
- name: "pool2_1"
- type: "Pooling"
- bottom: "conv2_1"
- top: "pool2_1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_1"
- type: "Convolution"
- bottom: "pool2_1"
- top: "conv3_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_1"
- type: "PReLU"
- bottom: "conv3_1"
- top: "conv3_1"
-}
-##########################
-layer {
- name: "conv1_2"
- type: "Convolution"
- bottom: "data242"
- top: "conv1_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_2"
- type: "PReLU"
- bottom: "conv1_2"
- top: "conv1_2"
-
-}
-layer {
- name: "pool1_2"
- type: "Pooling"
- bottom: "conv1_2"
- top: "pool1_2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_2"
- type: "Convolution"
- bottom: "pool1_2"
- top: "conv2_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_2"
- type: "PReLU"
- bottom: "conv2_2"
- top: "conv2_2"
-}
-layer {
- name: "pool2_2"
- type: "Pooling"
- bottom: "conv2_2"
- top: "pool2_2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_2"
- type: "Convolution"
- bottom: "pool2_2"
- top: "conv3_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_2"
- type: "PReLU"
- bottom: "conv3_2"
- top: "conv3_2"
-}
-##########################
-##########################
-layer {
- name: "conv1_3"
- type: "Convolution"
- bottom: "data243"
- top: "conv1_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_3"
- type: "PReLU"
- bottom: "conv1_3"
- top: "conv1_3"
-
-}
-layer {
- name: "pool1_3"
- type: "Pooling"
- bottom: "conv1_3"
- top: "pool1_3"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_3"
- type: "Convolution"
- bottom: "pool1_3"
- top: "conv2_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_3"
- type: "PReLU"
- bottom: "conv2_3"
- top: "conv2_3"
-}
-layer {
- name: "pool2_3"
- type: "Pooling"
- bottom: "conv2_3"
- top: "pool2_3"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_3"
- type: "Convolution"
- bottom: "pool2_3"
- top: "conv3_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_3"
- type: "PReLU"
- bottom: "conv3_3"
- top: "conv3_3"
-}
-##########################
-##########################
-layer {
- name: "conv1_4"
- type: "Convolution"
- bottom: "data244"
- top: "conv1_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_4"
- type: "PReLU"
- bottom: "conv1_4"
- top: "conv1_4"
-
-}
-layer {
- name: "pool1_4"
- type: "Pooling"
- bottom: "conv1_4"
- top: "pool1_4"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_4"
- type: "Convolution"
- bottom: "pool1_4"
- top: "conv2_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_4"
- type: "PReLU"
- bottom: "conv2_4"
- top: "conv2_4"
-}
-layer {
- name: "pool2_4"
- type: "Pooling"
- bottom: "conv2_4"
- top: "pool2_4"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_4"
- type: "Convolution"
- bottom: "pool2_4"
- top: "conv3_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_4"
- type: "PReLU"
- bottom: "conv3_4"
- top: "conv3_4"
-}
-##########################
-##########################
-layer {
- name: "conv1_5"
- type: "Convolution"
- bottom: "data245"
- top: "conv1_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_5"
- type: "PReLU"
- bottom: "conv1_5"
- top: "conv1_5"
-
-}
-layer {
- name: "pool1_5"
- type: "Pooling"
- bottom: "conv1_5"
- top: "pool1_5"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_5"
- type: "Convolution"
- bottom: "pool1_5"
- top: "conv2_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_5"
- type: "PReLU"
- bottom: "conv2_5"
- top: "conv2_5"
-}
-layer {
- name: "pool2_5"
- type: "Pooling"
- bottom: "conv2_5"
- top: "pool2_5"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_5"
- type: "Convolution"
- bottom: "pool2_5"
- top: "conv3_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_5"
- type: "PReLU"
- bottom: "conv3_5"
- top: "conv3_5"
-}
-##########################
-layer {
- name: "concat"
- bottom: "conv3_1"
- bottom: "conv3_2"
- bottom: "conv3_3"
- bottom: "conv3_4"
- bottom: "conv3_5"
- top: "conv3"
- type: "Concat"
- concat_param {
- axis: 1
- }
-}
-##########################
-layer {
- name: "fc4"
- type: "InnerProduct"
- bottom: "conv3"
- top: "fc4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 256
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4"
- type: "PReLU"
- bottom: "fc4"
- top: "fc4"
-}
-############################
-layer {
- name: "fc4_1"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_1"
- type: "PReLU"
- bottom: "fc4_1"
- top: "fc4_1"
-}
-layer {
- name: "fc5_1"
- type: "InnerProduct"
- bottom: "fc4_1"
- top: "fc5_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-
-#########################
-layer {
- name: "fc4_2"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_2"
- type: "PReLU"
- bottom: "fc4_2"
- top: "fc4_2"
-}
-layer {
- name: "fc5_2"
- type: "InnerProduct"
- bottom: "fc4_2"
- top: "fc5_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-#########################
-layer {
- name: "fc4_3"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_3"
- type: "PReLU"
- bottom: "fc4_3"
- top: "fc4_3"
-}
-layer {
- name: "fc5_3"
- type: "InnerProduct"
- bottom: "fc4_3"
- top: "fc5_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-#########################
-layer {
- name: "fc4_4"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_4"
- type: "PReLU"
- bottom: "fc4_4"
- top: "fc4_4"
-}
-layer {
- name: "fc5_4"
- type: "InnerProduct"
- bottom: "fc4_4"
- top: "fc5_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-#########################
-layer {
- name: "fc4_5"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_5"
- type: "PReLU"
- bottom: "fc4_5"
- top: "fc4_5"
-}
-layer {
- name: "fc5_5"
- type: "InnerProduct"
- bottom: "fc4_5"
- top: "fc5_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-#########################
-
diff --git a/deploy/mtcnn_detector.py b/deploy/mtcnn_detector.py
deleted file mode 100644
index 8fa22f2..0000000
--- a/deploy/mtcnn_detector.py
+++ /dev/null
@@ -1,864 +0,0 @@
-# coding: utf-8
-import os
-import mxnet as mx
-import numpy as np
-import math
-import cv2
-from multiprocessing import Pool
-from itertools import repeat
-try:
- from itertools import izip
-except ImportError:
- izip = zip
-
-def nms(boxes, overlap_threshold, mode='Union'):
- """
- non max suppression
-
- Parameters:
- ----------
- box: numpy array n x 5
- input bbox array
- overlap_threshold: float number
- threshold of overlap
- mode: float number
- how to compute overlap ratio, 'Union' or 'Min'
- Returns:
- -------
- index array of the selected bbox
- """
- # if there are no boxes, return an empty list
- if len(boxes) == 0:
- return []
-
- # if the bounding boxes integers, convert them to floats
- if boxes.dtype.kind == "i":
- boxes = boxes.astype("float")
-
- # initialize the list of picked indexes
- pick = []
-
- # grab the coordinates of the bounding boxes
- x1, y1, x2, y2, score = [boxes[:, i] for i in range(5)]
-
- area = (x2 - x1 + 1) * (y2 - y1 + 1)
- idxs = np.argsort(score)
-
- # keep looping while some indexes still remain in the indexes list
- while len(idxs) > 0:
- # grab the last index in the indexes list and add the index value to the list of picked indexes
- last = len(idxs) - 1
- i = idxs[last]
- pick.append(i)
-
- xx1 = np.maximum(x1[i], x1[idxs[:last]])
- yy1 = np.maximum(y1[i], y1[idxs[:last]])
- xx2 = np.minimum(x2[i], x2[idxs[:last]])
- yy2 = np.minimum(y2[i], y2[idxs[:last]])
-
- # compute the width and height of the bounding box
- w = np.maximum(0, xx2 - xx1 + 1)
- h = np.maximum(0, yy2 - yy1 + 1)
-
- inter = w * h
- if mode == 'Min':
- overlap = inter / np.minimum(area[i], area[idxs[:last]])
- else:
- overlap = inter / (area[i] + area[idxs[:last]] - inter)
-
- # delete all indexes from the index list that have
- idxs = np.delete(
- idxs,
- np.concatenate(([last], np.where(overlap > overlap_threshold)[0])))
-
- return pick
-
-
-def adjust_input(in_data):
- """
- adjust the input from (h, w, c) to ( 1, c, h, w) for network input
-
- Parameters:
- ----------
- in_data: numpy array of shape (h, w, c)
- input data
- Returns:
- -------
- out_data: numpy array of shape (1, c, h, w)
- reshaped array
- """
- if in_data.dtype is not np.dtype('float32'):
- out_data = in_data.astype(np.float32)
- else:
- out_data = in_data
-
- out_data = out_data.transpose((2, 0, 1))
- out_data = np.expand_dims(out_data, 0)
- out_data = (out_data - 127.5) * 0.0078125
- return out_data
-
-
-def generate_bbox(map, reg, scale, threshold):
- """
- generate bbox from feature map
- Parameters:
- ----------
- map: numpy array , n x m x 1
- detect score for each position
- reg: numpy array , n x m x 4
- bbox
- scale: float number
- scale of this detection
- threshold: float number
- detect threshold
- Returns:
- -------
- bbox array
- """
- stride = 2
- cellsize = 12
-
- t_index = np.where(map > threshold)
-
- # find nothing
- if t_index[0].size == 0:
- return np.array([])
-
- dx1, dy1, dx2, dy2 = [reg[0, i, t_index[0], t_index[1]] for i in range(4)]
-
- reg = np.array([dx1, dy1, dx2, dy2])
- score = map[t_index[0], t_index[1]]
- boundingbox = np.vstack([
- np.round((stride * t_index[1] + 1) / scale),
- np.round((stride * t_index[0] + 1) / scale),
- np.round((stride * t_index[1] + 1 + cellsize) / scale),
- np.round((stride * t_index[0] + 1 + cellsize) / scale), score, reg
- ])
-
- return boundingbox.T
-
-
-def detect_first_stage(img, net, scale, threshold):
- """
- run PNet for first stage
-
- Parameters:
- ----------
- img: numpy array, bgr order
- input image
- scale: float number
- how much should the input image scale
- net: PNet
- worker
- Returns:
- -------
- total_boxes : bboxes
- """
- height, width, _ = img.shape
- hs = int(math.ceil(height * scale))
- ws = int(math.ceil(width * scale))
-
- im_data = cv2.resize(img, (ws, hs))
-
- # adjust for the network input
- input_buf = adjust_input(im_data)
- output = net.predict(input_buf)
- boxes = generate_bbox(output[1][0, 1, :, :], output[0], scale, threshold)
-
- if boxes.size == 0:
- return None
-
- # nms
- pick = nms(boxes[:, 0:5], 0.5, mode='Union')
- boxes = boxes[pick]
- return boxes
-
-
-def detect_first_stage_warpper(args):
- return detect_first_stage(*args)
-
-class MtcnnDetector(object):
- """
- Joint Face Detection and Alignment using Multi-task Cascaded Convolutional Neural Networks
- see https://github.com/kpzhang93/MTCNN_face_detection_alignment
- this is a mxnet version
- """
- def __init__(self,
- model_folder='.',
- minsize=20,
- threshold=[0.6, 0.7, 0.8],
- factor=0.709,
- num_worker=1,
- accurate_landmark=False,
- ctx=mx.cpu()):
- """
- Initialize the detector
-
- Parameters:
- ----------
- model_folder : string
- path for the models
- minsize : float number
- minimal face to detect
- threshold : float number
- detect threshold for 3 stages
- factor: float number
- scale factor for image pyramid
- num_worker: int number
- number of processes we use for first stage
- accurate_landmark: bool
- use accurate landmark localization or not
-
- """
- self.num_worker = num_worker
- self.accurate_landmark = accurate_landmark
-
- # load 4 models from folder
- models = ['det1', 'det2', 'det3', 'det4']
- models = [os.path.join(model_folder, f) for f in models]
-
- self.PNets = []
- for i in range(num_worker):
- workner_net = mx.model.FeedForward.load(models[0], 1, ctx=ctx)
- self.PNets.append(workner_net)
-
- #self.Pool = Pool(num_worker)
-
- self.RNet = mx.model.FeedForward.load(models[1], 1, ctx=ctx)
- self.ONet = mx.model.FeedForward.load(models[2], 1, ctx=ctx)
- self.LNet = mx.model.FeedForward.load(models[3], 1, ctx=ctx)
-
- self.minsize = float(minsize)
- self.factor = float(factor)
- self.threshold = threshold
-
- def convert_to_square(self, bbox):
- """
- convert bbox to square
-
- Parameters:
- ----------
- bbox: numpy array , shape n x 5
- input bbox
-
- Returns:
- -------
- square bbox
- """
- square_bbox = bbox.copy()
-
- h = bbox[:, 3] - bbox[:, 1] + 1
- w = bbox[:, 2] - bbox[:, 0] + 1
- max_side = np.maximum(h, w)
- square_bbox[:, 0] = bbox[:, 0] + w * 0.5 - max_side * 0.5
- square_bbox[:, 1] = bbox[:, 1] + h * 0.5 - max_side * 0.5
- square_bbox[:, 2] = square_bbox[:, 0] + max_side - 1
- square_bbox[:, 3] = square_bbox[:, 1] + max_side - 1
- return square_bbox
-
- def calibrate_box(self, bbox, reg):
- """
- calibrate bboxes
-
- Parameters:
- ----------
- bbox: numpy array, shape n x 5
- input bboxes
- reg: numpy array, shape n x 4
- bboxex adjustment
-
- Returns:
- -------
- bboxes after refinement
-
- """
- w = bbox[:, 2] - bbox[:, 0] + 1
- w = np.expand_dims(w, 1)
- h = bbox[:, 3] - bbox[:, 1] + 1
- h = np.expand_dims(h, 1)
- reg_m = np.hstack([w, h, w, h])
- aug = reg_m * reg
- bbox[:, 0:4] = bbox[:, 0:4] + aug
- return bbox
-
- def pad(self, bboxes, w, h):
- """
- pad the the bboxes, alse restrict the size of it
-
- Parameters:
- ----------
- bboxes: numpy array, n x 5
- input bboxes
- w: float number
- width of the input image
- h: float number
- height of the input image
- Returns :
- ------s
- dy, dx : numpy array, n x 1
- start point of the bbox in target image
- edy, edx : numpy array, n x 1
- end point of the bbox in target image
- y, x : numpy array, n x 1
- start point of the bbox in original image
- ex, ex : numpy array, n x 1
- end point of the bbox in original image
- tmph, tmpw: numpy array, n x 1
- height and width of the bbox
-
- """
- tmpw, tmph = bboxes[:, 2] - bboxes[:, 0] + 1, bboxes[:,
- 3] - bboxes[:,
- 1] + 1
- num_box = bboxes.shape[0]
-
- dx, dy = np.zeros((num_box, )), np.zeros((num_box, ))
- edx, edy = tmpw.copy() - 1, tmph.copy() - 1
-
- x, y, ex, ey = bboxes[:, 0], bboxes[:, 1], bboxes[:, 2], bboxes[:, 3]
-
- tmp_index = np.where(ex > w - 1)
- edx[tmp_index] = tmpw[tmp_index] + w - 2 - ex[tmp_index]
- ex[tmp_index] = w - 1
-
- tmp_index = np.where(ey > h - 1)
- edy[tmp_index] = tmph[tmp_index] + h - 2 - ey[tmp_index]
- ey[tmp_index] = h - 1
-
- tmp_index = np.where(x < 0)
- dx[tmp_index] = 0 - x[tmp_index]
- x[tmp_index] = 0
-
- tmp_index = np.where(y < 0)
- dy[tmp_index] = 0 - y[tmp_index]
- y[tmp_index] = 0
-
- return_list = [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph]
- return_list = [item.astype(np.int32) for item in return_list]
-
- return return_list
-
- def slice_index(self, number):
- """
- slice the index into (n,n,m), m < n
- Parameters:
- ----------
- number: int number
- number
- """
- def chunks(l, n):
- """Yield successive n-sized chunks from l."""
- for i in range(0, len(l), n):
- yield l[i:i + n]
-
- num_list = range(number)
- return list(chunks(num_list, self.num_worker))
-
- def detect_face_limited(self, img, det_type=2):
- height, width, _ = img.shape
- if det_type >= 2:
- total_boxes = np.array(
- [[0.0, 0.0, img.shape[1], img.shape[0], 0.9]],
- dtype=np.float32)
- num_box = total_boxes.shape[0]
-
- # pad the bbox
- [dy, edy, dx, edx, y, ey, x, ex, tmpw,
- tmph] = self.pad(total_boxes, width, height)
- # (3, 24, 24) is the input shape for RNet
- input_buf = np.zeros((num_box, 3, 24, 24), dtype=np.float32)
-
- for i in range(num_box):
- tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.uint8)
- tmp[dy[i]:edy[i] + 1,
- dx[i]:edx[i] + 1, :] = img[y[i]:ey[i] + 1,
- x[i]:ex[i] + 1, :]
- input_buf[i, :, :, :] = adjust_input(cv2.resize(tmp, (24, 24)))
-
- output = self.RNet.predict(input_buf)
-
- # filter the total_boxes with threshold
- passed = np.where(output[1][:, 1] > self.threshold[1])
- total_boxes = total_boxes[passed]
-
- if total_boxes.size == 0:
- return None
-
- total_boxes[:, 4] = output[1][passed, 1].reshape((-1, ))
- reg = output[0][passed]
-
- # nms
- pick = nms(total_boxes, 0.7, 'Union')
- total_boxes = total_boxes[pick]
- total_boxes = self.calibrate_box(total_boxes, reg[pick])
- total_boxes = self.convert_to_square(total_boxes)
- total_boxes[:, 0:4] = np.round(total_boxes[:, 0:4])
- else:
- total_boxes = np.array(
- [[0.0, 0.0, img.shape[1], img.shape[0], 0.9]],
- dtype=np.float32)
- num_box = total_boxes.shape[0]
- [dy, edy, dx, edx, y, ey, x, ex, tmpw,
- tmph] = self.pad(total_boxes, width, height)
- # (3, 48, 48) is the input shape for ONet
- input_buf = np.zeros((num_box, 3, 48, 48), dtype=np.float32)
-
- for i in range(num_box):
- tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.float32)
- tmp[dy[i]:edy[i] + 1, dx[i]:edx[i] + 1, :] = img[y[i]:ey[i] + 1,
- x[i]:ex[i] + 1, :]
- input_buf[i, :, :, :] = adjust_input(cv2.resize(tmp, (48, 48)))
-
- output = self.ONet.predict(input_buf)
- #print(output[2])
-
- # filter the total_boxes with threshold
- passed = np.where(output[2][:, 1] > self.threshold[2])
- total_boxes = total_boxes[passed]
-
- if total_boxes.size == 0:
- return None
-
- total_boxes[:, 4] = output[2][passed, 1].reshape((-1, ))
- reg = output[1][passed]
- points = output[0][passed]
-
- # compute landmark points
- bbw = total_boxes[:, 2] - total_boxes[:, 0] + 1
- bbh = total_boxes[:, 3] - total_boxes[:, 1] + 1
- points[:, 0:5] = np.expand_dims(
- total_boxes[:, 0], 1) + np.expand_dims(bbw, 1) * points[:, 0:5]
- points[:, 5:10] = np.expand_dims(
- total_boxes[:, 1], 1) + np.expand_dims(bbh, 1) * points[:, 5:10]
-
- # nms
- total_boxes = self.calibrate_box(total_boxes, reg)
- pick = nms(total_boxes, 0.7, 'Min')
- total_boxes = total_boxes[pick]
- points = points[pick]
-
- if not self.accurate_landmark:
- return total_boxes, points
-
- #############################################
- # extended stage
- #############################################
- num_box = total_boxes.shape[0]
- patchw = np.maximum(total_boxes[:, 2] - total_boxes[:, 0] + 1,
- total_boxes[:, 3] - total_boxes[:, 1] + 1)
- patchw = np.round(patchw * 0.25)
-
- # make it even
- patchw[np.where(np.mod(patchw, 2) == 1)] += 1
-
- input_buf = np.zeros((num_box, 15, 24, 24), dtype=np.float32)
- for i in range(5):
- x, y = points[:, i], points[:, i + 5]
- x, y = np.round(x - 0.5 * patchw), np.round(y - 0.5 * patchw)
- [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph] = self.pad(
- np.vstack([x, y, x + patchw - 1, y + patchw - 1]).T, width,
- height)
- for j in range(num_box):
- tmpim = np.zeros((tmpw[j], tmpw[j], 3), dtype=np.float32)
- tmpim[dy[j]:edy[j] + 1,
- dx[j]:edx[j] + 1, :] = img[y[j]:ey[j] + 1,
- x[j]:ex[j] + 1, :]
- input_buf[j, i * 3:i * 3 + 3, :, :] = adjust_input(
- cv2.resize(tmpim, (24, 24)))
-
- output = self.LNet.predict(input_buf)
-
- pointx = np.zeros((num_box, 5))
- pointy = np.zeros((num_box, 5))
-
- for k in range(5):
- # do not make a large movement
- tmp_index = np.where(np.abs(output[k] - 0.5) > 0.35)
- output[k][tmp_index[0]] = 0.5
-
- pointx[:, k] = np.round(points[:, k] -
- 0.5 * patchw) + output[k][:, 0] * patchw
- pointy[:, k] = np.round(points[:, k + 5] -
- 0.5 * patchw) + output[k][:, 1] * patchw
-
- points = np.hstack([pointx, pointy])
- points = points.astype(np.int32)
-
- return total_boxes, points
-
- def detect_face(self, img, det_type=0):
- """
- detect face over img
- Parameters:
- ----------
- img: numpy array, bgr order of shape (1, 3, n, m)
- input image
- Retures:
- -------
- bboxes: numpy array, n x 5 (x1,y2,x2,y2,score)
- bboxes
- points: numpy array, n x 10 (x1, x2 ... x5, y1, y2 ..y5)
- landmarks
- """
-
- # check input
- height, width, _ = img.shape
- if det_type == 0:
- MIN_DET_SIZE = 12
-
- if img is None:
- return None
-
- # only works for color image
- if len(img.shape) != 3:
- return None
-
- # detected boxes
- total_boxes = []
-
- minl = min(height, width)
-
- # get all the valid scales
- scales = []
- m = MIN_DET_SIZE / self.minsize
- minl *= m
- factor_count = 0
- while minl > MIN_DET_SIZE:
- scales.append(m * self.factor**factor_count)
- minl *= self.factor
- factor_count += 1
-
- #############################################
- # first stage
- #############################################
- #for scale in scales:
- # return_boxes = self.detect_first_stage(img, scale, 0)
- # if return_boxes is not None:
- # total_boxes.append(return_boxes)
-
- sliced_index = self.slice_index(len(scales))
- total_boxes = []
- for batch in sliced_index:
- #local_boxes = self.Pool.map( detect_first_stage_warpper, \
- # izip(repeat(img), self.PNets[:len(batch)], [scales[i] for i in batch], repeat(self.threshold[0])) )
- local_boxes = map( detect_first_stage_warpper, \
- izip(repeat(img), self.PNets[:len(batch)], [scales[i] for i in batch], repeat(self.threshold[0])) )
- total_boxes.extend(local_boxes)
-
- # remove the Nones
- total_boxes = [i for i in total_boxes if i is not None]
-
- if len(total_boxes) == 0:
- return None
-
- total_boxes = np.vstack(total_boxes)
-
- if total_boxes.size == 0:
- return None
-
- # merge the detection from first stage
- pick = nms(total_boxes[:, 0:5], 0.7, 'Union')
- total_boxes = total_boxes[pick]
-
- bbw = total_boxes[:, 2] - total_boxes[:, 0] + 1
- bbh = total_boxes[:, 3] - total_boxes[:, 1] + 1
-
- # refine the bboxes
- total_boxes = np.vstack([
- total_boxes[:, 0] + total_boxes[:, 5] * bbw,
- total_boxes[:, 1] + total_boxes[:, 6] * bbh,
- total_boxes[:, 2] + total_boxes[:, 7] * bbw,
- total_boxes[:, 3] + total_boxes[:, 8] * bbh, total_boxes[:, 4]
- ])
-
- total_boxes = total_boxes.T
- total_boxes = self.convert_to_square(total_boxes)
- total_boxes[:, 0:4] = np.round(total_boxes[:, 0:4])
- else:
- total_boxes = np.array(
- [[0.0, 0.0, img.shape[1], img.shape[0], 0.9]],
- dtype=np.float32)
-
- #############################################
- # second stage
- #############################################
- num_box = total_boxes.shape[0]
-
- # pad the bbox
- [dy, edy, dx, edx, y, ey, x, ex, tmpw,
- tmph] = self.pad(total_boxes, width, height)
- # (3, 24, 24) is the input shape for RNet
- input_buf = np.zeros((num_box, 3, 24, 24), dtype=np.float32)
-
- for i in range(num_box):
- tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.uint8)
- tmp[dy[i]:edy[i] + 1, dx[i]:edx[i] + 1, :] = img[y[i]:ey[i] + 1,
- x[i]:ex[i] + 1, :]
- input_buf[i, :, :, :] = adjust_input(cv2.resize(tmp, (24, 24)))
-
- output = self.RNet.predict(input_buf)
-
- # filter the total_boxes with threshold
- passed = np.where(output[1][:, 1] > self.threshold[1])
- total_boxes = total_boxes[passed]
-
- if total_boxes.size == 0:
- return None
-
- total_boxes[:, 4] = output[1][passed, 1].reshape((-1, ))
- reg = output[0][passed]
-
- # nms
- pick = nms(total_boxes, 0.7, 'Union')
- total_boxes = total_boxes[pick]
- total_boxes = self.calibrate_box(total_boxes, reg[pick])
- total_boxes = self.convert_to_square(total_boxes)
- total_boxes[:, 0:4] = np.round(total_boxes[:, 0:4])
-
- #############################################
- # third stage
- #############################################
- num_box = total_boxes.shape[0]
-
- # pad the bbox
- [dy, edy, dx, edx, y, ey, x, ex, tmpw,
- tmph] = self.pad(total_boxes, width, height)
- # (3, 48, 48) is the input shape for ONet
- input_buf = np.zeros((num_box, 3, 48, 48), dtype=np.float32)
-
- for i in range(num_box):
- tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.float32)
- tmp[dy[i]:edy[i] + 1, dx[i]:edx[i] + 1, :] = img[y[i]:ey[i] + 1,
- x[i]:ex[i] + 1, :]
- input_buf[i, :, :, :] = adjust_input(cv2.resize(tmp, (48, 48)))
-
- output = self.ONet.predict(input_buf)
-
- # filter the total_boxes with threshold
- passed = np.where(output[2][:, 1] > self.threshold[2])
- total_boxes = total_boxes[passed]
-
- if total_boxes.size == 0:
- return None
-
- total_boxes[:, 4] = output[2][passed, 1].reshape((-1, ))
- reg = output[1][passed]
- points = output[0][passed]
-
- # compute landmark points
- bbw = total_boxes[:, 2] - total_boxes[:, 0] + 1
- bbh = total_boxes[:, 3] - total_boxes[:, 1] + 1
- points[:, 0:5] = np.expand_dims(
- total_boxes[:, 0], 1) + np.expand_dims(bbw, 1) * points[:, 0:5]
- points[:, 5:10] = np.expand_dims(
- total_boxes[:, 1], 1) + np.expand_dims(bbh, 1) * points[:, 5:10]
-
- # nms
- total_boxes = self.calibrate_box(total_boxes, reg)
- pick = nms(total_boxes, 0.7, 'Min')
- total_boxes = total_boxes[pick]
- points = points[pick]
-
- if not self.accurate_landmark:
- return total_boxes, points
-
- #############################################
- # extended stage
- #############################################
- num_box = total_boxes.shape[0]
- patchw = np.maximum(total_boxes[:, 2] - total_boxes[:, 0] + 1,
- total_boxes[:, 3] - total_boxes[:, 1] + 1)
- patchw = np.round(patchw * 0.25)
-
- # make it even
- patchw[np.where(np.mod(patchw, 2) == 1)] += 1
-
- input_buf = np.zeros((num_box, 15, 24, 24), dtype=np.float32)
- for i in range(5):
- x, y = points[:, i], points[:, i + 5]
- x, y = np.round(x - 0.5 * patchw), np.round(y - 0.5 * patchw)
- [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph] = self.pad(
- np.vstack([x, y, x + patchw - 1, y + patchw - 1]).T, width,
- height)
- for j in range(num_box):
- tmpim = np.zeros((tmpw[j], tmpw[j], 3), dtype=np.float32)
- tmpim[dy[j]:edy[j] + 1,
- dx[j]:edx[j] + 1, :] = img[y[j]:ey[j] + 1,
- x[j]:ex[j] + 1, :]
- input_buf[j, i * 3:i * 3 + 3, :, :] = adjust_input(
- cv2.resize(tmpim, (24, 24)))
-
- output = self.LNet.predict(input_buf)
-
- pointx = np.zeros((num_box, 5))
- pointy = np.zeros((num_box, 5))
-
- for k in range(5):
- # do not make a large movement
- tmp_index = np.where(np.abs(output[k] - 0.5) > 0.35)
- output[k][tmp_index[0]] = 0.5
-
- pointx[:, k] = np.round(points[:, k] -
- 0.5 * patchw) + output[k][:, 0] * patchw
- pointy[:, k] = np.round(points[:, k + 5] -
- 0.5 * patchw) + output[k][:, 1] * patchw
-
- points = np.hstack([pointx, pointy])
- points = points.astype(np.int32)
-
- return total_boxes, points
-
- def list2colmatrix(self, pts_list):
- """
- convert list to column matrix
- Parameters:
- ----------
- pts_list:
- input list
- Retures:
- -------
- colMat:
-
- """
- assert len(pts_list) > 0
- colMat = []
- for i in range(len(pts_list)):
- colMat.append(pts_list[i][0])
- colMat.append(pts_list[i][1])
- colMat = np.matrix(colMat).transpose()
- return colMat
-
- def find_tfrom_between_shapes(self, from_shape, to_shape):
- """
- find transform between shapes
- Parameters:
- ----------
- from_shape:
- to_shape:
- Retures:
- -------
- tran_m:
- tran_b:
- """
- assert from_shape.shape[0] == to_shape.shape[
- 0] and from_shape.shape[0] % 2 == 0
-
- sigma_from = 0.0
- sigma_to = 0.0
- cov = np.matrix([[0.0, 0.0], [0.0, 0.0]])
-
- # compute the mean and cov
- from_shape_points = from_shape.reshape(from_shape.shape[0] / 2, 2)
- to_shape_points = to_shape.reshape(to_shape.shape[0] / 2, 2)
- mean_from = from_shape_points.mean(axis=0)
- mean_to = to_shape_points.mean(axis=0)
-
- for i in range(from_shape_points.shape[0]):
- temp_dis = np.linalg.norm(from_shape_points[i] - mean_from)
- sigma_from += temp_dis * temp_dis
- temp_dis = np.linalg.norm(to_shape_points[i] - mean_to)
- sigma_to += temp_dis * temp_dis
- cov += (to_shape_points[i].transpose() -
- mean_to.transpose()) * (from_shape_points[i] - mean_from)
-
- sigma_from = sigma_from / to_shape_points.shape[0]
- sigma_to = sigma_to / to_shape_points.shape[0]
- cov = cov / to_shape_points.shape[0]
-
- # compute the affine matrix
- s = np.matrix([[1.0, 0.0], [0.0, 1.0]])
- u, d, vt = np.linalg.svd(cov)
-
- if np.linalg.det(cov) < 0:
- if d[1] < d[0]:
- s[1, 1] = -1
- else:
- s[0, 0] = -1
- r = u * s * vt
- c = 1.0
- if sigma_from != 0:
- c = 1.0 / sigma_from * np.trace(np.diag(d) * s)
-
- tran_b = mean_to.transpose() - c * r * mean_from.transpose()
- tran_m = c * r
-
- return tran_m, tran_b
-
- def extract_image_chips(self, img, points, desired_size=256, padding=0):
- """
- crop and align face
- Parameters:
- ----------
- img: numpy array, bgr order of shape (1, 3, n, m)
- input image
- points: numpy array, n x 10 (x1, x2 ... x5, y1, y2 ..y5)
- desired_size: default 256
- padding: default 0
- Retures:
- -------
- crop_imgs: list, n
- cropped and aligned faces
- """
- crop_imgs = []
- for p in points:
- shape = []
- for k in range(len(p) / 2):
- shape.append(p[k])
- shape.append(p[k + 5])
-
- if padding > 0:
- padding = padding
- else:
- padding = 0
- # average positions of face points
- mean_face_shape_x = [
- 0.224152, 0.75610125, 0.490127, 0.254149, 0.726104
- ]
- mean_face_shape_y = [
- 0.2119465, 0.2119465, 0.628106, 0.780233, 0.780233
- ]
-
- from_points = []
- to_points = []
-
- for i in range(len(shape) / 2):
- x = (padding + mean_face_shape_x[i]) / (2 * padding +
- 1) * desired_size
- y = (padding + mean_face_shape_y[i]) / (2 * padding +
- 1) * desired_size
- to_points.append([x, y])
- from_points.append([shape[2 * i], shape[2 * i + 1]])
-
- # convert the points to Mat
- from_mat = self.list2colmatrix(from_points)
- to_mat = self.list2colmatrix(to_points)
-
- # compute the similar transfrom
- tran_m, tran_b = self.find_tfrom_between_shapes(from_mat, to_mat)
-
- probe_vec = np.matrix([1.0, 0.0]).transpose()
- probe_vec = tran_m * probe_vec
-
- scale = np.linalg.norm(probe_vec)
- angle = 180.0 / math.pi * math.atan2(probe_vec[1, 0], probe_vec[0,
- 0])
-
- from_center = [(shape[0] + shape[2]) / 2.0,
- (shape[1] + shape[3]) / 2.0]
- to_center = [0, 0]
- to_center[1] = desired_size * 0.4
- to_center[0] = desired_size * 0.5
-
- ex = to_center[0] - from_center[0]
- ey = to_center[1] - from_center[1]
-
- rot_mat = cv2.getRotationMatrix2D((from_center[0], from_center[1]),
- -1 * angle, scale)
- rot_mat[0][2] += ex
- rot_mat[1][2] += ey
-
- chips = cv2.warpAffine(img, rot_mat, (desired_size, desired_size))
- crop_imgs.append(chips)
-
- return crop_imgs
-
diff --git a/detection/README.md b/detection/README.md
new file mode 100644
index 0000000..229814e
--- /dev/null
+++ b/detection/README.md
@@ -0,0 +1,42 @@
+## Face Detection
+
+
+
+
+
+
+
+## Introduction
+
+These are the face detection methods of [InsightFace](https://insightface.ai)
+
+
+
+
+
+
+
+### Datasets
+
+ Please refer to [datasets](_datasets_) page for the details of face detection datasets used for training and evaluation.
+
+### Evaluation
+
+ Please refer to [evaluation](_evaluation_) page for the details of face recognition evaluation.
+
+
+## Methods
+
+
+Supported methods:
+
+- [x] [RetinaFace (CVPR'2020)](retinaface)
+- [x] [SCRFD (Arxiv'2021)](scrfd)
+
+
+
+## Contributing
+
+We appreciate all contributions to improve the face detection model zoo of InsightFace.
+
+
diff --git a/detection/RetinaFace/README.md b/detection/RetinaFace/README.md
index 1e1f07f..521d8d9 100644
--- a/detection/RetinaFace/README.md
+++ b/detection/RetinaFace/README.md
@@ -4,9 +4,9 @@
RetinaFace is a practical single-stage [SOTA](http://shuoyang1213.me/WIDERFACE/WiderFace_Results.html) face detector which is initially introduced in [arXiv technical report](https://arxiv.org/abs/1905.00641) and then accepted by [CVPR 2020](https://openaccess.thecvf.com/content_CVPR_2020/html/Deng_RetinaFace_Single-Shot_Multi-Level_Face_Localisation_in_the_Wild_CVPR_2020_paper.html).
-
+
-
+
## Data
diff --git a/detection/RetinaFaceAntiCov/README.md b/detection/retinaface_anticov/README.md
similarity index 90%
rename from detection/RetinaFaceAntiCov/README.md
rename to detection/retinaface_anticov/README.md
index ae6c222..97b3b24 100644
--- a/detection/RetinaFaceAntiCov/README.md
+++ b/detection/retinaface_anticov/README.md
@@ -4,7 +4,7 @@
RetinaFace-Anti-Cov is a customized one stage face detector to help people protect themselves from CovID-19.
-
+
## Testing
diff --git a/detection/RetinaFaceAntiCov/rcnn/processing/__init__.py b/detection/retinaface_anticov/rcnn/processing/__init__.py
similarity index 100%
rename from detection/RetinaFaceAntiCov/rcnn/processing/__init__.py
rename to detection/retinaface_anticov/rcnn/processing/__init__.py
diff --git a/detection/RetinaFaceAntiCov/rcnn/processing/assign_levels.py b/detection/retinaface_anticov/rcnn/processing/assign_levels.py
similarity index 100%
rename from detection/RetinaFaceAntiCov/rcnn/processing/assign_levels.py
rename to detection/retinaface_anticov/rcnn/processing/assign_levels.py
diff --git a/detection/RetinaFaceAntiCov/rcnn/processing/bbox_regression.py b/detection/retinaface_anticov/rcnn/processing/bbox_regression.py
similarity index 100%
rename from detection/RetinaFaceAntiCov/rcnn/processing/bbox_regression.py
rename to detection/retinaface_anticov/rcnn/processing/bbox_regression.py
diff --git a/detection/RetinaFaceAntiCov/rcnn/processing/bbox_transform.py b/detection/retinaface_anticov/rcnn/processing/bbox_transform.py
similarity index 100%
rename from detection/RetinaFaceAntiCov/rcnn/processing/bbox_transform.py
rename to detection/retinaface_anticov/rcnn/processing/bbox_transform.py
diff --git a/detection/RetinaFaceAntiCov/rcnn/processing/bbox_transform.py.orig b/detection/retinaface_anticov/rcnn/processing/bbox_transform.py.orig
similarity index 100%
rename from detection/RetinaFaceAntiCov/rcnn/processing/bbox_transform.py.orig
rename to detection/retinaface_anticov/rcnn/processing/bbox_transform.py.orig
diff --git a/detection/RetinaFaceAntiCov/rcnn/processing/generate_anchor.py b/detection/retinaface_anticov/rcnn/processing/generate_anchor.py
similarity index 100%
rename from detection/RetinaFaceAntiCov/rcnn/processing/generate_anchor.py
rename to detection/retinaface_anticov/rcnn/processing/generate_anchor.py
diff --git a/detection/RetinaFaceAntiCov/rcnn/processing/nms.py b/detection/retinaface_anticov/rcnn/processing/nms.py
similarity index 100%
rename from detection/RetinaFaceAntiCov/rcnn/processing/nms.py
rename to detection/retinaface_anticov/rcnn/processing/nms.py
diff --git a/detection/RetinaFaceAntiCov/retinaface_cov.py b/detection/retinaface_anticov/retinaface_cov.py
similarity index 100%
rename from detection/RetinaFaceAntiCov/retinaface_cov.py
rename to detection/retinaface_anticov/retinaface_cov.py
diff --git a/detection/RetinaFaceAntiCov/test.py b/detection/retinaface_anticov/test.py
similarity index 100%
rename from detection/RetinaFaceAntiCov/test.py
rename to detection/retinaface_anticov/test.py
diff --git a/evaluation/IJB/IJBB_Evaluation_MS1MV2.ipynb b/evaluation/IJB/IJBB_Evaluation_MS1MV2.ipynb
deleted file mode 100644
index c18234e..0000000
--- a/evaluation/IJB/IJBB_Evaluation_MS1MV2.ipynb
+++ /dev/null
@@ -1,520 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "/home/jd4615/miniconda3/envs/insightface/lib/python2.7/site-packages/sklearn/utils/fixes.py:313: FutureWarning: numpy not_equal will not check object identity in the future. The comparison did not return the same result as suggested by the identity (`is`)) and will change.\n",
- " _nan_object_mask = _nan_object_array != _nan_object_array\n"
- ]
- }
- ],
- "source": [
- "import os\n",
- "import numpy as np\n",
- "import cPickle\n",
- "from sklearn.metrics import roc_curve, auc\n",
- "import matplotlib.pyplot as plt\n",
- "import timeit\n",
- "import sklearn\n",
- "import cv2\n",
- "import sys\n",
- "import glob\n",
- "sys.path.append('./recognition')\n",
- "from embedding import Embedding\n",
- "from menpo.visualize import print_progress\n",
- "from menpo.visualize.viewmatplotlib import sample_colours_from_colourmap\n",
- "from prettytable import PrettyTable\n",
- "from pathlib import Path\n",
- "import warnings \n",
- "warnings.filterwarnings(\"ignore\") "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_template_media_list(path):\n",
- " ijb_meta = np.loadtxt(path, dtype=str)\n",
- " templates = ijb_meta[:,1].astype(np.int)\n",
- " medias = ijb_meta[:,2].astype(np.int)\n",
- " return templates, medias"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_template_pair_list(path):\n",
- " pairs = np.loadtxt(path, dtype=str)\n",
- " t1 = pairs[:,0].astype(np.int)\n",
- " t2 = pairs[:,1].astype(np.int)\n",
- " label = pairs[:,2].astype(np.int)\n",
- " return t1, t2, label"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_image_feature(path):\n",
- " with open(path, 'rb') as fid:\n",
- " img_feats = cPickle.load(fid)\n",
- " return img_feats"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [],
- "source": [
- "def get_image_feature(img_path, img_list_path, model_path, gpu_id):\n",
- " img_list = open(img_list_path)\n",
- " embedding = Embedding(model_path, 0, gpu_id)\n",
- " files = img_list.readlines()\n",
- " img_feats = []\n",
- " faceness_scores = []\n",
- " for img_index, each_line in enumerate(print_progress(files)):\n",
- " name_lmk_score = each_line.strip().split(' ')\n",
- " img_name = os.path.join(img_path, name_lmk_score[0])\n",
- " img = cv2.imread(img_name)\n",
- " lmk = np.array([float(x) for x in name_lmk_score[1:-1]], dtype=np.float32)\n",
- " lmk = lmk.reshape( (5,2) )\n",
- " img_feats.append(embedding.get(img,lmk))\n",
- " faceness_scores.append(name_lmk_score[-1])\n",
- " img_feats = np.array(img_feats).astype(np.float32)\n",
- " faceness_scores = np.array(faceness_scores).astype(np.float32)\n",
- " return img_feats, faceness_scores"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [],
- "source": [
- "def image2template_feature(img_feats = None, templates = None, medias = None):\n",
- " # ==========================================================\n",
- " # 1. face image feature l2 normalization. img_feats:[number_image x feats_dim]\n",
- " # 2. compute media feature.\n",
- " # 3. compute template feature.\n",
- " # ========================================================== \n",
- " unique_templates = np.unique(templates)\n",
- " template_feats = np.zeros((len(unique_templates), img_feats.shape[1]))\n",
- "\n",
- " for count_template, uqt in enumerate(unique_templates):\n",
- " (ind_t,) = np.where(templates == uqt)\n",
- " face_norm_feats = img_feats[ind_t]\n",
- " face_medias = medias[ind_t]\n",
- " unique_medias, unique_media_counts = np.unique(face_medias, return_counts=True)\n",
- " media_norm_feats = []\n",
- " for u,ct in zip(unique_medias, unique_media_counts):\n",
- " (ind_m,) = np.where(face_medias == u)\n",
- " if ct == 1:\n",
- " media_norm_feats += [face_norm_feats[ind_m]]\n",
- " else: # image features from the same video will be aggregated into one feature\n",
- " media_norm_feats += [np.mean(face_norm_feats[ind_m], 0, keepdims=True)]\n",
- " media_norm_feats = np.array(media_norm_feats)\n",
- " # media_norm_feats = media_norm_feats / np.sqrt(np.sum(media_norm_feats ** 2, -1, keepdims=True))\n",
- " template_feats[count_template] = np.sum(media_norm_feats, 0)\n",
- " if count_template % 2000 == 0: \n",
- " print('Finish Calculating {} template features.'.format(count_template))\n",
- " template_norm_feats = template_feats / np.sqrt(np.sum(template_feats ** 2, -1, keepdims=True))\n",
- " return template_norm_feats, unique_templates"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [],
- "source": [
- "def verification(template_norm_feats = None, unique_templates = None, p1 = None, p2 = None):\n",
- " # ==========================================================\n",
- " # Compute set-to-set Similarity Score.\n",
- " # ==========================================================\n",
- " template2id = np.zeros((max(unique_templates)+1,1),dtype=int)\n",
- " for count_template, uqt in enumerate(unique_templates):\n",
- " template2id[uqt] = count_template\n",
- " \n",
- " score = np.zeros((len(p1),)) # save cosine distance between pairs \n",
- "\n",
- " total_pairs = np.array(range(len(p1)))\n",
- " batchsize = 100000 # small batchsize instead of all pairs in one batch due to the memory limiation\n",
- " sublists = [total_pairs[i:i + batchsize] for i in range(0, len(p1), batchsize)]\n",
- " total_sublists = len(sublists)\n",
- " for c, s in enumerate(sublists):\n",
- " feat1 = template_norm_feats[template2id[p1[s]]]\n",
- " feat2 = template_norm_feats[template2id[p2[s]]]\n",
- " similarity_score = np.sum(feat1 * feat2, -1)\n",
- " score[s] = similarity_score.flatten()\n",
- " if c % 10 == 0:\n",
- " print('Finish {}/{} pairs.'.format(c, total_sublists))\n",
- " return score"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_score(path):\n",
- " with open(path, 'rb') as fid:\n",
- " img_feats = cPickle.load(fid)\n",
- " return img_feats"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step1: Load Meta Data"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Time: 0.83 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load image and template relationships for template feature embedding\n",
- "# tid --> template id, mid --> media id \n",
- "# format:\n",
- "# image_name tid mid\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "templates, medias = read_template_media_list(os.path.join('IJBB/meta', 'ijbb_face_tid_mid.txt'))\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 10,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Time: 31.88 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load template pairs for template-to-template verification\n",
- "# tid : template id, label : 1/0\n",
- "# format:\n",
- "# tid_1 tid_2 label\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "p1, p2, label = read_template_pair_list(os.path.join('IJBB/meta', 'ijbb_template_pair_label.txt'))\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 2: Get Image Features"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 12,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "('loading', './pretrained_models/MS1MV2-ResNet100-Arcface/model', 0)\n",
- "[====================] 100% (227630/227630) - done. \n",
- "Time: 3279.69 s. \n",
- "Feature Shape: (227630 , 1024) .\n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load image features \n",
- "# format:\n",
- "# img_feats: [image_num x feats_dim] (227630, 512)\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "#img_feats = read_image_feature('./MS1MV2/IJBB_MS1MV2_r100_arcface.pkl')\n",
- "img_path = './IJBB/loose_crop'\n",
- "img_list_path = './IJBB/meta/ijbb_name_5pts_score.txt'\n",
- "model_path = './pretrained_models/MS1MV2-ResNet100-Arcface/model'\n",
- "gpu_id = 1\n",
- "img_feats, faceness_scores = get_image_feature(img_path, img_list_path, model_path, gpu_id)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))\n",
- "print('Feature Shape: ({} , {}) .'.format(img_feats.shape[0], img_feats.shape[1]))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step3: Get Template Features"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 45,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Finish Calculating 0 template features.\n",
- "Finish Calculating 2000 template features.\n",
- "Finish Calculating 4000 template features.\n",
- "Finish Calculating 6000 template features.\n",
- "Finish Calculating 8000 template features.\n",
- "Finish Calculating 10000 template features.\n",
- "Finish Calculating 12000 template features.\n",
- "Time: 3.65 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# compute template features from image features.\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "# ========================================================== \n",
- "# Norm feature before aggregation into template feature?\n",
- "# Feature norm from embedding network and faceness score are able to decrease weights for noise samples (not face).\n",
- "# ========================================================== \n",
- "# 1. FaceScore (Feature Norm)\n",
- "# 2. FaceScore (Detector)\n",
- "\n",
- "use_norm_score = True # if True, TestMode(N1) \n",
- "use_detector_score = True # if True, TestMode(D1)\n",
- "use_flip_test = True # if True, TestMode(F2)\n",
- "\n",
- "if use_flip_test:\n",
- " # concat --- F1\n",
- " # img_input_feats = img_feats \n",
- " # add --- F2\n",
- " img_input_feats = img_feats[:,0:img_feats.shape[1]/2] + img_feats[:,img_feats.shape[1]/2:]\n",
- "else:\n",
- " img_input_feats = img_feats[:,0:img_feats.shape[1]/2]\n",
- " \n",
- "if use_norm_score:\n",
- " img_input_feats = img_input_feats\n",
- "else:\n",
- " # normalise features to remove norm information\n",
- " img_input_feats = img_input_feats / np.sqrt(np.sum(img_input_feats ** 2, -1, keepdims=True)) \n",
- " \n",
- "if use_detector_score:\n",
- " img_input_feats = img_input_feats * np.matlib.repmat(faceness_scores[:,np.newaxis], 1, img_input_feats.shape[1])\n",
- "else:\n",
- " img_input_feats = img_input_feats\n",
- "\n",
- "template_norm_feats, unique_templates = image2template_feature(img_input_feats, templates, medias)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 4: Get Template Similarity Scores"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 46,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Finish 0/81 pairs.\n",
- "Finish 10/81 pairs.\n",
- "Finish 20/81 pairs.\n",
- "Finish 30/81 pairs.\n",
- "Finish 40/81 pairs.\n",
- "Finish 50/81 pairs.\n",
- "Finish 60/81 pairs.\n",
- "Finish 70/81 pairs.\n",
- "Finish 80/81 pairs.\n",
- "Time: 77.30 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# compute verification scores between template pairs.\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "score = verification(template_norm_feats, unique_templates, p1, p2)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 47,
- "metadata": {},
- "outputs": [],
- "source": [
- "score_save_name = './IJBB/result/MS1MV2-ResNet100-ArcFace-TestMode(N1D1F2).npy'\n",
- "np.save(score_save_name, score)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 5: Get ROC Curves and TPR@FPR Table"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 48,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEaCAYAAAAG87ApAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzsnXl4VdW5/z/vPmPmQIAgCYOgzJAgCCijA4iighWn22qlem3rrbXa2p/etmhtb1t7W22l9larrVotiBNYq4haQdGiIASVSRGDjAEyT2fc7++Pk5yTkIEAOTkJrM/znCfZa6+91ru+55z9nr2Gd4mqYjAYDAZDW7ESbYDBYDAYuhbGcRgMBoPhqDCOw2AwGAxHhXEcBoPBYDgqjOMwGAwGw1FhHIfBYDAYjgrjOAwGg8FwVBjHYeiSiEihiNSKSJWI7BeRx0Uk9bA8Z4vIv0SkUkTKReQfIjL8sDzpIvI7EfmyrqzP6457xNn+6SKyu8Hx4yLy87r/B4iI1tlTJSJFIvJHEXG1Ut49IhJscM0WEbk8nm0wnLwYx2HoylyiqqlAPjAGuKv+hIicBawAlgF9gFOBjcC7IjKwLo8beBMYAcwC0oGzgGJgfMc1o0Uy69o3iohd/3WE/M+oamrdNd8DnhKR7HgbaTj5MI7D0OVR1f3Aa0QcSD2/Bp5U1d+raqWqlqjqj4E1wD11ea4D+gGXqepmVbVV9YCq/kxVX2murrqnmLV1TzBrReTsBudWisjPROTduqecFe3x5KKqB4DXgeFHytvgmteASmDQ8dZvMByOcRyGLo+I5AIXAtvrjpOBs4Fnm8m+BJhR9//5wHJVrWpjPd2BfwIPAlnA/cA/RSSrQbb/AOYDvQA38IOjbU8z9fYBLiDi9NqSX0Rkdl39m4+3foPhcIzjMHRllopIJbALOADcXZfenchne18z1+wD6p8CslrI0xKzgc9U9W+qGlLVRcBW4JIGef6qqp+qai0RJ5XfXEFt5JCIlAF7gGrguSPkv7IufxXwEvALVS07jvoNhmYxjsPQlZmrqmnAdGAoMYdQCtjAKc1ccwpwqO7/4hbytEQfYOdhaTuBnAbH+xv8XwOkcuz0UNVMIBl4l0h3HCLy1QaD4K82yL9EVTNVNYVIF9V1IvLN46jfYGgW4zgMXR5VXQU8Dvym7rga+DdwRTPZryQyIA7wBnCBiKS0saq9QP/D0voReSKIG3VPL48DE0Wkh6o+XT8IrqoXtnBNIfAqjZ+GDIZ2wTgOw4nC74AZIpJXd3wn8HUR+a6IpIlIt7rprmcBP63L8zci3VzPi8hQEbFEJEtE/ltELmqmjleAwSLyHyLiFJGriAxYvxzPhomIB7iWyNNMcRuvySUyU2xTHE0znKQYx2E4IVDVg8CTwIK649VEBpS/QmQcYyeRKbuTVfWzujx+IgPkW4nMWqoAPiDS5fV+M3UUAxcD3ydyA/8hcLGqHjo877E247DjMhGpAoqIOLxLtfUNdK6q78IC1hLp3vppK/kNhmNCzEZOBkPiEZEXgLdV9XeJtsVgOBLmicNgSDAikgNMBtYl2haDoS3EzXGIyF9E5ICIfNLCeRGRB0Vku4h8JCJnxMsWg6GzIiI3AxuITONdnWh7DIa2ELeuKhGZSmQ++ZOqOrKZ8xcBtwAXAROA36vqhLgYYzAYDIZ2I25PHKr6NlDSSpY5RJyKquoaIFNEjmZOvcFgMBgSQCLHOHKITIWsZzeNF1IZDAaDoRPiTLQBbUFEbgJuAkhOTh57yimxB5O0tDQAKisro2lerxev10tFRQW2bQPgcDhIS0ujpqaGQCAQzZuenk44HKa6ujqalpycjNvtpqwsFq3B5XKRkpJCdXU1wWAwmp6ZmUkgEKCmpiaalpKSgsPhoKKiIprmdrtJTk6msrKScDgMgGVZpKen4/P58Pl8pk2mTaZNpk1xa9OWLVsOqWpP2oFEOo49QN8Gx7m0sAJXVR8BHgEYM2aMbtiwIf7WdQHKysrIzMxMtBmdAqNFDKNFDKNFDBE5PFzOMZPIrqqXiMTSERGZCJSr6hEDzjX0sCc7q1atSrQJnQajRQyjRQyjRXyI2xOHiCwiEnyuR91OZ3cDLgBV/ROR8A0XEQmFXUMkFLXBYDAYDsO2bYLBMMFAkGAwQCgYIOirprashAMVIUK+aqqrqvBoNamU4nD78XqrSXLX4HQoDqt9nxHi5jhU9ZojnFeOvKOZwWAwdGpUlVDYJhgOEwiHqfQFCIQDhAJBwqEggVo/QZ+PyvJq7GAAu7gay7YRtfE4q3C4ahCnH48riEgIr7MWy+nDIYrLGQIsnI4woi4QGws3FiDhdFIkwKlOB5riRpPdQAixfICNWD6CYhMWP367xV2Hj4kuMTjeEK/Xm2gTOg1DhgxJtAmdBqNFDKNFjOa0sG0lGApjK6it2KoEgkGqa2upKqukpqSS2tJKkP2EglU4XBW4xQ8SRrCxJIzTFcSSMA5LcUkIyxlCcCAAlo1DnThEcYoNWDhwgscNrhQ0KXYTd1i1iFWLSBhbHSA+EMUGQupABMI4CIgDS7zYDificGO5euHyZmF5U3E4vThcqThdqTjdmXgcaYhIM2r8b7vp2uViVY0bN07XrTORGQyGkxVVRcMhgrU+/DW1hHx+AgeKqD30BSIHCYX92I4gTnclYQUcQQQQUSxRLKcPLMHCjcMKgyaBJkXyqBvb9hD7TR1ExEbVArER8YHYoIKiYIUARcQGsairCCywcIGliAjqSEKcqbhTs3Gn9sXp6Y3L3QOx3B2mm4h8qKrj2qOsLvfE0XD62snO8uXLmTVrVqLN6BQYLWJ0Ni1sW/EHIzf62uoaav0+goEgIZ+P6soqgv5SnP59uAO1pHrKcFoBkjzlOFw+EAsVBw5HGBUFLFTdIIriQQBb3YidhCc9FVDclh+VEOBBARULVEA0En/YSkJQVJzYlhOnx4O403Cn9sHl7YXT3QOnpxcOZ0oLv9wNXc5x1M9PNoDf70+0CZ0Go0WMY9Ei0k+v1PqChEIh/LUBfLU1VJaX46/14Q8EqK7yE1Kb2qCgIT+qNv4wqA2WFSYJH24rSLLLR5KzlmRPFU6x8bhqcLsCOB2K5QiQaoVBPYCDHu4kglYPSAIIgSRjW4KfHtgIYZSgw0dYwigWtuXAEg/qEHA4wOXF481G3KkkJ2WT4u1Nqrs3Dol0By1btow5c+a0q76GLug4DAZD64Rti6Liany1fqrKK6kqLafG56Oyyk/IDhH0h/HZDmwNAdAjqYRunirSHNW4HX6SXJV43D6ScJAkDsQKo54kxGsDTlRddbd0C7Dqum2ckScBwpE0qxIVUGzClo3iJCAC4qZWPITxIs4UxJ2BIymL5B79SPf2wWl5scSJJY4EKmg4El3OcTgc5gNVT0ZGRqJN6DScKFqoKsGQTaCmlqrqWvwVlfh8tZRXVBEIBamq9KMawGHV4rXKsTSAx1WDkxCIAxUlr3spgc1rcAl0E5vuApLiwEq1sDUZEGzbS6RD3oFlVSFWLWEJYFs+gljUYhEWsCWI5XBHBmmtJNTlRqxuuJN6I24XliNykxfLgVhuPK4MUlw9cVoenFbiJ7KcKJ+LzoYZHDcY4ozPH6K0ohZ/jZ+DB8soLa2msrKGal+YsIapCbmwAbGCdE8qJtt7kCx3OR5LSXH7cToVhxUEPKjtxVYvkbW7YZAgSAjEhy2CbdUQinTkE7QCgGBLuG6sIIxNJmFXEkmefjg8PXCmJdM9eSBJzu44LU8iZTLEmZN6cLxhvJeTnYKCAvLz8xNtRqcg3lqoKv5gmGDQxraVsB2ZwllRXkXJoUPYvmqqKmup8dUQCIWwJIjD8lGjbpwOSJEqMh1lJFsh+rtrcacrdA/jdYMl3khXj+1BcYFVjjqqCFp+whIiDAQsi4CmEvK6cDu7kZxyCk5PCkneblhONx5HKg5x47S8fPzRZsbkj4mbFl0J8x2JD13OcTQMPnays3PnTvOlqKM1Lern7QdDNrX+ED5/iPJqHyWVPgLhEHaohlCwBAcVhIPVOP21eOxqnFqLoKS4S7HViddRS5KzGrfYuF21uHDSQ1z0EBeSApIK4XAyYTsZW73YdmrUBpFawEYlhFph1FFFCKHSChLSSgLOTNw6AG/PnqSln0K3lFOPuavny51fGsdRh/mOxIcu5zgMhsMJBMP4w062Fhazq6Scsqoaqquq0FAFqbofDwG6e4pIcVbilBDdvQfp6yynn7pwJdeC20XAn004nIbtdBL2pGDjBnWgCNALcKEoghAWPzWq2BKi1lFJwPJTbQWxxYnD5UWckemiTheo04nHmUGqpxcuZzKWy02qqyeprt44rPZdzWswdBTGcRg6PcFQmEOltRQV1xAMVKP+zyivKifT2kKWtRcXYc7tU4X7wJP0cwew05MJJGUDFuFwCnYoCRUHtvYBcQCnUhWyUZRQdRCfI0S5o5Qabw1OhwOP0yLsCCFOweVKRRG87u6oQIq7J15HBh5nBl5HOpa4cFlJZnzAcFLR5QbHzzjjDF2/fn2izegU1NbWkpSUlGgzjhvb1kgXks9HadkuKiuLqK05QIq/gDStJtXjw+upiazAVQ9hTSbky8ImmbAmRVbqAmqHsS2bgISpcvnwuXxUWiWEJIjTnYrbmY7tjMwA8rq64RAXGe6+ZHhySXJ2J9nZ44R4CjhRPhftgdEixkk9OF6/wYkBysvLu8SXIrJyOIzPH6K4uIii4t2ofzsp+jnJgWqyUvbh9YDLTqGnOukeTiYY7oVavbAtN2F/mKraEAFLCFk2fm8Yf1oAn6uaYvmMWqnGEjdpnmxSXL1IdfUmzdWbbo5khrr7kubujSVd7qN+zHSVz0VHYLSID13u29RwF66Tnffff79TrIoNBMNU1QSo8PupqCilunIfh4rLcVJMSvgAfZI+I8NdQkpSEf2cDnoyiJCVRTDYA3X1IBA8lWCNjeXzYCcpZakV+FIDhLxKhbWfA/YOXFYSWd7BOCw3Kc4epLh60svdm2HOHqS6e/PKP1ZwQSfQojPQWT4XnQGjRXzoco7D0PGEbZuS8lq+PFhBUVk1RdVlUFuGK1iKx/bRI3kP/dO3kJP6Bf3dFQQc2QTTcghpKuFgBkEdRMg3nLKwhQQdWOqKOIWeAcq81ZToXkLqoyZ0CIAMd19y08aT6TqFvs7uTPGeituRegQrDQZDR2Ech6ERqkpRRRXvb9/Nzh1V2LbS3dpDn7TPyU79nPO7F+DOrEVti3AwjdqqwYTsTMKBQZQcmIh6I7HEnO7ukJRM0B0m7HZQyn6KQtup0CKCdi1Jzu5kJ48gzT2QXPcUUl298Toz8DoyTWA5g6GT0+UcR3JycqJN6DTk5eUd87XBUJjiMh/bDx5i18FKSsqrSQnv4vTUj+mZvJuxzhqmn16OGx9uTy3+qn74KwdRuudqwkl1H5u6+7skZRJOT6HaLqYyyU+JvZti30oAkh1ZpDlOIdnZk37p0+mbNp5kZ492dw7Ho8WJhtEihtEiPnS5WVUm5Mixs+nAPt5evwdfuY36LBSbgb3fY2yv1fRL2Q5ATUkevsqBBMNZ2EmgzsgNXmwPVmoGflcN/mQHxc4DfFHzbxBId+eQUrc2weNII9nVg26eAWR6+p1Ug9IGQ2fmpJ5VVVZWlmgTOg1tCRld6q/h8YIPKN9tk16VjkOCjDptM+MzVpLm/JhQOJPq4nEcODCFUJITHODUAMHuTgJJFsXpfsoooiKwm4BdTYqzJ72TR5Ps7MPkbj8gN/VMRNp3P+NjwYTPjmG0iGG0iA9dznEYjoyq8sa27Xy4YydjHeu4Kv0TMgYWk+QtxsZLrW8A/rJB7NMJIIIlToK9nJQ7yzmYVsF+/yYsHJySOoZkZxYDPeeQ4elLN88AHB24Y5nBYOicGMdxglDir6bg0C4+3L4f+TKZUUlbuXP0/QBU75pIbcU4ylPrFrfZYKenctBbwgHXXkqChTjETU7qWLI9YxjZ62p6JA0xg9QGg6FZupzjcLm6/sre9iI7O5vKgI/fvruKYAVklWWR5HBwac5i+uX+C391LiXlM1GvA7VDVHqq2Ji6HltseiWNwGm5GZAynbNS8klz9enSjiI7OzvRJnQajBYxjBbxIa6D4yIyC/g94AAeVdVfHXa+P/AXoCdQAnxNVXe3VqYZHI9sn/uvrTv4Yn8ZRQdr6KPljO22j+E9n8dKLiMY7EVl6SSCjjRwuCjI3Ei5HKBn0jAGZZxH//TJZoc1g+Eko0sMjouIA3gImAHsBtaKyEuqurlBtt8AT6rqEyJyLvBL4NrWyj2ZV47vrCjhvfV72bOrBndImd7tPYacvhxPahHBmp6UVk4nVJsJCJKRSWHS5+zUj0h353BRnwfI8OQmuglxY82aNUycODHRZnQKjBYxjBbxIZ5dVeOB7aq6A0BEFgNzgIaOYzhwe93/bwFLj1RoMBhsZzM7N0E7zIrdm3lp58dklmeSc7AnX+v3DL37vgmA7+BgikpmEk7yggWpp03hn2U/AyDD1Zdzs+8hO3lEIpvQIRQVFSXahE6D0SKG0SI+xNNx5AC7GhzvBiYclmcj8BUi3VmXAWkikqWqxXG0q8vw8OZ3WF+8C2/AwWUlKUw/7Xd4Bx8AoLJoDpVWLwCsNC+ZQ85ni/9NPq1zGlec/pQJ9W0wGOJCogfHfwD8QUSuB94G9gBNwt+KyE3ATQA9e/Zk2bJl0XPTpk0DYNWqVdG0IUOGMHToUJYvX47f7wcim9ZPnz6dgoICdu7cGc07c+ZMysvLef/996NpeXl5DBgwoFE92dnZTJw4kTVr1jT6FTNnzhwKCwvZuHFjNG3ChAlkZGSwYsWKaFr//v3Jz89n5cqVlJeXA+DxeJg1axZbt25l27Ztjdq0dM8mPtm/n+H7+/If6cvpmfdPglWnUL5yDr6BaYSTkvCF/RQNdFBU8Qm1RcsBcB0czfn53+TQgdJO16Z4v0/Lli074dp0LO9TvRYnUpuO532qX/t1IrXpWN6n9iRug+MichZwj6peUHd8F4Cq/rKF/KnAVlVttSP+RB8cP1BbyR82rYQvk+lelcJXXAX0HfNnQl9MhE8DHBo3BiwnO04p4Uv/h5yaPo3s5FF0855Kpqdfos03GAydlC4xOA6sBU4XkVOJPElcDfxHwwwi0gMoUVUbuIvIDKtWOdH3HP9VwWu4wlV8s88SBnb/GIDgFwOoSR9P9bgqQPmw+4dU+Us4zXk1Z55yeWIN7iQUFhYyYMCARJvRKTBaxDBaxIe4xYpQ1RDwHeA1YAuwRFU3ici9InJpXbbpwDYR+RTIBv7nSOXW1NTEyeLE88qXn6BB4Q73ywzs/jEl679ByfY7OJg6g2qqSOo1lM9yivF4u3PpwD+y95NE9zR2Hhp2A5zsGC1iGC3iQ1zvPKr6CvDKYWkLGvz/HPBcPG3oSqza9xljyjLIGvI5B3Z+g1BfB1iVJPccTmV3F8sP/h78MC33v0lx9Uy0uQaD4STF/GTtRJxyMMSktI/Yd+hGSIZuPfPYnx1mXfEzVB88yGkZMxiXfWOnCCpoMBhOXrqc40hJSUm0CXGhcN9z/NfAhzhYegXJgXJqzryQLTUb2bb/n/RLO5spfX5IN++ARtfUz6AxGC0aYrSIYbSID13OcTgcJ16ojH1Fe+lZ/HsOlc2DoBsdfDWr9vw/srynk9/zOoZ1v6TZ6zIyMjrY0s6L0SKG0SKG0SI+dLk+j4qKikSb0C4Ul9Xy4uqt3LdkFe59N5DirkRtF8mjpvFa1V24rCTO6/fTFp0G0GgO+MmO0SKG0SKG0SI+dLknjhOBTdsP8dq7hQQdQa4c9jBZyfso+mI+pMAbB+/Bcri4cMBvcYiJBGwwGDofxnEkgDc/2EllShnzs5+mr+szDm28gXAfi7WZ7xN2wFdO+zMuKynRZhoMBkOzdDnH4XZ38R3oVBnS403O6b8Ut9RQtukqAt50gl4/Nc4aLh/0eJudRv/+/eNsbNfBaBHDaBHDaBEf4rofRzzoyiFHVJXynQ+TGfgTNbvOpGrzxViTbXx2CR95/s3APnMY2sqYhsFgMBwr7RlypMsNjldWVibahGNmb9F2MgN/onLTHAIFQ/Cen0mgZi+fuD8gu+fZR+00Vq5cGR9DuyBGixhGixhGi/jQ5bqqwuEmwXO7DKmlv0XVhXxpEZyYQbDkE3yOALnZ53Na95lHXV59BE2D0aIhRosYRov40OUcR1cmXT5g30c34BruJ0iA9Rkfkp4xmKm9rki0aQaDwdBmupzjsKwu17sWpapqDOSC3/KyOeUjJCWDcb3/85jL83jMRk31GC1iGC1iGC3igxkc70D2vruQvY5iPsvczMge8xjV48pEm2QwGE4STurBcZ/Pl2gTjomqLyPObm/6Fqb3/REjs46/e2rr1q3HXcaJgtEihtEihtEiPhjH0QEcKq2hcOsmkpPXE1APp6TkISLHXW7DrSRPdowWMYwWMYwW8aHLOY6uSNHmLWSkfQpWLcPSb060OQaDwXBcGMcRZ2p9IfZ9WYhbvJTWDGV4rgnzbDAYujZdblZVWlpaok04Kqprg/TpthtbLar7nt4uXVT1TJs2rd3K6uoYLWIYLWIYLeKDeeKIM8v+9RkOdyVhZznJadmJNsdgMBiOmy7nOLpayJEk2Udu1kbCtpOclLHtWvaqVavatbyujNEihtEihtEiPnQ5x9HVmDPwN/j8mfisHnic6Yk2x2AwGI4b4zjijNsKsMXtxOXpWmMzBoPB0BJxdRwiMktEtonIdhG5s5nz/UTkLRHZICIfichFRyrT6/XGx9g4oSpk+XvTzTug3cseMmRIu5fZVTFaxDBaxDBaxIe4OQ4RcQAPARcCw4FrRGT4Ydl+DCxR1THA1cAfj1RuV3IcpRU+bPWQGs4k5ZTR7V7+0KFD273MrorRIobRIobRIj7E84ljPLBdVXeoagBYDMw5LI8C9R3/GcDeIxVaUVHRrkbGkxdWbwUswhLGHYcZVcuXL2/3MrsqRosYRosYRov4EM91HDnArgbHu4HDV7/dA6wQkVuAFOD8IxVq23Z72Rd3KoM+ymunErQCcSnf7/fHpdyuiNEihtEihtEiPiR6AeA1wOOq+lsROQv4m4iMVNVG3kFEbgJuAujZsyfLli2Lnqtf4NNw2t2QIUMYOnQoy5cvj35wMjIymD59OgUFBezcuTOad+bMmZSXl/P+++9H0/Ly8hgwYECjerKzs5k4cSJr1qyhqKgomj5nzhwKCwvZuHFjNG3ChAlkZGQwNuMllEw+9W6iuqCA/Px8Vq5cGd1cxuPxMGvWLLZu3doopk5b21RPR7ZpxYoV0bT+/fu3e5uO531atmzZCdemY3mf6rU4kdp0PO9TWVnZCdemY3mf2pO4hVWvcwT3qOoFdcd3AajqLxvk2QTMUtVddcc7gImqeqClcocMGaJdIXBZWaWf4Ef/iU/y+CyrmHOH/KLd61i5ciXTp09v93K7IkaLGEaLGEaLGB0eVl1E3CJy2lGWvRY4XUROFRE3kcHvlw7L8yVwXl0dwwAvcLC1QrtKyJHNnx/CabsBizP7fDMudZgvRAyjRQyjRQyjRXw4ouMQkdnAx8Drdcf5IvLika5T1RDwHeA1YAuR2VObROReEbm0Ltv3gf8UkY3AIuB6PcIjUE1NzZGq7hRU+TYTcmYTsmqwXPGZCdbej59dGaNFDKNFDKNFfGjLE8e9RAa1ywBUtQBo09OHqr6iqoNVdZCq/k9d2gJVfanu/82qOklV81Q1X1VXtF4iBALxGWhub/rZq8B2ste7H3HEZyipYT/syY7RIobRIobRIj60xXEEVbXssLSutd9sBxMIhkmuDlErFrWWjcNyJ9okg8FgaDfa4ji2iMiVgFU3XvEAsCbOdnVpdmzbiW07sfAyJPMiPI6uMS5jMBgMbaEtjuM7wFjABl4A/MCt8TSqNdLTO3+gwEDJJpJSBdsKIakZcatn5syZcSu7q2G0iGG0iGG0iA9tcRwXqOr/U9Uxda87iYQRSQjhcDhRVbcJOxwinQJsMjngLQFX/Lqp6ud6G4wWDTFaxDBaxIe2OI4fN5P2o/Y2pK1UV1cnquo2sWfnu2QkHyAglexPD5Dm7hO3uhouSDrZMVrEMFrEMFrEhxan+4jIBcAsIEdE7m9wKp1It5XhMDRUQVb5g5T7J7Mj5Qvye15Lhic30WYZDAZDu9LaPNEDwCeAD9jUIL0SaBIi3QD2oV1YhKgJOXD3Gkhu2vhEm2QwGAztTouOQ1U3ABtE5GlV9XWgTa2SnJycaBNaJBgIE8ZJ0FNDd++IuNeXl5cX9zq6CkaLGEaLGEaL+NCWlWk5IvI/RPbUiC6BVtXBcbOqFdzuzrkmQtXm0O61WHou1a5yxna/JO51DhgwIO51dBWMFjGMFjGMFvGhLYPjjwN/BYTIbKolwDNxtKlV6iNddia0toqaD1eC7SMr9QU+TtvdIfU2jMx5smO0iGG0iGG0iA9tcRzJqvoagKp+rqo/JoHTcTsj9mP/j5c/LwIJE/aWk+YyC/4MBsOJS1sch19ELOBzEfmWiFwCmDtjA55OvpTLRz+AhWLvH8fcgT9PtEkGg8EQN9oyxnEbkd35vgv8D5EtXr8RT6Naw+VyJarqFimxMrHEJoygzrtwWB1jY3Z2+29H21UxWsQwWsQwWsSHIzoOVa1fQVMJXAsgIjnxNKo1UlJSElV1s9hvPAkMIRjqAUDapP4dVvfEiRM7rK7OjtEihtEihtEiPrTaVSUiZ4rIXBHpUXc8QkSeBBK2HLOzrRzXj1Zhi1BcPptiT3GH1r1mjYk1WY/RIobRIobRIj606DhE5JfA08BXgeUicg/wFrARSMhUXIBgMJioqltkVPfdgMVnaZ92aL0N9zU+2TFaxDBaxDBaxIfWuqrmAHmqWisi3YFdwChV3dExpnUN/OJmaLf9ZKS9yeDMyxNtjsFgMMSd1rqqfKpaC6CqJcCnxmk05R85cxFsvO5CRmZdkWhzDAaDIe5IS1t8i0gZ8K/6Q+CcBseo6lfibl0zjBs3TtetW5eIqptlz+qH2Ovdx9jUfyKnr43bNrEGg8FwPIjIh6pVkB51AAAgAElEQVQ6rj3Kau0ud3i/yx/ao8LjpbPtOS4CxSkbEen4ugsLC01IhTqMFjGMFjGMFvGhtSCHb3akIW2lpqYm0SY04TzZjW/vGLyntWU9ZfuxceNG86Wow2gRw2gRw2gRHzr2TneC4rAd2Bl3Iw4jp8FgOPGJ651ORGaJyDYR2S4iTfbwEJEHRKSg7vVp3bhKl0NtJ8mjTkm0GQaDwdAhtHkkV0Q8quo/ivwO4CFgBrAbWCsiL6nq5vo8qnpbg/y3AGOOVG5nWzkOULrlT2QN7/hQKBMmTOjwOjsrRosYRosYRov4cMQnDhEZLyIfA5/VHeeJyMI2lD0e2K6qO1Q1ACwmsjakJa4BFh2pUIfD0YaqO5b0S4chVsePjmdkZHR4nZ0Vo0UMo0UMo0V8aEtX1YPAxUAxgKpuJDI190jkEFk0WM/uurQmiEh/4FQaTPdtiYqKijZUfXKwYsWKRJvQaTBaxDBaxDBaxIe2dFVZqrpTGs83DbezHVcDz6lqs+WKyE3ATQA9e/ZstDnLtGnTAFi1alU0bciQIQwdOpTly5fj90d61zIyMpg+fToFBQXs3LkzmnfmzJmUl5fz/vux8Ft5eXkMGDCgUT3Z2dlMnDiRNWvWNApjcGYP2FVYyCdbt0bTJkyYQEZGRqMPbf/+/cnPz2flypWUl5cD4PF4mDVrFlu3bmXbtm1H3aZ62rtNc+bMobCwkI0bN3Z4m47nfVq2bNkJ16ZjeZ/qtTiR2nQ871P95m8nUpuO5X1qV1S11RfwPJFup/WAA/ge8GwbrjsLeK3B8V3AXS3k3QCcfaQyVZVBgwZpZ2LP6j+o3+dLSN1Lly5NSL2dEaNFDKNFDKNFDGCdtuEe25ZXW7qqvg3cDvQDioCJdWlHYi1wuoicKiJuIk8VLx2eSUSGAt2Af7ehzE6753gi6N+/40K4d3aMFjGMFjGMFvGhLV1VIVW9+mgLVtWQiHwHeI3Ik8pfVHWTiNxLxPPVO5GrgcV1HvGIJCcnH60pJyz5+fmJNqHTYLSIYbSIYbSID2154lgrIq+IyNdF5Ki2jFXVV1R1sKoOUtX/qUtb0MBpoKr3qGqTNR4tUVlZeTQmnNCsXLky0SZ0GowWMYwWMYwW8eGIjkNVBwE/B8YCH4vIUhE56ieQ9iIcbu9x+a5L/QCawWjREKNFDKNFfGjTynFVfU9VvwucAVQQ2eDJYDAYDCchbVkAmCoiXxWRfwAfAAeBs+NuWQtYVueIB6Vqs2fjP4E2Dc3EBY/Hk7C6OxtGixhGixhGi/jQ4n4c0QwihcA/gCWq+k5HGNUanWU/Dp+vlgNr/0p292VYp/8Dl5ntZTAYOjHtuR9HW36+D1TVWzqD0wDw+XyJNgEAW4OIFcTjKkISsRkHsLXBosOTHaNFDKNFDKNFfGjRcYjIb+v+fV5EXjj81UH2NaGzOI5gsBKn2PgPDk6Y42i4kvRkx2gRw2gRw2gRH1pbx/FM3d9OsfNfZyNYHZkWXLL1SrLP6hzjLu1FMBhk9+7dncZJt4Xc3Fy2bNmSaDM6BUaLGCejFl6vl9zcXFyu+EXsbm0HwA/q/h2mqo2cR93Cvk65Q2BHUFm6ndQ936aEOfS5YV6izWl3du/eTVpaGgMGDEjY09TRUlZWRmZmZqLN6BQYLWKcbFqoKsXFxezevZtTTz01bvW05afyN5pJu6G9DWkraWlHtQYxLlTv/xLL4SSsib2p1gc6a298Ph9ZWVldxmkApKamJtqEToPRIsbJpoWIkJWVFffeghafOETkKiLhQE49bEwjDeiSO/W1F0G/ja0OtAvdWI+WruQ0DAZDjI747rY2xvEBkT04cons5FdPJZFotgmhU4Qc0RBlvnNQ7ISasWrVKubMaW1vrJOHqqqqk6pLojWMFjGMFvGhtTGOL4AvgDc6zpwugtqAsL77RkzsTYPBcLLR2nTcVXV/S0WkpMGrVERKOs7EzoUvEGJ/ZTkqIZK9vRJtzglJYWEhSUlJ5OfnU1xcTH5+Pvn5+fTu3ZucnJzocSAQOKpy//KXv7B///7o8eTJk5sMIF588cVH/Qv1a1/7GkuXLj1ivltuuYX33nsvWnfD/bDXrFnD+eefD8CBAweYPn06KSkpfO9732tURm5uLqNGjWLkyJGMGDGCBQsWRDftsW2bCy64gP79+zN37txG102ePJkhQ4ZEtXvxxRcB+PrXv07Pnj2bRJG97bbbePvtt9vUFoCioiKcTiePPvpoNC0UCjXR8tFHH23Upscff5yRI0cyatQozjjjDB544IEW62wr999/PyNGjGDEiBE88sgj0fQNGzYwceJERo0axZw5c6iqqmpybXV1NePHjyc/P5/hw4dz7733Rs+pKnfeeSeDBw9m2LBhPPRQpCPmV7/6VVTXESNG4HQ6ozGy6t+v/Pz8Fvc/f+CBBxg5ciQXX3wxwWAQiARnvOOOO6J59u/fz0UXXRQ9fuuttxg+fHjiov+2tFEHkZ3/IBISvcmrvTYEOdrXiBEjjnb/knbl812l+upLj+u+936RUDtUVbds2RKXcjdv3hyXctvKF198oc29z3fffbf+7//+b7PX1NTUHLHcSZMm6YYNGxodjxo1Sv/973+rqmpxcbGOGzdOMzIyjsrer371q/riiy+2mufAgQN69tlnN6q7b9++umLFClVV/fe//63nnXeeqqpWVlbq6tWrdeHChXrrrbc2KicnJ0dLS0tVVbW8vFyvvPJK/cY3vqGqqrZt6xtvvKGLFy/WOXPmtNr2elauXKnvv/++5uXlNUrfvn27zpo1q01tUVV98MEHdfLkyXruuedG04LBYBMt//znP0fb9I9//EPHjh2r+/btU1XV2tpa/fOf/9xsnW1lw4YNOnr0aK2pqdFAIKBTp07VHTt2qKpqfn6+rl69WlVVH374Yb3nnnuaXB8Oh7WqqkpVVQOBgI4dO1bXrl2rqqqPPPKIzp8/X23bVlXVoqKiJte/8MILOmPGjOhxw/erJSZMmKDhcFjvvvtufeWVVzQcDuuMGTOaXPe1r31N16xZEz3+7LPPmrxv9TT3HaYjNnJS1foO/L51jiJMZFe/bwIpcfNkR8Dr9Saq6gihIMmOUGJtqGPo0KGJNiHhPPHEE4wfP56zzjqLm2++Gdu2CYVCXHvttdFf5g8++CDPPPMMBQUFXHXVVY2eVq6++moWL14MwHPPPce8ebHp1bZtc/vtt0d/ET/33HPR9JtvvpmhQ4cyY8YMDh06FL1m7dq1TJs2jbFjx3LhhRdGtxB99tlnufDCCxvZfscdd/Dzn/+8SZtSU1OZNGnSET/r6enpPPLIIyxZsoTy8nJEhPPOO4+srKw26zdt2jS6d+/eJH3QoEHs27ePgwcPNjnXXFsWLVrE7373O3bs2MG+ffvaVPcvfvEL7r//fnr37g1Evts33nhjm21vji1btjBx4kSSkpJwuVxMnz49+nT1+eefM2nSJABmzJjB888/3+R6y7JISYnc3gKBAMFgMDrY/H//938sWLAgetyrV9Meh0WLFnHNNdcclc2qSigUoqamBpfLxRNPPMGll17a5Glt7ty5PP1054gv25aNnJYCZ4rIIOCvwMvA34GL42lYS1RUVCSiWgBKy31s2lxIpq86YTY0ZPny5cyaNSvu9Xzznb+3e5kPT/mP4y7jk08+4cUXX+S9996jurqaO+64g8WLFzNo0CAOHTrExx9/DMTm8i9cuJA//OEPjR7vZ8yYwQ033IBt2zzzzDM89thj/PKXvwQiN8gtW7awceNGDh48yJlnnsnUqVNZuXIlX3zxBZs3b2bv3r0MHz6cb33rW/j9fm699VZeeuklevTowdNPP81PfvITHnnkEd59912+9rWvNbJ/ypQpPPfcc6xevRqnsy1fxaZkZGTQv39/tm/fztixY4FId0tzXHXVVSQlJQGRrpAjdcmNGTOG9957r8kEjMPbUlhYSElJCWPHjuWKK65gyZIl3HrrrUe0fdOmTVGbW+PJJ5/k/vvvb5I+ZMgQnnnmmUZpo0aN4qc//SklJSV4PB5efvllpk6dCkR+aL388stcfPHFPPvss+zatavZ+gKBAOPHj2f79u3ceuutURu/+OILnnrqKZYuXUqvXr1YuHAhgwYNil5XVVXFG2+8wZ///Odomohw7rnnIiLcfPPN3HBD05UM3/72t5kwYQKjR49m/Pjx/PKXv+S1115rkm/cuHHN/tBIBG35tNqqGhSRrwALVfVBEUnYrCrbTtxMpq0bt1F6sIyc9HLaJl18qe/bjjftcZOPB2+88QZr165l3LhxhMNhAoEAffv25YILLmDbtm1897vfZfbs2cycObPFMlwuFxMnTmTx4sWEw2Fyc3Oj51avXs0111yDw+Ggd+/eTJ48mXXr1vH2229zzTXXYFkWubm5TJ8+HYj82t20aVN0rKJhefv27aNnz55N6v/Rj37Ez372M376058esw56WKDSw4/reeaZZ46qT7xXr17s3bu3SfrhbVm8eDFXXXUVEHmCu/nmm7n11ltbnBZ6tNNFr7vuOq677ro25R05ciS33347559/PqmpqYwaNQqHwwFExlNuvfVW7r77bubMmdPiymq3201BQQGlpaVcdtllbNmyhWHDhuHz+UhNTWXdunUsWbKEG2+8kbfeeit63bJly5g2bRoZGRnRtDVr1pCTk8P+/fuZMWMGw4YN4+yzGwcXv/7667n++usBWLBgAbfddhsvv/wyTz/9NP369eM3v/kNItLi+5EI2rIAMCQiVwDXEnnaAIjfWvZOynPvf8KHhaUk2/sJ92n6aG/oeFSVb3zjGxQUFPDOO++wbds2fvKTn5CVlcVHH33ElClTeOihh/jmN7/ZajlXX301t9xyS/Tmdzz2jB49moKCAgoKCvj444959dVXAUhKSmp2UdbMmTMpKyvjWCM+l5eXs2vXLk4//fTjsr05fD5f9AmlIYe3ZdGiRTz66KMMGDCAr3zlK6xfv54dO3bgcDiwLItQKNa1W1JSQo8ePQAYPnw4H3744RHtePLJJ6ODzw1fLb1fN910E+vXr+ftt98mPT2dwYMHR+t7/fXX+fDDD5k3bx6nnXZaq/V269aNqVOnRn/95+TkcPnllwNw+eWXU1BQ0Cj/4sWLm3RT5eTkANC7d2/mzJnDBx98QEvs3r2bgoICLr74Yu6//36eeeYZkpOTo7sYtvR+JIK2rhw/B/i1qu4QkVOBRfE1q2Xqfz10FKrKX1/8mC+31TLQfYhzBx0gP21JgldwRGj4y+Zk5Pzzz2fJkiUcOnQIh8NBcXExX375JQcPHkRVueKKK7j33ntZv349EIk60Nw6oOnTp3PnnXc2uRFNmTKFxYsXY9s2RUVFvPvuu4wbN46pU6fyzDPPYNs2e/bsYdWqVUDkxrRnz57ozSEQCLBp0yYAhg0bxvbt25ttx49+9CN+/etfH3X7Kysr+fa3v80VV1xBenp6NL299qz59NNPGTlyZJP0hm3ZvHkzoVCIPXv2UFhYSGFhYbTLEGDq1Kn8/e+Rrs6amhqeffZZzjnnHADuuusufvCDH0THgfx+P4899liT+q677rqoM274Orybqp4DBw4AkS605cuXc/XVVzdKt22bn//853zrW99q9tr6GVE1NTW88cYb0bHEuXPnRp8w3nrrrUZjjKWlpbz33ntccskl0bSqqqrozK3q6mpef/31ZvWs58c//nG0K8rn8yEiWJZFTU0N0PL7kQjasnXsJ8B3gXUiMhTYpXX7hyeCjgw5or4a7K0fUFrh56bivzKj7FXSUg5RXTYKH4nfIKa+i+RkZdSoUdx9992cf/75TJo0iZkzZ1JUVMSuXbuYOnUq+fn5zJ8/n1/84hcAzJ8/nxtvvLHJVF7LsrjjjjuaDBLPmzePoUOHMnr0aM4//3zuv/9+evXqxbx58+jXrx/Dhw9n/vz5nHXWWUBk06DnnnuO22+/ndGjRzNmzBjef/99AGbPnt3i/teXXHIJ3bp1a5SWm5vLD3/4Qx577DFyc3MbRXmdMmUKo0aNYuLEiQwaNIg//vGP0XNnnXUWN9xwA6+99hq5ubm8+WbrIeWuuOIKpkyZwubNm8nNzeXxxx8HIjfxwsJCxowZ0+Sahm1ZtGgRl112WaPzl19+OYsWRX5bLly4kMWLF5Ofn8/EiRP56le/Gu2qufTSS/nmN7/Jueeey4gRIxg7dmyzU2SPlrlz5zJ8+HDmzp3LI488EnWqf/vb3xgyZAhDhw7l1FNP5dprrwVg165dXHrppQDs3buXadOmkZeXx/jx45k9e3Z0HPG///u/WbRoEaNGjWLBggWNpvo+//zzXHjhhY2eCPbt28ekSZOiZV122WXRbszDWbt2LW63m9GjRwOR8aiRI0eydu1aZsyYAUSc1ezZs49bn3bhSNOugClAIfAu8B6wA5jUXtO6jvY1bNiwZqefxYPw5n9r8JEf6G8f/0DfefdnWrn1Oq1e/zXd886DumH9vR1mR0s0N72yPeis03Fbo7q6Ok7WtA+2bevZZ5+t5eXlca+rPbRYsmRJs9NVVTu2LcdLZ/9ctBXbtnXSpElaVlYWTeuU03Eb8ABwkapOUtWzgdnA7+PixdrA0S76Oh5CYWVfr3wUGOjaiBzwUll6LgGvh90ZTacpdjQ7d+5MtAlxweFwUF5eflQDuR35uTgWRITf/OY3fPnll3Gvqz20UFVuu+22Zs91ZFuOl87+uWgrBw4c4Ic//GG0e/qtt97isssui44XdTRtmRrkVtXN9QequkVE2rRPqojMIuJkHMCjqvqrZvJcCdxDZPPujaraKabw6MFdbC88xMryQZC+D294COXWKWhKkN3eHQzKPC/RJp6w9O3bt8Wpkl2Z+i6trsCVV17Z6vmu1JYTgezs7Gh3GsA555wTnW6eCNriONaLyJ+Ap+qOv0obghyKiINIcMQZwG5grYi81NAJicjpwF1Eur5KRaTTxPCw3/gbYe1Dz7R0vjLsp5SWzcaV0ZMvsooJhdMZ1t0EFzQYDCcnbXEc3yIyOP7DuuN3gIVtuG48sF1VdwCIyGJgDrC5QZ7/BB5S1VIAVT1wpEIbzh6JJ/vtdD7rdjp+PURR8TdQlAJWgC+N0T2Ob9pme9Ha+oSTjY76XHQFjBYxjBbxoVXHISKjgEHAi6p6tPMFc4CG/Q27gcOjfA2uq+ddIt1Z96jq8tYKDYfDR2nGsfF2eAjdKCcvo4gkz2ds7NmDMVn/RZ/UprNMEkV5eXmnmdedaMLhcLtNQ+3qGC1iGC3iQ2sbOf03kZ3+1hMJOXKvqv4lDvWfDkwnsu/H2yIySlUbbRQlIjcBNwH07NmTZcuWRc/V74JXP5ceiE65W758eXR1dUZGBtOnT6egoKDRoPLMmTMpLy+PTpsEyMvLQxFcupsSq4KBaSvZXzuUkY4rWbNmTXTeOcCcOXMoLCxk48aN0bQJEyaQkZHBihUromn9+/cnPz+flStXRueJezweZs2axdatWxtNt2xrm+rrb2ubBgwY0Ei77OxsJk6c2KRNgwcPxu/3U1tbG01LSUnB4XA0CvnidrtJTk6msrIy6tBFhIyMDGpraxvZWb8TW8Pplh6Ph6SkJMrLy6OrnR0OB2lpadTU1DQa2ExPTyccDjcKp5GUlITH46GsLPZxcTqdpKamUlVV1WjhWWZm5knTpsNDjpwIbToR36d4tqmmpoZly5Y1uu+1Ky1NtwI2ASl1//cE1h7NdC0iARFfa3B8F3DXYXn+BMxvcPwmcGZr5Q4aNKjZ6Wfthb1vh4buv0GffvQfuu69H+uhtd/Tyo3T4lrnsbJ06dK4lNsZpuN6vV7Ny8vTQ4cOaV5enubl5Wl2drb26dMneuz3+6PXHCkCqarqY489Fo3EqhqJGDtgwIBGeWbPnh2X6Liqqt/5znf03XffjdY9fvz46LmG0XFVVX/2s5/poEGDdMiQIfr666+raiTarGVZmpeXp8OGDdO8vDx94IEHNBwOq2okWuu0adM0OTm52ai6I0eOjGq3Zs0aXbdunU6YMEFHjBiho0aN0meffTaaf968efr555+32Ja5c+dqYWFh9Hjt2rUKRG1VbX666I9+9CN94IEHVDUyxfS+++7TwYMHa15eno4bN06feuqpI+p4JL7//e/riBEjdMSIEfr4449H019//XUdM2aMjhgxQufPn6/BYLDJtTt27NAxY8ZoXl6ejhgxQh955BFVjXy+6rXLy8vT7t276/e//31VjUT87dGjR/TcX/7yl2ZtaahvQ26//XYdNWqUXn/99dG0v/71r7pw4cLo8YYNG6JRkFVVn3rqKR00aFCTKMj1xHs6bms3/vWHHX94VAVHniZ2AKcCbmAjMOKwPLOAJ+r+70GkayurtXLj6Tj+uu3f+ugrT+pTf1mqDz79pn7+/o90z5q7tOqD3XGr83g4kR3H0YZVb4vj6Cph1Tdu3KhjxoxRv9+v27dv19NOO03D4XCTMOX79+/X6dOn6733RtYU1Ydjv++++1oNx17P1q1bdfv27aqqumvXLs3OztaKigpVVX3jjTf0W9/6VrNtKSgo0Hnz5jVKu/3223Xy5MmNbm5HchwLFy7UWbNmRessKyvTJ554okUN28LSpUv1ggsu0FAopJWVlZqXl6eVlZUaCoU0Jycn2t677rqrkVOpx+fzqc/nU9VI2Pq+ffs2Gz599OjR0R8BDUPFt2bLGWecoZWVlY3yHDp0KBq+/utf/7pu3rxZq6qq9Nxzz23i2KZPn667d8fuRa+//nrCHEdrnX8DReSFuteLwKAGxy+0cl39k0wI+A7wGrAFWKKqm0TkXhGpn1f2GlAsIpuBt4A7VLW4tXKTk5OPVPUxU/XlTkYTZPYZD3HLuNvp5dlOGDcpZ+bErc7jIS8vL9EmJJz6sOrTpk07YcKqL1u2jGuuuQa3282gQYPo169fszGdsrOzefjhh1m4MDJXpT4ce1ujKwwZMiQa3TU3N5esrKxoW6ZPn87y5cubHVN8+umnG0XMtW2b559/nieeeIJXX321zWsnfvGLX/CnP/0pam9GRkabgxm2xObNm5k2bRoOh4PU1FRGjhzJihUrOHDgACkpKdH2thRW3ePx4PFEokL4/f6GP3KjbNmyhfLy8iNOSW7JloY4HI5oPfVh1X/9619z2223NYmYfPHFF7cYZqWjaW1w/PLDjv9wtIWr6ivAK4elLWjwvwK3173ahNvdpiUkx0TSriz86WUk16awb8930Aw/ld5g3Oo7XgYMGNAh9YTvbxoK+nhx3N40JtHR0jCsutPp5Kabbjohwqrv2bOnUTiZ3Nxc9uzZ02z4j8GDB1NbW0txcXF0H46Wor5OmTIFh8NBcnJyo937gOhx/WfK4XAwYMAAPvnkkyY/UN59913mz58fPX7nnXcYMmQIAwcOZPLkybz66qtNQrEfTklJCcFgkP79j7z58q9+9auoc2/IOeec02THwLy8PH71q1/xve99j6qqKlavXs2ZZ57J3Llzqa2tZcOGDeTn5/P888+3uFaosLCQSy+9lO3bt3P//feTnZ3d6PyiRYu4+uqrG0X5XbJkCf/6178YOnQoDzzwADk5OU1sWbVqFWeccUajsjIzM5kxYwZjxoxh5syZeL1eNmzY0Gy05HHjxvG73/2O229v8+0ybrS253jrQW4SRMPBqPZAQ0GorYTivaAwMCeZKkYhPSz8fQdSpp+1a33tybJly474BW0P2uMmHw9MWPUIh/8irg+KdzjvvPNOs3tw7Nmzh+uvv56nn3660c2wPoz34Y7j8LbU30gh8gS3aNEi5syZ025h1e+8807uvPPONuW96KKLWLduHWeddRa9evVi7Nix0Si9f//737nlllsIBALMmDGjxYCpAwYM4KOPPmLPnj1cdtllzJs3r9EK7cWLF/Pss89Gj+fOncu1116Lx+PhoYceYv78+axYsaKJLWeddVazdd51113cddddQCSe2s9//nMefvhh3nzzTcaMGRM919XCqp+w6N7PsRd+G/vPd2C/9ldClgMcFkE7jX3pZXxQ/iRuR/y6xgzHh+qJGVY9Jyen0a/h3bt3R8NzH86nn35KcnLyUe3615Dy8nJmz57Nfffdx5lnntnoXFvCqgeDQV544QUWLFjAgAED+N73vscrr7xCdXU1WVlZlJaWNrq2Pqx69+7dcblcbQpb0nBP74avlkKiLFiwgIKCAlasWIFt29Gw6pMnT2b16tV88MEHTJo0KZreEjk5OQwdOpTVq1dH0z788EOcTmcjZ9qjR49o99ZNN93E2rVrm7UlHA63Wue6detwuVzR2Y9Llixhy5YtfPHFF0DXC6t+4hL0Qd9hVNzwEBun/pCMdD+2XYM6qvnCuYnp/X7C2F7fSLSVhhZoGFYdOGHCql966aUsWrSIQCDA559/zs6dO5vdKe/AgQN8+9vf5pZbbjla6YBIH/6cOXO48cYbm0S4Bfjss88YMWJEk/SGbXn99dc588wz2bVrF4WFhXz55ZdccsklLFu2jMzMTLp16xbVp7i4mBUrVkS3b73zzju5+eabo+9JRUUFf/vb35rUd+eddzYbVv3wbiqAUChESUkJABs2bGDbtm2cd14kPFB9WHWfz8evf/3rZsOq7969O+oUi4uLee+99xrd7JvbGrbhVrlLly6Nana4LVu2bIna0hwLFizg3nvvJRAIRDesE5FOGVa9zdvYiYhHVTtmy7lWaKn/9njYsX0blH/OxF5FqMNGXJ+S3/MmsrynIdJ5fevhfa8nGw3DqodCITweD3/6059wOBzccMMNqCoiwn333QfEwqonJSU12lCnPqw60Gj+/bx581izZg2jR49GRBqFVX/rrbcYPnw4/fr1axJW/bvf/S4VFRWEw2G+//3vM2LECGbPns0TTzwR3emtIZdccgk/+clPosd5eXnMnQ9NpRYAACAASURBVDuXYcOG4XQ6+eMf/4hlWdi2TWVlJfn5+QSDQVwuF1//+tcbbdOam5tLTc3/Z+/Mw5q4uj/+vYRFQIuKWwURBAHZElxRqIJLpXVBLXXDfat7Xapv+1qtWlutW63aVutuRVxQi/Xn8qoVN9QqFa27KEERFQXZl0Byfn+EDAlJIGjCOp/nyaNz596ZOyfD3My953xPNvLz8xEeHo7Tp0/DxcVFo/3CwsIQFRWF1NRUbN68GYBcetzT0xOJiYmwsrLSOL2mkFX39/fXKqu+bds2DB06FLt27cKUKVO42KUlS5Zw6yjTpk1DVlYW2rRpA1NTU5iYmGDu3LnFT1cm8vLy4OfnB0C+2L5161Zuemjp0qU4fvw4ZDIZpk6dyqWUvXLlCrZt24YNGzbg1q1bmDNnDoyMjEBE+Oqrr+Dm5gZA/kapWMtQZvXq1Th27BgEAgEaNGjA5RQp3pfQ0FCt02Ph4eHo1KkTl3/d1dUVnp6e8Pb25gaiM2fOlMvUtC6w4vOjahUYaw9gCwArIrJjjAkBjCOit/uZ8460bduW3jZbWnEo/jZkV4/jvp0QprI03DZ5gEaWd+AsTYS1e6Vc4ikXFKkyKwqxWIzevXvj1q1bFdYHfUNE3MJxVZDBWLFiBRo1aoSRI0eq7cvOzka3bt1w4cKFck+sVlPJyclBQEAALl68yNn81KlTWL9+Pf744w+1+pr+hhlj0UTUVh/90eXn9FoAvQEkAwAR3YA8I2CFUDwqVh8UsDdINXmKDMt/0ZB5wTKvYqSKy8rly5crugsG4W1k1fWRAMiQlKcUuT5sYW1treYFpsDCwgILFixQmaKprFT2+0JXnjx5guXLl3ODRmhoKKZPn66WAKy80GWqyoiI4ot5QpSPYJQG8vP15x6bLSFESxwgkCSCmeagRQGDncnfYLJaejuHIVGWCalOvI2suvIUU2WlvKTI9WGLMWNKXtsrHpNSWakK94UuuLi4qEw5hoSEICQkpML6o8vA8bRwuooKpdKnAXhg2G6VD68yZXhY0BDtTe6gnuAlmho/QOa9wZAxIdSdFnl4eHh4AN0GjkmQT1fZAXgJ4FRhWZUmp0CC5NwsmMjSUYsyYCwxxZuYqajdtS9MGteu6O7x8PDwVFpKHThIniNjcDn0RSc0BTCVBZmMkJiWjrAj92Btmoxe7ltgImiMbGlDNAoJgZGZzo5mFU5l8bCoDLzrfVGd4G1RBG8Lw1DqU5IxtgnytK4qENEEg/SoFN4lh3BGlgRbDvwLAoFqSTG0ZSoE+QV4km4LmWW9KjVoAHLvo/KSHans5OXlcUFYNR3eFkXwtjAMunhVnYJc7vw0gIsAGgGosHgObXIKupCXL0UtCwHadzXC8JY/wFSwC1RQC9S4KVidqhchrpwDpDohFothbm4OkUiE5ORkLlK4SZMmsLGx4baVf0Qo5zrQxtatW/HixQtu28/PDw4ODip1evfuXeZfqcOGDdPoElmcadOmcZpQfn5+6NChKK/Z5cuXOamSpKQk+Pv7w9LSEjNmzFA5hq2tLSfe6O7ujgULFnB5GmQyGXr27IlGjRqhX79+Ku38/Pzg4uLC2e7QoUOIj4+Hv78/3Nzc4O7ujvXri+ToZs6ciXPnzul0LYDcUcPY2JiLBwHkC9PFbbl582aVa9q+fTsnItm6dWuNQX1lZfXq1XB3d4e7uzt++uknrvz69evw8fGBp6cngoKCNHpcZWVloX379hCJRHBzc8PixYu5fZ06deLs9/7773OCmMqR7e7u7jA2NubiVkaOHImGDRuW6CH4448/wsPDA7179+acfyIjI7nYIgB48eIFPv74Y25bEUdUFs9DvVJWOV3IB5sofcnzlvXzLrLqD14k0Yrfz9P5k+uoIKYzvdnxC6Uf2EcP726hh3e3vfVxKwpeVr2I6iSrrpBHX7duXYny6GlpaTRw4EBOylwmk9GpU6do165danLbxa+diOjZs2dcWVpaGrVo0YLu379PRESxsbGc3Hdp10JEtHbtWvLz86OuXbtyZcVl4IlUJcj//PNPatOmDZcjJScnhzZt2qTxnLpy/fp18vLyouzsbJJIJOTn50ePHz8mIiKRSEQXLlwgIqKNGzfSwoUL1dpLpVLKzMwkIiKJREJt2rShq1evqtXr27cvhYaGqpUfPHiQevTowW1HRkbSlStX1OTllenQoQNJpVL65ptv6OjRoySVSqlHjx5q9/SwYcPo8uXL3LYm2XoFFSmrrg0HAFUqZJnevITsyAbUPrUL9SW5aJ+cBJKZoM6g4SjwrA/TjLef/uKpWBSy6h988EG1kVVXyKPXqlWyW/h7772H3377Dfv27UNaWhoYY+jWrRssLS11sl3Tpk25X6zvvfceXF1d8ezZMwCAo6Mjnj9/jlevXqm103QtYWFhWLNmDR4/fqxzfMf333+P1atXc9HStWrVwrhx43Rqq427d+/Cx8cH5ubmMDExQadOnXDo0CEAwKNHjzi5E22y6kZGRpz9JBIJ8vPz1UQZU1NTce7cOY1rjMUlSbp06YL69euX2GciQkFBAServmPHDvTt21ftba1fv34IDQ3VwQqGR5c1jjcoWuMwApACQDepSgOg6x+FCinPQalJyHP2R9pDAYy6DgOeXUZ+zhvkZ7xETn1zFFhUvXlQ5akOQ7J6h34i9ZWZNfLdA1iVZdWJCFOmTKkWsuplwcrKCs2bN0dsbCynZ6VtwBk0aBAnkhcZGanyYHr8+DFu3bqlInTo7e2NqKgotQdk8WsRi8VISUlBmzZt8Omnn2Lfvn0qMijauH37tkYNruLs3LkTq1evVit3cXFRy0/h6emJRYsWISUlBWZmZvjrr7842Q9XV1ccOXIEvXv3xv79+7XGCkkkErRv3x6xsbH4/PPP1fp48OBB9OzZU+1ZlJmZiVOnTmHTpk2lXpMykyZNQocOHeDl5YX27dtj6dKlOHHihFq9tm3bavyhURGUeLcy+VArBPCssEhW+MpTYZRF4oAyUoDkRFBSPFC7LiR2LnBMuozM12nIze4OPDqDbJaJOIEYNibtSj9gJcPKyqpczqOPh7whUJZVB+TrHLysuvxXsyb27t2rcU48PT0dn3zyCdatW8flsga0y3gXv5Y9e/ZwApGDBw/G5MmT8fnnn+tNVn3EiBE6J3jy8PDArFmz0L17d9SuXRve3t7cM2P79u34/PPP8c033yAoKEir7p2pqSliYmLw5s0b9O/fX02+IywsDFOnTlVrFxERgS5dupT573LUqFGchtmCBQswc+ZMHDlyBKGhobCzs8PKlSvBGKs6suqFg8RRIpIWfip00ACgkty9NOjiIcjO7gUlxoLZtQKTZMLL6hkgS0Mt4wTImrXAw/cewanuh3Cw6mLAXhuG4tnEahqkJKseGRlZbWTVy0JaWhqePn2Kli1bcmVlcSCRSCQYMGAARo8ejb59+6rs00VWHZA/SDdv3gx7e3sMGDAA//zzDx4/fszlwVCO3lbIqgNyNWFNmQ2Ls3PnTo2y6tq+rwkTJuCff/7BuXPnUKtWLU7d1s3NDSdPnkR0dDSCg4Ph5ORU4nnr1auHzp07q/z6f/nyJa5fv64xcn7Pnj1qyrllISEhATExMejduzdWr16NvXv3wsLCApGRkQCqnqx6DGNMPfVYVYAIrN1HQGAQMkxjUPd1KMxN3sDMaCPS34vB3xk7UauWNZzqdkdtk0YV3VueMlJdZdV1JSMjA5MmTcKnn376VsKJRIRRo0ZBJBJh+vTpavu1yXgrX8udO3dQUFCAZ8+eQSwWQywWY86cOdy6UefOnbF7924A8gFt//79CAiQS9199dVX+OKLL7h1oLy8PE5ZVpkRI0ZolFXXlkZVIZ8uFotx/PhxLsmUolwmk2HJkiUaZdWTkpI4j6js7GycOnUKrq6u3P79+/cjKChILRPpmzdvEBUVhT59+mjsky58/fXX3FRUbm4uGGMwMjKqlLLqWgcOxphiGssbwFXG2H3G2D+MseuMsX/Kp3tlh7LTId21CNIdC0CPYwAjAXKeXEduuhTSV6Z4ahaHmJwgvJaOQtdmC9Hx/dLnYnkqJ8qy6r6+vvjwww/x8uVLPH36FJ07d4ZIJMLo0aPx/fffAyiSVS/uyquQVS++iBkcHAxXV1d4eXmhe/fuKrLqdnZ2cHNzw+jRo9Vk1WfNmgUvLy94e3vjypUrAIqkyDXRp08fNbE6W1tbzJ07F1u2bIGtrS3u37/P7fvggw/g6ekJHx8fODo64pdffuH2dezYEePGjcOJEydga2uL06e1qzyfPXsWYWFhOHnyJPcrXvHrOi8vD2KxWGO6WuVr0SarHhYWBgBYt24d9uzZA5FIBB8fH4SEhKBTp04A5HlHPvvsM3Tt2hXu7u5o06aNXkQJ+/XrBzc3N/Tr1w+rV6/mBtXff/8dLi4ucHV1hYODA4YPHw4AePr0Kfe2lZiYiC5dukAoFKJ9+/bo1asXAgMDuWNre6s4cOAAPvroI7U3gk8//RQffPAB7ty5A1tbW2zfvl1jn69evQpTU1N4eXkBkK9HeXh44OrVq+jRowcAuQtur1693s04+kKbuxWAfwr/ddT00ZdbV1k/rVq10uh+pkD2+hkVbJ5LsqSn8k9BPr2M2UyJl7+mvTe/pB0x40tsX5Uo7l6pLyqrO25JZGVlGag3+kEmk1GnTp0oLS3N4OfShy327dun0V2VqHyv5V2p7PeFrshkMvL19aXU1FSurLK647LCgeWRpo9BR7MSsLDQIVBPYALW0Fb+ERgjKz8dBBnM833wIs3f4H0sLyos+MfAvI2suk73RQVSnrLq+rAFEWlNzVqe1/KuVPb7QleSkpIwd+5cbuH9zJkz6N+/v0ou9PKkJK+qhoyxWdp2EpG6f1w5oGmOWhvp8VeQ/eIWTAtMAGMZajVyRqZM8zxzVUSRha268Tay6hkZGahTp46BeqQfyktWXR+2GDhwYIn7y+ta3pWqcF/oQuPGjVWcFwICAjh384qgpDcOAYDaAOpo+VQIUqnuqUAK8tJh0UyEZNNUwDwa8Zkp6NDIofSGVQTFIh5P2e6L6g5viyJ4WxiGkt44nhPR4hL2lwpjLBDAT5APQpuJaFmx/aMArEBRnMh6ItoMPZGSG4unufcgqP0YjSVGGOfqq69D8/Dw8NRYSho4yhalU7yxPOnTzwB6AEiA3DPrMBHdKVZ1LxGpR9NoQVtwkyZkJEULqwDkJraGGZ3SuV1VgVf9LKKsQWXVGd4WRfC2MAwlPYW7veOx2wOIJaLHRCQBsAfAOyeQeBt/9eqKsptgTae8ouirArwtiuBtYRi0vnEQUco7HtsGgPIKZwIATeJKnzDGOkOejnYmEamtijLGJgCYAMgXiSIiIrh9XbrII74VQVi1JeloT1KY56RCkp2L56nxMM2QoVFdedB7TEwM4uPjufYffvgh0tLSOH97ABAKhbC3t1c5T+PGjeHj44PLly+r5PoOCgqCWCxWkTjv0KEDrKysVCK7mzdvDpFIhMjISG5twszMDIGBgbh3756Kn37xawLA+Z8fP36ck9FWtNf3NTk7OyMvL09FqtzS0hICgUAlct/U1BQWFhbIyMjg5pIZY7CyskJOTg7XTwCclIWyn76ZmRnMzc2RlpbGyWYIBAIkJyejVatWcHJyQkREBIKCgiAQCPDixQsYGRnB2toaABAVFYU6deogNTWVO6axsTFq166NzMxMlYjlunXrYuPGjfD390fjxnKNzl69eiExMRHXr1/n6g0ZMgSXLl3C06dPdb6moUOHIigoCL169dJ6TXXq1MGkSZPQr18/dOjQAYGBgSAiXLhwAVlZWbh69Sq+++47nDhxAmlpaRgwYACuX7+OESNGYMWKFdw1ubi4wMrKSu4SaWSEoKAgfP7559zb5759+7B8+XIQEebOnYtBgwbB1NQUzs7OsLKy4t7Y16xZg27duuHXX3/FypUrAQBz587F2LFjAQDdunXDzp07YWVlpXZNMpkM/fr1w9GjR2FkZASJRIKIiAiMGjUK9+/fh4ODA7KyshAZGYlNmzbh4MGDMDMzQ2pqKiZMmICgoCAEBQXBzMwMc+bMweHDh1GnTh2YmZnh22+/hb+//1vfe7Vq1cLYsWPxzz//QCAQYNmyZejZsycAYNu2bfjxxx9BROjduzdWrFih9j3dunULEybIUw0REf773/9i8ODBSE9PR7du3Tjhw4EDB2Lx4sVITU0FEWHRokU4cuQITExMMH78eIwZMwZ79uzB2rVrYWxsDEtLS6xcuRLu7u4q1/Ty5UuEhITg+fPnmDx5MqZNm4aMjAxMnjwZn332GTw9PWFlZYUVK1agTp06XDDjN998g7179+Lzzz/HpEmT1P6esrOzERERASsrK/j7+yMmJgZ6RV9+vcU/AIIhX9dQbA+HfA1DuY41ALPC/38G4K/SjluarLok8T49O7+OXlz7neIur6NrsXvp+tlN9ObCsBLbVUV4WfUieFl1olevXpGDgwOJxWJ6/fo12dvbc37/yu0UKOq/efNGrf7mzZtp2bJlGq/ljz/+oC+++EKlbMCAAeTn50eLFy/myk6ePKkm765sq9mzZ9Po0aMpLy+PiIieP39O+/fvL9GOpbFmzRoaN24cdzyRSEQymYxevnxJdnZ29Pr1a5LJZDR06FCKjIxUa5+VlUX5+flEJJedb9SoEUml0hLl1n/77TcaPXo0yWQyIiJ6+fIlERFduHCBs/nhw4fVpOiJiA4cOEBLly6lgoIC8vHxISKi6OhoGj9eNd4sIyODWrdurVI2b948+vHHHzXaoTLKquvKMwDNlLZtUbQIrhi0kolI8RNuM4DSpTJLQSrLh0wmQVqLBrhY9x7OZj5HRn7uuy3Y8FRaeFn1Iln1Y8eO4aOPPoKVlRWsra3RtWvXEvXMFPXr1q2rVj8oKIiTCilOaGioimJueno6rly5gk2bNnG2LI2MjAxs374da9eu5eQ7mjRpomL/t+HOnTvo2rUrdzwLCwtcv34djx49gqurK6ytrcEYQ/fu3TXKqltYWHBKxYq3Hip8u9Mmt/7rr79iwYIF3HajRnL5Il9fX06B2MfHBwkJCWrnMzExQXZ2tsrb8YIFC1QSSAHye6Jp06acfE5FY8hcqVcBtGSMOUA+YAwGMFS5AmPsfSJSiPf3BXD3XU+aLXuDXJMCpEsSIEMTtG3YEc758TCS8EPH2/JixQW9H7PJHL93PoayrHpmZibmzp1bo2XVnz17hmbNin6r2dracvk1FOcTCASwsLBAVFRUifUbNGiAjIwMznbKREVFqUhnHDp0CL169YKrqyssLS1x48YNCIXCEvv98OFDODg4qKjxamP69OkasxGGhISoZMkD5FOyERERGDhwIMRiMf799188ffoUvr6+uH37Np48eYL3338fERERWhfOo6KiMH78eMTHx2P37t2cuq42ufW4uDjs2rULf/zxBxo1aoR169bB0dFR5ZhbtmzRKIwYGBiI0NBQ+Pj44Msvv8TBgwfh4+PD5ShRpm3btjh//jxat25dqs0MjcEGDiIqYIxNBXACcnfcrUR0mzG2GPJXpsMApjPG+gIogDzPx6jSjltaMI9UKgPICJlPeiI9JREJZuaoJ8mGfen3Z5VDsRZiaPTxkDcEyrLqRITc3FxeVh3Q+jA+f/58mdLiNmzYEM+fP1drk56erhKRHRYWhv/85z8A5G9wYWFhEAqFepNVX7t2rc51x48fj/v376NNmzZwcHBAx44dIRAI0KBBA/z8888IDg6GsbExfHx8tEa+d+rUCbdv38bt27cxZswYBAYGwtTUVKvcem5uLmrXro1r165h3759GDduHM6cOcMd79SpU/j9999x4YL6DzATExPuLU0ikSAwMBCHDx/GjBkzkJCQgNGjR3P6VI0aNYJYLC6D5QyHId84QERHARwtVrZA6f9fAfhKn+fMypOBwFDbwgRGWYRa5gJYmZnBpAx5PHiqBkRyWfVvv/0WBQUFKr/ab968iWPHjuHnn3/GgQMH8Ntvv2k9zuDBg/Hpp5++c5IcIrms+vnz59X2lSSrPn/+fL3Iqt+5cweXL1/m9iUkJJSopmpjY1NifW0y3sou8a9evcLZs2dx9+5dMMZQUFAAExMTLF26FNbW1njz5o1KW4WsesuWLREXF4fMzMxS3zrK8sZhYmKikme8Xbt2nKy6YlEeAH755ZdSpwLd3d1hZmaGO3fuqLylKsutt2rVCjY2Nvjkk08AyAUelWX8Y2Ji8Nlnn+HEiRNqQpbFWbduHcaMGYPz58+jYcOGWLVqFbp168YNHFVNVr1SoavkSFv3Jqj1vgzv29dCkwaWMDKqflNVyl5XNRFlWfXMzMwaL6seGBiIY8eO4dmzZ0hOTsbp06dLfNtS1E9NTVWrL5VK8fr1a9jZ2am1c3Jy4n757t+/H2PGjEF8fDzEYjESEhLQtGlTXLp0Ca6uroiPj8eDBw8AyKd0bt++DS8vL9SpUwcjRozAjBkzkJ+fD0Cux6RYR1Jm7dq1GmXViw8aAJCVlcXJkB87dgzm5ubcwKGQVU9JScGGDRs0pqmNi4vjvLTi4uLw8OFDNG/evES59X79+nFvGGfOnOHKxWIxgoODsXv37lJzfyQnJ+PEiRMICQlBdnY2Nzgre5dVJll1g75x8PAYEmVZ9fz8fNSqVQsbNmyAQCDA2LFjQURgjOGHH34AUCSrbm5uzj3cgSJZdQAqi5TBwcG4fPkyvLy8wBhTkVU/c+YM3NzcYGdnpyarPn36dKSnp0MqlWL27Nlwd3dHr169sGPHDi7TmzJ9+vTB/PnzVcpsbW2RnZ2N/Px8hIeH4/Tp03BxcQEgH9AA+SL9gAED8PXXXwOQTy199dVXCAgIgJGRERYvXlxiHIOiviKDonL9q1evws/PT2PArUJWfdSoUQgLC8M333yjsl8hq96pUyfs3LkTw4cPR15eHkxNTbF161ZuunnZsmX473//i1atWsHc3ByWlpb49ttvtfZXF168eIGPP/6Ym0ZUlpyfMmUKN5AvXLgQLVq0ACBfo/n333+xYMECnD17FitWrICJiQkEAgE2btyIevXqISYmBqNGjQIRQSqVYsiQIVwc1X//+1+EhIRwLrOKt9uFCxciJSWFewMxMzNTcZFXZuHChdwC+0cffYRff/0Vu3btwpQpU7g6ly5d4lIEVDj6cs8qr09p7rhx987Qg4sriYho672LFPXiEeU9OEx5V0eX2K4qUtPccUtCF3fciqQ8pcj1YYvJkydrdFclInr69Cn17Nnznc9RHlT2+0JX/v77bxo1apRKWXV1xzUIpc1L1iQUv0CrG28jq17Z5VfKU4pcH7bw9vbW6nxha2uLUaNG6SXpkqGp7PeFrqSkpKg4UMycORN79uzhXITLG0ZU4WnEy0Tbtm1J00IivRADORmIfXkLkCVii8ARmQV5GOXsA6/Um0DaIZi23Vr+Ha6CKLxFeHh4qiaa/oYZY9FE1FYfx69ybxzKsgPKyA6sgiz6BPBKHmQzV9QDPzTPQOvUL2Bc8DNA1c+r6vjx4xXdhUoDLzFfBG+LInhbGIYqtzguk8k0lhMRUrsPx+uku7B+HY36ZpaQvLyJnJRWKMhuC2MbR5hqbFl1UdZNqulUtTdnQ8LbogjeFoahyg0c2siV5uOXO+dRHznoqSgkgomNMyzd+pbUlIeHh4enDFS5qSpBCYF8Xwi7o09zz3fMJFJ14CWjiyjpvqhp8LYogreFYahyA0d1yB+sL6pjvnFAHjhlbm4OkUiE5ORkiEQiiEQiNGnSBDY2Nty2QqgQ0O2+2Lp1K168eMFt+/n5wcFBNZVw7969yyTLAQDDhg3DH3/8UWq9adOmISoqijt3hw5FWQYuX77MSZUAwJIlS+Dk5ARXV1ecOiVPQlZQUACBQACRSAQ3NzeIRCKsWbNGZfp2yZIl8Pb21tpO8VHkdNd0ntzcXHTu3Flr2tWsrCz4+/urnHflypWczLmCzZs3Y8aMGSpt/fz8OInv9PR0jB8/Ho6OjmjTpg0CAgJw9erVUu1YEikpKejbty+8vLzQoUMHldz1q1evhru7O9zd3bFu3TqN7Q8ePAgvLy+IRCK0a9eO+76io6Ph4+MDDw8PeHl5qQQqPnr0CO3bt4eTkxOGDh3KBTSeOXMG3t7eMDY21np/vHz5Er6+vvDw8MCff/7Jlffp00flXp05c6ZK9PygQYNQv359ne47g6Avv97y+rRq1Uqj33LsXxso4fx6enZhPd2J+oGIiPKuTqW8279rrF8dUJYI1yeVNY6jJFn1rKysUo9bVWTVb9y4Qd7e3pSXl0exsbHk5OREUqmU8vPzVfr24sUL8vf356TMFe3evHlTYjsF2s5DRPT111/Tnj17NF7LmjVraP369SplrVu3Jj8/P9q5cydXtmnTJjVZeOXv4JNPPqGvv/6akyOPjY2lo0ePlmjH0pgxYwYtWbKEiIhu3bpFAQEBRCT/W/Hy8qLs7GySSCTk7+9Pjx8/VmufkZHB9Sc6Opq7D+/du0exsbFEJI9jady4MaWnpxMRUf/+/Tk5+LFjx9Jvv/1GRESPHz+mmzdv0pAhQ7TeH6tWraKwsDDKzMzk+nrw4EH69ttvVerFxsZSYGCgSllJ9x0fx1EM5V+ZyggEMmTbdkaegxMc6+0F3W8PkzoXQFQxfs7lgXLyppqKQla9Q4cO1UZWPSIiAkOGDIGpqSkcHR1hZ2eH6OhotXqNGzfGxo0buV/PinYASmyny3n69euH0NBQje2Ky6o/ePAABQUFWLhwIcLCwrSeT5n79+8jJiYGixYt4kQPHR0dNSrIlgVlWXV3d3c8fPgQycnJuHv3Lnx8fGBubg4TExN07twZhw4dUmtfu3Ztrj9ZWVnc/11cXDjFW1tbW1hbW+P169eQSqU4d+4c+vfvTBK7gwAAIABJREFUDwAYOXIk9xbg4OAAT0/PEtNdK2TVc3NzIRAIIJFIsG7dOsyePVulnqOjI54/f45Xr169k330RbVZHGcCCe7krUdtSTYayWpBkrodAGDuoa61w1M2wu5/qvdjDnHZ/87HqK6y6s+ePVOZhlTInXt7e6vZwNnZGTk5OUhOTi6xXUZGBnfdTk5OCA8P11q/Xbt2EAqFKgKICnJzc5GQkKCiIhwWFobBgwfD398fo0ePxuvXr9GgQYMSv7vbt2/D29u7xIeqguDgYI06X3PmzEFISIhKmVAoxMGDB9GxY0dcunQJiYmJSEhIgKenJxYtWoSUlBSYmZnh2LFj8PX11Xi+8PBwzJs3D69fv8bRo0fV9iumr+zt7fHy5Us0aNCAW0spLmVfGsOGDUNISAh++eUXrFy5EuvXr8eYMWM0ihl6e3sjKipKZdCuKKrNwAEAPvX+A2vKAZ5/AeNOLSu6O9UGfTzkDYGyrLpUKoVEIuFl1bVQp06dMqUPNTY2BmMMOTk5Kg+xpKQk1K9fX6Xunj17cPToUQgEAvTr1w/h4eGYOHGi3mTVNQkfamPevHmYPn06RCIRhEIhPDw8IBAI4OHhgVmzZqF79+6oXbs2vL29tS6cBwcHc3pk8+fPV0mG9ezZM4waNQqhoaFlvg5N1KtXjxuckpOTsXTpUoSHh2PcuHFITU3F3Llz0b59ewByWfXExMR3Pqc+qHIDx3vvvVfRXag0lPRArAkQFcmqy2QylV+vVVlW3cbGRmVRNyEhATY2NhrP+eDBA1hYWMDa2pprp/gbKamdLueRSCRqkh3Fr+P69et4/PgxAgICAMhji5ydnTFx4sQSZdVr1aqFmJgYte9NE2V547CyssKOHTsAyKcU7e3tOQeICRMmcPnE586dW6pibUBAAEaOHMm9saalpaFXr1744Ycf0K5dOwByoUjFlJVAICjV5iWxaNEizJ8/H7t27UJAQAD69u2LgQMH4tixYwB4WfV3IjcvH4+epOLRk1QcPfcY/4sS439R4oruVoVQ06NilWXVpVJptZFV79u3L8LCwiCRSPDo0SPEx8dz2eaUSUpKwqRJkzBt2jSVdjk5OSW20+U8L1++hI2NjdpDvWHDhsjJyeHWiMLCwrBkyRKIxWKIxWIkJiYiLi4OCQkJ6NChA86dO8fJmV+5cgVEhKZNm8LFxQWenp5YvHgx98YUFxfHPSSVCQ8P1yirXnzQAOTTkgqvpo0bN6Jr166cnpOiH2KxGIcPH8bgwYPV2sfGxnL9uXbtGogIdevWRV5eHoKCgjBu3DhuPQOQu/t+8MEH3HrJjh073moq6d69e3j16hX8/Pw4WXXFG5+CyiSrXuFeUmX9NLN3pUOnHtChUw9oz9G7dON+Et24n0TxF9ZQSspjyn9xi/Kvq3ofVFdqmjquJq+q0NBQEgqF5ObmRq1bt6a///6boqOjSSQSkVAoJJFIRCdOnCAior1795KzszMJhULKy8tT87IiIhUPJKlUSjNnziR3d3fy8PDgPGekUilNnDiRXFxcqEePHtSzZ0/OuyU6Oprz1nJzc6MtW7YQEdFff/1FI0eO5M6jfG6ZTEZeXl6cVxUR0aJFi6hFixbk7OzM9T8/P5+MjIy46xUKhbR69WrOE0rRzt7eXq2dNk8xTechIgoLC6O5c+dqbDNixAg6c+YMyWQysrOzo4cPH6rsnzZtGq1cKVeoPnDgAPdd+Pn5qdg7NTWVxowZQy1atCB3d3fy9/ena9euaTynrpw7d46cnJzI2dmZPvnkExKLxdy+jh07UqtWrUgoFNKZM2e48vXr19OmTZuIiOi7777jbNuxY0e6ePEiERFt27aNTExMSCgUcp+bN28SEdHDhw+pbdu25OjoSIMGDaK8vDwiIoqKiiIbGxuysLAga2tr8vT01NrvAQMG0KNHj4iIKDExkXx8fMjNzY27r3Jzc8nV1ZUKCgq4NhXpVVXhA0FZPy1aOHGGkBXkk0ySRzJJHsVfWENv3sTxA4ceqKwDR0lUdvnsqiar3rdvX879tDiaJL4rK5X9vtCVffv20cKFC1XKeHfcMsBY0UKg7NfPIftlOmS/TJcXGFW5JRseDbyNrHplpzxl1d+VvLw8BAcHc+6nxWnXrh38/Py06sbx6B8iwsyZM7ntQYMG4eLFixWWZqLKPWktLCyKNiS5EMzaAgCgqJ8AgUkF9apiEAqFFd0Fg9CsWTOVRVtdqCyLhiWhyBRoaN7VFmZmZhg+fHiJdcaOHftO5ygvqsJ9oQsDBw5U2d67d28F9UROlXvjMDWtbhq3b4+9vX1Fd6HSUF0S9ugD3hZF8LYwDFVu4Eh7k4S7147i7jW57/OOB5ex44F6oFJNICIioqK7UGlITU2t6C5UGnhbFMHbwjAYdKqKMRYI4CcAAgCbiWiZlnqfAAgH0I6I1NP7KWEqzUet+/IqSUIZAkz/AgAYwxyWiXNhRHmQUZUbD3l4eHiqDAYbOBhjAgA/A+gBIAHAVcbYYSK6U6xeHQCfA7iiy3HzjC3hELJAvvFAhOy45gCAN2bmSP3XDyZkDqP3bMCHCfLw8PAYBkP+NG8PIJaIHhORBMAeAJoiY74F8AMA9bBaDZiYqC6AGzvPgrHzLEhhhDq9PkHdT4bhvR4B79j1qkHjxo0rugsG4W1k1ZW1nrRRVWTVk5KS4O/vD0tLSzVZcltbW0680d3dHQsWLOAyQcpkMvTs2RP29vbo16+fSjs/Pz+4uLhwtlMErB09ehQuLi5wcnLCihUruPqffvopHj9+rPVa+vfvryKyee3aNTDGOGl2QB5MV9wz7uuvv8aaNWsAyD2Fli9fzvWrXbt2WoUVy8IXX3wBDw8PeHh44MiRI1z5qVOn0Lp1a3h4eGDMmDEoKChQaxsXF4fWrVtDJBLBw8MDmzZtUqvz8ccfa/T4++GHH8AY46bHdu7cCU9PT3h5ecHX15fTTlMmJycHH374ITw8PLBx40aufOzYsbh58ya3vWbNGuzcuZPbnjlzJpo0acLZsrwx5FSVDQBl15gEAB2UKzDGWgNoRkT/xxibo+1AjLEJACYAQNOmTbm5/aBWQLZlkUvgyZMnIZPWgouLC1xdXXH8+HHuj8rKygr+/v6IiYlRueE//PBDpKWl4cqVohceoVAIe3t7lTWExo0bw8fHB5cvX+YUTwEgKCgIYrEYN27c4Mo6dOgAKysrFY2b5s2bQyQSITIykov4NjMzQ2BgIO7du4f79+9zdbt06QIAXEQyAK3XBEDv1+Ts7Iy8vDyVqFVLS0sIBAKVnO+mpqZcDgZF7gbGGKysrJCTk6OS2rZ27doAgMzMTK7MzMwM5ubmSEtLkwcVoSjxjoODAyIjIwEAkZGReO+99/DNN9/A1NSUi5RWtFH8oaampsLY2Bi1a9dGZmamyoOhbt262Lx5M1q2bMm5MBIR6tSpg5MnT6Jdu3Z48+YNnj9/DgBluiaJRIKsrCykpqZqvabc3Fxcu3YN3377LVJTU1FQUIDnz5/j2LFj6NixIzIyMlBQUIC8vDxYWFjgyy+/xM2bNxEXF4fMzEzumogIR44cgZWVFYyMjDB27FiMGzcO69atAxFh1qxZyMrKwpYtWzi7KBxKtm7dCnd3d+6a8vPzMWXKFERERKBx48YICAhAr1694OzsjGHDhmHJkiVYvXq12jUpHoDNmzdHdnY2JBIJtm/fDh8fH+zevRtdunRBVlYW0tPTIZVKkZeXBzMzM6SmpiI3Nxc5OTnIzMzE9u3bcerUKZw6dQp16tRBWloazp49+073XmRkJG7cuIGzZ88iJycHvXv3Ru/evWFubo6RI0fiyJEjcHBwwHfffYfQ0FD069dP5Xtq2rQpTp8+DcYY0tPT0alTJ/Tp0wfW1tbIysrCoUOHYGFhoXbvPXnyBH/99RdsbGy4e7xRo0Y4cuQImjdvjoMHD2LcuHE4ceKEyjWFh4ejY8eOmD59Oj7++GN89tlnOH/+PKRSKezs7JCWlgYrKyuEhISgR48e6NtXns10xYoVMDc3R05ODtcH5e8pOzsbERERKs89vaKvgJDiHwDBkK9rKLaHA1ivtG0EIBKAfeF2JIC2pR3Xo5UtUWoEUeohovtCLrgl/uIayslN1hgMU11R5JHQN5U1AFBT5Pj27dupXbt25OnpSZMmTeLyTwwbNow8PDzI3d2dfvrpJ9qzZw9ZWlqqRY5/9913XM6IjRs30rJly3SKHJ80aRK5uLhQ9+7dVSLH//77b+rcuTO1bt2aAgMD6cWLF0RE9PPPP6vkWPD19aW1a9dS586diUg1H4cCTfksbGxsVILaUlNTqXbt2pSamsqVHT58mIKCglTaaYqSP3fuHH388cfc9uLFi2n58uVERFRQUED29vYqkcoK5syZQ7//XpTnRiqVUvPmzenRo0f0/vvvc5HTDx8+JKFQqNJ23rx59OOPPxIR0fvvv68S2a0Pvv/+e/r++++57SFDhtCBAwcoMTGRnJ2dufK//vqL+vTpU+KxkpKSyNbWlvsO09LSyNfXl27evKl2Xf369aN///1X7ftRPpadnZ1a+eHDh2n+/PmUm5tLPj4+RETUq1cvev78uVrd3r17U3R0NLetbMviVOUAwGcAmilt2xaWKagDwANAJGNMDMAHwGHGWNuSDiqQ5SE77hKy4/7G8xejkBSzF0kxeyEgAcBq1qK48luCQXkg0v9HDyjLqp87dw4FBQXYs2cPoqOjOVn1W7duYcSIEVweDkVeDsWv8B49euCvv/7iZNWV9aqUZdVPnjyJmTNnIikpCeHh4Zys+rZt27jpJ4Ws+oEDBxAdHY1hw4Zh/vz5AICLFy+q6UZ98MEHAOQqvG+LlZUVmjdvrqKDpS1zn8IGIpEIqampePbsGZo1K/oTVZYEFwgEsLe3x61bt9SOU/xazp8/DxcXF7Ro0QJ+fn4a9aaKk5KSgvz8fDRv3rzUusuWLVPJXqj4KAfEKRAKhTh27BhycnLw6tUrXLhwAU+fPkXjxo2Rk5OD69evg4hw4MABrbFCYrEYXl5eaN68OebNm8dNCc+bNw//+c9/1GJDDhw4gBYtWpSoI7VlyxaNuUYCAwPx4MED+Pj4YMaMGTh48CB8fHzQpEkTtbpt27bVKKBZERhyquoqgJaMMQfIB4zBAIYqdhJRGgBOtJ8xFgngCyrFq0pG5kjPkL9uQ1CA9xw7AwBOJXyDD2tYAGC54azn11w9wcuqyyEqXVYdkAeNlSUaXyHjXTzQtPi1KPJxAHKl4bCwMAQFBelNVv3LL7/El19+qVPdjz/+GNeuXUPHjh3RqFEjtGvXDgKBAEZGRti9ezemTZsGiUSCHj16aJVVt7e3x82bN/Hs2TP0798fwcHBiI+PR0JCAvr06aMySGdmZmL58uUqazvFOXXqFH7//XeNPxBMTEy4RGISiQSBgYE4fPgwZsyYgYSEBIwePRq9evUCIP8+xGKxTnYwNAYbOIiogDE2FcAJyN1xtxLRbcbYYshfmQ6/zXELAJi0lS+VnEn4FtLn8jlDY2MzGLEqFwjP8w4QFcmqK6SvFVRlWfWykJaWhqdPn6Jly7LnnylNVl2bjLfyteTn5+PgwYP4v//7PyxatAgymQypqanIysrSKqveqlUr1K9fHyYmJnjy5Ans7EpOtrZs2TLu4apMQEAAfvzxR7XyBQsWYMECuedl//794ezsDEDuIKB4eB89ehRxcXElntfGxgaurq64cOECEhIScOXKFdjb26OgoABJSUno1q0bVq1ahbi4OHh6egIAXrx4AS8vL0RHR6Nhw4aIiYnBZ599hhMnTqBevXolnm/dunUYM2YMzp8/j4YNG2LVqlXo1q0bN3DUGFl1IjpKRM5E5EhE3xWWLdA0aBCRf2lvGwBAgnxcS9qCa0lbYF3LGYOcwzDIOQyftNwOY6OaFSVaGTKBVSTKsup169atNrLqupKRkYFJkybh008/VclTo1i0Lw0fHx/cuXMH8fHxyMvLw759+7jFVwB4+PAht5iujPK1KBwLnj59CrFYjCdPnqBPnz6IiIhA3bp1Ua9ePc4+ycnJ+N///sdl3vvyyy8xefJk7jtJT0/H77//rna+L7/8UqOsuqZBo6CgACkpKQDkuUJiY2PRrVs3AEWy6rm5uVi+fDkmTpyo1j4hIYEbFJOTkxEVFQVnZ2dMnToViYmJEIvFiIyMhJubG06fPg2RSISkpCROVr5Jkya4efMmGjZsCLFYjODgYOzevbvU3B/Jyck4ceIEQkJCOFl1AJVWVr3K/USXGQnQs7nGOMIah1gsrtGyI56envjmm2/QvXt3SKVSmJqaYsOGDRAIBBg7diyICIwx/PDDDwCA0aNHY9y4cTA3N+ce7gBgZGSEOXPkTn3KnljBwcG4fPkyvLy8wBjD6tWr0ahRIy47nJubG+zs7DgNKjMzM4SHh2P69OmcR9Hs2bPh7u6OXr16YceOHRg1apTadfTp04dbC1Fga2uL7Oxs5OfnIzw8HKdPn4aLiwuAorURmUyGAQMG4Ouvv+badezYEbGxscjMzIStrS127NjBPTiLY2JigrVr16JHjx6QSqWYMGECd47ExERYWVlpnF7r1asXIiMj4e/vj7CwMJX8FADwySefYNu2bRg6dCh27dqFKVOmcJ6ES5Ys4e7ZadOmISsrC23atIGpqSlMTEwwd+5cjX3Vlby8PPj5+QGQr/9s27aNm5JaunQpjh8/DplMhqlTp6JzZ/k095UrV7Bt2zZs2LABt27dwpw5c2BkZAQiwldffQU3N7e36svChQuRkpKCzz77DID8/lD2dCxed8GCBWCM4aOPPsKvv/7K2U7BpUuX8P33379VX/SOvlbZy+vTwsVGoxdBTYSXVS+isstnVzVZ9eXLl9P27ds17svKyiIfHx+NHleVjcp+X+iKJin76upVZRBYQcXICPOUH7ysesVjbW2NYcOGadxnYWGBBQsWcDEvPIYnJSVFxYFi5syZ2LNnD5fdsLxhpKNHRmXBycmJtM0V1zQiIiIMss5x9+5dtGrVSu/HNSTFF8drMrwtiqipttD0N8wYiyaiEsMddKXKvXFU1AhbGVGWrKjp8PdFEbwtiuBtYRiq3MChzfe6JqKQHOHh7wtleFsUwdvCMFS5gUNZr6amo6yFVdPh74sieFsUwdvCMFS5gYOHh4eHp2LhBw6eSsfbyKrrQlWRVQfk8Q5OTk5wdXXl5CwKCgogEAggEong5uYGkUiENWvWQCaTK0Qr5NhtbGy0yrErbKeIJ9iyZQtatmyJli1bYteuXVz9bt26cbEXxZHJZAgICFBROg4PDwdjTCXI8dSpU2ry7sq2ys/Px9y5c+Hk5ITWrVujU6dOnHrs25KXl4eRI0dy13rx4kVu3+7du+Hp6Ql3d3d89dVXGttfunQJQqEQIpEIQqFQLctmQUEBvLy8VK5r5MiREAqF8PT0xMCBA5GVlQUAWL58OVq1agWhUIgePXpo1MZ6+fIlfH194eHhgT///JMr79Onj8q9OnPmTJw7d47bHjRoEOrXr6/TfWcQ9OXXW16fVq1aafRbrokUVzvVF5U1jkOTOq6CrKysUo9bXCHW19eXPD09OZXh5ORkatu2LaeOqyshISGcOq42kpKSqFOnTirnbtasGf3vf/8jIlV13Bs3bpC3tzfl5eVRbGwsOTk5caq/yn178eIF+fv70+LFi4mIKCMjgy5cuECrVq0qVVWXiOjVq1fk4OBAb968odevX5O9vT2nsrt582ZatmyZxmv5448/6IsvvlApGzBgAPn5+XF9ISI6efKkmkqvsq1mz55No0eP5tR0nz9/zikQvy1r1qyhcePGccfz9vYmmUxGL1++JDs7O3r9+jXJZDIaOnQoRUZGqrXPysqi/Px8IiJ69uwZNWrUiKRSKbf/hx9+oCFDhqhcl3JszrRp07h79PTp05SdnU1ERGvXrqWhQ4eqnW/VqlUUFhZGmZmZFBAQQEREBw8eVFFSJiKKjY2lwMBAlbKS7js+jqMYFhYWFd2FSkN1inN4W3bs2IH27dujU6dOmDx5MmQyGQoKCjB8+HAu4dHatWs5VVyFQqzibWXw4MGcDlJ4eDiCg4O5Y8tkMsyaNQseHh7w9PREeHg4Vz558mS4urqiR48eeP36Ndfm6tWr6NKlC9q0aYOPPvqIUzDev3+/mjrqnDlzNOpjRUREYMiQITA1NYWjoyPs7OwQHR2tVq9x48bYuHEj1q1bB0AuNeLr66siP1ISx44dw0cffYS6devC2toaXbt25dbNgoKCsHv3bo3tQkNDVdzA09PTceXKFWzatEmjppQmMjIysH37dqxdu5ZTKm7SpImK/d+GO3fuoGvXrtzx6tSpg+vXr+PRo0dwdXWFtbU1GGPo3r07Dhw4oNbewsKCSwqmkPugwpCF+Ph4nDx5EqNHj1Zpo7C3TCZDbm4uJ+LYtWtXTlvKx8cHCQkJauczMTFBdnY2cnNzIRAIIJFIsG7dOsyePVulnqOjI54/f45Xr169tW30SZWTHNGkNVRTUcg+GJrEiz/r/ZhNfaeUXqkUlGXVc3JyMHv2bOzZsweOjo6crDpQ5Mu/bt06rF+/XmXA7dGjB8aOHcvJqm/ZsgVLly4FoCqr/urVK7Rr1w6dO3dGZGQkJ6uemJgINzc3TJw4kZNVP3z4MBo0aIDQ0FDMnz8fv/32Gy5evKgWUPfBBx8gPDwcFy5cUMlg+OzZM5XvVSF37u3trWYDZ2dn5OTkIDk5GdbW1gCgUUxRcT6BQAALCwtERUWVKKveoEEDZGRkaIyDiIqKwvbt27ntQ4cOoVevXnB1dYWlpSVu3LihpqhbnIcPH8LBwUEnXa3p06erTNMoCAkJ4aRiFCimlwYOHAixWIzr16/j6dOn8PX1xe3bt/HkyRO8//77iIiI0KrSGxUVhfHjxyM+Ph67d+/mPLNmzJiBFStWqPxQUDBixAgcP34cXl5e+Omnn9T2a5NVHzZsGEJCQvDLL79g5cqVWL9+PcaMGaNRzNDb2xtRUVGVQqOuyg0c2nIN1ES0zUHrG3085A0BL6suR/GLWIFizaM458+fL9P6TcOGDfH8+XO1Nunp6Spv/mFhYfjPf/4DoEhWXSgU6k1Wfe3atTrXHT9+PO7fv482bdrAwcEB7du3h0AgQIMGDfDzzz8jODgYxsbG8PHx0RrF36lTJ9y+fRu3b9/GmDFjEBgYiKNHj6JZs2YQiUQaJdR37twJqVSKyZMnIzw8HMOHD+f2bd++Hf/++6/G66hXrx6OHj0KQC50uHTpUoSHh2PcuHFITU3F3Llz0b59ewBFMveVgSo3cPDwKCCqnrLqpcmdK/PgwQNYWFhwbxtlwcbGBpcvX1Y5j7L6qjYZb4VyKwC8evUKZ8+exd27d8EYQ0FBAUxMTLB06VKtsuoNGjRAy5YtVdLilkRZ3jhMTExUfvG3adOGk1UPCgrifq3/8ssvXAphbbi7u8PMzAx37txBVFQUDh48iMOHDyM3Nxfp6ekYOXIkduzYwdUXCAQYNGgQ1q5dyw0cx48fx4oVK3D27FluSk4bixYtwvz587Fr1y4EBASgb9++GDhwIJcYq8bIqhsC5Zu2pmNmVrNk5IujLKvOGKs2sup9+/ZFWFgYJBIJHj16hPj4eLXsgYDci2rSpElcDnYFuv6iDwwMxLFjx5Camork5GScPn2aezuTSqV4/fq1xlwZTk5OXEKh/fv3Y8yYMYiPj4dYLEZCQgKaNm2KS5cuwdXVFfHx8Xjw4AEAIC4uDrdv34aXlxfq1KmDESNGYMaMGcjPz+euR7GOpMzatWs1yqoXHzQAICsrC9nZ2QDkaziWlpbcwKGQVU9JScGGDRswbtw4tfZxcXHcrEZcXBwePnyI5s2bY/ny5UhISIBYLMauXbvw4YcfYseOHZDJZHj8+DEA+Q+Hw4cPw9XVFQBw7do1TJkyhZu6LIl79+7h1atX8PPz42TVGWOVVla9wr2kyvpp06aNRi8CHv1RlbyqQkNDSSgUkqenJ7Vu3Zr+/vtvio6OJpFIREKhkEQiEZ04cYKIiPbu3auWc7y4Z5qy51JJOccnTpxILi4u1KNHD5Wc49HR0Zy3lpubG23ZsoWI5DmuR44cyZ1H+dwymYy8vLxUco4vWrSIWrRoQc7Ozlz/8/PzycjIiIRCIbm5uZFQKKTVq1ereP3Y2NhQvXr1qHbt2mRjY0P37t3jyjUpxf7222/k6OhIjo6OtGPHDq780qVLNHDgQI3fz4IFC2jbtm1EROTn50cnT55U2b9q1SqaOnUqERGdPXuW2rdvT0KhkNq1a0enTp3i6uXl5dHs2bPJ0dGRPDw8qEOHDpyX2dsSGxtLzs7O5OrqSt27d6cnT55w+4KDg6lVq1bUqlUr2rt3L1d+8OBBWrRoERERbdu2jbNt69atKSIiQu0cyt5iEomEOnbsSB4eHuTh4UHDhw+n9PR0IiLq0qULNW7cmIRCIQmFQurXr5/Wfg8YMIAePXpERESJiYnk4+NDbm5u3H2Vm5tLrq6uKorEFelVVeEDQVk/ZZXbrs7cvXvXIMetrANHSSjcHisr5Smrrg9bTJ48WaO7KhHR06dPqWfPnu98jvKgst8XurJv3z5auHChShnvjlsGtHmM1ETu379f0V0wCG8jq56Xl2fAHr075Smrrg9beHt7o0uXLhr32draYtSoUSoBgJWVyn5f6AoRYebMmdz2oEGDcPHixVLXaQwFvzjOoxEiKrP3i75o1qyZxijbqo4iU2BVQNP8vzKDBw8up57wAMDAgQNVtvfu3au1rvzlwrBUuTcOHsNTq1YtJCcnl8sNyMPDoz+ICMnJyQZ/E6lyiZy8vb3p+vXrFd2NSoGhktTk5+cjISGhSk0LymQy3uOuEN4WRdREW9SqVQu2trYwMTFRKddnIid+qopHDRPr6bXDAAAI/0lEQVQTEzXxv8pOTc30pgneFkXwtjAMBh2KGWOBjLH7jLFYxtiXGvZPZIz9yxiLYYxdYIy5lXZMXnKkCEX8AA9vC2V4WxTB28IwGGzgYIwJAPwM4CMAbgCGaBgYdhORJxGJACwHsNpQ/eHh4eHh0Q+GfONoDyCWiB4TkQTAHgAq6lxEpJyeyxJA1Vpw4eHh4amBGHKNwwaAsk9lAoAOxSsxxqYAmAXAFEBXTQdijE0AMKFwM48xdkuP/bQCUBa1wNLql7Rf0z5dypS3lf/fAIC6VOfbw9ui5L68S31926Iku/C24G2haZ9LaZ3VGX1FEhb/AAgGsFlpeziA9SXUHwpghw7H1Vv0Y+HxftNn/ZL2a9qnS5nydrH/87aoobYoxS68LXhbGNQWhpyqegagmdK2bWGZNvYA6FfCfkPxZ+lVylS/pP2a9ulS9mcJ+/QJb4u3P3Z526Iku+gb3hZvf+xqaQuDxXEwxowBPADQDfIB4yqAoUR0W6lOSyJ6WPj/PgC+oVL8jBlj10qrU1PgbVEEb4sieFsUwduiCH3awmBrHERUwBibCuAEAAGArUR0mzG2GPJXpsMApjLGugPIB/AGwEgdDq09sULNg7dFEbwtiuBtUQRviyL0ZosqFznOw8PDw1Ox1KxYfB4eHh6ed4YfOHh4eHh4ygQ/cPDw8PDwlIlqNXAwxowYY98xxtYxxnRZaK+2MMb8GWPnGWMbGGP+Fd2fioYxZskYu8YY613RfalIGGOtCu+JcMbYpIruT0XCGOvHGNvEGNvLGPuwovtTkTDGWjDGtjDG1JO+a6DSDByMsa2MsaTiUeGlCSUWIwjyeJF8yCPVqyR6sgUByARQC7wtAOA/APYZppflgz5sQUR3iWgigIEAfA3ZX0OiJ1v8QUTjAUwEMMiQ/TUkerLFYyIaq/M5K4tXFWOsM+QPup1E5FFYJoA8FqQH5A+/qwCGQO7eu7TYIcYUft4Q0UbGWDgRBZdX//WJnmzxmohkjLHGAFYTUUh59V+f6MkWQgDWkA+ir4noSPn0Xr/owxZElMQY6wtgEoDfiWh3efVfn+jLFoXtVgEIJaJ/yqn7ekXPttDpuVlp8nEQ0TnGmH2xYk4oEQAYY3sABBHRUgBqUw6MsQQAksJNqeF6a1j0YQsl3gAwM0Q/ywM93Rf+kItougHIYYwdJSKZIfttCPR1XxTGUB1mjP0fgCo5cOjpvmAAlgE4VlUHDUDvzwudqDQDhxZ0EkpU4iCAdYyxDwCcM2THKoAy2YIxNgBATwB1Aaw3bNfKnTLZgojmAQBjbBQK38QM2rvypaz3hT+AAZD/mDhq0J6VP2V9XkwD0B2AFWPMiYg2GLJz5UxZ7wtrAN8B8GaMfVU4wGilsg8cZYKIsgHoPE9XnSGig5APpDyFENH2iu5DRUNEkQAiK7gblQIiWgtgbUX3ozJARMmQr/XoRKVZHNdCWYUSqzO8LYrgbVEEb4sieFsUYVBbVPaB4yqAlowxB8aYKYDBAA5XcJ8qCt4WRfC2KIK3RRG8LYowqC0qzcDBGAsDcAmAC2MsgTE2logKACiEEu8C2Kesrltd4W1RBG+LInhbFMHbooiKsEWlccfl4eHh4akaVJo3Dh4eHh6eqgE/cPDw8PDwlAl+4ODh4eHhKRP8wMHDw8PDUyb4gYOHh4eHp0zwAwcPDw8PT5ngBw6eSgdjTMoYi1H62JdQ1764nPRbnjOyUIL6BmPsImPM5S2OMZExNqLw/6MYY02V9m1mjLnpuZ9XGWMiHdrMYIxZvOu5eXgU8AMHT2Ukh4hESh9xOZ03hIiEAHYAWFHWxkS0gYh2Fm6OAtBUad84Irqjl14W9fMX6NbPGQD4gYNHb/ADB0+VoPDN4jxj7J/CTycNddwZY38XvqXcZIy1LCwfplS+sTBXQUmcA+BU2LYbY+w6Y+zfwoQ5ZoXlyxhjdwrPs7KwbCFj7AvGWDCAtgBCC89pXvim0LbwrYR72Be+max/y35eglwFVXGsX5k8y+FtxtiiwrLpkA9gZxhjZwrLPmSMXSq0437GWO1SzsPDowI/cPBURsyVpqkOFZYlAehBRK0hz9amSdV0IoCfiEgE+YM7gTHWqrC+b2G5FEBpSa36APiXMVYLwHYAg4jIE3I16UmFEtT9AbgTkReAJcqNiSgcwDXI3wxERJSjtPtAYVsFgwDsect+BgL4Q2l7HhG1BeAFoAtjzKtQATYRQAARBTDGGgD4GkD3QlteAzCrlPPw8KhQrWTVeaoNOYUPT2VMAKwvnNOXAnDW0O4SgHmMMVsAB4noIWOsG4A2AK4yxgDAHPJBSBOhjLEcAGLIczW4AIgjogeF+3cAmAJ5fpNcAFsYY0cA6JxRkIheMcYeM8Z8ADwE4ArgYuFxy9JPUwC1ASjbaSBjbALkf9fvQ5646maxtj6F5RcLz2MKud14eHSGHzh4qgozAbyEPA2sEeQPbhWIaDdj7AqAXgCOMsY+A8AA7CCir3Q4RwgRXVNsMMbqa6pERAWMsfYAugEIhlxMrmsZrmUP5Dm/7wE4RETE5E9xnfsJIBry9Y11AAYwxhwAfAGgHRG9YYxthzxVbnEYgJNENKQM/eXhUYGfquKpKlgBeF6YvW845LmTVWCMtQDwuHB6JgLyKZvTAIIZY40K69RnjDXX8Zz3AdgzxpwKt4cDOFu4JmBFREchH9CEGtpmAKij5biHAARBngP6/9u7e5QIgiiKwufGglswNXABgitwA6biIsx0CaYyGImBCqYGooHJgGKio+7CQAwGjHwG1ZPIDFqhcL6waKqrO+hL/fD6fGjrGme16qT7wHqSVWAZmAIfaf+Z31wwlntgY/ZMSZaSzJu9SQsZHPovDoHtJBPa8s50zjVbwGuSJ2ANOBlOMu0B10megRvaMs6vquoT2AEukrwAX8CI9hG+HPobM3+P4BgYzTbHf/T7Tit1vVJVD0Nb9ziHvZMDYLeqJsAjbRZzSlv+mjkCrpLcVtUb7cTX2XCfO9r7lP7MsuqSpC7OOCRJXQwOSVIXg0OS1MXgkCR1MTgkSV0MDklSF4NDktTF4JAkdfkG2BsGavF6/QMAAAAASUVORK5CYII=\n",
- "text/plain": [
- "
"
- ]
- },
- "metadata": {
- "needs_background": "light"
- },
- "output_type": "display_data"
- }
- ],
- "source": [
- "score_save_path = './IJBB/result'\n",
- "files = glob.glob(score_save_path + '/MS1MV2*.npy') \n",
- "methods = []\n",
- "scores = []\n",
- "for file in files:\n",
- " methods.append(Path(file).stem)\n",
- " scores.append(np.load(file)) \n",
- "methods = np.array(methods)\n",
- "scores = dict(zip(methods,scores))\n",
- "colours = dict(zip(methods, sample_colours_from_colourmap(methods.shape[0], 'Set2')))\n",
- "#x_labels = [1/(10**x) for x in np.linspace(6, 0, 6)]\n",
- "x_labels = [10**-6, 10**-5, 10**-4,10**-3, 10**-2, 10**-1]\n",
- "tpr_fpr_table = PrettyTable(['Methods'] + map(str, x_labels))\n",
- "fig = plt.figure()\n",
- "for method in methods:\n",
- " fpr, tpr, _ = roc_curve(label, scores[method])\n",
- " roc_auc = auc(fpr, tpr)\n",
- " fpr = np.flipud(fpr)\n",
- " tpr = np.flipud(tpr) # select largest tpr at same fpr\n",
- " plt.plot(fpr, tpr, color=colours[method], lw=1, label=('[%s (AUC = %0.4f %%)]' % (method.split('-')[-1], roc_auc*100)))\n",
- " tpr_fpr_row = []\n",
- " tpr_fpr_row.append(method)\n",
- " for fpr_iter in np.arange(len(x_labels)):\n",
- " _, min_index = min(list(zip(abs(fpr-x_labels[fpr_iter]), range(len(fpr)))))\n",
- " tpr_fpr_row.append('%.4f' % tpr[min_index])\n",
- " tpr_fpr_table.add_row(tpr_fpr_row)\n",
- "plt.xlim([10**-6, 0.1])\n",
- "plt.ylim([0.3, 1.0])\n",
- "plt.grid(linestyle='--', linewidth=1)\n",
- "plt.xticks(x_labels) \n",
- "plt.yticks(np.linspace(0.3, 1.0, 8, endpoint=True)) \n",
- "plt.xscale('log')\n",
- "plt.xlabel('False Positive Rate')\n",
- "plt.ylabel('True Positive Rate')\n",
- "plt.title('ROC on IJB-B')\n",
- "plt.legend(loc=\"lower right\")\n",
- "plt.show()\n",
- "#fig.savefig('IJB-B.pdf')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 49,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "+-------------------------------------------+--------+--------+--------+--------+--------+--------+\n",
- "| Methods | 1e-06 | 1e-05 | 0.0001 | 0.001 | 0.01 | 0.1 |\n",
- "+-------------------------------------------+--------+--------+--------+--------+--------+--------+\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N1D1F1) | 0.4091 | 0.9081 | 0.9477 | 0.9636 | 0.9755 | 0.9863 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N0D1F2) | 0.4089 | 0.8995 | 0.9463 | 0.9642 | 0.9761 | 0.9867 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N1D1F2) | 0.4281 | 0.9082 | 0.9490 | 0.9647 | 0.9767 | 0.9866 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N1D0F0) | 0.3900 | 0.9042 | 0.9467 | 0.9620 | 0.9761 | 0.9860 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N0D0F0) | 0.3828 | 0.8933 | 0.9425 | 0.9615 | 0.9751 | 0.9856 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N1D1F0) | 0.3930 | 0.9039 | 0.9476 | 0.9630 | 0.9758 | 0.9861 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N0D1F0) | 0.3892 | 0.8984 | 0.9456 | 0.9626 | 0.9753 | 0.9861 |\n",
- "+-------------------------------------------+--------+--------+--------+--------+--------+--------+\n"
- ]
- }
- ],
- "source": [
- "print(tpr_fpr_table)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# setting N1D1F2 is the best"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "language": "python",
- "name": "python2"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 2
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.15"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/evaluation/IJB/IJBB_Evaluation_VGG2.ipynb b/evaluation/IJB/IJBB_Evaluation_VGG2.ipynb
deleted file mode 100644
index 0ed5fa7..0000000
--- a/evaluation/IJB/IJBB_Evaluation_VGG2.ipynb
+++ /dev/null
@@ -1,535 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "/home/jd4615/miniconda3/envs/insightface/lib/python2.7/site-packages/sklearn/utils/fixes.py:313: FutureWarning: numpy not_equal will not check object identity in the future. The comparison did not return the same result as suggested by the identity (`is`)) and will change.\n",
- " _nan_object_mask = _nan_object_array != _nan_object_array\n"
- ]
- }
- ],
- "source": [
- "import os\n",
- "import numpy as np\n",
- "import cPickle\n",
- "from sklearn.metrics import roc_curve, auc\n",
- "import matplotlib.pyplot as plt\n",
- "import timeit\n",
- "import sklearn\n",
- "import cv2\n",
- "import sys\n",
- "import glob\n",
- "sys.path.append('./recognition')\n",
- "from embedding import Embedding\n",
- "from menpo.visualize import print_progress\n",
- "from menpo.visualize.viewmatplotlib import sample_colours_from_colourmap\n",
- "from prettytable import PrettyTable\n",
- "from pathlib import Path\n",
- "import warnings \n",
- "warnings.filterwarnings(\"ignore\") "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_template_media_list(path):\n",
- " ijb_meta = np.loadtxt(path, dtype=str)\n",
- " templates = ijb_meta[:,1].astype(np.int)\n",
- " medias = ijb_meta[:,2].astype(np.int)\n",
- " return templates, medias"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_template_pair_list(path):\n",
- " pairs = np.loadtxt(path, dtype=str)\n",
- " t1 = pairs[:,0].astype(np.int)\n",
- " t2 = pairs[:,1].astype(np.int)\n",
- " label = pairs[:,2].astype(np.int)\n",
- " return t1, t2, label"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_image_feature(path):\n",
- " with open(path, 'rb') as fid:\n",
- " img_feats = cPickle.load(fid)\n",
- " return img_feats"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [],
- "source": [
- "def get_image_feature(img_path, img_list_path, model_path, gpu_id):\n",
- " img_list = open(img_list_path)\n",
- " embedding = Embedding(model_path, 0, gpu_id)\n",
- " files = img_list.readlines()\n",
- " img_feats = []\n",
- " faceness_scores = []\n",
- " for img_index, each_line in enumerate(print_progress(files)):\n",
- " name_lmk_score = each_line.strip().split(' ')\n",
- " img_name = os.path.join(img_path, name_lmk_score[0])\n",
- " img = cv2.imread(img_name)\n",
- " lmk = np.array([float(x) for x in name_lmk_score[1:-1]], dtype=np.float32)\n",
- " lmk = lmk.reshape( (5,2) )\n",
- " img_feats.append(embedding.get(img,lmk))\n",
- " faceness_scores.append(name_lmk_score[-1])\n",
- " img_feats = np.array(img_feats).astype(np.float32)\n",
- " faceness_scores = np.array(faceness_scores).astype(np.float32)\n",
- " return img_feats, faceness_scores"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [],
- "source": [
- "def image2template_feature(img_feats = None, templates = None, medias = None):\n",
- " # ==========================================================\n",
- " # 1. face image feature l2 normalization. img_feats:[number_image x feats_dim]\n",
- " # 2. compute media feature.\n",
- " # 3. compute template feature.\n",
- " # ========================================================== \n",
- " unique_templates = np.unique(templates)\n",
- " template_feats = np.zeros((len(unique_templates), img_feats.shape[1]))\n",
- "\n",
- " for count_template, uqt in enumerate(unique_templates):\n",
- " (ind_t,) = np.where(templates == uqt)\n",
- " face_norm_feats = img_feats[ind_t]\n",
- " face_medias = medias[ind_t]\n",
- " unique_medias, unique_media_counts = np.unique(face_medias, return_counts=True)\n",
- " media_norm_feats = []\n",
- " for u,ct in zip(unique_medias, unique_media_counts):\n",
- " (ind_m,) = np.where(face_medias == u)\n",
- " if ct == 1:\n",
- " media_norm_feats += [face_norm_feats[ind_m]]\n",
- " else: # image features from the same video will be aggregated into one feature\n",
- " media_norm_feats += [np.mean(face_norm_feats[ind_m], 0, keepdims=True)]\n",
- " media_norm_feats = np.array(media_norm_feats)\n",
- " # media_norm_feats = media_norm_feats / np.sqrt(np.sum(media_norm_feats ** 2, -1, keepdims=True))\n",
- " template_feats[count_template] = np.sum(media_norm_feats, 0)\n",
- " if count_template % 2000 == 0: \n",
- " print('Finish Calculating {} template features.'.format(count_template))\n",
- " template_norm_feats = template_feats / np.sqrt(np.sum(template_feats ** 2, -1, keepdims=True))\n",
- " return template_norm_feats, unique_templates"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [],
- "source": [
- "def verification(template_norm_feats = None, unique_templates = None, p1 = None, p2 = None):\n",
- " # ==========================================================\n",
- " # Compute set-to-set Similarity Score.\n",
- " # ==========================================================\n",
- " template2id = np.zeros((max(unique_templates)+1,1),dtype=int)\n",
- " for count_template, uqt in enumerate(unique_templates):\n",
- " template2id[uqt] = count_template\n",
- " \n",
- " score = np.zeros((len(p1),)) # save cosine distance between pairs \n",
- "\n",
- " total_pairs = np.array(range(len(p1)))\n",
- " batchsize = 100000 # small batchsize instead of all pairs in one batch due to the memory limiation\n",
- " sublists = [total_pairs[i:i + batchsize] for i in range(0, len(p1), batchsize)]\n",
- " total_sublists = len(sublists)\n",
- " for c, s in enumerate(sublists):\n",
- " feat1 = template_norm_feats[template2id[p1[s]]]\n",
- " feat2 = template_norm_feats[template2id[p2[s]]]\n",
- " similarity_score = np.sum(feat1 * feat2, -1)\n",
- " score[s] = similarity_score.flatten()\n",
- " if c % 10 == 0:\n",
- " print('Finish {}/{} pairs.'.format(c, total_sublists))\n",
- " return score"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_score(path):\n",
- " with open(path, 'rb') as fid:\n",
- " img_feats = cPickle.load(fid)\n",
- " return img_feats"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step1: Load Meta Data"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Time: 0.83 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load image and template relationships for template feature embedding\n",
- "# tid --> template id, mid --> media id \n",
- "# format:\n",
- "# image_name tid mid\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "templates, medias = read_template_media_list(os.path.join('IJBB/meta', 'ijbb_face_tid_mid.txt'))\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Time: 31.75 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load template pairs for template-to-template verification\n",
- "# tid : template id, label : 1/0\n",
- "# format:\n",
- "# tid_1 tid_2 label\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "p1, p2, label = read_template_pair_list(os.path.join('IJBB/meta', 'ijbb_template_pair_label.txt'))\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 2: Get Image Features"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 11,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "('loading', './pretrained_models/VGG2-ResNet50-Arcface/model', 0)\n",
- "[====================] 100% (227630/227630) - done. \n",
- "Time: 2386.28 s. \n",
- "Feature Shape: (227630 , 1024) .\n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load image features \n",
- "# format:\n",
- "# img_feats: [image_num x feats_dim] (227630, 512)\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "#img_feats = read_image_feature('./MS1MV2/IJBB_MS1MV2_r100_arcface.pkl')\n",
- "img_path = './IJBB/loose_crop'\n",
- "img_list_path = './IJBB/meta/ijbb_name_5pts_score.txt'\n",
- "model_path = './pretrained_models/VGG2-ResNet50-Arcface/model'\n",
- "gpu_id = 0\n",
- "img_feats, faceness_scores = get_image_feature(img_path, img_list_path, model_path, gpu_id)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))\n",
- "print('Feature Shape: ({} , {}) .'.format(img_feats.shape[0], img_feats.shape[1]))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step3: Get Template Features"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 12,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Finish Calculating 0 template features.\n",
- "Finish Calculating 2000 template features.\n",
- "Finish Calculating 4000 template features.\n",
- "Finish Calculating 6000 template features.\n",
- "Finish Calculating 8000 template features.\n",
- "Finish Calculating 10000 template features.\n",
- "Finish Calculating 12000 template features.\n",
- "Time: 3.41 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# compute template features from image features.\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "# ========================================================== \n",
- "# Norm feature before aggregation into template feature?\n",
- "# Feature norm from embedding network and faceness score are able to decrease weights for noise samples (not face).\n",
- "# ========================================================== \n",
- "# 1. FaceScore (Feature Norm)\n",
- "# 2. FaceScore (Detector)\n",
- "\n",
- "use_norm_score = False # if True, TestMode(N1) \n",
- "use_detector_score = True # if True, TestMode(D1)\n",
- "use_flip_test = True # if True, TestMode(F1)\n",
- "\n",
- "if use_flip_test:\n",
- " # concat --- F1\n",
- " #img_input_feats = img_feats \n",
- " # add --- F2\n",
- " img_input_feats = img_feats[:,0:img_feats.shape[1]/2] + img_feats[:,img_feats.shape[1]/2:]\n",
- "else:\n",
- " img_input_feats = img_feats[:,0:img_feats.shape[1]/2]\n",
- " \n",
- "if use_norm_score:\n",
- " img_input_feats = img_input_feats\n",
- "else:\n",
- " # normalise features to remove norm information\n",
- " img_input_feats = img_input_feats / np.sqrt(np.sum(img_input_feats ** 2, -1, keepdims=True)) \n",
- " \n",
- "if use_detector_score:\n",
- " img_input_feats = img_input_feats * np.matlib.repmat(faceness_scores[:,np.newaxis], 1, img_input_feats.shape[1])\n",
- "else:\n",
- " img_input_feats = img_input_feats\n",
- "\n",
- "template_norm_feats, unique_templates = image2template_feature(img_input_feats, templates, medias)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 4: Get Template Similarity Scores"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 13,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Finish 0/81 pairs.\n",
- "Finish 10/81 pairs.\n",
- "Finish 20/81 pairs.\n",
- "Finish 30/81 pairs.\n",
- "Finish 40/81 pairs.\n",
- "Finish 50/81 pairs.\n",
- "Finish 60/81 pairs.\n",
- "Finish 70/81 pairs.\n",
- "Finish 80/81 pairs.\n",
- "Time: 38.38 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# compute verification scores between template pairs.\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "score = verification(template_norm_feats, unique_templates, p1, p2)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 14,
- "metadata": {},
- "outputs": [],
- "source": [
- "score_save_name = './IJBB/result/VGG2-ResNet50-ArcFace-TestMode(N0D1F2).npy'\n",
- "np.save(score_save_name, score)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 5: Get ROC Curves and TPR@FPR Table"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEaCAYAAAAG87ApAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzsvXl8lNXZ//8+syeTDRIIQiBh0YRASFiUTQGVzaJicZdqi1q7unb56eNjtdZWf7ZVW9s+rdW69LHiVsX6WMSq4MomENn3AImQfZJJZp+5vn9MMpNACAEymQyc9+s1r8x95tznXOdz35lr7rNcR4kIGo1Go9F0FUO8DdBoNBpNYqEdh0aj0WiOC+04NBqNRnNcaMeh0Wg0muNCOw6NRqPRHBfacWg0Go3muNCOQ6PRaDTHhXYcmoREKVWmlHIrpZqUUoeUUs8ppVIOyzNFKfWBUsqplGpQSv1LKVV4WJ40pdQTSqn9LWXtbjnOirH9M5RS5W2On1NKPdTyPk8pJS32NCmlKpVSf1JKmTsp7wGllL/NOVuVUpfHsg2a0xftODSJzCUikgKUAGOBe1o/UEpNBpYBS4CBwFCgFPhUKTWsJY8FeB8YBcwF0oDJQC1wTs8146hktLSviLBdPzhG/pdFJKXlnDuA/1VKZcfaSM3ph3YcmoRHRA4B7xJ2IK08CrwgIr8TEaeI1InIfwMrgQda8twADAG+LiJbRCQkIlUi8gsReaejulqeYta0PMGsUUpNafPZcqXUL5RSn7Y85SzrjicXEakC3gMKj5W3zTnvAk5g+MnWr9EcjnYcmoRHKZUDXATsajlOBqYAr3aQ/RVgVsv7mcBSEWnqYj19gf8Dfg9kAo8B/6eUymyT7TpgEdAfsAA/Pt72dFDvQGAOYafXlfxKKTWvpf4tJ1u/RnM42nFoEpk3lVJO4ABQBdzfkt6X8L19sINzDgKtTwGZR8lzNOYBO0Xk7yISEJGXgG3AJW3yPCsiO0TETdhJlXRUUBepUUo5gAqgGXjtGPmvasnfBLwF/EpEHCdRv0bTIdpxaBKZy0QkFZgBFBB1CPVACDijg3POAGpa3tceJc/RGAjsOyxtHzCozfGhNu9dQAonTpaIZADJwKeEu+NQSi1sMwj+7zb5XxGRDBGxE+6iukEp9Z2TqF+j6RDtODQJj4isAJ4DftNy3Ax8DlzZQfarCA+IA/wHmKOUsnexqq+A3MPShhB+IogZLU8vzwGTlFJZIvJi6yC4iFx0lHPKgH/T/mlIo+kWtOPQnCo8AcxSShW3HN8NfFMpdZtSKlUp1adluutk4Octef5OuJvrdaVUgVLKoJTKVEr9l1Lqax3U8Q5wllLqOqWUSSl1NeEB67dj2TCllBW4nvDTTG0Xz8khPFNscwxN05ymaMehOSUQkWrgBeBnLcefEB5QXkB4HGMf4Sm754rIzpY8XsID5NsIz1pqBFYT7vJa1UEdtcDFwI8If4H/FLhYRGoOz3uizTjs2KGUagIqCTu8S6XzDXSubu3CAtYQ7t76eSf5NZoTQumNnDSa+KOU+ifwkYg8EW9bNJpjoZ84NJo4o5QaBJwLrI23LRpNV4iZ41BK/U0pVaWU2nSUz5VS6vdKqV1KqS+VUuNiZYtG01tRSn0fWE94Gu8n8bZHo+kKMeuqUkpNIzyf/AURGd3B518DbgW+BkwEficiE2NijEaj0Wi6jZg9cYjIR0BdJ1nmE3YqIiIrgQyl1PHMqddoNBpNHIjnGMcgwlMhWymn/UIqjUaj0fRCTPE2oCsopW4BbgFITk4ef8YZ0QeT1NRUAJxOZyTNZrNhs9lobGwkFAoBYDQaSU1NxeVy4fP5InnT0tIIBoM0NzdH0pKTk7FYLDgc0WgNZrMZu91Oc3Mzfr8/kp6RkYHP58PlckXS7HY7RqORxsbGSJrFYiE5ORmn00kwGATAYDCQlpaGx+PB4/HoNuk26TbpNsWsTVu3bq0RkX50A/F0HBXA4DbHORxlBa6IPAU8BTB27FhZv3597K1LABwOBxkZGfE2o1egtYiitYhyumghoQCNjdU4XZW4mssRTx0Gby0S8GIIBjAZAgyd8vPDw+WcMPF0HG8BP1RKLSY8ON4gIscMONfWw57urFixgvnz58fbjF6B1iKK1iJKb9YiFPQjQR+hYIAmr5MmdzNOZx2hgJuAv5lg0IXN3wwhN2ZcGCSIUQSz8oMCIwYkZCccBDmEweDCTIh0FUCUFxCCygCmAAr/Maw5PmLmOJRSLxEOPpfVstPZ/YAZQET+TDh8w9cIh8J2EQ5FrdFoNAmBiCBBP6GgH5ffjT8UxOf10Ohqxu1qIOhrwuKvRPwubCE/JvFhUEFMKAzYwoUoL4oASoFV+UlSHgzGZkIIIQQMJsTkRgQCYkAIIiHBHzQRCloJ+v2YTenYjIPpkzESY2pfDOnJKFsSymw9zOLfdlvbY+Y4ROTaY3wuHHtHM41Go+kxQgEfPm8Tze46Gh0HCLr2E/A6Mft9GAmgEIxiRaFQrV+fyosiCCqEAjJUkEyDC6V8KOUhYHMhImBswh80ICED3pDC57MTkBSsKh2DspKk8kjOGI7ZnoXRYsdgT0XZbGAwopSKpyxHkBCD422x2WzxNqHXkJ+fH28Teg1aiyhaizDBgI/BwwZyqL4Ct6cRR1Mdyl2FMeDEEPBiDLix4scsBhSCIvwL3aA8KIOXFOVBGRvxqyBibSIUMhIkSDBgJBhShAJGBDMmg4FgKAOzIROzMQllSMVi7Udy0iBMqXaU3Y5K7dPrvvxPhoSLVTVhwgRZu1ZHZtBoTlckFMTXXEudq54mVx1ORyWGgANjsIHkkB8LYJSkltwBDAY3SvkxGp0YlIegwU9IQBmb8SkvgaAR8ZkJ+G2ILxVTII+01OGkpg7A3L8fqk8mypLUmUkJgVLqCxGZ0B1lJdwTR9vpa6c7S5cuZe7cufE2o1egtYiSyFqICL7mKpqde3G76vC4qwl6GzD6g5hDQYxiQWHEYHBiNjSTgdBHhQgZ3BgsTgISJBiEgF+Bz0DAa8dCBhYGYzEXY03ugzEjDYPVAhYjqu8ZkJRySj0N9AQJ5zha5ydrwOv1xtuEXoPWIkpv1EJCQYK+ZvzeJhyeJoK+JoLuQ/jcezH5GjH7zRjEBJhR+DEYmzAZXKQbnfgNzYgFAhLC5bXgDhgINqdi9GdyRrAfSan9MWf0x5jZD5Wehkrri1Lhtc1LlizptbOqEpmEcxwajab3IcEAAW8Dfk8TNVXb8TZXokJOzP4ABkIYxIRBuTEoPwYVxGJoBhXAa/ARNHjwmoSAz4DX0R/VmI3NMIAUWzoZGXaMGakYM7MgvR+kZeqng15AwjkOo9EYbxN6Denp6fE2odegtYgSCy187kaammvwuupxNdcgzYcw+5oxSggIYcAIyofR6CDV2EBfUzV+sweXVQj6TXi9VjxN/Qk2ZmPy9cEimaQYM+jbx4Z5yCAM6VmQnAapfVDG7vta0vdFbNCD4xqNBgBfy9OCr6mKkKcB5WsiEApil0D4c7yYjE0km2owGRsImmsIKA8unx28SbidA6HJTsiTSV9XNvbkNCyD0zFm98cwYAAqJTXShaTpeU7rwfG28V5OdzZs2EBJSUm8zegVaC2idKaFhAK4HOUcclTQ5G5E+d2EfC4sXicZhhAuvBhNVSQbHaTZD2AxORH8OJTgx0JN00CoKCTzYBGp2LH3sZA6aBCWUWeh0tK79WmhO9D3RWzoXVe5C7QNPna6s2/fPv1P0YLWIkqrFhIK4HZWUu+sxuU4gLmpCkvQg1d5UYZmsoxOzMY6Um11GO2N1BtdWJXCFUyi0XkG++qHohrtZFWnkRZIJjvZTk7+UAznjQ7PRkoA9H0RGxLOcWg0miORUBB3UzWHavaQllLHllXPkxFowiNBjEYn6ZYK0lPKUKZKarDh8mTidvWhvrEP9fXDMTSbyPQlk5aWgjnTjiFnMCo5Hfpmgz1DD0hr2qEdh0aTIIgIQU8j7qZKauvL8bjqCXob6RMId98GRQjhY5j9IJlJu7CYK6gzGGnyZuCqzqem6iL61GeQak8ma2gGpqGDURlZkJSKMpnj3DpNIpFwg+Pjxo2TdevWxduMXoHb7SYpKfFXtHYHp5oWoYAPv6sGf1M1Llc99XX7SfWHI0PX4MNicGAx1WEzNtDftg9laMShLLhDNpzePnhqziJzdz4Zlr5YhvbBMqYAQ98BcW5Vz3Oq3Rcnw2k9ON66wYkGGhoa9D9FC4msRdDXjN9ZhbPxIE5HOQZvI+agFxeCDzcGYyMp1v0MSNuKUj4s2GgSK25XFn5nFlX7Lia1OpM+Xht9+phISTOQVjwadWH+ad/FlMj3RW8m4RxH2124TndWrVqlV8W2kAhaiAghn4uGur0cqt2H0efE4mvAFAzgUh5QDmymOgak7MZirsSJCb+YqPf0p97dl6pDc1GNZ9DHYSIrdQCmTBumwYMxTsjBYE+J1LNkyRLm5xbEsaW9h0S4LxKRhHMcGk0iEPQ6cdfvp6apBm9TDUZPA8lBN0EEF17SzV/R17YXZW3AY2gmaDDTJFYCrj58UVVE+oGr6NeQisVqJjvFhDU3BdOIDNQZuZCcjjLo9RCa+KEdh0ZzEoSCfgKuWpzOahwNFQRdDoy+JiwhL46QjyRTA8nmamy2ajJtZfhVkGpJpsYzkH21w0iqPBN7k510p5E+djOmM/pgGzkQw7QResBa02tJOMeRnJwcbxN6DcXFxfE2odfQU1o0N3xF1YEvCDVVkhQMBxP0i6LeZwGDk1TrITLt20i3lmPBSo3YCTqzaTo0nIqGadia+pFZZ2Bo3xCW7GQs44eghpyFsnZfP7y+L6JoLWJDws2q0iFHND1JIBRkT205zRXrsLhrSA/62BTIIBhKJuAvJzf9C0r6biAkigqSqQtlYm4eiKdiKOlfFZDh82PKSMY6rB/GrFRMAzMhLeu0H7TW9Dyn9awqh8MRbxN6DTpkdJTu0kJEqG08xL7afTTW7iXHW0+KEppDyThwQMoaJtgO0sfQRAM2GkOZfLlvPpb9I0l19CMnVIPJHsAyNBPrVWMxZGZ3Q+uOD31fRNFaxIaEcxwaTXcR9DXjaa6lqqmW6sZKfE3VZPud2JWQipBprMGSupUzbDvpj+DChtczEM+hCVRsvgCLO4VUQy39+9ZiHRrCUDgMsi/QTxOaUx7tODSnDSKCy3GAmq++xOzYRwjw4Aejh8HGeozmRuz2PRhMdShlpknsOJsH07jrOqxl+SS5+mA0N5PRz4N1UhamkYUouw7brTn9SDjHYTbrmSatZGf3fDdIb+VwLUQECXhprtmJs+ErGpvrsHjrEfGDuYr+GauwmWqoCaZRYzSwN5CB35uFqX4CyZWDyCofQYbfh9lSjynLjml0GpYx+Rj69ItTC7uOvi+iaC1iQ0wHx5VSc4HfAUbgaRF55LDPc4G/Af2AOuAbIlLeWZl6cFxzOK0xnLyOfTTU7iPgrsPkawKgHj9WYw39bXuxmKs4KNBoDlEVsiM1I8k8NIR+VYNIcTVhVA0YzQFMZ6RhLhqJYchI/UShOWVIiMFxpZQR+CMwCygH1iil3hKRLW2y/QZ4QUSeV0pdADwMXN9ZuXrleJSVK1cyadKkeJvR40goSMDtwFO7m6aa3Yi7DjcKjwrgoZ6c5O30SdmLxejGHkxinyGJj0khuXwKmYdy6V+XzTDvDsyqClOOC8O5Gaj8WSibPd5N6xZO1/uiI7QWsSGWXVXnALtEZA+AUmoxMB9o6zgKgbta3n8IvHmsQv1+fzebmbhUVlbG24QeI+BxUlu9g8aaXSS7aggRRIw1mKy7yc7cjZ8QDVhoUgYOhOxs8g9DagfR52AuAw4OYzAN2EI7sOQLKs+HKrwWMrJPyYHs0+m+OBZai9gQS8cxCDjQ5rgcmHhYnlJgAeHurK8DqUqpTBGpjaFdmgQgFPDia6qm+tBmjLW7APAZGuiftJWkjIO4DC7KDEnUBbPY5p6OeX8/+jkHklI1gFzlxRrchdHsw5JWh3HGaFS/fOh/Ra/boU6jSUTi/V/0Y+APSqlvAR8BFcAR4W+VUrcAtwD069ePJUuWRD6bPn06ACtWrIik5efnU1BQwNKlS/F6w6t709PTmTFjBhs2bGDfvn2RvLNnz6ahoYFVq1ZF0oqLi8nLy2tXT3Z2NpMmTWLlypXtfsXMnz+fsrIySktLI2kTJ04kPT2dZcuWRdJyc3MpKSlh+fLlNDQ0AGC1Wpk7dy7btm1j+/btx92mVk6FNuFvoq/VTVKSIt3qx+R34VU+TKZDpNgPkJq0nUNY2eTKpaZ+LP0rh3DGVyPI8QUwioOAsRGfpYLAoCayzp7EmoMpHKypC1e018H8MdPjdp16+t4D2p1/KrTpZK5T69qvU6lNJ3KdupOYDY4rpSYDD4jInJbjewBE5OGj5E8BtolITmfl6sHxUwOREN66fbidlTTW7MLsbcBt8pJhLSXZ2EDQXI1DQX0whUZfDslfjSVz51kkB3yY5BAWcw2WgkEY88fCGcNQVh2KRqPpjIQYHAfWAGcqpYYSfpK4BriubQalVBZQJyIh4B7CM6w6Re85HqWsrIy8vLx4m9FlfM5KHIc24XNWYXDXEUCxW3nItZVxRt/VBA1+dpHO9vrR9HPMIa/sLPo5rQyyVGPyV2DLLsV0zgzU0Jkoi61d2YmmRSzRWkTRWsSGmDkOEQkopX4IvEt4Ou7fRGSzUupBYK2IvAXMAB5WSgnhrqofHKtcl8sVK5MTjtLS0l7/T+H3NbOjfCOm6i3YA252hMz4xMPZff6PgdZKssRIBXY+dEwnrXYseVtz6G8MkGzcgTHwOdaJhRgGDYMh81Bmy1HrSQQtegqtRRStRWyI6RiHiLwDvHNY2s/avH8NeC2WNmh6npDfQ92hzVRX7SDdU0dTyIRbsqltquPrw5/EoITdoT7sbbqejPJC0valUOxRmCnHnvIR5sIzMeROgUFndeosNBpNfIj34LjmFCLoc1F14Avk0JccEiMHDZk01U+g0eng0jF/JHvAfg4ZR+CtvZHkj1JJIYiN7Vgy3FhGZWOYNA+VnBbvZmg0mmOQcI7Dbj81Fml1B60zaOJJwNNAU0UpDY79mD0NbAuZcTQPwtFgpmhgKecN+wtpphoOSjKbvD8ma2keSUY3KeoDbEW5qHMWoDJOPoxHb9Cit6C1iKK1iA0J5ziMRmO8Teg1pKfHJxyGiFBf9hnOqh2YAy4qxMhGbwoDQzbGZi4hO3s/AHViZYs6A4IzGbCqhH61ZpKTNmKfPAw14geo9KxusyleWvRGtBZRtBaxIeEcR2NjY7xN6DUsW7asR/cacFXvwFWzG1/dHgDWig1vUhHJjUnMyfgVg1L3Uqn681loAE3uIkZtvYi8cjMmmwurdyu2QX5MV9+FMnS/8+9pLXozWosoWovYkHCOQ9OziITwOQ9Ru+VtCPrZKSYqDEHG9jnEdP8W0izlWFIDHJIkPvJ+jYy9+QzZNZjkJDdW9xosQ5MxZmahhs2GISNPyRAfGs3phnYcmg5x7P2E5oMbURIC4JAYWaOMfD39XaZb91Dj60OVFUoZQrr7AnJWlXBWA6SlrMYsqzGk9MPwzR+hklPj3BKNRtPdJJzjsFj09MxWcnNzu7W8oM+F17Gfyj2fYA16+XfIRlK6hbMNn3GmYR/jTF9RGejDe5JDjSmZvG0TKdxxDiYcWK07sBcEUDnFqPybe3x2VHdrkchoLaJoLWJDTPfjiAU65Ej3467ZhWPnB0jIjwsjLjzY7Ic4K3kpAId8mZSZjRzEjq1pKDlbismqHkaqqRRboBTDhQsxFM+IbyM0Gk2nJErIkZjgdDrjbUKvYfny5cyYMeOEzg24HbiqtuOu2UXQ42C/8pCa/AVZRicjbAeo8fXlM/8w9pkM2FQGZ2wtZOKu8RiNkMx6kuQ51KDRGOY8ikrp070NOwFORotTDa1FFK1FbEg4xxEMHhE897SlNYLm8RD0NlG1fjES9OIyJ9HXuJbMvhtJDthoCiSxXfrwhSgazFYG7xrH1F3jSRuQhqn+S2zq76g+A1AjJ6OKbupV4xcnosWpitYiitYiNiSc49CcGKGgj4bdK3BX70AZgvTv+zImY3g3xSWN5+NKLcdgMpPq7MugzdMpPjicPqOCWAIvoyqCqFHnoqb8GpUa/6cLjUYTXxLOcRgMhnib0GuwWq3HzOM+uApnxRcEvOGxrNTktWxRAT6tuAJP/1KM1mZILeesL2cw3DIDo68CS8NKjHyO2m5CXXgNqvj8Xj+NtitanC5oLaJoLWKDHhw/BfHUleGp34+3bitBX4Dk5DIMfXP4zHWAGp8Do7URlGD0WyhZtYDswWOwB1ehtn4IthTUmeNRE+eh0jLj3RSNRtNNnNaD4x6PJ94m9Bq2bdtGQUFB5FhCAQ6u/CtICI89m/7mj6lJ8vJm5Xn0M6wjYGsmt6KEdHcRuUOLsA+0Y8h7Fyl9BAB17gIM58yLV3NOisO1OJ3RWkTRWsSGhOv30Y4jStutJAHc1TtBQnySVcIfmhykJG/iczP0yfmY1NrB8MU3mDj1DkYPCJHy8SOof9yD7N2EmnIZxrueSVinAUdqcTqjtYiitYgNCffEoekYf3Mtjl0fsMuYQqXjAx7r/zrNEr68po9ux5fXl6sHbUSeuQ1RBtS0K1DFF6BM5jhbrtFoEg3tOE4BXFXbcOx8n2oxsMWpuDX3dWr86awwZZL3+Q/IHd5I9oZnkfQs1ORLURMvQelJBhqN5gRJuMHxsWPHyvr16+NtRq/A4XBg8X6FY9eHbCdEof0DspP30iBJvEMOo9Z8jcKxJah3/4AqnIJh7k3xNjlmOBwOMjIy4m1Gr0BrEUVrEeW0HhzXhAl4nbh3vY3L28A+g4vzM1+kKpDGv2UI0jyUSZ/PJue681HP3Q4Dhp7STkOj0fQsCddfoUOOgHP/aqrWvkDA08Be6zYm9X2R7a6hvG/MJnfTdYx7/0oG5AQw/PMXICEM874Tb5NjzooVK+JtQq9BaxFFaxEb9BNHghH0u3EeWIMv80w+rP+YRWkfs0r6syfJxIiN08iqHEy6fSmm3V+h5t6EGnQmKv3kt2bVaDSaVrTjSCDczip27l5BGkKG3M+iTDfbJQN35Rymr55AxjgL1v1Pw6AzMVz8GMqut83UaDTdT0wdh1JqLvA7wAg8LSKPHPb5EOB5IKMlz90i8k5nZdpsthhZ23sRCVG35VW8jhoGmGpISi0laAixrPZiCj8/n4KgFXvSZizrV6MKJ6Hm3tzrQ4R0N/n5+fE2odegtYiitYgNMZtVpZQyAjuAWUA5sAa4VkS2tMnzFLBeRP5HKVUIvCMieZ2VezqGHHFX/Jv6sj24kzewObmaZsxMaPo16WsqCTn9ZBpexKB8GL75C1TmwHibq9FoeiHdOasqloPj5wC7RGSPiPiAxcDhu8YL0LpVXDrw1bEKbWxs7FYjezuuimXUl+0B8wFWJ9dj+aqQc/99C6kflKOcVWQkv49xznUY7nz6tHYaS5cujbcJvQatRRStRWyIZVfVIOBAm+NyYOJheR4AlimlbgXswMxjFRoKhbrLvt6Nv4Ka9X/C5x+Ism3j45SvyNs+icKmESQF38LQJ5n3+o7nogX3x9vSXoHX6423Cb0GrUUUrUVsiPfg+LXAcyLyW6XUZODvSqnRItLOOyilbgFuAejXrx9LliyJfDZ9+nSg/bS7/Px8CgoKWLp0aeTGSU9PZ8aMGWzYsIF9+/ZF8s6ePZuGhgZWrVoVSSsuLiYvL69dPdnZ2UyaNImVK1dSWVkZSZ8/fz5lZWWUlpZG0iZOnEh6ejrLli2LpOXm5lJSUsLy5csjm8tYrVbmzp3Ltm3b2sXUOX/qYCh/Bp+/kPUZq0ndfyaj18xhRPMy6lMdVPQdzM6+0b7bRGhTT1ynJUuWnHJtOpHr1KrFqdSmk7lODofjlGvTiVyn7iSWYxyTgQdEZE7L8T0AIvJwmzybgbkicqDleA8wSUSqjlZufn6+nNKBywLV+HdcRXX9FTT666n0BumzdSYFwb9jtJkxfPdxlDHs7/W2mFG0FlG0FlG0FlG6c4yjS45DKWUBhojIri4XrJSJ8OD4hUAF4cHx60Rkc5s8/wZeFpHnlFIjgfeBQdKJUaf04Li/Et+O66lxzAePmY8GfEBD+TSGVA5kToEFNW6WDkqo0WhOiB4dHFdKzQM2Au+1HJcopd441nkiEgB+CLwLbAVeEZHNSqkHlVKXtmT7EfBtpVQp8BLwrc6cBoDL5TpW1QlJ0NNI7ZdPU+OYjxI7teWpiCmIoSaHOZdMwXDO145wGt39+JnIaC2iaC2iaC1iQ1dmVT1IeFDbASAiG4ARXSlcRN4RkbNEZLiI/LIl7Wci8lbL+y0iMlVEikWkRESWdV4i+Hy+rlSdcLj3/RWvJ42M/nmUH3KyadI/8TUN4DLHp6j0rA7PadsPe7qjtYiitYiitYgNXRkc94uI47AFZYkVUre3E2zE13iIpPRR7Fj7FWWj1tJYdj4Few1kFvfTIdA1Gk2voiuOY6tS6irAoJQaCtwGrIytWacPntq9uMr/hceXR3ljObvHleKpLMRVVcJY4zuoyTfH20SNRqNpxzEHx5VSduBnwOyWpHeBn4uIO8a2dci4ceNk3bp18ai62/HU7aZu61KslkMYHWkszVmLY89s7NX9ON9ezaDLF3X6tOF2u0lKSupBi3svWosoWosoWosoPb1yfI6I/H8iMrbldTdwUXdUfiIEg8F4Vd19SIhA1Ss07vgHVss+SNlJYG8OQX8yGY1JXCGrGDRt5jG7qFrnemu0Fm3RWkTRWsSGrjiO/+4g7d7uNqSrNDc3x6vq7kH8sO9ymss/IBDqQyjnm6xbdS3/Oe9NjNZmLq15D9vUy1DZuccsqu2CpNMdrUUUrUUUrUVsOOoYh1JqDjAXGKSUeqzNR2nAaRL3IwbUPQu+vZD2bZItfaj4z1ZqJr+MLZDJRXUzMA+pRp11dryt1Gg0mqPS2eB4FbAJ8ACb26Q7gbtjadQpjXiRvt/Hv9OAf/chdo/4hEBzP+Z97McoH6AuvD55hIbVAAAgAElEQVTeFmo0Gk2nHNVxiMh6YL1S6kUR8fSgTZ2SnJwcbxNOnK9+DE3/IWi5E59rPxUpdpoyDlK4eTjGYCWG7/0OlZTS5eKKi4tjaGxiobWIorWIorWIDV0Z4xiklFqslPpSKbWj9RVzy46CxWKJV9UnT9NyGPQ/VBw0gc3PvkHrUMpAwcGDGL796+NyGgB5eXkxMTMR0VpE0VpE0VrEhq44jueAZwFFeDbVK8DLMbSpU1ojXSYiYsxg/7qdmEMO1qevw230MzX1u9iS+6NS+x53eW0jc57uaC2iaC2iaC1iQ1ccR7KIvAsgIrtF5L+J43TcRCbgs2MyODlgrGP7F7fQvO0Gsj9eBS5nvE3TaDSaLtOVleNepZQB2K2U+i7hSLepsTXr1MNdtYlmZzFNpgZWl4/hkrSdDNv7IapgEmrS1+Jtnkaj0XSZrjiOOwnvzncb8EvCW7zeGEujOsNsTqyw4hIK4tj1Ae7q7SQnOTngb6Skqi/D3GswXHYbatiJD95lZ2d3o6WJjdYiitYiitYiNpzQRk5KqUEiUhEDe45Jou3HEfQ6qVr3D5JTVrHDVMtBRx5nfJlL7hVX0z9zULzN02g0pwk9FnJEKXW2UuoypVRWy/EopdQLQNyWYybayvG65m2EaCBg3YJl63nkrb2M8gw35pQ+J132ypU61mQrWosoWosoWovYcFTHoZR6GHgRWAgsVUo9AHwIlAJn9Yh1HeD3++NV9XHTUPYx/q2rsRqclFZdT9/dE6m272Z81hD6WE9+PUrbfY1Pd7QWUbQWUbQWsaGzMY75QLGIuJVSfYEDQJGI7OkZ0xKb5oNf0lxRij3tQ16ou5S564ewz6aYYG8m7cwp8TZPo9FoTpjOHIenNXS6iNQppXZop9E1fI2HcB5YizFlJ9tMxVy5ejhOo1Db14X9699DJfIiRo1Gc9pz1MFxpZQD+KD1EDi/zTEisiDm1nVAIgyOuyq/xH3wI+zJi9m/ezqW7VP53B7ksplnkTQ0P97maTSa05DuHBzv7Inj8sOO/9AdFZ4sCbHnuHcrhmAFVYFsfPVD+Dg1lYF5md3uNMrKynRIhRa0FlG0FlG0FrGhsyCH7/ekIV3F5XLF24TO8e4Cx6v4fWdTGszGY03C6PUybcqZ3V5VaWmp/qdoQWsRRWsRRWsRG7oSckRzPITcCGfQ4E3CmGrlq0A/5vuXY09KrIWLGo1GczRi6jiUUnOVUtuVUruUUkfs4aGUelwptaHltaNlXCVxCTUTqPgNjU0jwGxkcL8pGD0poLR/1mg0pw5dCTkCgFLKKiLe48hvBP4IzALKgTVKqbdEZEtrHhG5s03+W4GxxyrXbrd31YSeJ9hEyN+EQWXgkCb2NRzCFOqP97zDh4u6h4kTJ8ak3EREaxFFaxFFaxEbjvlTWCl1jlJqI7Cz5bhYKfVkF8o+B9glIntExAcsJrw25GhcC7x0rEKNRmMXqo4fIkaCDSGU1YRp/yG8yoq935CY1JWenh6TchMRrUUUrUUUrUVs6Eofyu+Bi4FaABEpJTw191gMIrxosJXylrQjUErlAkNpM933aDQ2Nnah6jjh2QSAMhuozBKagjbOzA6Q0jcjJtUtW7YsJuUmIlqLKFqLKFqL2NCVriqDiOxTSrVNC3azHdcAr4lIh+UqpW4BbgHo169fu81Zpk+fDsCKFSsiafn5+RQUFLB06VK83nDvWnp6OjNmzGDDhg3s27cvknf27Nk0NDSwalU0/FZxcTF5eXnt6snOzmbSpEmsXLmyXRiD+fPnU1ZWxsYv13Jx/i+prL0QA0EONe/FJilI7QHeeustcnNzKSkpYfny5TQ0NABgtVqZO3cu27ZtY/v27cfdplZi1abS0tJI2sSJE0lPT2/3jxiLNp3MdVqyZMkp16YTuU6tWpxKbTqZ69S6+dup1KYTuU7dyTGj4yqlXgf+f+DPwNnArcBUEbnyGOdNBh4QkTktx/cAiMjDHeRdD/xARD47lsEjRoyQXbt2HStbzxPyENp1PjU1P8XlO8SnmSup33kJl+YNZ8i4Yw7dnBBLlixh/vzOev9OH7QWUbQWUbQWUXosOm4L3wPuAoYAlcCklrRjsQY4Uyk1VCllIfxU8dbhmZRSBUAf4POuGNyb9xwPBW0E/JVUigFf42AmGpIZXDImZvXl5ubGrOxEQ2sRRWsRRWsRG7rSVRUQkWuOt2ARCSilfgi8CxiBv4nIZqXUg8BaEWl1ItcAi6WLG4MkJ598VNmY4FiMiAGfycOerE9Jqihk/IWTUYbYDeaXlJTErOxEQ2sRRWsRRWsRG7ryxLFGKfWOUuqbSqnj2jJWRN4RkbNEZLiI/LIl7WdtnAYi8oCIHLHG42g4nb1zf26p/gPVjgW4jW6yDl4JFRMwpJ78nhudsXz58piWn0hoLaJoLaJoLWLDMR2HiAwHHgLGAxuVUm8qpY77CaS7CAa7e1y+u1CAgS1JO6jebWs5ji2tA2garUVbtBZRtBaxoUtLmkXkMxG5DRgHNBLe4EkDEHLDgVuAEAKIgJ1mJk/vljEojUaj6XV0ZQFgilJqoVLqX8BqoBqI205EBkMvC9+x+wJwr6bZ9jC0jNLMse4gd8iAmFdttVpjXkeioLWIorWIorWIDV0ZHN8E/At4VEQ+jrE9xyQtLS3eJrRH3DiTnsG5fyVbmm1gF8wxHBBvy9y5c3uknkRAaxFFaxFFaxEbuvLzfZiI3NobnAaAx+OJtwnh/ijvLvBsBcB5YDUrawdRM+Rd+lf33EZN27Zt67G6ejtaiyhaiyhai9hwVMehlPpty9vXlVL/PPzVQ/YdQa9wHP4K2H8tVP4cksMrdcsdmRhCBqY22+mJgXGg3UrS0x2tRRStRRStRWzorKvq5Za/vWLnv95FEExnQO7i8OH+/2GALzw4TsVODJffFU/jThq/3095eXnvcNJdJCcnh61bt8bbjF6B1iLK6aiFzWYjJycHszl2ewB1tgPg6pa3I0WknfNoWdjXK3cI7Gmq1r9ESAIYJzxLCDOYragBQ+Nt1klRXl5OamoqeXl5HBajrNficDjIyIhNMMlEQ2sR5XTTQkSora2lvLycoUNj9z3UlTGOGztIu6m7DekqqanHtQYx5gRcdazM+JzyLQv5muOSHq27NdBZd+PxeMjMzEwYpwGQkpISbxN6DVqLKKebFkopMjMzY95bcNQnDqXU1YTDgQw9bEwjFUjsnfq6CX8wvP/5uI+vpcKahmXze6hxs+JsVfeQSE5Do9FE6Yn/3c7GOFYT3oMjh/BOfq04gfWxNKozekvIEUH4564bOY/zsDlTwQos+hUGU5c3VTxpVqxYoSN/ttDU1HRadUl0htYiitYiNnQ2xrEX2Av8p+fMSSwMKjz4ZFPbCBnGY+yh9RsajUYTTzqbjrui5W+9UqquzateKVXXcyb2fpIMa0F37XQbZWVlJCUlUVJSQm1tLSUlJZSUlDBgwAAGDRoUOfb5fMdV7t/+9jcOHToUOT733HOPGEC8+OKLj/sX6je+8Q3efPPNY+a79dZb+eyzzyJ1t90Pe+XKlcycOROAqqoqZsyYgd1u54477mhXRk5ODkVFRYwePZpRo0bxs5/9LLJpTygUYs6cOeTm5nLZZZe1O+/cc88lPz8/ot0bb7zBvn37mDFjBoWFhYwaNYo//CE6B+bOO+/ko48+6lJbACorKzGZTDz99NORtEAgcISWTz/9dLs2Pffcc4wePZqioiLGjRvH448/3rmIXeCxxx5j1KhRjBo1iqeeeiqSvn79eiZNmkRRURHz58+nqampw/Pr6upYsGABBQUFjBw5ktWrw/OE/uu//osxY8ZQXFzMnDlzIvdSXV0dl156KWPGjGHixIls2bIlUtZvfvMbRo0axejRo1m4cOERm7AB/OhHP2LMmDEsWrSonS5tr8eGDRu46abo0PKLL77IiBEjjrjOPYaIdPgivPMfhEOiH/E62nmxfo0aNUrijrdMgrsvkc2rfikVH/9Blv/pz/Lb59ZIMBjqUTO2bt0ak3K3bNkSk3K7yt69e6Wj63z//ffLr3/96w7Pcblcxyx36tSpsn79+nbHRUVF8vnnn4uISG1trUyYMEHS09OPy96FCxfKG2+80WmeqqoqmTJlSru6Bw8eLMuWLRMRkc8//1wuvPBCERFxOp3yySefyJNPPim33357u3IGDRok9fX1IiLS0NAgV111ldx4440iIhIKheQ///mPLF68WObPn99p20VEKioqImkNDQ0ybNgw2b59u4iI7Nq1S+bOndultoiI/P73v5dzzz1XLrjggkia3+8/Qsu//vWvkTb961//kvHjx8vBgwdFRMTtdstf//rXDuvsKuvXr5cxY8aIy+USn88n06ZNkz179oiISElJiXzyySciIvKXv/xFHnjggQ7LuO666+TZZ58VERGv1ysOh0NEwhq18tvf/lZ+8IMfiIjIHXfcIQ899JCIiGzatElmzpwpIiJlZWUyfPhwcbvdEgqFZMGCBfL3v/+9XV01NTURnb/5zW/Kli1bpKmpSS644ALx+/3t8s6YMUPKy8sjx++9994R17mVjv6HCW9n0S3fw0d94hCRUMvbwS2OIghMBr4D2GPmyY6BzWaLV9XtEDGQ6k9Bbchnva2YOZNzMBh69qmjoKCgR+vrjTz//POcc845TJ48me9///uEQiECgQDXX3995Jf573//e15++WU2bNjA1Vdf3e5p5ZprrmHx4vB6nNdee40rrrgiUnYoFOKuu+6K/CJ+7bXXIunf//73KSgoYNasWdTU1ETOWbNmDdOnT2f8+PFcdNFFkS1EX331VS666KJ2tv/kJz/hoYceOqJNKSkpTJ069Zj3elpaGk899RSvvPIKDQ0NKKW48MILyczM7JJ2AwcOjOxXkZaWRkFBARUVFQAMHz6cgwcPUl1dfcR5HbXlpZde4oknnmDPnj0cPHiwS/X/6le/4rHHHmPAgHBcN5vNxs0339ylc4/G1q1bmTRpEklJSZjNZmbMmMEbb7wBwO7du5k6dSoAs2bN4vXXXz/i/Lq6OlatWsW3vvUtILxxXHp6OtA+3JHL5YoMQm/ZsoULLrgAgFGjRrFjxw5qa2uB8Jooj8dDIBDA5XIxcODAdvUZjUa8Xi8igsvlwmw28+ijj3LnnXdiOmy89OKLL+bll1+mN9CVkdw3gbOVUsOBZ4G3gX8AF8fSsKPR2NgYj2rb4Qs24wvUA6DcNsSuGD6452NoLV26tEdi8Xzn4390e5l/Oe+6ky5j06ZNvPHGG3z22Wc0Nzfzk5/8hMWLFzN8+HBqamrYuHEjEJ3L/+STT/KHP/yh3eY+s2bN4qabbiIUCvHyyy/zzDPP8PDD4d2NX331VbZu3UppaSnV1dWcffbZTJs2jeXLl7N37162bNnCV199RWFhId/97nfxer3cfvvtvPXWW2RlZfHiiy9y33338dRTT/Hpp5/yjW98o5395513Hq+99hqffPLJEV8SXSU9PZ3c3Fx27drF+PHjAWhubu4w79VXX01SUhIQ3qeibTfSnj172LRpE2effXYkbezYsXz22WdHTMA4vC1lZWXU1dUxfvx4rrzySl555RVuv/32Y9q+efPmiM2d8cILL/DYY48dkZ6fn3/EF2lRURE///nPqaurw2q18vbbbzNt2jQg/EPr7bff5uKLL+bVV1/lwIEDR5S5Z88e+vXrxw033MDGjRs5++yzeeKJJyIbyN1999387//+L3379uXDDz8EwvuP//Of/2Ty5Ml8/vnnlJeXU15eTnFxMbfffjuDBw/GarUyb968iINpJSMjg1mzZjF27Fhmz56NzWZj/fr1/PznPz/CtgkTJvDEE09w113xX2Dclbs1JCJ+pdQC4EkR+X3LHuFxIRQKHTtTLHGvx3zgRpySjiFo4tM0MyN9OzAYYrdF7NHoqL80FnTHl3ws+M9//sOaNWuYMGECwWAQn8/H4MGDmTNnDtu3b+e2225j3rx5zJ49+6hlmM1mJk2axOLFiwkGg+Tk5EQ+++STT7j22msxGo0MGDCAc889l7Vr1/LRRx9x7bXXYjAYyMnJYcaMGUD41+7mzZsjYxVtyzt48CD9+vU7ov57772XX/ziFx1+UXQVOWzzzMOPW3n55Zc73BGvsbGRyy+/nCeffLLduof+/fvz1VdfHZH/8LYsXryYq6++Ggg/wX3/+9/n9ttvP+q00OOdLnrDDTdwww03dCnv6NGjueuuu5g5cyYpKSkUFRVhNIYnrTz33HPcfvvt3H///cyfP7/DldWBQIC1a9fy5JNPMn78eG699VZ+/etfc//99wPwyCOP8Mgjj/CLX/yCP/3pT9x3333ce++93HbbbZSUlFBcXExxcTFGo5Ha2lrefvtt9u7dS1paGpdffjmLFy/mmmvab2d0zz33cM899wCwaNEiHnroIf7yl7/w/vvvM3bs2MhnR7se8aArCwADSqkrgesJP20AxG4te28n2ITfcjYVzbNQKPbZjMxwrdDrHuKAiHDjjTeyYcMGPv74Y7Zv3859991HZmYmX375Jeeddx5//OMf+c53vtNpOddccw233npr5MvvZOwZM2YMGzZsYMOGDWzcuJF///vfACQlJXW4KGv27Nk4HA7Wrl17QnU2NDRw4MABzjzzzBM63+fzsWDBAhYtWsSll17a7jOPxxN5QmnL4W156aWXePrpp8nLy2PBggWsW7eOPXv2YDQaMRgMBAKBSN66ujqysrIAKCws5IsvvjimjS+88EJkUL/t62jX65ZbbmHdunV89NFHpKWlcdZZZ0Xqe++99/jiiy+44oorGDFixBHn5uTkMGTIECZMmIBSissvv5x169YdkW/hwoWRrq709HSef/55NmzYwLPPPktNTQ1Dhw5l2bJlnHnmmWRlZWGxWPj617/ebkLB4axduxaz2UxeXh5LlizhlVdeYevWrezduxc4+vWIB11dOX4+4bDqe5RSQ4GXYmvW0Wn99RAvJBTC05RBusdO3d480odGtuHocVr7Xk9XZs6cySuvvEJNTU3kF97+/fuprq5GRLjyyit58MEHI//4qampHa4DmjFjBnffffcRX0TnnXceixcvJhQKUVlZyaeffsqECROYNm0aL7/8MqFQiIqKClasWAGEv5gqKiois3B8Ph+bN28GYOTIkezatavDdtx77708+uijx91+p9PJ9773Pa688sp2/e9d3bNGRPjWt75FSUkJt9122xGf79ixg9GjRx+R3rYtW7ZsIRAIUFFRQVlZGWVlZZEuQ4Bp06bxj3+EuzpdLhevvvoq559/PhD+pf3jH/84Mg7k9Xp55plnjqjvhhtuiDjjtq+j9fdXVVUB4S60pUuXRn7ht6aHQiEeeughvvvd7x5xbk5ODtnZ2ZH2vf/++xQWFgKwc+fOSL4lS5ZExhgdDgd+vx+Av/zlL8ycORO73c6QIUP4/PPPcbvdiAjvv/8+I0eO7NBmgJ/97Gc8+OCD+Hy+SM+KUgqXK7zQ+GjXIx50ZevYTcBtwFqlVAFwQFr2D48H8Qw54nUcoHLbNpoacqky1bFUZbJa7cCkDJjisIajtYvkdKWoqIj777+fmTNnMnXqVGbPnk1lZSUHDhxg2rRplJSUsGjRIn71q18B4W6Am2+++YipvAaDgZ/85Cf07du3XflXXHEFBQUFjBkzhpkzZ/LYY4/Rv39/rrjiCoYMGUJhYSGLFi1i8uTJQHjToNdee4277rqLMWPGMHbsWFatWgXAvHnzjrr/9SWXXEKfPu33p8/JyeGnP/0pzzzzDDk5Oe2ivJ533nkUFRUxadIkhg8fzp/+9KfIZ5MnT+amm27i3XffJScnh/ffP3pIuRUrVvDSSy/x3nvvRX7Fv/vuu0D4S7ysrIyxY8cecV7btrz00kt8/etfb/f55ZdfzksvhX9bPvnkkyxevJiSkhImTZrEwoULmTIlvA/cpZdeyne+8x0uuOACRo0axfjx4486RfZ4uOyyyygsLOSyyy7jqaeeijjVv//97+Tn51NQUMDQoUO5/vrrAThw4EC7p60nn3ySq6++mjFjxrB582buvvtuIDyZYfTo0YwZM4bly5dHxl02btxIYWEh+fn5vP/++5H0qVOncumllzJ27FiKioowmUztptS25bXXXmPKlCkMGDCArKwsCgoKKCoqap1JCsCHH37IvHnzTlqfbuFY066A84Ay4FPgM2APMLW7pnUd72vkyJEdTj/rCRr3r5GDpX+Wis1z5N8f3iP3LQlPpQw8cYuE/L4et+fw6ZXdRW+djtsZzc3NMbKmewiFQjJlypR2UzpjRXdo8corrxx1umpPtuVk6e33RVdxuVwyceJECQQCkbReOR23DY8DXxORqSIyBZgH/C4mXqwLHO+ir+5FUAYvZgkydN3XCJ7hJrR9NcRpwH7fvn1xqTfWGI1GGhoaOhzIPRrxvS+OjVKK3/zmN+zfvz/mdXWHFiLCnXfe2eFnPdmWk6W33xddZf/+/Tz66KORrvoXX3yR22677Ygn1Z6iK7OqLCISWQopIluVUpauFK6UmkvYyRiBp0XkkQ7yXAU8QHiooFREeucUHgB3KTbfZuqNSazLMPDLsy8l+NhNqLMvAmPPxag61Rk8eHCHUyUTndYurUTgqquu6vTzRGrLqUB+fj75+dHdRRcuXMjChQvjZk9Xvu3WKaX+DPxvy/FCuhDkUCllJBwccRZQDqxRSr3V1gkppc4E7iHc9VWvlOp/vA3oSSTgp9k3kNJAGhXJiuCz/wVGE+rcy/WsKo1Gc9rQla6q7xIe1/hpy2sP4dXjx+IcYJeI7BERH7AYODyU67eBP4pIPYCIVB2r0LazR3qS5oMbcTsCqEAIjwlumlsAznoM33ksbk6js/UJpxvxui96I1qLKFqL2NDpE4dSqggYDrwhIsc7X3AQ0La/oRyYeFies1rq+ZRwd9YDIrK0s0KDweBxmtE9eOrKEOXFqSpxm9PISLYRUgpli1v0FRoaGnrNvO54EwwGuzwN9VRHaxFFaxEbOtvI6b8I7/S3jnDIkQdF5G8xqP9MYAbhfT8+UkoViUi7jaKUUrcAtwD069ePJUuWRD5r3QWvdS49EJlyt3Tp0sjq6vT0dGbMmMGGDRvaDSrPnj2bhoaGyLRJCIcQaF2E08rorEbMxkbEXoXBl8L//d87zAoEMBKeL15aWhrJO3HiRNLT01m2bFkkLTc3l5KSEpYvX05DQwMQnr45d+5ctm3b1m66ZVfbBDB//vwTblN2djaTJk1i5cqVkbn0AGeddRZerxe32x1Js9vtGI3GdiFfLBYLycnJOJ3OiENXSpGeno7b7W5nZ+uK5LbTLa1WK0lJSTQ0NERWOxuNRlJTU3G5XO0GNtPS0ggGg+3CaSQlJWG1WnE4oreLyWQiJSWFpqamdgvPMjIyTps2HR5y5FRo06l4nWLZJpfLxZIlS9p973UrR5tuBWwG7C3v+wFrjme6FuGAiO+2Ob4HuOewPH8GFrU5fh84u7Nyhw8f3uH0s1izd9XD0lg6XzZvuFTeXr1UQl6XBH7/vbjY0sqbb74Zk3J7w3Rcm80mxcXFUlNTI8XFxVJcXCzZ2dkycODAyLHX642c0xoxtjOeeeaZSCRWkXDE2Ly8vHZ55s2bF5PouCIiP/zhD+XTTz+N1H3OOedEPmsbHbeyslKmT58uycnJHUbHHT16tIwaNUoKCwvlvvvuE4/HE/n86aeflmHDhsmIESPaRWJtPa9Vu5UrV0byjxgx4oj8F1xwQSQq7OEEg0GZMWOGOJ3OSNqrr74qgOzcuTOS1tF00bZa+Xw++clPfiLDhw+XsWPHyuTJk2Xp0qXH1LEzPB6P3HDDDZG2vv3225HPXnzxRRk9erQUFhbK3XfffdQy1q9fLxMnTpTCwkIZPXq0+HzhqfarV6+WUaNGyfDhw+WOO+6I5L/nnnukqKhIxowZI7Nnz47cY7W1tXLJJZdIUVGRnHPOObJ58+Yj6nK5XDJr1iwZNWqU/PnPf46k33jjjVJaWho5fvzxx+X555+PHN9xxx2SnZ0tjz/+eIdtiPV03M6++NcddvzFcRUcfprYAwwFLEApMOqwPHOB51veZxHu2srsrNx4OY7ylf8tG1d/V9a887qE3M0SeP0xCTz5g7jY0sqp7DiON6x6VxzHqR5Wvbq6WoYOHSplZWVSU1MjeXl5kS//tue10pq/vr7+iPxPP/20PPLIIx225c0335Qf//jH7dIWLFgg5557rjz44IORtGM5jh/96EeyaNGiyA+AgwcPyquvvtqpjsfiiSeekJtvvjlSXklJiYRCIamsrJQhQ4ZITU2NhEIhue6662T58uVHnO/z+WT06NHy5ZdfikhYo2AwKCIi48aNk9WrV0soFJJZs2ZFrt3xhltvy+uvvy4PP/ywBAIBmTRpkoiIfPHFF/Ltb3+7XT6n0ynjxo1rl3bvvffGzXF01vk3TCn1z5bXG8DwNsf/7OS81ieZAPBD4F1gK/CKiGxWSj2olPp/7J13XFPX+8c/h7BkiBZXBTcKBEjCUhRU3FgHtGJrq3W1tWrVOqrVbxVba6vVurU/bbV1o1Ws2FacFUURJ7hAFDUKKqIgG4Ekz++PmEtCEoYmDMn79crrxd3nPPdyzz3nPM/nUYRpHgaQzhiLB3ACwEwiSi/rvAqVyqqGAaj/tCU61HOGLHw1kP4IRsFfVUtZFAiFwmq9fk1AIavevXv3Oi+rHhERgf79+6NZs2awtbVFz549VYZLS6PYv0GDBmr7BwYGclIhpdmxY4eKYm52djbOnTuH3377jbNleeTk5GDz5s1YvXo1TE3l3v3NmjVTsf+roCxx3qxZM1hZWSE2NhZ37tyBk5MTbG1twRhD7969NcqqR0REwNPTE25ubgCARo0awcjICMnJyXjx4gW8vb3BGMPHH/cumU4AACAASURBVH/MJe96Fbl1BSYmJsjPz1cZ2lJIjyhjZWWF5s2ba9TNqg7KmhwfUmp5rca9yoCIDgI4WGpdiNLfBGD6y1+FUDxk1UGerRlaNC6G7GExjN79EqyRffkH6ZHWrVtXyXWkyzXLJLwOvOnqmkSVRVlW3djYGOPGjavTsuoPHz7kJLwBuWyJIr+G4no8Hg8WFhaIjo7m9legvH+jRo2Qk5PD2U6Z6OhobN68mVv+66+/MGDAADg5OcHS0hJXrlwp96Pm9u3baNOmjYoarzamTJmiMRvh8OHDMXPmTJV1QqEQ4eHheP/997m5x+TkZPj6+uLGjRt48OAB3n77bYSHh2v0hrx16xaICH379sWzZ88wfPhwzJgxo0xbAZWTW1fOlxIQEIAdO3bAx8cHs2fPxr59++Dj48PlKFHGy8sLUVFR8PDwKNdm+qasnOPaRW6qEeXJqKricd4VqEgZ1rOu9kYDkAutlc6VoA908ZLXBwZZdTlEqjKbml72ABAVFVWptLiNGzfG48eP1Y7Jzs5W6fmHhobi66+/BiDvwYWGhkIoFOpMVn316tUV3vezzz5DYmIiPD090aZNG3Ts2BE8Hg+NGjXCunXrEBwcDGNjY/j4+GiMfJdIJDhz5gzOnTsHc3Nz9OjRA15eXlxjrI3KyK0rY2JiwvXSioqKEBAQgAMHDmDq1KlISUnBmDFjOH2qJk2aQCwWV9gW+sQQ7lwGUpIhLT8LkSkL4U/OMEL1DJMZ0AyRXFb9+++/V3tZXr16FREREVi3bh3CwsJUck+XZtiwYRg6dKjGYaPKlkcgECAqKkptW1my6vPmzdOJrHp8fDxiYmK4bSkpKWWqqdrZ2ZW5vzYZb2X31qdPn+LkyZNISEgAYwwSiQQmJiZYtGgRbG1t8fz5c5VjFbLq7du3x71795Cbm1tur6MyPQ4TExOsWlWiiOTp6cnJqgcGBnIfWr/88ovGoUB7e3t0796d6xX0798fly9fRnBwsIqaQUpKCuzs7DSW6b333sO8efM4uXVAPrzZunVrtRz3yqxZswZjx45FVFQUGjdujGXLlqFXr15cw1HbZNXrLLHPkrEo7hDc85qAyazR8GEKZPtXgdUrv3ttQP8oy6oDqPOy6gEBAYiIiEBWVhbS09Nx/PjxMntbiv0zMzPV9pdKpXj27BlatmypdpyDgwP35btnzx6MHTsW9+/fh1gsRkpKCpo3b46zZ8/CyckJ9+/fx61btwAA9+7dw40bNyAQCGBtbY2RI0di6tSpnCR5WloaN4+kzOrVqzXKqpduNAB59kOFDHlERAQsLS25hkMhq56RkYH169drTFPbv39/xMXFoaCgABKJBKdOnQKfz+eGAC9cuAAiwrZt27hGqLJy65pIT0/H4cOHMXz4cOTn53ONs7ILb02SVa+Ml5SZrmbkX+fn5OSk0YtAH5x9cpd+u3GSHp5eS+nH5lPemWMkk8mq7PrlofAG0jW1yatqx44dJBQKycXFhfN6uXTpEolEIhIKhSQSiejw4cNERLR7927q0KED58pb2suKiKi4uJjzqpJKpTRt2jRycXEhV1dXzuNHKpXS+PHjydHRkfr06UP9+vXjPIUuXbrEeWvx+XzatGkTERH9999/NGrUKO46yteWyWQkEAg4ryoiuRdUw4YNycrKiuzs7OjmzZvceldXV86tdO7cuSruuL/++iu1bduW2rVrp+K+qcmrSrF/u3bt1PY/e/Ysvf/++xrvT0hICP3xxx9EROTn50dHjx5V2b5s2TKaNGkSERGdPHmSOnbsSEKhkLy9venYsWPcfoWFhTRjxgxq164dubq6UqdOnThPpVclKSmJOnToQE5OTtS7d29KSEjgtgUHB5OzszM5OzvT7t27ufX79u2j7777jlvevHkzOTs7k4uLi4rbbkxMDPH5fGrbti1NmTKFWx8YGEguLi7k5uZGgwcPpocPHxIR0alTp8jBwYE6dOhAQ4YMKdPzb9KkSRQVFUVEckXfXr16EZ/Pp3Xr1nH7CIVCysjI4Jar06uqIg1GRwDXADx4uSyEPIVstTQcnp6emi2vB6If36ENF07Sw9Nr6PGSU/Ti/OUqu3Z1UlMbjtpMbZIiJyKaOHGiRndVIqLk5GTq169fFZeobnP+/HkaPXq0yrqa6o6rYDWAgQDSX/ZQrkCeEbBaKB0Vq0/SnxQhN0E+DdSo7SWYNqpZU0LK49NvEq8iq66LBED6pCqlyHVhC3d3d07BoDT29vYYPXp0jbc5UPOfi4qSkZGh4kAxbdo07Nq1S+vQl76pyJvQiIjul/KEqB7BKIAbM9Q3B+5fxbWnabBpKJ+MYqy6EsRqR1km5E3iVWTVlf3gaypVJUWuC1toGv9XRpGOtaZTG56LitCvXz+V5RUrVmDFihXVVJqKNRzJjLGOAOilVPpkALf0W6zqJ/75Yzg2sAMvxyCXbsCAAQPKVGSoagLkAXotATwB4PNy3RtPI3NLmPKqPpe4AQMGDNRkyu1xkDxHRo3pl1YmgOl1YHnGSC8uhHkNdliuiuC/2kJVPRe1AYMtSjDYQj+U23Awxn6DSti0HCIap5cSlUNV5RDmPbLAC2spXNs3AlIBZJcpoVUtiMXiKpMdqekUFhaWG91bVzDYogSDLfRDRb6nj0Eud34cwBkATQAUlnmEHlEE91QFHTpYo1Xjl7r4LZ2BGiAzooxyDpA3CbFYjHr16kEkEiE9PR0ikQgikQjNmjWDnZ0dt6z8EaEcKKWN33//Hampqdyyn5+fWiTvwIEDK/2VOmLECE7wriwmT56M6Oho7tqdOpXkNYuJieGkStLS0uDv7w9LS0tMnTpV5Rz29vaceKOLiwtCQkK4PA0ymQz9+vVDkyZNEBQUpHKcn58fHB0dOdv99ddfAIBRo0ahcePGah5s06ZN0xitrakugNxRw9jYGBs3buTWSSQSNVtu3LhRpU6bN2/mRCQ9PDx0MuG7fPlyuLi4wMXFRSWKPDY2Fj4+PnBzc0NgYKBWj6uMjAy89957cHJygrOzMxfQ+b///Q8CgQBCoRD9+vXjnqVjx47BxsaGs+0PP/zAnUubfZVZsWIFXF1dMXDgQM75JzIyUiXAMTU1Fe+88w63fOLECfD5/Ep5HuqUyvrvQt7YROvKH7iyv6qSVV/6ZzTFXf+PUqLX0dNz/6uSa1YWg6x6CQZZdXmsyLFjx2j79u1qcuaagh2JiCIjI+ncuXMkFApV1iclJVFAQECF6kJEtHr1avLz86OePXty65SDKRX89ttvXJ3+/vtv8vT05PJXFBQU0G+//abxmhUlNjaWBAIB5efnU1FREfn5+dHdu3eJiEgkEtHp06eJiGjDhg307bffajzHRx99xAU4FhYWclLz2uTTNcnHK9BmX2U6depEUqmU5s+fTwcPHiSpVEp9+vRRe6ZHjBjB5VEhIrp9+7bW89aEOI7StAHQVJeNV01CUpCF9BsH0Ns2ETY5icgwy4Stzb/VXSwDWlDIqnft2rXOy6ozxtCrV69K+fZ3794db731ltr6du3a4fHjx3j69KnaNk11CQ0NxcqVK3H37l08fvy4Qtf+8ccfsXz5ck4J1tzcvFw34PJISEiAj48P6tWrBxMTE3Tp0oXrXd25cwe+vr4A5KrImmTVMzIycO7cOYwePRqAXI3bxsYGgHb59LLQZl9liAgSiQT5+fkwMTHBli1bMHjwYLXeWlBQEHbs2FHuNauCisxxPEfJHIcRgAwAs/VZqLLQd8CLpDAbsuIXuJLzNpxsk2EuvQd5No6ah/JQhz5ZvuXVBPjKYvoor9c+h7KsOhHhiy++qNOy6p6engCgtcH54IMPOJG8yMjIcofk3N3dER0dreaEUbouYrEYGRkZ8PT0xNChQ/Hnn3/iyy+/LLfsN27c4MpcFlu3bsXy5cvV1js6OmL37t0q69zc3PDdd98hIyMDZmZm+O+//+Dn5wcAcHJywj///IOBAwdiz549GmOF7t69i8aNG2PkyJG4du0avL29sXLlSk4NWJN8OiBXUhYKhbCzs8PPP/8MPp9fbr0UTJgwAZ06dYJAIEDHjh2xaNEiHD58WG0/Ly+v1xbi1BVlPq1M3qQKASiE52UvuzzVRmlZYl1SkH4Xhc/vgxmboZvdFrSpFw8ZMRQ9aw8zR71d9pVRfAnpG1285PWBsqw6IJ/nMMiqq6rXKrN79+5KjYk3adIEjx49Ultfui67du3iBCKHDRuGiRMn4ssvv9SZrPrIkSMxcuTICu3r6uqK6dOno3fv3rCysoK7uzv3zti8eTO+/PJLzJ8/H4GBgTAxMVE7XiKR4OLFi1izZg08PT0xefJkLF26FPPnzwegWT7d29sbYrEYVlZW+Pvvv/Hee+/h5s2bFa7f6NGjuR5OSEgIpk2bhn/++Qc7duxAy5Yt8fPPP4MxpvV+VAdlDlW9bCQOEpH05a/aw6eVk7vrirzU63gc8xue34yATFoMyVttYW5UgKtPgxD1pC+eR0/R+TV1QVnZ3eoCRHJZ9bi4OERGRiIxMRHz5s2Dra0trl69iq5du2LdunX4/PPPyzzPsGHDMHnyZDV13Fcpj0Ag4NRbr127hoiICABly6pnZmbqRFZdga4cSLTJeJeuS2hoKDZu3IjWrVvjvffew+XLl3H37l3weDwYGRmpRG8rZNUBuZrwpUuXyi3H1q1buYln5Z+2+zVu3DhcvnwZp06dgrm5OaeOy+fzcfToUVy6dAnBwcFwcHBQO9be3h4tW7aEl5cXGGMYMmSIxqx7w4cP54a6bGxsOGn4QYMGITc395XyBqWkpCAuLg4DBw7E8uXLsXv3blhYWCAyMhJA7ZNVj2OMueu9JNWItCgPls1c0cxnHJ41FWLxvauoL5HBLJ0HkjE0CHSq7iIa0IBBVl1VVl3XaJPxVq5LfHw8JBIJHj58CLFYDLFYjJkzZ3LzRt26deNS0Obn52PPnj3o0UMudTdnzhx89dVX3DxQYWEhNm1STxo2cuRIjbLqpYepFCjk08ViMQ4dOsTJoyjWy2QyLFy4EOPHj1c71t7eHk2bNuXqd/z4cW7YSZt8urKnXkxMDIyNjV8pfmTu3LncUNSLFy/AGIORkRH3IVCTZNW1NhyMMcUwljuAC4yxRMbYZcZYLGOsZiS+1SHMiIdzz5KRkHoUyxv9jHpWqZA2MYepVT7MOzSq7uIZ0ICbmxvmz5+P3r17w9fXF3379sWTJ0+QnJyMbt26QSQSYcyYMfjxxx8BAGPGjMGnn36q5sprZGSEmTNnqk1iBgcHw8nJCQKBAL1798by5cvRpEkTBAcHo2XLluDz+RgzZgynQWVmZoa9e/di+vTpEAgEcHd3x7lz5wAAAwYM4L4cSzNo0CA0bNhQZZ29vT1mzZqFTZs2wd7eHomJidy2rl27ws3NDT4+PmjXrh1++eUXblvnzp3x6aef4vDhw7C3t8fx42Un8hw6dCi6du2K+Ph42NvbcylhCwsLIRaL4e6u/s2oXJfQ0FC8++67KtuHDBmC0NBQAPLkRLt27YJIJIKPjw+GDx+OLl26AAAGDx6Mzz//HD179oSLiws8PT11IkoYFBQEPp+PoKAgLF++nGtUt23bBkdHRzg5OaFNmzb4+OOPAQDJyckYPHgwd/yaNWvwwQcfQCAQ4MaNG5g9Wz6lO3PmTLi6ukIgECAyMpKbd9m1axdcXFwgEokwbdo0lQZNm31Lc+HCBZiamkIgEACQz0e5urriwoUL6NOnDwC5C64iqVN1w7SNPjHGLhORB2OsnabtRHRHryXTAp/Pp/j4+Nc6R4GkECcS/oNlcR5avEgHkQzRMlNcMZIhuHEOnNgVpB4aj3yPeLwwu4IuXhVPXVmVxMXF6cWPOyEhAc7Ozjo/b0URi8UYOHAgrl+/XuFj8vPzVdKZ1jSICH5+foiIiNBL70AZXdhiz549iI+P58b2lanKurwuNf25qChEhK5du+Lff//l5jaTkpIQHByMuLg4tf01/Q8zxi4RkU4mLMsaqmIvC3xH008XF38VXuchICLcz8nAzSd3wM++BzseQ1GDlshq6Yte7kFYYncYXnQEvFuNYPNCCg1zZzWKagv+0TOvIqte018OVSmrrgtbEBGmTZumcVtV1uV1qenPRUVJS0vDrFmzuEbjxIkTePfdd7n5oqqmrB5HCgB1H7iXEJHWbfrE0dGRlLvtlSEtOw3Pr+6FKWMoNjZDh05jVbYX3hqEq3enoF6yKbIHXMGj7POwz2kEH++Vuii6zomMjOQ8enRJdfc4XoWcnBxYW1tXdzFqBAZblFBXbaHvHkdZ7rg8AFaoYUEMUmnlU4HIpMXIyxCjMC8dMsZDy44jYWQk704US6TIL5AgPasATYuykdtoJ5La5MIs3wZdZUF4Sym4q6aRlZVV3UWoMbzKc/GmYrBFCQZb6IeyGo7HRLTgdU7OGAsAsAryRmgjES0utX00gKUoiRNZS0QboWMS7l2B2aOLeCKphzxpI9yLeQySETKyX+BpRgHMbeNRv8VZDDYvwNvkCrfCtmh4+x7w7CxYq4oH8hgwYMBAXaCshuO1ehovkz6tA9AHQArknlkHiKj0zPZuIppU0fNqC24qi5zcIuQXWaNe894wkcpgZWEKIyMGPh7Bjm1CoewxiCSwKpKhzbl7MGZPgUb2MOo5HGiu0TegRmBQ/SyhskFlbzIGW5RgsIV+KKvh6PWa5+4IIImI7gIAY2wXgEAAr+US9cpeHEYED76qxJY09zqkackQFzeHZXo+pAn9YdVzEHhtW75OEauMgICA6i5CjaGqouhrAwZblGCwhX7Q2nAQUcZrntsOgLIYTAoATeJKQxhj3SBPRzuNiNQEZBhj4wCMA4CmTZsiPDyc29a9e3cA4IKwAHC+2ocOHUJhYSGkxkWwN5cBkLuwih/cQXGjK2hS7wGcLLLwLN0ZuOsI00xXnL94BY5GMrRu3VrlOk2bNoWPjw9iYmJUcn0HBgZCLBarSJx36tQJNjY2KpHdrVq1gkgkQmRkJDc3YWZmhoCAANy8eVPFT78idVI+Pi4uDvfv3+f27du3L7KysrgYAgAQCoUVrlOHDh1QWFioIlVuaWkJHo+nErlvamoKCwsL5OTkcGPJjDHY2NigoKCAKycALrJW2U/fzMwM9erVQ1ZWFiebwePxkJ6eDmdnZzg4OCA8PByBgYHg8XhITU2FkZERbG1tAQDR0dGwtrZWidI1NjaGlZUVcnNzVSKWGzRogA0bNsDf3x9Nm8o/IAYMGIBHjx4hNjaW2+/DDz/E2bNnkZycXOE6ffTRRwgMDMSAAQO01sna2hoTJkxAUFAQOnXqhICAABARTp8+jby8PFy4cAE//PADDh8+DDMzM8ydOxc7d+6EsbExli1bhkGDBiEzMxO2trbg8/koLi6Gubk5RowYgbFjx3I98VWrVmHr1q3g8XhYsmQJ/P39YWRkhIYNG8LFxYUrkyL2YP78+di2bRuMjY2xZMkSDBw4EC9evEDfvn3x999/g8fjqdUpLy8Pw4YNw8mTJ/HixQsUFRVhzZo1WLRoER4/fgwLCwvk5eVh69atSEhIwMqVK2FmZobMzEwEBARg6dKlcHd3h0wmw5QpU3Dy5EnY2NjA2toay5cvh0AgeOVnTyqV4uOPP+ak+deuXctJ0vz000/Yvn07AOCzzz7D9OnTNd6nf//9F19//TWkUikaN26MyMhISKVSjBw5EkePHsXbb7+NixcvcnXat28flixZglu3buHy5ctwcHBQefaeP38OV1dXzJ07FxMmTFCp05MnTzB8+HA8fvwYEydOxOTJk5GTk4OJEyfi888/h5ubG2xsbLB06VJYW1tzwYzz58/H7t278eWXX3LnVL5P+fn5CA8Ph42NDfz9/TW67L4WupLZLf0DEAz5vIZi+WPI5zCU97EFYPby788B/FfeeV9FVv1s7Bk6f3wbt5xd+Ij23h5DdxMXU+75QMratI+ydh6m7NNikhVJKn3+6sIgq17CmySrfuXKFXJ3d6fCwkJKSkoiBwcHkkqlajLlqamp5O/vTwsWLFA57smTJ2Uep0DbdYiI5s6dS7t27dJYl5UrV9LatWtV1nl4eJCfnx9t3bqVW6csoa5cb8U9GDJkCM2dO5dkMhkRyaXcDx48WKYdy2Pq1Km0cOFCIiK6fv06+fv7E5G63Lq/vz8nt65Meno6OTs7U3JyMhERPXnyhNumTSL9xo0blJiYqFW6PigoiIYMGUIrVqxQ2xYWFkaLFi0iiURCPj4+RER06dIl+uyzz1T2y8nJIQ8PD5V133zzjcZzEtVMWfWK8hBAC6Vle5RMgisarXQiUnzCbQRQvlRmJSh4loTnt47BtlCssp4kxTAtItglFcGkiMGy4QNYd24Oa99WYCaGHOO1iTdRVj08PBwffvghTE1N0a5dO7Rs2VKjplPTpk2xYcMGrFmzplLHVeQ6ZUl479ixQ0Ux99atW5BIJPj222+5iPHySExMRFxcHL777jtuHqJdu3ZqNqos8fHx6NmzJwDAxcUFSUlJSE9PV5Nb79atGye3rsz27dvx/vvvc+KUTZo04bZpk0jn8/mcHlZp9u7dCycnJ06epDQmJibIz89X6aGEhIRgwQJVvyQrKys0b95co25WdfBqWs4V4wKA9oyxNpA3GMMAfKS8A2PsbSJSiPcPBpCgywLcTo7F44IcpL6wBOW/DW/FhtxMNCjOAGtoB/DMwHu3fAnoukzq0tM6P2ezmX6vfQ5lWfXc3FzMmjXrjZBVf/jwoUp8jr29PR4+fKhR/qNDhw4oKChAenp6mcfl5ORw9XZwcMDevXu17u/t7Q2hUIiYmBi167148QIpKSkqKsKhoaEYNmwY/P39MWbMGDx79qzcwLQbN27A3d29Qs4uwcHBGnW+Zs6cieHDh6usEwqF2LdvHzp37oyzZ8/i0aNHSElJUZNbj4iI4HJzKHPr1i0wxtC9e3fk5eVh6tSpavetouTk5GDZsmU4fvw4J3tTmoCAAOzYsQM+Pj6YPXs29u3bBx8fHy5HiTJeXl6IioqCh4fHK5VHl+it4SAiCWNsEoDDkLvj/k5ENxhjCyDvMh0AMIUxNhiABPI8H6PLO29lgnkKpcWwtW2Dt034uHMvC5BkAMmjYF38FF3rvYDMoj5ePHJFDQ8Q14piLkTf6OIlrw+UZdWJCC9evDDIqqNk7kUZa2vrSo1zGxsbgzGGgoICFUXWtLQ0ta/uXbt24eDBg+DxeAgKCsLevXsxfvx4ncmqK3p6FeGbb77BlClTIBKJIBQKIRAIwOPxypRbV0YikeDatWs4evQo8vLy0LlzZ3Tu3Bnt2lXeu3LevHmYOXNmmdHrJiYmXI+3qKgIAQEBOHDgAKZOnYqUlBSMGTOG06dq0qQJxGJxpcuhD/TZ4wARHQRwsNS6EKW/5wCYo88ymBoZw9LYDAwMUkk6pNJc3DQLQkrGVXQr+AFFT7LxZogS1D3opaz6999/D4lEovLVfvXqVURERGDdunUICwvDr7/+qvU8w4YNw9ChQ187SQ69lFWPiopS21aWrPq8efNUZNXt7OxUkgylpKTAzs5O4zVv3boFCwsL2NraVuq4ilynqKhIzeW7dD1iY2Nx9+5dTvG2sLAQHTp0wPjx42Fra4vnz5+rHK+QVTc3N0dcXBxkMlm5vY7K9DhsbGywZcsWAPIhxdatW3N55ceNG4dx48YBAGbNmqVVVt3Ozg4WFhawsLCAr68vrl69+koNx/nz57F//35Mnz4dmZmZMDIygpmZGTeZXZo1a9Zg7NixiIqKQuPGjbFs2TL06tWLazhqm6x6jUKTLLZWCJDJCFKZ3KMqr+ApCqV5yL9lguY3e6A4NRdmbRuWc5Kai7LXVV1EWVY9Nzf3jZFVHzx4MEJDQ1FUVIQ7d+7g/v37GjPlpaWlYcKECZg8ebLKcRkZGWUeV5HrPHnyBHZ2dmov9caNG6OgoICbIwoNDcXChQs5SfVHjx7h3r17SElJQadOnXDq1ClOzvzcuXMgIjRv3hyOjo5wc3PDggULuB7TvXv3uPwlyuzdu1ejrHrpRgOQD0sWFxcDADZs2IBu3bpxWUOV5dYPHDjAeSgpExQUhKioKEilUuTl5eH8+fNa5yfKIzo6mrPLpEmTEBISorXRSE9Px+HDhzF8+HDk5+dzdlf2LqtJsup67XFUN9I8I6Qk5yFDcgl9nE7ALDMbxWQEEc8TLOcMzD40RIXXZpRl1RXuqevXrwePx8Mnn3wCIgJjDD/99BOAEln1evXqcS93oERWHYDKJGVwcDBiYmIgEAjAGFORVT9x4gT4fD5atmypJqs+ZcoUZGdnQyqVYsaMGXBxccGAAQOwZcsWLtObMoMGDcK8efO4ZaFQiKCgIDg7O8PY2Bi//PILjIyMIJPJuLmK4uJimJiYYNSoUVyaVsVxnTp1gqmpqcpxmtB2HaBsCe/evXsjOjoa3bt3x+7du1Wk2xljCAoKwu7duzFjxgwsW7YM/fr1AxHB2toaoaGh3FDVH3/8genTp8PBwQH16tVD48aN8fPPP5d908vh2rVrnHuym5sbli1bxm0LCgpCZmYmTE1NsX79ei4mbN26dTAzM8Onn34KV1dX9OzZE25ubjAyMsLEiRM5zaehQ4fi9OnTSE9Ph729PRYuXIjRo0djz549mDZtGp4+fYp+/frBy8sL//77b6XK/e233yIkJASMMfTv3x//93//h+3bt+OLL77g9jl79qzWuZIqR1fuWVX1q4w7bkzk7xR5cQM9eLSYCpL6kDjxc4qOHUKS/atJGhVW4fPUVOqaO25ZVMQdtzqRyWTUpUsXysrK0vu1dGGLwYMHU1JSksZt58+fp9GjR7/2NaqCmv5cVBRNNn9T3XH1grm5eYX35Zmmg4xSUSTLR6GRNTKYCJaJ/uAFToaR33t6LGXV4OhYAxOh64BXgx8Z4QAAIABJREFUkVWv6fIrVSlF/rq2KCwsRHBwsNZxfW9vb/j5+WntydQkavpzUVEyMjJUHCimTZuGXbt2ccNwVY1WWfWaipeXF1U0P/PFsz8jz7QR/Jrbgh7uQH78cBQ+M0Hj6QapjrKojbLqBgwYKKE6EznVSJRlB8rDSCaDVVoyJMf/gTSfQWZkAQvnN0e75tChQ9VdhBqDQWK+BIMtSjDYQj/UusnxynSPTSABr0ExjNq0AOXeRYMRr6vbWLNQ1k2q69S2nrM+MdiiBIMt9EOtazjKg2RS5KclAiRDQyMZzM2TYWRkgRc57jCt7sIZMGDAwBtArRuq0hTtqYzkRRayxdEozn8G4mXjcbEtio0WoOj5m9XbAAyS0cqU91zUJQy2KMFgC/1Q6xqOciVHJM/AwxM0kE5HiwZHYPHMFJl/JYCZv3GdK73kG68JKCSxRSIR0tPTIRKJIBKJ0KxZM9jZ2XHLiiA0oGJSNL///jtSU1O5ZT8/Py6qWMHAgQPRoEGDSpV3xIgR2L9/f7n7TZ48GdHR0dy1O3UqyTIQExPDSZWkpaXB398flpaWmDp1qso57O3tOfFGFxcXhISEcEOWMpkM/fr1Q4sWLRAUFKRynJ+fHxwdHTnbKQT+Dh48CEdHRzg4OGDp0qXc/kOHDsXdu3e11uXdd99VkfK/ePEiGGM4duwYty4pKUnNM27u3LlYuXIlAPkw0pIlS7hyeXt7axVWrAxfffUVXF1d4erqisOHD3Prjx07Bg8PD7i6umLs2LEqMTvKiMVi9O7dG3w+H3w+n4uuX7VqFdq1awfGmIqU/7Fjx2BjY8PZ9ocffuC2abOvMjNmzIBAIMCYMWO4dZs3b8batWu55bi4OHzyySfc8o4dO+Dg4KB2n6sMXfn1VtXP2dlZo9/ys+sH6MmlHZR6fhM9ObeISCahf/+bQ/8dWkcyqYyTbn6T0CThrAtqahxHWbLqeXl55Z63tsiq5+Tk0OnTp2nNmjVqsuR2dnZcbEJWVha9//77NHbsWCKSx4ocO3aMdu3aRYGBgWXWnYioqKiI2rRpQ2KxmF68eEGurq6UmJhIRETHjh2j8ePHa6xLXFwcBQcHq6ybPn06+fn5cWUhIrp9+7aaBLly7MGaNWsoICCAsrOziYgoMzOTtmzZovGaFWX//v3Ur18/kkgklJOTQyKRiHJyckgikZCdnR0XmzJnzhzavHmzxnP4+fnR8ePHiUh+L/Lz84mI6PLlyyQWi1XuARHR0aNH1exNVLZ9FTx79owCAgKIiGjUqFEUHx9Pubm51LNnTyouLlbZ19/fn1JSUsq9LpEhjkMN5a9MlfU5qWjQvicathTBxjQaD/ecgTGTgRjAjNgbmUJS+YuvrqKQVe/UqdMbI6tuZWUFX1/fcmOW6tevj19//RV//vknsrKywBhDr169Khy7EBMTA2dnZ7Rq1QpmZmZ4//33uURf/v7+OHToEJckSZnSsuoymQxhYWHYsmULIiIitP6PlubHH3/E+vXrud6ijY0NRo4cWaFjtREfH4/u3buDx+PBysoKTk5OOHLkCNLS0mBpacnFpvTp0wdhYWFqx1+9ehU8Ho+TZreysuL0odzd3dGqVasKl6Us+yrg8XgoLCwEESE/Px8mJiZYsmQJpk2bpqK9Bsh7w7t3766UPfTFmzN+Qy9g/Px7yIrTkVsvFRdE2/HiRSEapL+ZQXJVSWjiUJ2f80PHPa99jjdVVr0y2NjYoFWrVkhKSipTlwoAPvjgA+4lGBkZiYcPH6JFi5KUOfb29lwmSx6Ph9atW+P69esQCoUq5zlz5ozKsEpUVBQcHR3Rtm1b+Pn5ISIiQqVh0URGRgaKi4sr9CJevHgx17gr06NHD6xYsUJlnVAoxOLFizF16lTk5ubizJkz8PHxQVBQEAoKChAbGwuRSISwsDAVgUcFt27dQv369REUFIT79++jb9++WLRoUblCjKdPn4ZQKISdnR1+/vlnTrdMm30VNGjQAH369IG7uzv69u0Lc3NzxMbGalRL9vLywsqVKzF9+vQyy1IVvEENhwSoPxgFudmIzfkbQaJfsXvbAZg2qXikuQHN6OIlrw+UZdWlUimKiooMsuplsHv37kpF4zdp0gSPHj1SazhK10WRjwOQ9+BCQ0MRGBioM1n12bNnY/bs2RXa95133sHFixfRuXNnNGnSBN7e3uDxeDAyMsLOnTsxefJkFBUVoU+fPlpl1aOiohAbGws7OzsEBwdj27ZtGDVqlNZrent7QywWw8rKCn///Tfee+893Lx5s8L1mzNnDubMkYuEjxkzBgsXLsSGDRtw/PhxuLu7c9sU96MmUOuGqhTCZJrIPtsQzy+/hRypGaS7f4JFYT6AN2+ISkFZL8S6AJFcVj0uLg5XrlxBYmIi5s2bB1tbW1y9ehVdu3bFunXr8Pnnn5d5nmHDhmHy5Mlq6rivUh6BQMCpt167do1Tey1LVj0zMxMVVUMoTVZWFpKTk9G+fXtuXVn5H5QpT1Zdm4y3cl2Ki4uxb98+hISEoHXr1pg6dSoOHjyIvLy8MmXV33rrLZiYmFRIgmXx4sXcxLPyb9q0aRr3DwkJQVxcHI4cOQIjIyMuO5+fnx9Onz6N8+fPw9fXV2PWPnt7e3h4eKB169YwMTFBUFBQuVn3bGxsuBwogwYNQm5uLjIzMystc3/x4kWYmJigdevWCA8Px59//omEhATcu3cPgEFW/bXQNOYKACDAuJkVTN82hRF7AaMuQXjyVjPkvvV21RawCqnrUbHKsupSqfSNkVWvKDk5OZgwYQKGDh2q8kFV0SBZHx8fxMfH4/79+ygsLMSff/6JwYMHc9tv374NFxcXteOU63L06FF4e3sjOTkZYrEYDx48wKBBgxAeHo4GDRqgYcOGnH3S09Nx5MgRLvPe7NmzMXHiRO6eZGdnY9u2bWrXmz17tkZZ9dLDVIC8x5CRkQFAniskISEBvXrJXfEVsuovXrzAkiVLMH78eI02efr0KdLT0wEA//33H/j8slW0lT31YmJiYGxsjAYNGpRr39IoUsYWFRVx95Axhvz8fAA1S1a91jUceXl5WreZt7OFeUtLMCYDa+GIF2b1gDfYj/vcuXPVXYRqRVlWXSQSoW/fvnjy5AmSk5PRrVs3iEQijBkzhpOiVsiql3blVciql85sFxwcDCcnJwgEAvTu3VtFVr1ly5bg8/kYM2aMmqz69OnTIRAI4O7uzt2jAQMGIDIyUmM9Bg0ahIYNVfPC2NvbY9asWdi0aRPs7e2RmJjIbevatSvc3Nzg4+ODdu3a4ZdffuG2de7cGSNGjMDhw4dhb2+vInleGhMTE6xevRp9+vQBn8/HiBEjOOHMR48ewcbGRuPwmnJdQkND8e6776psHzJkCJd7fPv27QgJCYFIJEKvXr2wcOFCtG7dGoDcPdnX1xeenp5wdXVF9+7dX3muR0FhYSH8/PzA5/MxceJETmYfABYtWgRnZ2cIhUIMGTIE3bp1AyD/P1I0IsbGxli6dCl69OgBNzc3mJqaYuzYsQCA5cuXw97eHqmpqXBxceF6srt27YKLiwvXC1JMYJdl39Ls3bsXXbp0QbNmzdCoUSM4OTnBzc0NRMQ13mVJ3Vc5unLPqqqfNln1R6eXU3FWKqU+jKHwS8NoY8IZ+uGvkxR+KUHj/m8CBln1Emq6fHZtk1VfsmSJVnfVvLw88vHxIYlE8trX0Tc1/bmoKPn5+dSpUycVmxvccXXE/8WfxN67l0AAXN56Gy0sG6CFZe3N8FdXeRVZ9ZpOVcqq6wJbW1s1LzAFFhYWCAkJwePHj6u4VHWXBw8eYMmSJVzvaceOHZgyZYpaT7WqqHWy6gKBgK5evQoAuPT0AfIk8qjZ9nf/QUHrAJD0KRLSN2Cg+w5ERN1Dq+bW4LdrVJ1F1htisZjr9uuS2iirXlhY+MbkXnhdDLYooa7aQt+y6rXOHdfUtESqMC5tPuoZy2UD2jN33MhZhAImBZNa4fCZe3iUlotWzcuXoqit6KPRqK3UxZeDNgy2KMFgC/1Q6xqO53kPEfNYPhloxSvA4FZrAACZz7aj19vfITujCPtvpMK5izXsmlijjV3ldIdqE+Hh4eUGWtUVFEF+Bgy2UMZgC/2g14aDMRYAYBUAHoCNRLRYy35DAOwF4E1EZTu0MykaWzgBAFokSZD5RO69wUyKYRz2C4yKLWBu7g/X9m/m8JQBAwYMVDd6azgYYzwA6wD0AZAC4AJj7AARxZfazxrAlwAq5FvKkxCe7/8PANDc4W00a/ZSu0Wah62Wa5FPZrC2MGTeMGDAgAF9oU+vqo4AkojoLhEVAdgFQNO4yvcAfgKgHlargXqsHuzat4Bd+xYAJCgy2YYik214FPETBvi7YsQgV7zfv3ZN7L4qTZs2re4i6IVXkVWviP9/bZFVB4CFCxfCwcEBTk5OnFS5RCIBj8eDSCQCn8+HSCTCypUruWAxhRy7nZ2dVjl2he0U8SWbNm1C+/bt0b59e2zfvp3bv1evXloDTGUyGXr06IHc3Fxu3d69e8EYUwlyPHbsmJrst7KtiouLMWvWLDg4OMDDwwNdunRRkUF/FQoLCzFq1CiurjExMdy2nTt3ws3NDS4uLpyMhybi4uLg4+MDFxcXuLm5obi4GIA8ENHe3l7t+RCLxejZsycEAgF69OjByYJcunQJPj4+cHV1hUAg4EQyS1MbZdX1OVRlB0BZRSwFQCflHRhjHgBaENG/jLGZ2k7EGBsHYBwA8NvaITZdHpQU0H4xnp2RB/GkGtXDgzMnYMQjODo6wsnJCYcOHeJyFdjY2MDf3x9xcXEqqrJ9+/ZFVlaWSjCdUCjkwv4VNG3aFD4+PoiJieEUTwEgMDAQYrFYRbysU6dOsLGxwZEjR7h1rVq1gkgkQmRkJPcPaWZmhoCAANy8eVMlwKt79+4AwEXcAtBaJwA6r1OHDh1QWFiIgoICbp2lpSV4PJ5KzndTU1NYWFggJyeHi+hnjMHGxgYFBQUqqW0VkgzKLxszMzPUq1cPWVlZnN6Swt2wTZs2XJBZZGQk6tevj/nz58PU1BSTJ08GUKLRpMiNkJmZCWNjY1hZWSE3N1cl30KDBg2wceNGtG/fnlOdJSJYW1tz0c/Pnz/nXEwrU6eioiLk5eUhMzNTa51evHiBixcv4vvvv0dmZiYkEgkeP36MiIgIdO7cGTk5OZBIJCgsLERiYiL27NmD6OhoPHz4EEOHDsWtW7eQm5sLKysrzi6FhYV4//338fTpU8ycORNFRUX47rvvcPXqVcTHx3N2UTiUHDx4kFOiZYzh2bNnWLhwISIjIyGVStGjRw/0798fNjY2CAoKwooVKzB16lS1Oh08eBACgQBWVlbIz89HUVERtm7dCh8fH+zcuRNz5sxBXl4ecnNzUVxczHk2ZWZmcrbKzc3Ft99+iydPniA6OhqmpqZ48uQJrl279lrP3ubNm2FkZISoqCg8efIEw4YNQ69evfD06VN8/fXXOHnyJBo2bIgJEybg5MmTEIlEKvfJ3Nwcw4cPx6+//goXFxekp6eDMYbi4mL07NkTH3/8Mbp06aJSp0mTJmHYsGH46KOPEB0djVmzZmHt2rWQyWT4v//7P7i7u+POnTvo0qULOnXqBGtra65OinfHqVOnMGnSJCQkJKBhw4b4448/EBYWhqysLNjY2MDR0RG3bt1CfHw8mjdvjg8++ACNGjXC6tWrufusfJ/y8/MRHh6u8t7TKboKCCn9AxAM+byGYvljAGuVlo0ARAJo/XI5EoBXeecVOTcm2U13kt10J+k1H1q17RKt2naJVm69SEXFNT8gSZco8kjompoaAKgpH8fmzZvJ29ub3NzcaMKECSSVSqm4uJhGjBhBrq6u5OLiQqtWraJdu3aRpaUldejQgYRCIRUWFpKvry/98MMPXM6LDRs20OLFi7l8HFKplKZNm0YuLi7k6upKe/bs4dZPmDCBHB0dqXfv3tSvXz8uH8f58+epW7du5OHhQQEBAZSamkpEROvWraPvv/+eK7evry+tXr2aunXrRkSq+TgWLFhAS5Ys4fbt2bMnnT9/noqLi9VyhSQmJlLjxo1V1pWXx0PB1q1baeLEidzy2LFj6c8//yQioqdPn5JAIFC7B0REQ4cOpaioKG45KyuL7OzsKCEhgfh8PrdeU4CaIndJdnY22draUk5OjsZrvCrjxo2jnTt3csu+vr506dIlio6Opr59+3Lrf//9d5o8ebLa8eHh4TRq1Cit59d0Dzp06ECPHj0iIiKJREL169fXeCyfz6e7d++qrHv+/Dn16NGDZDIZDR06lG7fvk0hISH0999/qx3/888/07Jly7jlNzUA8CGAFkrL9i/XKbAG4AogkjEmBuAD4ABjrEw/Y6nMBPdy/8K93L+QFvETJgwTYsIwIb74yB0mxm+uvIgmlHsJeuWWSPc/HaAsq37q1ClIJBLs2rULly5d4mTVr1+/jpEjR3J5OBR5ORRf4X369MF///3Hyaor61Upy6ofPXoU06ZNQ1paGvbu3cvJqv/xxx/c8JNCVj0sLAyXLl3CiBEjMG/ePAByKfLSsuddu3YFIFfhVUaTHPfDhw+hiQ4dOqCgoIDTVgK0a1V17doVIpEIXbp0Kfc6jRo1Qk5OjkqmOwXR0dHw8PDglv/66y8MGDAATk5OsLS0VJMO18Tt27fRpk0brtdWFlOmTNEocqgpo55QKER4eDikUinu3LmDq1evciKQN27cwIMHD1BcXIzw8HCtsupEhL59+8LDwwPLli0rt3xCoRD79u0DAISFhSE7O1ttmE/xjJR2oVeWVW/dujUnqz5w4EC163h5eSEqKqrc8lQF+hyqugCgPWOsDeQNxjAAHyk2ElEWAM71iTEWCeArKseryogAm0T5TTFq16jONRbVQgcdd3N1hEFWXQ5VMIg3KiqqUvM3jRs3xuPHj9WOyc7OVlHgDQ0Nxddffw2gRFZdKBTqTFZ99erVFd73s88+Q2JiIjw9PdGmTRt07NgRPB4PjRo1wrp16xAcHAxjY2P4+PhojOKXSCQ4c+YMzp07B3Nzc/To0QNeXl7c8LEmVqxYgUmTJmHTpk3o3r07mjVrpiLZ/vDhQ4wePRo7duzQWPfaKKuut4aDiCSMsUkADkPujvs7Ed1gjC2AvMt04JXOCylsPxLosqgGailEcll1xbyB8gvu6tWriIiIwLp16xAWFoZff/1V63mGDRuGoUOHaszGV9nyCAQCjV+FZcmqz5s3T0VWvTJy3Ldu3YKFhQVsbW0rXV47OzuVyeOUlBQV9VVtMt7KSY2ePn2KkydPIiEhAYwxSCQSmJiYYNGiRWXKqrdv3x737t3j5m3KYsqUKTh16pTa+uHDh2PmTNWpURMTE6xatYpb9vT05OTTAwMDubinX375RWOGRXt7e3Tv3p2zZ//+/XH58uUyGw47Ozsuh3t2djbCwsK4OmVlZWHAgAH46aef4O3tXWY9lWXVZ8+ejYMHD2LkyJG4d+8e2rRpU3dk1YnoIBF1IKJ2RPTDy3UhmhoNIvIvr7cBADyjNze/RmWp68F/yrLqDRo0eGNk1QcPHozQ0FAUFRXhzp07uH//vsbsfmlpaZgwYQLnLKCgovk4AgICEBERgczMTKSnp+P48eNc70wqleLZs2do2bKl2nEODg4Qi8UA5MN5Y8eOxf379yEWi5GSkoLmzZvj7NmzcHJywv3793Hr1i0AwL1793Djxg0IBAJYW1tj5MiRmDp1Kue1pBgGLM3q1as1yqqXbjQAuXq2QoY8IiICNjY2XMOhkFXPyMjA+vXr8emnn6od379/f8TFxaGgoAASiQSnTp0qV1b92bNnXK/vxx9/5M5bWFiIwMBAfPrpp2oKwpowyKrrldqlraVPFP+8dRVlWXU3N7c3RlZdKBQiKCgIzs7OeOedd/DLL79wX/k5OTkQiURwcXFB3759MXDgQHzzzTfcsWXJsZemcePGmDNnDry8vNCpUycsWLCA89S7cOEC/Pz8NKZMraisurm5ObZu3YqPP/4YIpEIH3zwAX7//XfOs2vx4sVo0KABnJ2d4ebmhsGDB3PXf1VSU1Ph7u4OZ2dnLF++XKWn+cUXX4DP58PPzw9z585F27ZtAcjnaBYsWABALu44ZcoUeHp6QiQSwcfHB/369QMATJ8+Ha1bt0Z2djbs7e25Hurx48fh6OiIDh06ICMjg8tWGBoaiujoaGzcuJGbl1GkMy6NQVZdzz9hB1UPkrqMQVa9hJoun13bZNUnTpxIkZGRGrclJydTv379XvsaVUFNfy4qikFW/TWRSmrGGJ8B/WGQVa9+3N3dtY7r29vbY/To0SoxOQb0i0FW/TXht+5A8eJb1V2MGoG+RA5ro6y6QcyuBIMtSqirttC3rHqt63GYWBhkkhUoS1bUdSwtLau7CDUGgy1KMNhCP9S6hoNnblLdRagxvO5E4psE7w3OLV9ZDLYowWAL/VDrGg5lvZq6jrIWVl3H8FyUYLBFCQZb6Ida13AYMGDAgIHqxdBwGKhxvIqsekUwyKqryqqXJfs9dOhQ3L17V2td3n33XRVF5osXL4IxxpUVAJKSktQ84+bOnYuVK1cCkIcCLFmyBI6OjhCJRPD29saOHTvKtWN5fPXVV3B1dYWrq6uKGvSxY8fg4eEBV1dXjB07VkU9WRmxWIzevXuDz+eDz+eraVpNnDhR4zOye/duMMY4JdqkpCTuORaJRPjiiy/UjiEiDBs2DAKBgNM1A4Bvv/0W//zzD7e8f/9+LtYEAJYuXYqWLVuq3ecqQ1d+vVX1c3Z21uLpXPeIjY3Vy3lrahyHJnVcBXl5eeWe19fXV8Vmvr6+5ObmxqkMp6enk5eXl5r6aXkoFF/LIi0tjbp06aJy7RYtWtCRI0eISFUd98qVK+Tu7k6FhYWUlJREDg4OnOqvctlSU1PJ39+fFixYQEREOTk5dPr0aVq2bFmF1HFv3rxJSUlJRCSPzWjatCllZ2cTEdGxY8do/PjxGusSFxdHwcHBKuumT59Ofn5+NHbsWG7d7du3SSgUquz3zTff0IoVK4hIruIbEBDAXTMzM5O2bNmi1YYVYf/+/dSvXz+SSCSUk5NDIpGIcnJySCKRkJ2dHVffOXPm0ObNmzWew8/Pj44fP05Ecpvm5+dz22JiYmjEiBFqz0hWVhZ169aNvLy8uGdMU/1Lc+nSJfr888+JiKhHjx6Um5tLKSkpNHjwYJX9ZDIZCYVCKigo4Nb99ttvavdZgSGOoxQVlVOoC7xJcQ6vypYtW9CxY0d06dIFEydOhEwmg0Qiwccffww3Nze4urpi9erVnCquQiVX0VsZNmwYdu3aBUAevRscHMydWyaTYfr06XB1dYWbmxv3RS6TyTBx4kQ4OTmhT58+ePbsGXfMhQsX0L17d3h6eqJ///6cgvGePXvQv39/lbLPnDlToz5WeHg4PvzwQ5iamqJdu3Zo2bIlLl26pLZf06ZNsWHDBqxZswaAPD+Ir68v6tevXyHbOTo6ol27dgDkPRJbW1uuLv7+/jh06BCX60KZHTt2qLiBy2QyhIWFYcuWLYiIiKhwT/DHH3/E+vXruUhyGxsbjBw5skLHaiM+Ph7du3cHj8eDlZUVBAIBjhw5grS0NFhaWnL17dOnD8LCwtSOv3r1Kng8Hnr27AlAblOFPpREIsHXX3+NxYvVM2D/73//w//+9z+YmVXO69PExAT5+fmQyWQoLi6GkZER5s2bh++//15lP8YYunbtioMHD1bq/PpCrznH9YEmraG6SmRkJKfMqk8enVmn83M291XvtlcWZVn1goICzJgxA7t27UK7du04WXWgxJd/zZo1WLt2rUqD26dPH3zyySecrPqmTZuwaNEiAKqy6k+fPoW3tze6deuGyMhITlb90aNH4PP5GD9+PCerfuDAATRq1Ag7duzAvHnz8Ouvv+LMmTMYMWKESvm7du2KvXv34vTp0yoZDB8+fKhyXxVy5+7u7mo2UJZVVwjzaRJTVFyPx+PBwsKCGzJTUFr2m8fjoXXr1rh+/TqEQqHKvmfOnFHJVhcVFQVHR0e0bdsWfn5+iIiIKDe+KCMjA8XFxWjVqlWZ+wFyaRJF465Mjx49sGLFCpV1QqEQixcvxtSpU5Gbm4vIyEh4eHggKCgIBQUFiI2NhUgkQlhYmFZZ9fr16yMoKAj3799H3759sWjRIhgZGWHVqlUYMmSIWubNCxcuIC0tDf369VN74SclJcHd3R02Njb48ccfOUl7BW5ubrCxsYGHhwdGjx6N+Ph4GBsbQyBQF3JVyKq/99575dpM39S6hkPTF1BdRVtqT12ji5e8PjDIqsuhUkG82vJxaJNV1yb7rZDxLt1wlK5LaGgohg0bBqBEVj0wMFBnsuqzZ8/m9J/K45133sHFixfRuXNnNGnSBF5eXuDxeDAyMsLOnTsxefJkFBUVoU+fPhpddSUSCaKiohAbGws7OzsEBwdj27Zt6NWrF/bv34/IyEgVe8tkMsyYMUPj3Iy9vT0ePHiAt956C+fPn8eQIUOQkJCgpgas6DECch2wTZs2YcGCBbh+/ToCAgIwduxYAHVEVt2AAX1DZJBVfx1ZdaBs2W9tMt7KdSkuLsa+ffvw77//4rvvvoNMJkNmZiby8vK0yqo7OzvjrbfegomJCR48eKBRgVeZyvQ4ALnKbEhICAD5JL5CHdfPz49LmnXw4EHcu3dP7Vh7e3t4eHhwPa+goCBcvnwZDRs2xO3bt7mhruzsbDg6OuLs2bOIj4/nknKlpqbinXfewb///gt3d3dOur1jx45o1aqVRocBBWFhYejcuTOeP3+O5ORk/Pnnn+jduzc++ugjmJub1x1ZdX2gSa2zrlLZ8dQ3DWVZdcaYQVb9JRX9oi9P9vv27ducMqsyynVR5GtPTk6GWCzGgwcPMGjQIISHh6OgosigAAAMFUlEQVRBgwZo2LAhZ5/09HQcOXIEvr6+AOQ9iYkTJ3L3JDs7G9u2bVO73uzZszXKqmtqNCQSCTIyMgAAsbGxuHXrFnr16sXZC5A3iEuWLMH48ePVjvfx8cHTp0+5jIr//fcf+Hw+Bg8ejNTUVIjFYiQlJaF+/fpITEzEW2+9hWfPnkEsFkMsFsPLywsHDx6Eu7s7nj59yo2QJCUl4e7du2pefAqKioqwdu1azJgxA/n5+dx7TiqVct5fNUlWvdb1OCo68VcXCAgIqO4iVCvKsuoymQwmJiZYv349eDwePvnkExARGGP46aefAJTIqterV497uQMlsuoAVFw0g4ODERMTA4FAAMaYiqz6iRMnwOfz0bJlSzVZ9SlTpiA7OxtSqRQzZsyAi4sLBgwYgC1btmD06NFq9Rg0aJCKK6ayrLqxsTEnqy6TyThZ9eLiYpiYmGDUqFH48ssvuWPt7e2Rn5+P4uJi7N27l5P81oRC9jszMxMbN24EAGzbtg1ubm549OgRbGxsNA6vKWTV/f39tcqq//HHH/joo4+wfft2fPHFF9yw6sKFC7mv+cmTJyMvLw+enp4wNTWFiYkJZs2apflmV5DCwkL4+fkBkE+2h4aGckNSixYtwqFDhyCTyTBp0iR069YNAHDu3Dn88ccfWL9+PYyNjbF06VL06NEDRISOHTtyQ0WV5cSJE/juu+9gYmICHo+H3377Tavaw+rVq/HJJ5+gXr168PDwwPPnz+Hm5oZBgwZxQ1snTpzQ2FhWC7pyz6qqX2Xltt9kEhIS9HLemuqOWxbKLpM1kaqUVdeFLZYsWaLVXTUvL498fHxUJL5rKjX9uagoDx8+pD59+qisM7jjVgJtHiN1kbKS9NRmXkVWvbCwUI8len2qUlZdF7awtbVV8wJTYGFhgZCQEDx+/Pi1r6NvavpzUVGSk5Px888/c8tLly7F0qVLq20EptbJqjs4OJC2seK6hj5l1Z2cnCrt/VKd1FX5bE0YbFFCXbQFEeHmzZsGWXUDVYu5uTnS09PV3DwNGDBQsyEipKenc95c+qLW9Tjc3d0pNja2uotRI9DX11RxcTFSUlJq1bCgTCYzeNy9xGCLEuqiLczNzWFvbw8TE9UUFLrscdQ6ryoD+sfExESr22BNpS4OSWjDYIsSDLbQD3ptihljAYyxRMZYEmNMLfSTMTaeMXaNMRbHGDvNGOOXd06D5EgJCv94AwZbKGOwRQkGW+gHvTUcjDEegHUA+gPgA/hQQ8Owk4jciEgEYAmA5foqjwEDBgwY0A367HF0BJBERHeJqAjALgAqLkBEpJyeyxJA7ZpwMWDAgIE6iD7nOOwAKMtPpgDoVHonxtgXAKYDMAXQU9OJGGPjAIx7uVjIGLuuw3LaAKiMWmB5+5e1XdO2iqxTXlb+uxGAZ9AdBluUXZbX2V/XtijLLgZbGGyhaZtmCYFXQVeRhKV/AIIBbFRa/hjA2jL2/wjAlgqcV2fRjy/P96su9y9ru6ZtFVmnvFzqb4Mt6qgtyrGLwRYGW+jVFvocqnoIoIXSsv3LddrYBSBIj+XRxt863r+s7Zq2VWTd32Vs0yUGW7z6uavaFmXZRdcYbPHq534jbaG3OA7GmDGAWwB6Qd5gXADwERHdUNqnPRHdfvn3IADzqRw/Y8bYxfL2qSsYbFGCwRYlGGxRgsEWJejSFnqb4yAiCWNsEoDDAHgAfieiG4yxBZB3mQ4AmMQY6w2gGMBzAKMqcGrtiRXqHgZblGCwRQkGW5RgsEUJOrNFrYscN2DAgAED1UvdisU3YMCAAQOvjaHhMGDAgAEDlcLQcBgwYMCAgUrxRjUcjDEjxtgPjLE1jLGKTLS/sTDG/BljUYyx9Ywx/+ouT3XDGLNkjF1kjA2s7rJUJ4wx55fPxF7G2ITqLk91whgLYoz9xhjbzRjrW93lqU4YY20ZY5sYY3srsn+NaTgYY78zxtJKR4WXJ5RYikDI40WKIY9Ur5XoyBYEIBeAOQy2AICvAfypn1JWDbqwBRElENF4AO8D8NVnefWJjmyxn4g+AzAewAf6LK8+0ZEt7hLRJxW+Zk3xqmKMdYP8RbeViFxfruNBHgvSB/KX3wUAH0Lu3ruo1CnGvvw9J6INjLG9RBRcVeXXJTqyxTMikjHGmgJYTkTDq6r8ukRHthACsIW8EX1GRP9UTel1iy5sQURpjLHBACYA2EZEO6uq/LpEV7Z4edwyADuI6HIVFV+n6NgWFXpv1ph8HER0ijHWutRqTigRABhjuwAEEtEiAGpDDoyxFABFLxel+iutftGFLZR4/v/t3W+IVFUYx/HvL0izLMMiSAI1NE1LrSwkiTTtHxGRiBZmGUUZUVjYC9GgoECoXqRiFgWr4B+ytMS2SEKzZEst/5X9EcwXUpSERNj6Qn16cc42szKrc21nd3b5fWBg58y99zz7cPc+nnvHc4CetYizI7TTeTGONInmMKBZUmNEnKhl3LXQXudF/j9U6yR9BHTJwtFO54WA+cDHXbVoQLtfL6pSN4WjDVVNlFhmDbBQ0k3A5loG1gkK5ULSJOB24EJgUW1D63CFchERcwEkzSCPxGoaXccqel6MAyaR/jHRWNPIOl7R68VTwESgj6RBEbGklsF1sKLnxUXAy8A1kubkAtOmei8chUTEP0DV9+m6s4hYQyqklkVEQ2fH0NkiYhOwqZPDqAsRsQBY0Nlx1IOI+JP0rKcqdfNwvA1FJ0rszpyLEueixLkocS5KapqLei8c24DBkgZK6gHcB6zr5Jg6i3NR4lyUOBclzkVJTXNRN4VD0kqgCRgi6aCkRyLiGNAyUeIPwLvls+t2V85FiXNR4lyUOBclnZGLuvk6rpmZdQ11M+IwM7OuwYXDzMwKceEwM7NCXDjMzKwQFw4zMyvEhcPMzApx4bC6I+m4pJ1lrwGn2HbAydNJn2Gfm/IU1LskbZE05AyOMVPSg/nnGZL6lX32tqRh7RznNkmjqthnlqRz/2/fZi1cOKweNUfEqLLXgQ7qd1pEjASWAq8U3TkilkTEsvx2BtCv7LNHI2Jvu0RZinMx1cU5C3DhsHbjwmFdQh5ZfCHp2/y6scI2wyVtzaOU3ZIG5/YHytrfzGsVnMpmYFDed4KkHZL25AVzeub2+ZL25n5ezW0vSJotaTIwGlie++yVRwqj86jkv4t9HpksOsM4m0izoLYc6w2lVQ6/l/RibnuaVMA2StqY226T1JTzuFpS79P0Y9aKC4fVo15lt6nW5rY/gFsj4lrSam2VZjWdCbweEaNIF+6Dkq7M24/N7ceB0y1qdTewR9I5QAMwNSKuJs0m/USegvpeYHhEjABeKt85It4DtpNGBqMiorns4/fzvi2mAqvOMM47gA/K3s+NiNHACOBmSSPyDLC/AuMjYryki4F5wMScy+3As6fpx6yVbjWtunUbzfniWe5sYFG+p38cuKLCfk3AXEmXAWsiYp+kCcB1wDZJAL1IRaiS5ZKagQOktRqGAL9ExM/586XAk6T1TY4C70haD1S9omBEHJK0X9IYYB8wFNiSj1skzh5Ab6A8T1MkPUb6u76UtHDV7pP2HZPbt+R+epDyZlY1Fw7rKp4BfictA3sW6cLdSkSskPQ1cBfQKOlxQMDSiJhTRR/TImJ7yxtJfSttFBHHJN0ATAAmkyaTu6XA77KKtOb3j8DaiAilq3jVcQLfkJ5vLAQmSRoIzAauj4jDkhpIS+WeTMCGiLi/QLxmrfhWlXUVfYDf8up900lrJ7ci6XJgf7498yHpls1nwGRJl+Rt+krqX2WfPwEDJA3K76cDn+dnAn0iopFU0EZW2Pdv4Pw2jrsWuIe0BvSq3FYozkizkz4PjJE0FLgAOAL8pbTO/J1txPIVMLbld5J0nqRKozezNrlwWFexGHhI0i7S7Z0jFbaZAnwnaSdwFbAsf5NpHvCppN3ABtJtnNOKiKPAw8BqSXuAE8AS0kV4fT7el1R+RtAALGl5OH7ScQ+TprruHxFbc1vhOPOzk9eA5yJiF7CDNIpZQbr91eIt4BNJGyPiEOkbXytzP02kfJpVzdOqm5lZIR5xmJlZIS4cZmZWiAuHmZkV4sJhZmaFuHCYmVkhLhxmZlaIC4eZmRXiwmFmZoX8Cx2PXcerQ51mAAAAAElFTkSuQmCC\n",
- "text/plain": [
- ""
- ]
- },
- "metadata": {
- "needs_background": "light"
- },
- "output_type": "display_data"
- }
- ],
- "source": [
- "score_save_path = './IJBB/result'\n",
- "files = glob.glob(score_save_path + '/VGG2*.npy') \n",
- "methods = []\n",
- "scores = []\n",
- "for file in files:\n",
- " methods.append(Path(file).stem)\n",
- " scores.append(np.load(file)) \n",
- "methods = np.array(methods)\n",
- "scores = dict(zip(methods,scores))\n",
- "colours = dict(zip(methods, sample_colours_from_colourmap(methods.shape[0], 'Set2')))\n",
- "#x_labels = [1/(10**x) for x in np.linspace(6, 0, 6)]\n",
- "x_labels = [10**-6, 10**-5, 10**-4,10**-3, 10**-2, 10**-1]\n",
- "tpr_fpr_table = PrettyTable(['Methods'] + map(str, x_labels))\n",
- "fig = plt.figure()\n",
- "for method in methods:\n",
- " fpr, tpr, _ = roc_curve(label, scores[method])\n",
- " roc_auc = auc(fpr, tpr)\n",
- " fpr = np.flipud(fpr)\n",
- " tpr = np.flipud(tpr) # select largest tpr at same fpr\n",
- " plt.plot(fpr, tpr, color=colours[method], lw=1, label=('[%s (AUC = %0.4f %%)]' % (method.split('-')[-1], roc_auc*100)))\n",
- " tpr_fpr_row = []\n",
- " tpr_fpr_row.append(method)\n",
- " for fpr_iter in np.arange(len(x_labels)):\n",
- " _, min_index = min(list(zip(abs(fpr-x_labels[fpr_iter]), range(len(fpr)))))\n",
- " tpr_fpr_row.append('%.4f' % tpr[min_index])\n",
- " tpr_fpr_table.add_row(tpr_fpr_row)\n",
- "plt.xlim([10**-6, 0.1])\n",
- "plt.ylim([0.3, 1.0])\n",
- "plt.grid(linestyle='--', linewidth=1)\n",
- "plt.xticks(x_labels) \n",
- "plt.yticks(np.linspace(0.3, 1.0, 8, endpoint=True)) \n",
- "plt.xscale('log')\n",
- "plt.xlabel('False Positive Rate')\n",
- "plt.ylabel('True Positive Rate')\n",
- "plt.title('ROC on IJB-B')\n",
- "plt.legend(loc=\"lower right\")\n",
- "plt.show()\n",
- "#fig.savefig('IJB-B.pdf')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "+----------------------------------------+--------+--------+--------+--------+--------+--------+\n",
- "| Methods | 1e-06 | 1e-05 | 0.0001 | 0.001 | 0.01 | 0.1 |\n",
- "+----------------------------------------+--------+--------+--------+--------+--------+--------+\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N1D1F2) | 0.4044 | 0.8145 | 0.9056 | 0.9497 | 0.9779 | 0.9922 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N1D0F0) | 0.4035 | 0.8038 | 0.8976 | 0.9437 | 0.9755 | 0.9914 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N1D1F1) | 0.3940 | 0.8124 | 0.9028 | 0.9479 | 0.9770 | 0.9919 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N0D0F0) | 0.3893 | 0.8050 | 0.8990 | 0.9448 | 0.9759 | 0.9918 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N1D1F0) | 0.4098 | 0.8123 | 0.9022 | 0.9463 | 0.9766 | 0.9918 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N0D1F0) | 0.3949 | 0.8130 | 0.9036 | 0.9471 | 0.9767 | 0.9919 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N0D1F2) | 0.4011 | 0.8210 | 0.9069 | 0.9500 | 0.9779 | 0.9924 |\n",
- "+----------------------------------------+--------+--------+--------+--------+--------+--------+\n"
- ]
- }
- ],
- "source": [
- "print(tpr_fpr_table)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# setting N0D1F2 is the best"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Test Setting Conclusions"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### (1) add is better than concat for the flip test (N1D1F2 v.s. N1D1F1)\n",
- "#### (2) detection score contains some faceness information to decrease weights of noise samples within the template (N0D1F0 v.s. N0D0F0)"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "language": "python",
- "name": "python2"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 2
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.15"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/evaluation/IJB/IJBC_Evaluation_MS1MV2.ipynb b/evaluation/IJB/IJBC_Evaluation_MS1MV2.ipynb
deleted file mode 100644
index e364be8..0000000
--- a/evaluation/IJB/IJBC_Evaluation_MS1MV2.ipynb
+++ /dev/null
@@ -1,532 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "/home/jd4615/miniconda3/envs/insightface/lib/python2.7/site-packages/sklearn/utils/fixes.py:313: FutureWarning: numpy not_equal will not check object identity in the future. The comparison did not return the same result as suggested by the identity (`is`)) and will change.\n",
- " _nan_object_mask = _nan_object_array != _nan_object_array\n"
- ]
- }
- ],
- "source": [
- "import os\n",
- "import numpy as np\n",
- "import cPickle\n",
- "from sklearn.metrics import roc_curve, auc\n",
- "import matplotlib.pyplot as plt\n",
- "import timeit\n",
- "import sklearn\n",
- "import cv2\n",
- "import sys\n",
- "import glob\n",
- "sys.path.append('./recognition')\n",
- "from embedding import Embedding\n",
- "from menpo.visualize import print_progress\n",
- "from menpo.visualize.viewmatplotlib import sample_colours_from_colourmap\n",
- "from prettytable import PrettyTable\n",
- "from pathlib import Path\n",
- "import warnings \n",
- "warnings.filterwarnings(\"ignore\") "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_template_media_list(path):\n",
- " ijb_meta = np.loadtxt(path, dtype=str)\n",
- " templates = ijb_meta[:,1].astype(np.int)\n",
- " medias = ijb_meta[:,2].astype(np.int)\n",
- " return templates, medias"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_template_pair_list(path):\n",
- " pairs = np.loadtxt(path, dtype=str)\n",
- " t1 = pairs[:,0].astype(np.int)\n",
- " t2 = pairs[:,1].astype(np.int)\n",
- " label = pairs[:,2].astype(np.int)\n",
- " return t1, t2, label"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_image_feature(path):\n",
- " with open(path, 'rb') as fid:\n",
- " img_feats = cPickle.load(fid)\n",
- " return img_feats"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [],
- "source": [
- "def get_image_feature(img_path, img_list_path, model_path, gpu_id):\n",
- " img_list = open(img_list_path)\n",
- " embedding = Embedding(model_path, 0, gpu_id)\n",
- " files = img_list.readlines()\n",
- " img_feats = []\n",
- " faceness_scores = []\n",
- " for img_index, each_line in enumerate(print_progress(files)):\n",
- " name_lmk_score = each_line.strip().split(' ')\n",
- " img_name = os.path.join(img_path, name_lmk_score[0])\n",
- " img = cv2.imread(img_name)\n",
- " lmk = np.array([float(x) for x in name_lmk_score[1:-1]], dtype=np.float32)\n",
- " lmk = lmk.reshape( (5,2) )\n",
- " img_feats.append(embedding.get(img,lmk))\n",
- " faceness_scores.append(name_lmk_score[-1])\n",
- " img_feats = np.array(img_feats).astype(np.float32)\n",
- " faceness_scores = np.array(faceness_scores).astype(np.float32)\n",
- " return img_feats, faceness_scores"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [],
- "source": [
- "def image2template_feature(img_feats = None, templates = None, medias = None):\n",
- " # ==========================================================\n",
- " # 1. face image feature l2 normalization. img_feats:[number_image x feats_dim]\n",
- " # 2. compute media feature.\n",
- " # 3. compute template feature.\n",
- " # ========================================================== \n",
- " unique_templates = np.unique(templates)\n",
- " template_feats = np.zeros((len(unique_templates), img_feats.shape[1]))\n",
- "\n",
- " for count_template, uqt in enumerate(unique_templates):\n",
- " (ind_t,) = np.where(templates == uqt)\n",
- " face_norm_feats = img_feats[ind_t]\n",
- " face_medias = medias[ind_t]\n",
- " unique_medias, unique_media_counts = np.unique(face_medias, return_counts=True)\n",
- " media_norm_feats = []\n",
- " for u,ct in zip(unique_medias, unique_media_counts):\n",
- " (ind_m,) = np.where(face_medias == u)\n",
- " if ct == 1:\n",
- " media_norm_feats += [face_norm_feats[ind_m]]\n",
- " else: # image features from the same video will be aggregated into one feature\n",
- " media_norm_feats += [np.mean(face_norm_feats[ind_m], 0, keepdims=True)]\n",
- " media_norm_feats = np.array(media_norm_feats)\n",
- " # media_norm_feats = media_norm_feats / np.sqrt(np.sum(media_norm_feats ** 2, -1, keepdims=True))\n",
- " template_feats[count_template] = np.sum(media_norm_feats, 0)\n",
- " if count_template % 2000 == 0: \n",
- " print('Finish Calculating {} template features.'.format(count_template))\n",
- " template_norm_feats = template_feats / np.sqrt(np.sum(template_feats ** 2, -1, keepdims=True))\n",
- " return template_norm_feats, unique_templates"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 19,
- "metadata": {},
- "outputs": [],
- "source": [
- "def verification(template_norm_feats = None, unique_templates = None, p1 = None, p2 = None):\n",
- " # ==========================================================\n",
- " # Compute set-to-set Similarity Score.\n",
- " # ==========================================================\n",
- " template2id = np.zeros((max(unique_templates)+1,1),dtype=int)\n",
- " for count_template, uqt in enumerate(unique_templates):\n",
- " template2id[uqt] = count_template\n",
- " \n",
- " score = np.zeros((len(p1),)) # save cosine distance between pairs \n",
- "\n",
- " total_pairs = np.array(range(len(p1)))\n",
- " batchsize = 100000 # small batchsize instead of all pairs in one batch due to the memory limiation\n",
- " sublists = [total_pairs[i:i + batchsize] for i in range(0, len(p1), batchsize)]\n",
- " total_sublists = len(sublists)\n",
- " for c, s in enumerate(sublists):\n",
- " feat1 = template_norm_feats[template2id[p1[s]]]\n",
- " feat2 = template_norm_feats[template2id[p2[s]]]\n",
- " similarity_score = np.sum(feat1 * feat2, -1)\n",
- " score[s] = similarity_score.flatten()\n",
- " if c % 10 == 0:\n",
- " print('Finish {}/{} pairs.'.format(c, total_sublists))\n",
- " return score"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_score(path):\n",
- " with open(path, 'rb') as fid:\n",
- " img_feats = cPickle.load(fid)\n",
- " return img_feats"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step1: Load Meta Data"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Time: 1.73 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load image and template relationships for template feature embedding\n",
- "# tid --> template id, mid --> media id \n",
- "# format:\n",
- "# image_name tid mid\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "templates, medias = read_template_media_list(os.path.join('IJBC/meta', 'ijbc_face_tid_mid.txt'))\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 10,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Time: 63.98 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load template pairs for template-to-template verification\n",
- "# tid : template id, label : 1/0\n",
- "# format:\n",
- "# tid_1 tid_2 label\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "p1, p2, label = read_template_pair_list(os.path.join('IJBC/meta', 'ijbc_template_pair_label.txt'))\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 2: Get Image Features"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 12,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "('loading', './pretrained_models/MS1MV2-ResNet100-Arcface/model', 0)\n",
- "[====================] 100% (469375/469375) - done. \n",
- "Time: 6806.24 s. \n",
- "Feature Shape: (469375 , 1024) .\n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load image features \n",
- "# format:\n",
- "# img_feats: [image_num x feats_dim] (227630, 512)\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "#img_feats = read_image_feature('./MS1MV2/IJBB_MS1MV2_r100_arcface.pkl')\n",
- "img_path = './IJBC/loose_crop'\n",
- "img_list_path = './IJBC/meta/ijbc_name_5pts_score.txt'\n",
- "model_path = './pretrained_models/MS1MV2-ResNet100-Arcface/model'\n",
- "gpu_id = 1\n",
- "img_feats, faceness_scores = get_image_feature(img_path, img_list_path, model_path, gpu_id)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))\n",
- "print('Feature Shape: ({} , {}) .'.format(img_feats.shape[0], img_feats.shape[1]))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step3: Get Template Features"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 34,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Finish Calculating 0 template features.\n",
- "Finish Calculating 2000 template features.\n",
- "Finish Calculating 4000 template features.\n",
- "Finish Calculating 6000 template features.\n",
- "Finish Calculating 8000 template features.\n",
- "Finish Calculating 10000 template features.\n",
- "Finish Calculating 12000 template features.\n",
- "Finish Calculating 14000 template features.\n",
- "Finish Calculating 16000 template features.\n",
- "Finish Calculating 18000 template features.\n",
- "Finish Calculating 20000 template features.\n",
- "Finish Calculating 22000 template features.\n",
- "Time: 7.85 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# compute template features from image features.\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "# ========================================================== \n",
- "# Norm feature before aggregation into template feature?\n",
- "# Feature norm from embedding network and faceness score are able to decrease weights for noise samples (not face).\n",
- "# ========================================================== \n",
- "# 1. FaceScore (Feature Norm)\n",
- "# 2. FaceScore (Detector)\n",
- "\n",
- "use_norm_score = False # if Ture, TestMode(N1) \n",
- "use_detector_score = False # if Ture, TestMode(D1)\n",
- "use_flip_test = False # if Ture, TestMode(F1)\n",
- "\n",
- "if use_flip_test:\n",
- " # concat --- F1\n",
- " #img_input_feats = img_feats \n",
- " # add --- F2\n",
- " img_input_feats = img_feats[:,0:img_feats.shape[1]/2] + img_feats[:,img_feats.shape[1]/2:]\n",
- "else:\n",
- " img_input_feats = img_feats[:,0:img_feats.shape[1]/2]\n",
- " \n",
- "if use_norm_score:\n",
- " img_input_feats = img_input_feats\n",
- "else:\n",
- " # normalise features to remove norm information\n",
- " img_input_feats = img_input_feats / np.sqrt(np.sum(img_input_feats ** 2, -1, keepdims=True)) \n",
- " \n",
- "if use_detector_score:\n",
- " img_input_feats = img_input_feats * np.matlib.repmat(faceness_scores[:,np.newaxis], 1, img_input_feats.shape[1])\n",
- "else:\n",
- " img_input_feats = img_input_feats\n",
- "\n",
- "template_norm_feats, unique_templates = image2template_feature(img_input_feats, templates, medias)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 4: Get Template Similarity Scores"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 35,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Finish 0/157 pairs.\n",
- "Finish 10/157 pairs.\n",
- "Finish 20/157 pairs.\n",
- "Finish 30/157 pairs.\n",
- "Finish 40/157 pairs.\n",
- "Finish 50/157 pairs.\n",
- "Finish 60/157 pairs.\n",
- "Finish 70/157 pairs.\n",
- "Finish 80/157 pairs.\n",
- "Finish 90/157 pairs.\n",
- "Finish 100/157 pairs.\n",
- "Finish 110/157 pairs.\n",
- "Finish 120/157 pairs.\n",
- "Finish 130/157 pairs.\n",
- "Finish 140/157 pairs.\n",
- "Finish 150/157 pairs.\n",
- "Time: 67.17 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# compute verification scores between template pairs.\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "score = verification(template_norm_feats, unique_templates, p1, p2)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 36,
- "metadata": {},
- "outputs": [],
- "source": [
- "score_save_name = './IJBC/result/MS1MV2-ResNet100-ArcFace-TestMode(N0D0F0).npy'\n",
- "np.save(score_save_name, score)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 5: Get ROC Curves and TPR@FPR Table"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 38,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEaCAYAAAAG87ApAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzsnXl8VNXd/9/fe2fNDgGCJEgQZUtigqCAolAVxBVbtWo3a+3jY/3VWrW2Uq1W26cuTx+1te3TWvdqQdQqto8i2oprQVGCC5uIICBbgKyzz/3+/phkJiELiTJMAuf9es0rc8+ce873fO7kfuee5XtEVTEYDAaDobtYmTbAYDAYDH0L4zgMBoPB0COM4zAYDAZDjzCOw2AwGAw9wjgOg8FgMPQI4zgMBoPB0COM4zAYDAZDjzCOw9AnEZH1IhIUkUYR2SoiD4lIzh55jhWRf4lIg4jUicjfRWTsHnnyRORuEfm0uayPm48HpNn+aSKyqdXxQyLyy+b3pSKizfY0isg2EfmDiLj3UuZIEXlCRGqa2/ueiFwtInY622I4+DCOw9CXOVNVc4AqYBwwu+UDEZkMLATmA0OA4cBy4A0ROaw5jwf4J1AGzATygMnATuCY/deMTilobl8FCbv+X2cZRWQEsATYCFSoaj5wHjAByN0PthoOIozjMPR5VHUr8AIJB9LCHcAjqvobVW1Q1V2qegOwGPh5c55vAYcCX1bVFarqqOp2Vf2Fqj7XUV3NTzFvN/+if1tEjm312SIR+YWIvNH8lLNwXzy5qOp24EVgbBfZbgbeVNWrVXVL83mrVfVrqlr7RW0wGFpjHIehzyMiJcCpwNrm4yzgWOCJDrLPA6Y3vz8ZWKCqjd2spz/wf8BvgULgTuD/RKSwVbavARcDgwAP8KOetqeDeocAp5Bwep1xMvDkF63LYOgOxnEY+jLPiEgDie6Z7cBNzen9SXy3t3Rwzhag5SmgsJM8nXE68JGq/kVVY6o6B1gFnNkqz4OqukZVgyScVFVHBXWTGhGpBTYDTXTtGHraFoPhc2Mch6Evc7aq5gLTgNGkHMJuwAEO6eCcQ4Ca5vc7O8nTGUOADXukbQCKWx1vbfU+AOTw+RmgqgVAFvAGie44ROTrrQbOn2/O29O2GAyfG+M4DH0eVX0FeAj4dfNxE/BvEoPDe/JVEgPiAC8Bp4hIdjer+gwYtkfaoSSeCNJG89PLQ8AkERmgqo+pak7z69TmbC8B56TTDoOhBeM4DAcKdwPTRaSy+fg64CIR+YGI5IpIv+bprpNJDCQD/IVEN9dTIjJaRCwRKRSRn4rIaR3U8RwwUkS+JiIuETmfxID1P9LZMBHxAt8k8TSzs5NsNwHHish/i8jg5vMOF5FHRaQgnfYZDj6M4zAcEKjqDuAR4Mbm49dJDCh/hUTf/wYSU3anqOpHzXnCJAaVV5GYtVQPvEWiy2tJB3XsBM4AriFxA/8xcIaq1uyZ9/M2Y4/jWhFpBLaRcHhnaScb6Kjqx815SoEPRaQOeApYCjTsI/sMBgDEbORkMGQeEfkb8Kqq3p1pWwyGvWGeOAyGDCMixcAUEk8HBkOvJ22OQ0QeEJHtIvJBJ5+LiPxWRNY2h0Y4Kl22GAy9FRG5HFhGYhrv65m2x2DoDmnrqhKRE4BGEqt3yzv4/DTgCuA0YCLwG1WdmBZjDAaDwbDPSNsTh6q+CuzqIsssEk5FVXUxUCAiZh66wWAw9HIyOcZRTGIqZAubaLuQymAwGAy9EFemDegOInIpcClAVlbW+EMOST2Y5OYmAn82NKRmHPp8Pnw+H/X19TiOA4Bt2+Tm5hIIBIhEIsm8eXl5xONxmpqakmlZWVl4PB5qa1Ox4dxuN9nZ2TQ1NRGNRpPpBQUFRCIRAoFAMi07Oxvbtqmvr0+meTwesrKyaGhoIB6PA2BZFnl5eYRCIUKhkGmTaZNpk2lT2tq0cuXKGlUdyD4gk45jMzC01XEJnazAVdV7gXsBxo0bp8uWLUu/dX2A2tpaCgrM2i4wWrTGaJHiYNXCcRxi4TCxYBPRWIxYLMqA4kP3DJfzucmk43gW+L6IzCUxOF7XEg66K1p72IOdV155hVmzZmXajF6B0SKF0SJFb9MiHneIRiI01TcSCYQI1DUQDewmFNkJTgOiYZQIjoawiCHqYBHD7YqiCrY4uF1hLHGwrDjgQcRBABEQ3Kh6ERQRQR0fJD7dp+1Im+MQkTkkgs8NaN7p7CbADaCqfyQRvuE0EqGwAyRCURsMBkNaUFXijhKNOjiqxOMO8XiceCxKNBwlFggSadxJMFBLPFaDFasFCaI0ITgIcWwJATZYUWxcWFYMALEUW22QOJYFIIi6Uzd1BMGDoKiTjUgMdXLwA9kSAK8DOCgCKGKFSQQSSKSp2iAhHABxiKsLRyCuCrhAFLCwLAXbBtvGbecg3n64/EOwvf32qZZpcxyqeuFePle62NHMYDAc2DiOEo7GiccdYnEn0a3S1EAwGCIWixGNhomEm9BwE7Gogzgx0ChoE07cwXbCeO0mbAW3uwHLEsSO4HaHEBFsl8Mxgyy2LH4NcGFLDMUCBEsUVT+goB4sdeFSF7Z68GOBLWBHQaKAC7GCgCK4cNSNWg6iDoqNSAQHC9Shxak4eFAUtVxgu0C8iCsLy+vGlz0Ej78I29Mfj7cI285GZN8+EaSbPjE43hqfz5dpE3oNo0aNyrQJvQajRYovqoWqEosrkWiccCRONBojGg4TaAzQFAoQjkYJhGKoxolHoxCLEglFsCRKLB7FIw14iODVCG47iKWKZSkedwiPxHF7mvC4QogoIhaWONh24hezGzduwBLFURdqu3H8btRn4WgWaMsNtnn7dbFQHLC8KIojAriIATEcojioBWK5cEsuccuF2B4s24PbzsX298PlLcDtG4THXYDHm49lefvcjXx/YxxHH2b06NGZNqHXcDBr4ThKKBIjFI4TaAridedQvWw10UiYaDRKIBDGUYdYLE4kHAKioCE8hPFIBJeA3xXA5wnitcN4XSFcrggiifn6IuASi3yBfgKKDW4LARyXHxA0FxwnC7CbrYqCxElsi2KhEkOtKKqCY9mEJQtVC0cc4hLHFpuoy4VtZxEXF5YnD2w/4vYgto3HlY+Ihcedj217wHbhc+Xjsnx4rRwscZub/X6kzzmO1tPXDnYWLFjAzJkzM21Gr6CvaeE4SjgSJxiOEo7ECTcFCAdDRAIBAk2NBIMBQpEYkVgcidVjESfLqsfviuCy43itKFgOblcc2xXEFguvK4hLLNyiZEliOFRtH+TFESwc9YFaKDaq3oQhEgGJIFYIR6I4Kjh2iKjaqMSIIyAuHMtByMKxBaxsxM5CbQ/i8mC7c7E9PrBdiO3C68nHwoUtrsTN3s7BFg8eOweX+PbrDb6vfS/6Cn3OcbTMTzZAOBzOtAm9hkxo4ThKQyBCIBglEIzhqEM4EKQxEKCxIUAoFCYUChKPRwnHIrgI4beD5Nhhcj1N5LsayHI14nVFyXEpWeJHRFGPG3XbKG5U3ag2/5tKNHGjt8I4EkFFAUUUYlaMEDYxK0I4rrg8PhAXghd12cRFsD2FWO4sbG8OHm8+LpeXbPcg/HYBLsuPbbn3u4bpxvyPpIc+5zgMhs+LquI4qf77WMwhGI4Ri8Wpa6ynMVhLMNxALLybaGMIS4N4tR5xFA9R/O5GbEfwuSP43QFclmLbiseK4rVsxIqg6gYRHH8O6rdQFcDGcXyJX/w4IDGQCA42MctDgwVRK0jY3oGKB9v2o1Y2uGxcviLE58PrziXLPQDb8pLnGYLXzsMSu8N2zp8/v1dNQTUcePQ5x2HbHf+zHIzk5+dn2oSM4DhKMBwjFI7RGIjQFIxSTyF/f/MjHCdCJFiLz9mGK9KIxwnjliZyPHW4JU62u45sdx0+bxNuN7jFTyyeSx4W8XgWjuMFt0U834+jXhKDsNr8NxtwUHGj4iWGELBCxESJ2GHCVmIwNm5BSLbjtfrjcmejluD1ZqNuF9meQfhd/XBZPkQscl398Vo5+FwFuCzvPtHnYP1edITRIj30uY2cJkyYoEuXmm0LDiQcRwmEogRDMRqaItQ11hOLNBINbUej29BIPZ7oTohauKwoHjtInq8Glzj4vY0ILlx2BLVciCjxeB4ad6GOh1g8H8WFg4WDFxAUF6BE7ShRCyK2Q9gO0WjVEZEg4vIQsx187n7ErTh+zyBsy43PlbgJZbkKW/XXW/jsPNxWFn5XP2zLk1EtDYbOEJF3VHXCviirzz1xtI73crBTXV1NVVVVps1og+MoTcEoDU0RAqEou+t2E4w0EgjW4mYHHv0MTzxIf3sbXg3htQJ4mufg53obyIrnMwgPsVgBjnpxJBt1u1G3hYMLJRuVfJBDQOMEYg4KxKKKumyiLiViR4h4okQI0KTbiUucqBXB5+qHumxcnlx87v6AkOUqxOvKI8vK4XD/4eS6D0Gkb+9v1hu/F5nCaJEe+pzjaB187GBnw4YNafmniMUdIpF4YhwgrgQjUWqamgiEAhBvItBUjxOtxcNuwsEQPhrId9eQZdWT664jL2sb+XgpdEcoJoso/Ym784jHs4lrNo74UIYDShwhGBNwlMaAQ1wh6hZwu9EsF0FPkCgh6nUHjc5uYhImJjE8nn64bT9eO48cdxEb1n3G2CMqcVtZ5NjZZLkGkOXqj9eVt99n8mSadH0v+iJGi/TQ5xyH4fPREm6hKRCltiHM7sYgm3fV0xAKEwwGcOsO+skn2Bon17OLAl8NfitAP992iry7KXEFcCwf8XguMV8OMbs/TiybmL8QVQ9xCnFkMKhNfaMicQuJuwAXjssm7hfifiHqtwh7ozRRS5Ozi8bYDqJOkIg24bPz8dp5+F39cdtZ+O0CstyHkm3nU+o9lGz3QNxWVoeDwluWzKfsWDMgbDDsD4zjOEBQTQwYB4JRdjUF2bCjlkA4Ss3OANGm3fR3b8WyovT3bcPrqaPAU8M4K8DAAZ/gcgcINx1CPNqfSLCEODlotAR1PIRCHsJYOC3T/h0Xlriw7VzU70Z9PuJZLhyvm0bdTYhGgk49O0IraYxuBSDPU4zXzsNn52OJTZ6nhEHu4XjsHDx2DnnuQ3DbWVhivo4GQ1+gzw2OH3XUUfruu+9m2oz9Rstagc07Gti8q576xgiNgSjhUJyYxomHwxT4PqOfezv9fNvJ8uwmz9VIrruWftmbcdkRYvECwsFiYqGBia6iuB8n7kctKzFDyFZAsCwPLk8BdnY+rryBRO0YcYkRkiBNuoudsQ2E4vXURTYSjicWYua4i3BZflziTYwVuArJ8wwhxz2YfO9QslyF+6WbKBgM4vf7015PX8BokcJokeKgHhxv2eDkQGLbzibqAxGawmFWb61hV2OQeBgijQoxC1AsTyOenC2MKljDkQM/Ic+uo9C7GSWXaCSfaGgI8UgB8dhg4jE/McfFjkjzDdsBO+7CFg+unAF4BpbgzumHuLwEtJbdsc0EqaMpWkMotpaG6DbqGzchCPneQxNPBlYWOe7BlOROJNs9kBx3EV47r9eMHdTV1ZkbRDNGixRGi/TQ5xxH6124+gL1jWE2b29k+64ADaEwm+vqcFSJxeNkRbcz2LeRbE8tjjtEP+92jpIIAwfW4JUIthXF5WsgHs8lGu9HNDyQWKwfsWA5cfGzPSCgYEW9iPhweb14BxRg5fTDlTMIT14RatnURzaxNfA+9ZGNhGIf0BB9icYdW1AST5sF3mEUeA/F7+pPf98Icj2HUOA9FI+V02scw95YsmSJWfTWjNEihdEiPfQ5x9FbicZjrNy6g+3BBnbUB9iyIQj1bsAhz78Nf94mBvo3MeOQNQzP+Sh5XrBhGLHGoUSdfjihQ4lrNsG4RUCsRIS5JhDHBnHh9hVh52aRlVOIJ38gC199izNnfZlIvIldoXXsCG+gKbqRmK6lsWkbgdoaGqPbAOjvG0F/72EMyT6KbPdAcj2H4HMV4LbMrzGDwdAzjOPoJvWNYT6p2c32QCN1uyMgys7GINGoQyykRJuU/p5dZOfUMDxnDSeWfkChfwsuK4zjuIkH8ok0DiVSM4btm6cRs7MhP/H0ZJGDnZOFKysff//BWO5s3NmFifDPbn+bX/3B2G4+a3yXrYFFNB22nCc/epqoE8Rl+Sn0jSDPU0yuezBFWeV47TzyPEPIdu+TbYYNBoMB6IOOIysrK21lP/fph+wOBtm9NUJ8t40TFHApoVgUb8RHzI6Bx8FnQ25uHbmeRkry6jnC/RqH5FYDoJFsYo25BDdXsFNOIV7QHJQxbiHiwcrLwZ3fD68vC29+Cd6CEsRqfxniTpTtwQ/ZUb+KYGwXgehOakIfEXOCiS4l73AOyzqN0UOPI9s18IAMUNcTKisrM21Cr8FokcJokR763KyqfRFyJOrEWbpjA8tqNuKg7GhqxL0tG4I2WcGEY8ofYDFm6CYKshpxS4zB1ntk2buwwisRDQIQD2cRDQ8kEhxKpK6UaGAI6nWgXz3YissahLv/AHKHjcfly9urXVualvPmlt8Qiaf2VR/oH0OuZzADfKPIdg+kwDssGfrCYDAYustBPauqtrb2c50XdeJ8uOsz3ty2jvd2fYaiVHgPxd6YRX5jDrZEOXfC8xR4tuJ3NWJFVgCg0UOJN+ThRBzqdlQStSYS97jBEwNfFADLlYt7yEB82d7EpjO5g/AVjsBytY9b1BDZwqbGtwjF6gjHG3A0SmN0Ow2RLUScRgq8wzh+yDUU+kbu9SnCREFNYbRIYbRIYbRID33OcfSUZ9Yv5/Wta2mIhrEci9EUc0xgDLkaIrdpFaOGr2RIv114o28DEPNcTzzuJbDeofG9fqhaMHIDOqAOBoHLnU1W/6F4cgfhyRuC7et4SqqjcerCm9jcuJSm6HbC8Xo2Nr4FKHmeYoZkH4Xf1Q+/qz9Dcycl1j94S8xgtcFg6PUcsI5jdzjAA6vf5KO6HVw8vJjhzhpCO5eTZ6+h4JDETKM4eeAqI940hoYdJ9P4ThFkR3D5A8Rzd8NRW1FvYt1Iv9Ez8fU/rFvTU1fu+jvVOx4BEqumC31HMMA/huKcoxmcXYnfVZC+hhsMBkOa6XOOw+3uuvvmiXXv8t7OTeyOBCny5HHZsGmMDPyIaKSWoDMMa8A5NO0YQ8P7u1CvoLm7oaARPDUwuSZRSFMQK68//uKj8eQdgie3CLE63wekNryB7YEVrNn9PMHYLmIa5rD8Exk/6Dv7bI+FjigqKkpb2X0No0UKo0UKo0V6SOvguIjMBH5DYgf7+1T1tj0+HwY8AAwEdgHfUNVNXZXZ2eC4qlITauSu9//FtILR7P7UYfPmxHTXb1bdyqqGSxhz2BioWU6kYTMAHsuPq3Y3nmAMb/YArGNOh4JBXYbVVlU2Ni5mW9P71EY+pSa4GoB8z1AG+kdzRL9TyHYPMl1OBoOhV9EnBsdFxAZ+D0wHNgFvi8izqrqiVbZfA4+o6sMiciJwK/DNrsptvXJ8W7Ce5zeuQNVhc1MdG5t2MyLbRVFgJYOy13Hy8V4KPe9BaBvZhW7qN72K1FlYm4vIrV2BL7sBOWI8Mmka0m9wl+1RdagJruGljT8DoDhnAodkj6NqwNcp9B+RkQB9ixcvZtKkSfu93t6I0SKF0SKF0SI9pPNudwywVlXXAYjIXGAW0NpxjAWubn7/MvDM3gqNRqPJ9xsbd/NZUy3n5a5gUNar5OesT+RxvDR5RuK3j+azDRWIHonqR7B5ENaWbPJL6vGd+i0YUILYnUsQiO3ivR1zWF//KkpiPUah7wimlszGa+d2X4k0sW3btkyb0GswWqQwWqQwWqSHdDqOYmBjq+NNwMQ98iwHvkKiO+vLQK6IFKrqzs4KDSHc/tLLBGuVQzw7OH/wcxRHPmL5jtOpc76P7T+Cgf38lNg72blhETRmYa/Jx+vZif+YItznTkJcHY+TOBpjQ/2b1IRW80ndIuIaIds9kMmH/IBDsqvw2NlfRA+DwWA4IMj04PiPgN+JyLeBV4HNQLvwtyJyKXApQEnpKApr6/FIjFG57zMoL8Juz2y27o6ANOHf8T79PsklHN2NFFlsrgmzebiQnz+CaROOp7q6mg0bNgCgEqNqSgnra99gS/TfyfoG28cxtv+XWbs4jsZ9VFPDlqL3mTRpEosXL27zK2bWrFmsX7+e5cuXJ9MmTpxIfn4+CxcuTKYNGzaMqqoqFi1aRF1dHQBer5eZM2eyatUqVq9encw7depUAF555ZVk2qhRoxg9ejQLFiwgHA630ad1mwBmzJhBXV0dS5YsSaZVVlZSWlrK/Pnzk2lFRUW9rk35+flMmzbtc7Vp/vz5B1ybPs91atHiQGrTF7lOLWu/DqQ2fZ7rtC9J2+C4iEwGfq6qpzQfzwZQ1Vs7yZ8DrFLVkq7KnVCepUv/Ngp1DQF1EZfpNK05GacusRd5+NMAlHyCDq4jK3cw+RPO7bSs92rmsmrXs+R7hnJo3rEcXjADWzwd7jBnMBgMfZk+MTgOvA0cISLDSTxJXAB8rXUGERkA7FJVB5hNYoZVl8SCg9j67F2gLrAFy+tCPatxZ21GPHGcyQIi5JYeR27xuE7LCccbaIhsoazwPMoKv/xF2pkx1q9fT2lpaabN6BUYLVIYLVIYLdJD2hyHqsZE5PvACySm4z6gqh+KyC3AUlV9FpgG3CoiSqKr6v/trdxY1MuA70zEsgI4dZ8R2PIBgeB2NH8wVt4gssQi99BjsFy+zuxiV2gtr3323wRjuzks/8R91eT9zvLly80/RTNGixRGixRGi/SQ1jEOVX0OeG6PtBtbvX8SeLInZcZdQaz35tOw7X2i+QXEvV78rlxyjjgZO7ew0/MaI9t4Z/sDfNaU2HZ2gG8kJw69iTxPcU+qNxgMhoOeTA+O9xi/RmhwamkqGULe8Cl4covw5Ha+BqM2/Cmf1L3C6t1/x7Z8nDj0JoqyyvejxQaDwXBg0eccR9zrwSk5nLycQeQMOXKv+Rdu+CnZ7oGUFZ7DyIJT8br2Ht68r9Ayg8ZgtGiN0SKF0SI99DnHYWcNoN/Ik9ulqypN0e1saapme3AlgWgNioOjUWaW3oEtB95GR/n5Zl+OFowWKYwWKYwW6aHzoEy9lPr6+g7TdwRX8Nz6q3h3x8O4LT+l+Sdw1KBvc/rwuw9IpwG0mQN+sGO0SGG0SGG0SA997omjNdF4gC1N1ShKbXgDA/1j+NLQn2XaLIPBYDig6XuOQxxqgh+xM7SG2vAGtja9xwD/SABKco7JsHEGg8Fw4NP3HIc3wJKtvyfmhCnOGc/4Qd+hJPfgdBjDhg3LtAm9BqNFCqNFCqNFekjrfhzp4LDy/rp06Tv09w3PtCkGg8HQZ9iXIUf63OC4OzrAOI1mFi1alGkTeg1GixRGixRGi/TQ5xxHPO5k2oReQ0sETYPRojVGixRGi/TQ5xyHwWAwGDJLn3McltXnTE4bXq830yb0GowWKYwWKYwW6aHPDY5PmDBBly5dmmkzDAaDoU9xUA+Oh0KhTJvQa1i1alWmTeg1GC1SGC1SGC3Sg3EcfZjWW0ke7BgtUhgtUhgt0kOfcxwGg8FgyCzGcRgMBoOhR/S5wfFx48bpsmXLMm1Gr6C2tpaCgoJMm9ErMFqkMFqkMFqkOKgHxw0Gg8GQWfqc42hoaMi0Cb2GV155JdMm9BqMFimMFimMFumhzzkOg8FgMGQW4zgMBoPB0CPS6jhEZKaIrBaRtSJyXQefHyoiL4vIMhF5T0RO21uZPp8vPcb2QUaNGpVpE3oNRosURosURov0kLZZVSJiA2uA6cAm4G3gQlVd0SrPvcAyVf1fERkLPKeqpV2Va0KOGAwGQ8/pK7OqjgHWquo6VY0Ac4FZe+RRIK/5fT7w2d4Kra+v36dG9mUWLFiQaRN6DUaLFEaLFEaL9JDOrWOLgY2tjjcBE/fI83NgoYhcAWQDJ++tUMcx+3G0EA6HM21Cr8FokcJokcJokR4yvef4hcBDqvo/IjIZ+IuIlKtqG+8gIpcClwIMHDiQ+fPnJz+bOnUq0Hba3ahRoxg9ejQLFixIfnHy8/OZNm0a1dXVbNiwIZl3xowZ1NXVsWTJkmRaZWUlpaWlbeopKipi0qRJLF68mG3btiXTZ82axfr161m+fHkybeLEieTn57Nw4cJk2rBhw6iqqmLRokXJzWW8Xi8zZ85k1apVbWLqdLdNLRxIbfoi12n+/PkHXJs+z3Vq0eJAatMXuU61tbUHXJs+z3Xal6RzjGMy8HNVPaX5eDaAqt7aKs+HwExV3dh8vA6YpKrbOyt31KhRagKXJVi0aBHTpk3LtBm9AqNFCqNFCqNFin05xtEtxyEiHuBQVV3b7YJFXCQGx08CNpMYHP+aqn7YKs/zwOOq+pCIjAH+CRRrF0aZwXGDwWDoOft1cFxETgfeB15sPq4Skaf3dp6qxoDvAy8AK4F5qvqhiNwiImc1Z7sG+A8RWQ7MAb7dldMACAQCe6v6oGFfP372ZYwWKYwWKYwW6aE7s6puITGoXQugqtXA4d0pXFWfU9WRqjpCVf+rOe1GVX22+f0KVT1OVStVtUpVF3ZdIkQike5UfVDQuh/2YMdokcJokcJokR664ziiqlq7R1rfCqlrMBgMhn1Gd2ZVrRSRrwKWiAwHfgAsTq9ZBoPBYOit7HVwXESygRuBGc1JLwA3q2owzbZ1yFFHHaXvvvtuJqrudQSDQfx+f6bN6BUYLVIYLVIYLVLs75Xjp6jqT1R1XPPrOuDUfVH55yEej2eq6l5Hy1xvg9GiNUaLFEaL9NAdx3FDB2nX72tDuktTU1Omqu51tF6QdLBjtEhhtEhhtEgPnY5xiMgpwEygWETubPVRHmDifhgMBsNBSleD49uBD4AQ8GGr9AagXYh0g8FgMBwcdOo4VHUZsExEHlPV0H60qUuysrIybUKvobKyMtMm9BqMFimMFimMFumhO9Nxi0Xkv4CxQHIXJVUdmTarusDj8WSi2l5JaWlppk3oNRgtUhgtUhgt0kN3BscfAh4EhMRsqnnA42m0qUtaIl0a2kZAPdgxWqQwWqQwWqSH7jiOLFV9AUBVP1bVG8jgdFyDwWAwZJbudFWM81EXAAAgAElEQVSFRcQCPhaRy0hEus1Nr1kGg8Fg6K10x3FcRWJ3vh8A/0Vii9fvpNOornC73ZmqutdRVFSUaRN6DUaLFEaLFEaL9PC5NnISkWJV3ZwGe/aK2Y/DYDAYes5+CzkiIkeLyNkiMqD5uExEHgEythzTrBxPsXixiTXZgtEihdEihdEiPXTqOETkVuAx4OvAAhH5OfAysBzIyFRcgGg0mqmqex2t9zU+2DFapDBapDBapIeuxjhmAZWqGhSR/sBGoEJV1+0f0wwGg8HQG+mqqyrUEjpdVXcBa4zTMBgMBkOng+MiUgv8q+UQ+FKrY1T1K2m3rgPM4LjBYDD0nH05ON5VV9U5exz/bl9U+EUxe46nWL9+vQmp0IzRIoXRIoXRIj10FeTwn/vTkO4SCAQybUKvYfny5eafohmjRQqjRQqjRXroTsgRg8FgMBiSpNVxiMhMEVktImtFpN0eHiJyl4hUN7/WNI+rGAwGg6EX052QIwCIiFdVwz3IbwO/B6YDm4C3ReRZVV3RkkdVr2qV/wpg3N7Kzc7O7q4JBzwTJ07MtAm9BqNFCqNFCqNFetjrE4eIHCMi7wMfNR9Xisg93Sj7GGCtqq5T1Qgwl8TakM64EJizt0Jt2+5G1QcH+fn5mTah12C0SGG0SGG0SA/d6ar6LXAGsBNAVZeTmJq7N4pJLBpsYVNzWjtEZBgwnFbTfTujvr6+G1UfHCxcuDDTJvQajBYpjBYpjBbpoTtdVZaqbhCR1mnxfWzHBcCTqtphuSJyKXApwMCBA9tszjJ16lQAXnnllWTaqFGjGD16NAsWLCAcTvSu5efnM23aNKqrq9mwYUMy74wZM6irq2PJklT4rcrKSkpLS9vUU1RUxKRJk1i8eHGbMAazZs1i/fr1LF++PJk2ceJE8vPz23xphw0bRlVVFYsWLaKurg4Ar9fLzJkzWbVqFatXr+5xm1o4kNr0Ra7T/PnzD7g2fZ7r1KLFgdSmL3KdWjZ/O5Da9Hmu0z5FVbt8AU+R6HZ6F7CBHwJPdOO8ycALrY5nA7M7ybsMOHZvZaoqI0aMUEOCZ555JtMm9BqMFimMFimMFimApdqNe2x3Xt3pqvoecDVwKLANmNSctjfeBo4QkeEi4iHxVPHsnplEZDTQD/h3N8o0e463YtiwYZk2oddgtEhhtEhhtEgPe92PQ0T6ayJWVc8LFzkNuJvEk8oDqvpfInILCc/3bHOenwM+VW03XbcjTMgRg8Fg6Dn7bT+OZt4WkedE5CIR6dGWsar6nKqOVNURqvpfzWk3tjiN5uOfd9dpADQ0NPTEhAOaRYsWZdqEXoPRIoXRIoXRIj3s1XGo6gjgl8B44H0ReUZELki7ZZ0Qj+/rcfm+S8sAmsFo0RqjRQqjRXro1spxVX1TVX8AHAXUk9jgyWAwGAwHId1ZAJgjIl8Xkb8DbwE7gGPTblknWJYJr9WC1+vNtAm9BqNFCqNFCqNFeujO4Ph64O/APFV9bX8Y1RVmcNxgMBh6zv4eHD9MVa/oDU4DIBQKZdqEXsOqVasybUKvwWiRwmiRwmiRHjp1HCLyP81vnxKRv+352k/2tcM4jhStV5Ie7BgtUhgtUhgt0kNXIUceb/7bK3b+M+w/otEomzZt6lNOuqSkhJUrV2bajF6B0SLFwaiFz+ejpKQEt9udtjq62gHwrea3Y1S1jfMQke8DvXKHQMMXZ9OmTeTm5lJaWsoeMcp6LbW1tRQUFGTajF6B0SLFwaaFqrJz5042bdrE8OHD01ZPd8Y4vtNB2iX72pDukpvbozWIBzQtgc72NaFQiMLCwj7jNABycnIybUKvwWiR4mDTQkQoLCxMe29Bp08cInI+ifhSw/cY08gFzE59Bzh9yWkYDIYU++N/t6sxjrdI7MFRQmInvxYaSESzzQgm5EiKV155hVmzutob6+ChsbHxoOqS6AqjRQqjRXroaozjE+AT4KX9Z47BYDAYejtdTcd9pfnvbhHZ1eq1W0Q+V7Rcg6E7rF+/Hr/fT1VVFTt37qSqqoqqqioGDx5McXFx8jgSifSo3AceeICtW7cmj6dMmdJuAPGMM87o8S/Ub3zjGzzzzDN7zXfFFVfw5ptvJutuvR/24sWLOfnkkwHYvn0706ZNIzs7mx/+8IdtyigpKaGiooLy8nLKysq48cYbk5v2OI7DKaecwrBhwzj77LPbnDdlyhRGjRqV1O7pp58G4KKLLmLgwIFUVVW1yX/VVVfx6quvdqstANu2bcPlcnHfffcl02KxWDst77vvvjZteuihhygvL6eiooKjjjqKu+66q9M6u8udd95JWVkZZWVl3Hvvvcn0ZcuWMWnSJCoqKpg1axaNjY0dnt+icVVVVZtrdMMNN7T5/r3wwgvJz6qrq5k0aRJlZWVUVFQQjUZpaGjgtNNOY/To0ZSVlXH99dd3WN+8efMoKyvjhBNOYPfu3QB89NFHfO1rX0vmCYVCnHDCCclYfatXr6aqqipzT1OdbdRBYuc/SIREb/faVxuC9PRVVlbW4w1MDlRWrlyZlnJXrFiRlnK7yyeffKIdXeebbrpJ//u//7vDcwKBwF7LPe6443TZsmVtjisqKvTf//63qqru3LlTJ0yYoPn5+T2y9+tf/7o+/fTTXebZvn27HnvssW3qHjp0qC5cuFBVVf/973/rSSedpKqqDQ0N+vrrr+s999yjV155ZZtyiouLdffu3aqqWldXp1/96lf1O9/5jqqqOo6jL730ks6dO1dnzZrVZdtbWLRokS5ZskQrKyvbpK9du1ZnzpzZrbaoqv72t7/VKVOm6IknnphMi0aj7bT885//nGzT3//+dx0/frxu2bJFVVWDwaD++c9/7rDO7rJs2TI98sgjNRAIaCQS0RNOOEHXrVunqqpVVVX6+uuvq6rqn/70J/35z3/eYRmtNW7N9ddfr3fddVe79EgkouXl5free++pquqOHTs0Ho9rQ0ODLlq0SFVVQ6GQTp48OXm9W3P88cdrMBjUBx98UP/whz+oqup5552nH3/8cZt8N9xwg86dOzd53JG+LXT0P8z+2MhJVZ3mt0ObHUWcxK5+/wlkp82T7QWfz5epqnsdo0ePzrQJGefhhx/mmGOOYfLkyVx++eU4jkMsFuOb3/xm8pf5b3/7Wx5//HGqq6s5//zz2zytXHDBBcydOxeAJ598knPPPTdZtuM4XH311clfxE8++WQy/fLLL2f06NFMnz6dmpqa5Dlvv/02U6dOZfz48Zx66qnJLUSfeOIJTj311Da2X3vttfzyl79s16acnByOO+64vX7X8/LyuPfee5k3bx51dXWICCeddBKFhYXd1m/q1Kn079+/XfqIESPYsmULO3bsaPdZR22ZM2cOd999N+vWrWPLli3dqvtXv/oVd955J4MHDwYS/9vf/e53u217R6xcuZJJkybh9/txu91MmzYt+XT18ccfc9xxxwEwffp0nnrqqS9UVwvPP/8848ePp6KiAoABAwZgWRY5OTnJmY9er5dx48axadOmdudblkU4HCYQCOB2u3n55ZcZNmwYhx12WJt8Z599No891jviy3Znz/FngKNFZATwIPAP4K/AGek0rDPq6+szUW2vZMGCBcycOTPt9fzna3/d52X+6fiv7T3TXvjggw94+umnefPNN2lqauLaa69l7ty5jBgxgpqaGt5//30gNZf/nnvu4Xe/+12bbpnp06dzySWX4DgOjz/+OPfffz+33norkLhBrly5kuXLl7Njxw6OPvpoTjjhBBYtWsQnn3zCihUr+Oyzzxg7diyXXXYZ4XCYK6+8kmeffZYBAwbw2GOP8bOf/Yx7772XN954g2984xtt7D/++ON58sknef3113G5uvOv2J78/HyGDRvG2rVrGT9+PABNTU0d5j3//PPx+/1AYp+KvXVzjBs3jjfffLPdBIw927J+/Xp27drF+PHjOe+885g3bx5XXnnlXm3/8MMPkzZ3xSOPPMKdd97ZLn3UqFE8/vjjbdIqKiq4+eab2bVrF16vl3/84x+ccMIJQOKH1j/+8Q/OOOMMnnjiCTZu3NhhfSLCiSeeiIhw+eWXc8klqdUHv/nNb3jggQc45phj+J//+R/y8/NZs2YNqsqMGTOoqanh61//Otdcc02bMnfv3s1zzz3Hj3/843b1XXfddZx44okUFxfz6KOPcs455/DEE0+0y1dZWcnixYv3qtf+oDvfVkdVoyLyFeAeVf2tiGRsVpXjOHvPdJDQ0redbvbFTT4dvPTSS7z99ttMmDCBeDxOJBJh6NChnHLKKaxevZof/OAHnH766cyYMaPTMtxuN5MmTWLu3LnE43FKSkqSn73++utceOGF2LbN4MGDmTJlCkuXLuXVV1/lwgsvxLIsSkpKmDZtGpD4tfvhhx8mxypal7dlyxYGDhzYrv7rr7+eX/ziF9x8882fWwfdI1DpnsctPP744+3GMrpi0KBBfPbZZ+3S92zL3LlzOf/884HEE9zll1/OlVde2em00J5OF/3Wt77Ft771rW7lLS8v5+qrr+bkk08mJyeHiooKbNsGEuMpV155JTfddBOzZs3qdGX14sWLKS4uZuvWrUyfPp0xY8Zw7LHHcsUVV3DzzTcjIsyePZtrr72We++9l1gsxhtvvMGSJUvw+Xx86UtfYsKECcmnjWg0yvnnn88111zT4Va2M2fOTP4AfOCBB5g1axYffvghd911F/379+c3v/kNfr8fl8uFiBAMBpM/ADJFdxYAxkTkPOCbJJ42ANK3lt1g6Caqyne+8x2qq6t57bXXWL16NT/72c8oLCzkvffe4/jjj+f3v/89//mf/9llORdccAFXXHFF8ub3Rew58sgjqa6uprq6mvfff5/nn38eAL/f3+GirBkzZlBbW8vnjfhcV1fHxo0bOeKII76Q7R0RCoU6vEHt2ZY5c+Zw3333UVpayle+8hXeffdd1q1bh23bWJZFLBZL5t21axcDBgwAYOzYsbzzzjt7teORRx5JDki3fnV2vS699FLeffddXn31VfLy8hg5cmSyvhdffJF33nmHc889l8MPP7zD84uLiwEYPHgws2bN4q23EkE0ioqKkm36j//4j2R6SUkJU6dOpbCwkOzsbE499VTeffddIPGduOSSSygvL+f73/9+l+1samri0Ucf5bLLLuPmm2/mkUceYeLEicmuVIBIJNIrQsV3d+X4l4A7VHWdiAwH5qTXrM5p+fVgSHRTHMycfPLJzJs3j5qaGmzbZufOnXz66afs2LEDVeW8887jlltuSf4T5+bmdrgOaNq0aVx33XXtbkTHH388c+fOxXEctm3bxhtvvMGECRM44YQTePzxx3Ech82bN/PKK68AiRvT5s2bkzeUSCTChx9+CMCYMWNYu3Zth+24/vrrueOOO3rc/oaGBr73ve9x3nnnkZeXl0zfV3vWrFmzhvLy8nbprduyYsUKYrEYmzdvZv369axfvz7ZZQhwwgkn8Ne/Jro6A4EATzzxBF/60pcAmD17Nj/60Y+S40DhcJj777+/XX3f+ta3ks649WvPbqoWtm/fDiS60BYsWMAFF1zQJt1xHH75y19y2WWXtTu3sbExOduqqamJF198MalB67Gbp59+Opl+6qmnUl1dTTAYJBaL8eqrrzJ27NhkG0OhEL/+9a87UTnFbbfdxtVXX43L5SIQCCAiWJZFIBAAEjPXiouLe8eeRN0ZQSfRpTW6+eXaVyPzn+c1fvz4DmcRGPYdfWlW1WOPPaaVlZVaUVGhRx11lL711lv6zjvvaFVVlVZWVmpVVZW+8MILqqr6+OOP68iRI7WyslLD4XCHM41az1SJx+N61VVXaVlZmZaXl+sTTzyRTL/ssst01KhROn36dD3llFOSs6reeeed5GytsWPH6v3336+qqv/617/0oosuStbTum7HcfTII49MzqpSTczs6devn+bk5GhxcbGuWrUqmV5eXq7l5eU6duxYveGGGzQUCiXPmzRpkg4YMEB9Pp8WFxfrSy+91K6+1px77rk6ePBgdbvdWlxcrA8++KCqJmYBjR49WmOxWLtzWrflhhtu0Ouvv77N5++8846Wl5erquqnn36qp556avIa7Tkr6b777tOxY8fq2LFjtaysTO++++529fWUyZMn65gxY7SyslJffvnlZPqvf/1rHTlypB5xxBH605/+VB3HSdp45plnqqrqmjVr9Mgjj9QjjzxSx44dq7feemvy/AsvvFDLy8u1oqJCZ82alZwNpqr60EMP6ZgxY7SsrEyvu+46VU18j4GkLZWVlfrAAw90aHNrG1RV58yZo2PHjtUpU6ZoTU1NMu3HP/5xMk8mZ1V1x2kcD6wH3gDeBNYBx+0rA3r6GjNmTIdCHYx0dCPYF/RWx9EVTU1NabJm3+A4jh577LFaV1eX9rr2hRbz5s3rdLrq/mzLF6W3fy96wllnnaVr165NHvfK6bituAs4TVWPU9VjgdOB3+zb557u09NFXwcyGzZsyLQJacG2berq6no0kNvbvxciwq9//Ws+/fTTtNe1L7RQVa666qoOP9ufbfmi9PbvRXcJh8Oce+65jBgxAkgsAJwwYQJFRUUZsac7s6o8qrqi5UBVV4qIpzuFi8hMEk7GBu5T1ds6yPNV4OeAAstVtXdO4THsN4YOHdrpVMm+zOTJkzNtQrf56le/2uXnfaktBwJer5dvfvObyeNRo0ZRXV2dMXu64zjeFZE/Ao82H3+dbgQ5FBGbRHDE6cAm4G0Reba1ExKRI4DZJLq+dovIoJ42wGAwGAz7l+50VV1GYlzjx82vdSRWj++NY4C1qrpOVSPAXGDPUK7/AfxeVXcDqOr2vRXaevbIwU5X6xMONsz3IoXRIoXRIj10+cQhIhXACOBpVe3pfMFioHV/wyZg4h55RjbX8waJ7qyfq+qCrgptCfJlSMzhz/RCoN5CPB7vHdMUewFGixRGi/TQ1UZOPyWx09+7JEKO3KKqD6Sh/iOAaST2/XhVRCpUtc1GUSJyKXApwMCBA5k/f37ys5bVmS1z6SHR/zd69GgWLFiQXF2dn5/PtGnTqK6ubjOoPGPGDOrq6liyZEkyrbKyktLS0jb1FBUVMWnSJBYvXpycdw4wa9Ys1q9fz/Lly5NpEydOJD8/n4ULFybThg0bRlVVFYsWLaKurg5I9FvOnDmTVatWsXr16h63qaX+fd2mkSNHEg6HCQaDybTs7Gxs224T8sXj8ZCVlUVDQ0PSoYsI+fn5BIPBNna27MTWOiKp1+vF7/dTV1eXXO1s2za5ubkEAoE2A5t5eXnE4/E24TT8fj9er5fa2tTXxeVykZOTQ2NjY5uFZwUFBQdNm/YMOXIgtOlAvE7pbFMgEGD+/Plt7nv7lM6mWwEfAtnN7wcCb/dkuhaJgIgvtDqeDczeI88fgYtbHf8TOLqrckeMGNHh9LODkWeeeSYt5faG6bg+n08rKyu1pqYmOQe+qKhIhwwZkjwOh8PJczqKZron999/f5u598cdd5yWlpa2yXP66aenJTququr3v/99feONN5J1H3PMMcnPWkfHVVX9xS9+oSNGjNBRo0bpiy++qKqJ6ZeWZWllZWVybcBdd92l8XhcVVW3bdumU6dO1aysrA6j6paXlye1W7x4sS5dulQnTpyoZWVlWlFRkVynoppY37FndNbWnH322bp+/frk8dtvv61A0lZV1Y8++qhd1N3WEWYdx9Hbb789ubZmwoQJ+uijj+5Vx71xzTXXaFlZmZaVlelDDz2UTH/xxRd13LhxWlZWphdffLFGo9F257bWuLKyUs8+++zkZ47j6E9+8hM94ogjdPTo0fq73/1OVVU/+OADnTRpkno8nnbrVP7v//5PR44cqSNGjNA77rijQ3uvvvpqraio0G9/+9vJtAcffFDvueee5PGyZcuSUZBVVR999FEdMWJEuyjILWRsHQfw7h7H7/So4MTTxDpgOOABlgNle+SZCTzc/H4Aia6twq7KNY4jxYHsOHoaVr07jqOvhFVfvny5jhs3TsPhsK5du1YPP/xwjcfj7ebtb926VadNm6a33HKLqqbCsd9+++1dhmNvYdWqVcl1ARs3btSioiKtr69XVdWXXnpJL7vssg7bUl1dreeee26btKuvvlqnTJnS5ua2N8dxzz336MyZM5N11tbW6sMPP9ypht3hmWee0VNOOUVjsZg2NDRoZWWlNjQ0aCwW0+Li4mR7Z8+e3captNDV2oh7771XL7744uTCwW3btqlq4jq8/fbb+pOf/KSN44hEIjp8+HBdv369hkIhLS8v19WrV7cps6amJhm+/qKLLtIVK1ZoY2Ojnnjiie0c27Rp03TTpk3J4xdffDFjjqOrzr/DRORvza+ngRGtjv/WxXktTzIx4PvAC8BKYJ6qfigit4jIWc3ZXgB2isgK4GXgWlXd2VW5WVlZe6v6oKGysjLTJmSclrDqU6dOPWDCqs+fP58LL7wQj8fDiBEjOPTQQzuM6VRUVMSf/vQn7rnnHiAVjj03N7db2o0aNSq5LqCkpITCwsJkW6ZNm8aCBQs6HFN87LHH2kTMdRyHp556iocffpjnn3++22snfvWrX/HHP/4xaW9+fn63gxl2xooVK5g6dSq2bZOTk0N5eTkLFy5k+/btZGdnJ9v7ecKq/+///i833nhjMkjjoEGJSaBFRUVMmDChXYTjxYsXM2bMGIYNG4bX6+WrX/1qm65iSHSPhcNhVDUZVv2OO+7gqquualfeGWec0WmYlf1NV4Pj5+xx/LueFq6qzwHP7ZF2Y6v3Clzd/OoWHk+3lpAcFJSWlu6XeuJ3XrL3TD3Evrp9TKKe0jqsusvl4tJLLz0gwqpv3rw5GXEXEjf1zZs3M27cuHYajBw5kmAwyM6dO5P7cHQW9fX444/Htm2ysrLa7N4HJI9bvlO2bVNaWsoHH3zQ7gfKG2+8wcUXX5w8fu211xg1ahSHHXYYU6ZM4fnnn28Xin1Pdu3aRTQa7TBa7J7cdtttbQL9tfClL32p3Y6BlZWV3Hbbbfzwhz+ksbGR119/naOPPpqzzz6bYDDIsmXLqKqq4qmnnup0rVBTUxPjx4/H4/Hw05/+lDPPPBOATz75hEcffZRnnnmGQYMGcc899yQdUUds3ryZoUOHJo9LSkrajIVCYvxj+vTpjBs3jhkzZuDz+Vi2bFmH0ZInTJjA3XffzdVXd/t2mTa62nP8n/vTkO7SejDqYGf+/Pl7/QfdF+yLm3w6MGHVE6i2DaPeEhRvT1577bUO9+DYvHkz3/72t3nsscfahDxvCau+p+PYsy1z5sxJBhK84IILmDNnDrNmzdpnYdWvu+46rrvuum7lPe2001i6dCmTJ09m0KBBjB8/PhnR9q9//StXXHEFkUiE6dOndxgw1bZtNmzYwJAhQ1i7di0nnXQSFRUVlJaWEgqFyMnJYenSpcybN4/vfve7vPzyyz1qS0fMnj2b2bNnA3DxxRfzy1/+kj/96U/885//ZNy4ccnPOgtznwnMPDVDn0X1wAyrXlxc3ObX8KZNm5KhvvdkzZo1ZGVl9WjXv9bU1dVx+umnc/vtt3P00Ue3+aw7YdWj0Sh/+9vfuPHGGyktLeWHP/whzz33HE1NTRQWFib30G6hJax6//79cbvd3Qpbctttt3UYVr2zkCg33ngj1dXVLFy4EMdxkmHVp0yZwuuvv85bb73Fcccdl0xvjYgwZMgQAA4//HCOP/745Iyk4uJizjkn0RFzzjnn7HWmUk+uI8DSpUtxu93J2Y/z5s1j5cqVfPLJJ0Dn1yMTGMdh6LO0DqsOHDBh1c866yzmzJlDJBLh448/ZsOGDR3ulLd9+3a+973vccUVV/RUOiAR/2jWrFl897vf5ctf/nK7zz/66CPKysrapbduy4svvsjRRx/Nxo0bWb9+PZ9++ilnnnkm8+fPp6CggH79+iX12blzJwsXLkxu33rddddx+eWXJ69JfX09f/nLX9rVd91113UYVn3PbiqAWCzGrl27AFi2bBmrV6/mpJNOAlJh1UOhEHfccUeHYdV37dqVnPa6Y8cO/v3vfzNmzBggsXVryxPGyy+/vNetmydNmsSKFSvYsGED4XCYefPmcdZZZ3Wa/8Ybb+SWW24hEokkN6wTkeQTZGdh7jNBt/erFBGvqu6fLee6oLP+24ORTAU46y1UVFRw0003cfLJJxOLxfB6vfzxj3/Etm0uueQSVBUR4fbbbwcS3QDf/e538fv9yZs7JPavuPbaawHazL8/99xzWbx4MUceeSQiwp133smgQYM499xzefnllxk7diyHHnpoMm6T1+vlySef5Ac/+AH19fXE43GuueYaysrKOP3003n44Yf59re/3a4dZ555Jj/72c+Sx5WVlZx99tmMGTMGl8vFH/7wByzLwnEcGhoaqKqqIhqN4na7ueiii9ps01pSUkIgECAajfLkk0/yz3/+k1GjRnWo35w5c/j/7Z13WFRH28bvARFQEWOJBaIQkQ4LWEAliopKgopBNNbEFl97LDExiZpYony2GCyxt1eDBQsmryVqxBJrjJpYgqKuAnYQqdL2+f5YdnaX3aXoLiwyv+vaS8+cmTlznj2c2Zl55n5Onz6NlJQUrF27FgDw3//+Fx4eHnjw4AGsra21Tq8FBwcjJiYGAQEBiIyM1Oh0evXqhQ0bNqB///7YsmULxowZw/cuzZkzh6+jjBs3Tm09wczMTGto1dKQnZ0Nf39/APLF9vXr1/MpqXnz5uHgwYOQyWQYO3YsDyl77tw5bNiwAStXrsS1a9cwevRomJiYgIgwffp0br+vv/4aAwYMwIIFC2BlZYXVq1cDkI8k/Pz8kJqaChMTEyxcuJCPBCMiItC5c2fk5+djxIgROr+LqKgotGnThsdfd3Z2hoeHB7y9vXnnfezYsTKZmi4RxbldQS4d8g+A+wXHEshDyDvujzMAACAASURBVOrFrau0HxGPw/AYqztuRaYiSZETEc2fP1+ruyqRXKrcz89Pa6wOgWHIzMwkX19fNZsbqzuugggA3QAkFXQ0VyCPCFguFN4VW5kxlsD1+uZVZNVVd9AaI2UpRa4PW9SpU0fDC0xBtWrVMGPGDLWIeMaKsT8XJeX+/fuYP38+Hz1t3boV48ePx1tvvVUu7SnJVJUJEd0r5AlRboJRubm55XVpo0NVJuRN4lVk1VWnmIyVspIi14cthg4dWuT5wntSjJWK8FyUBCcnJ7VprgEDBmDAgAHl1p6SdBzxjLFWAKhAKn0cgJuGbZZAIBAIjJWSTFWNgnyDXmMAjwH4FaQJBAKBoBJS7IiD5DEy+pZBW0qEtg1MlRWj8bAwAsRzoUTYQomwhWEotuNgjK2BPKyrGkQ0wiAtKoY3JYawPpBKpWUmO2LsZGdnw9zcvLybYRQIWygRtjAMJZmqOgK53PlRAH8AeBtAue3n0CWnUBkprHvzpiCVSmFpaQkvLy8kJSXxncINGjSAjY0NP1b9EaEa60AX69evx6NHj/ixv78/7O3t1fJ069at1L9SBw4ciL179xabb9y4cVwTyt/fH76+yrhmZ8+e5VIlT548QUBAAKpXr44JEyao1WFra8vFG93c3DBjxgy+YU0mk6Fr1654++230bNnT7Vy/v7+cHJy4rbbs2cP7t27h4CAALi6usLNzQ3Llinl6CZOnIgTJ06U6F4AuaNGlSpV+H4QQL4wXdiWa9euVbunjRs3chFJHx8frZv6SsvixYvh5uYGNzc3/Pjjjzz90qVL8PPzg4eHB0JCQnR6XCls7OXlpfYdAcAPP/wAJycnuLq64uuvvwYAHDx4ED4+PvDw8EDz5s0RExPD8xe2e1KSpobrjh074Obmhnbt2vGd9rdu3UL//v15npcvX6Jdu3ZcdDI2NhZeXl7lN6Iqrf8u5J3NaX35A5f2I2TVlQhZdSVvkqy6Qh596dKlRcqjv3jxgvr06cOlzGUyGR05coS2bNmi4d9f+N6JiBITE3naixcv6N133+Wy33FxcVzuu7h7ISKKiIggf39/6tixI0/TJlG+Zs0afk+//PILNW/enMdIycrKojVr1mi9Zkm5dOkSeXp6UmZmJuXk5JC/vz/duXOHiIi8vLzo1KlTRES0atUq+u6777TWoU2Cnojot99+oy5dutDLly+JSCmrfvHiRXrw4AERySXnbW1teRltdi/Me++9R1lZWbRhwwZasWIFERH17t1bIx7KtGnTaNu2bfy4KAl4Y9jHURh7AJV7y7LAaFDIqr/33ntvjKy6Qh7dwsKiyHuvWbMmVq9ejR07duDFixdgjKFTp06oXr16iWzXqFEjvlemZs2acHZ2RmJiIgCgadOmePjwIZ4+fapRTtu9REZGYsmSJbhz506J93fMnTsXixcv5rulLSwsMHz48BKV1cWNGzfg5+cHS0tLmJmZoU2bNtizZw8A4Pbt21zu5FVl1b/66is+9aWQVffx8UHDhg0ByNUM0tPTS7VtwMTEBNnZ2VxW/dixY2jSpAneffddtXw9e/bE1q1bS9VmQ1GSNY7nUK5xmABIBlAyqUoDUNI/ispA4WG0oVi86c/iM5WSSZ+0eO06VGXViQhjxox5I2TVS4O1tTWaNGmCuLg4rmelq8P56KOPuEheTEyM2jTHnTt3cPXqVTWhQ29vb5w+fVrDCaPwvUilUiQnJ6N58+bo3bs3duzYoSaDootr165p1eAqzObNm7F48WKNdCcnJ434FB4eHpg5cyaSk5Nhbm6O33//nUuQODs749dff0W3bt2wc+dOnXuFGGPo2LEjGGMYPXo0hg2ThxW4efMmYmJi8OWXX8LS0hKLFi3SaP+OHTvg6+urJo00aNAgmJqaok+fPnx6S5WpU6eiY8eOsLGxwZYtW9CrVy/s3LlTI59EIjGaTb9FPq1MvutPAiCxIElWMOQpN7RJIVdWrK2ty+Q6+njJGwJVWXVAvs4hZNXlv2C1sX37dq278VNTU9GrVy8sXbqUx7IGdMt4F76Xbdu2cYHIvn37YvTo0fjss8/0Jqv+8ccflzjAk7u7OyZNmoTAwEDUqFED3t7e/J2xceNGfPbZZ/j2228REhKiU/fu7NmzsLGxwaNHj9C5c2e4uLigTZs2yMvLw4sXL3Du3DmcOXMGH330kZpw5T///INp06bh8OHDPG379u2wsbFBamoqPvzwQ9jZ2amtXQBAUFAQgoKCAMjX4UJCQnDt2jX88MMPqF27Nn788UdYWlqiSpUqYIwhKyur3FVyi5yqKugk9hNRfsGnXDsNAGrB3Ss7v/32W3k3oVwhUsqqx8TEvDGy6qXhxYsXiI+PR7NmzXhaaRxIcnJyEBoaiiFDhmgot5ZEVh2QT1OtXbsWdnZ2CA0NxV9//YU7d+7wOBiqu7cVsuqAXE1YW2TDwmzevFmrrLqu72vEiBH466+/cOLECVhYWHD5dFdXVxw+fBgXL15EWFgYHBwctJZXSJ83aNAAISEhXBDT1tYWoaGhAOQqALm5uXwx+/79+wgNDcWWLVvUHC4UddWsWRP9+vVTE9csTEZGBrZs2YKRI0di5syZ2Lx5M3x9fdWCWOXk5BiFl1hJ1jguM8Y0Q48JBOXMmyqrXlLS0tIwatQo9O7dGzVr1ix1eSLC4MGD4eXlhfHjx2uc1yXjrXov169fR15eHhITEyGVSiGVSjFlyhT+smvXrh1+/vlnAPIObefOnejQQS5199VXX+Hzzz/n60DZ2dlYt04zaNjHH3+sVVZdVxhVhXy6VCrFwYMHeZApRbpMJsOcOXO0yqqnp6dzb6uMjAwcPnyY20BVVv3GjRsAgLfeegvPnz9HcHAwFi5cCD8/P15Xbm4ufzZzc3Pxv//9r0hZ9PDwcEyaNAlVqlRBZmYmGGMwMTHhPwQeP34MGxsbnSPKMkXXqjmAKgX/XgOQByAWwF8ALgH4S1+r86X9CK8qJcKrimjr1q0kkUjI1dWVfHx86Pz583Tx4kXy8vIiiURCXl5edOjQISIi2r59Ozk6OpJEIqHs7GytHi+qnir5+fk0ceJEcnNzI3d3d9q5cydPHzlyJDk5OVHnzp2pa9eu3Kvq4sWL3FvL1dWV1q1bR0REv//+O33yySf8OqrXlslk5Onpyb2qiOSePW+99RbVqFGDbGxs6N9//+Xp7u7u5O7uTq6urjRt2jTu5UNE5OfnR3Xq1CELCwuysbGhI0eOaFxPwbFjxwgAeXp6kkQiIYlEQgcPHiQiopcvX5Kzs7NWBVzVe5k2bRp98803aucvXrxI7u7uRER0//59ev/990kikZCHhwf98MMPannXrl1Lrq6u5OrqSm5ubrRkyRKN65WW1q1bk4uLC0kkEvrll194+sKFC8nR0ZGaNWtGX3/9NclkMt7G7t27ExHRzZs3ydPTkzw9PcnV1ZXmzZvHy798+ZL69u1Lbm5u5OPjQzExMUQkfy6rV6/ObSiRSOjZs2eUmppKPj4+5OHhQS4uLjRhwgTKz8/X2mbVNhARRUZGkqurK/n7+9OzZ8942hdffMHzlKdXVVEdx18F/zbV9tFXA0r7cXFx0Wqoykhxbn6virF2HEWRkZFhoNboh7KUVdeHLXbs2KHTXbUiScQb+3NRGnr06EFxcXH82FjdcVnBiOS2to/+xz4lo1q1auV1aaOjNLLjFYlXkVU39ueiLGXV9WELItIZmrUs7+V1MfbnoqRkZ2cjLCwMTZs2BSDfANiiRYtyC+bGSMd6N2MsAYCmD1wBRKTznCFxcnKi2NjY8ri00aGIwqZvbty4wcNlVhTS0tJgZWVV3s0wCoQtlFRWW2j7G2aMXSQivbhIFuWOawqgBgpGHsaCYsu9ADwcp0A8F6oIWygRtjAMRXUcD4lo1utUzhgLAvAj5J3QWiIKL3R+MIAFUO4TWUZEayEQCAQCo6WojuO1RhoFQZ+WA+gMIAHABcbYPiK6XijrdiIaW9J6jcIVzUgwBn9uY6G0m8reZIQtlAhbGIai3sKdXrPuVgDiiOgOEeUA2AbgtQNIvIq/+puKYrepoOx20VcEhC2UCFsYBp0jDiJKfs26bQCoisEkANAmrtSLMdYO8nC0E4lIQ0CGMTYCwAgAqF+/PqKjo/m59u3bAwDfhAXINWycnZ1x8OBBLjltbW2NgIAAXL58Gffu3eN5u3TpwmUEFEgkEtjZ2aldp379+vDz88PZs2fVYn2HhIRAKpWqSZz7+vrC2tpabWd3kyZN4OXlhZiYGL42YW5ujqCgIPz7779QXfAv6T0pyuv7nhwdHZGdna0mVV69enWYmpqq7dyvWrUqqlWrhrS0ND6XzBiDtbU1srKyeDsBcCkLVSlrc3NzWFpa4sWLF1w2w9TUFElJSXBxcYGDgwOio6MREhICU1NTPHr0CCYmJqhTpw4A4PTp07CyskJKSgqvs0qVKqhRowbS09PVdizXqlULq1atQkBAAPdECQ4OxoMHD3Dp0iWer1+/fjhz5gzi4+NLfE/9+/dHSEgIgoODdd6TlZUVRo0ahZ49e8LX1xdBQUEgIpw6dQoZGRm4cOECvv/+exw6dAgvXrxAaGgoLl26hI8//hgLFizg9+Tk5ARra2u5S6SJCUJCQvDZZ5/x0eeOHTswf/58EBG++OILfPTRR6hatSocHR1hbW3NR+xLlixBp06d8NNPP2HhwoUAgC+++ILrMnXq1AmbN2+GtbW1xj3JZDL07NkT+/fvh4mJCXJychAdHY3BgwcjNjYW9vb2yMjIQExMDNasWYPdu3fD3NwcKSkpGDFiBEJCQhASEgJzc3NMmTIF+/btg5WVFczNzTF79mwEBAS88rNnYWGBYcOG4a+//oKpqSnCw8PRtWtXAMCGDRvwww8/gIjQrVs3LFiwQON7evz4MTw8PPiucl9fX6xevRr5+fnIyMgAAPTu3RtPnjzBlStXkJKSglmzZuHQoUMwMTFBo0aNsGLFCtSpUwfPnz/H6NGjkZiYCAsLC0RERMDZ2Vntnh4/fowBAwbg4cOHGD16NMaNG4e0tDSMHj0a//nPf+Dh4QFra2ssWLAAVlZWfDPjt99+i+3bt+Ozzz7DqFGjNP6eMjMzER0drfbe0yv68ust/AEQBvm6huJ4EORrGKp56gAwL/j/fwD8Xly9YgOgErEBUImQVSd6+vQp2dvbk1QqpWfPnpGdnR2lpKRolFOgyP/8+XON/GvXrqXw8HCt97J37176/PPP1dJCQ0PJ39+fZs2axdMOHz6sIe+uaqvJkyfTkCFDKDs7m4iIHj58yDdZvipLliyh4cOH8/q8vLxIJpPR48ePqXHjxvTs2TOSyWTUv39/voFPlVu3bpFEItFZ//bt26lfv35qeVT3syxatIjGjBlDREQTJkygOXPmEBHR1atXKTAwUKO+Xbt20bx58ygvL4/8/PyISL6B8tNPP1XLl5aWRj4+Pmpp33zzjcaGSgXGKKteUhIBvKNybAvlIrii00oiIsVPuLUAipfKFAhUELLqSln1AwcO4P3334e1tTXq1KmDjh07Fqlnpshfq1YtjfwhISFcKqQwW7duVVPMTU1Nxblz57BmzRo1XaWiSEtLw8aNGxEREYGqVasCkGtDqdr/Vbh+/To6duzI66tWrRouXbqE27dvw9nZGXXq1AFjDIGBgaWWVU9NTUVERAS++uortXTV6XOFVEjhtri5ueHmzZsagZzMzMyQmZmpNjqeMWMGZs1S90uqUaMGGjVqxOVzyptX03IuGRcANGOM2UPeYfQFoCYLyRhrSEQK8f4eAG4YsD2CV+TRglN6r7PBFP/XrkNVVj09PR1ffPFFpZZVT0xMxDvvKH+r2dra8vgaiuuZmpqiWrVqOH36dJH569ati7S0NG47VU6fPo2NGzfy4z179iA4OBjOzs6oXr06rly5AolEUmS7b926BXt7ezU1Xl2MHz9eazTCAQMGYMqUKWppEokE0dHR6NOnD6RSKf755x/Ex8ejbdu2uHbtGu7fv4+GDRsiOjpa58J5XFwcvL29YW1tjblz56JNmzYA5JpiCkn1wkydOhVbtmxB7dq1uZ6VRCLB7t270bp1a5w5cwYJCQlISEjgU62AfJ1y69at8PPzw9SpU7F79274+fnxGCWqtGjRAidPnoSPj0+xNjM0Bus4iCiPMTYWwCHI3XHXE9E1xtgsyIdM+wCMZ4z1gFwLKxnA4OLqrYybeXShWAsxNPp4yRsCVVl1IsLLly+FrDqg82V88uTJUoUarVevHh4+fKhRJjU1VW1HdmRkJL788ksA8hFcZGQkJBKJ3mTVIyIiSpz3008/RWxsLJo3bw57e3u0bt0apqamqFu3LpYvX46wsDBUqVIFfn5+Wne+29ra4v79+6hduzbOnz+PXr164caNG4iNjUVCQgK6d++uVawyPDwc4eHhmD17NlasWIHp06fjm2++wfjx4+Hl5QWJRAKJRKIRFsLMzIyP0nJychAUFIR9+/ZhwoQJSEhIwJAhQxAcHAxALnMvlUpLYTnDYcgRB4hoP4D9hdJmqPz/KwBfFS4nEJQEIrms+uzZs5GXl6f2q/3vv//GgQMHsHz5cuzatQurV6/WWU/fvn3Ru3dvrdNGpW2Pp6cnTp48qXGuKFn16dOn60VW/fr162qBfhISEopUY7WxsSkyvy5ZdVWX+KdPn+L48eO4ceMGGGPIy8uDmZkZ5s2bxxeIVVHIqjdr1gx3795Fenp6saOO0ow4zMzM1OKMt2zZksuqKxblAWDFihVapwItLCx4eqtWrfho7syZMzh37hzs7OyQl5eHJ0+eoFOnTjh69KhGm0JDQzF9+nRYW1tj06ZNAOTTm3Z2dhox7lVZunQphg4dipMnT6JevXpYtGgROnXqxDsOXd9HeVDhNkVok8WurKh6XVVGVGXV09PTK72selBQEA4cOIDExEQkJSXh6NGjRY62FPlTUlI08ufn5+PZs2do3LixRjkHBwf+y3fnzp0YOnQo7t27B6lUioSEBDRq1AhnzpyBs7Mz7t27h5s3bwIA7t69i2vXrsHT0xNWVlb4+OOPMWHCBB5m9cmTJ3wdSZWIiAitsuqFOw1ALoWukCE/cOAALC0tecehkFVPTk7GypUrtYapffr0KffSiouLw507d2Bvb4+xY8fiwYMHkEqliImJgaurK+80bt26xctHR0dzz6mUlBR+b6tWrUJgYKDOCKZJSUk4dOgQBgwYgMzMTN45q3qX6ZK5Lw8MOuIQCAyJh4cHvv32WwQGBiI3NxcWFhZYuXIlTE1NMWzYMBARGGP4v//7PwDAkCFDMHz4cFhaWqoF1DExMeEvIdVFyrCwMJw9exaenp5gjGHx4sV4++23ERYWhmPHjsHV1RWNGzdG69atAcjdIaOiojB+/HikpqYiPz8fkydPhpubG4KDg7Fp0yYMHjxY4z66d++O6dOnq6XZ2toiMzMTubm5iIqKwtGjR+Hk5ARA3qEB8l+xoaGhmDZtGgD51NJXX32FDh06wMTEBLNmzSpyH4MivyKComr+CxcuwN/fX+uG2+DgYMTExGDw4MGIjIzEt99+q3a+V69eiIyMRJs2bbB582YMGjQI2dnZqFq1KtavX8+nm8PDw/H111/DxcUFlpaWqF69OmbPnq2zvSXh0aNH+OCDD/g04ooVK/i5MWPG8I78u+++4zG99+zZg3/++QczZszAsWPHMHPmTJiZmcHU1BRr1qwpdi/IlClTEBcXBxMTE9jb2+Onn34CII8IOHToUJiYmMDDwwNr1+oWxfjuu+8wY8YMMMbw/vvv46effsKWLVswZswYnufMmTOYO3fuK9tGr+jLPausPsIdV0llc8ctipK445YnZSlFrg9bjB49Wqu7KhFRfHw8de3a9bWvURYY+3NRUs6fP0+DBw9WS3tT3XENQnEuipUJxS/QN41XkVU3dvmVspQi14ctvL29dTpf2NraYvDgwWqbOY0VY38uSkpycrKaA8XEiROxbds2nVNfhkanrLqx0qJFC3rVhURByaiIsuoCgUCJoWXVK9yIQ1V2oLJz8ODB8m6C0SAk5pUIWygRtjAMFa7jkMlk5d0Eo0FVN6myU9FGzoZE2EKJsIVhqHAdh0AgEAjKlwrXcRTeeVmZEZLRSsRzoUTYQomwhWGocB2HkBxRYoh448aAVCqFpaUlvLy8kJSUBC8vL3h5eaFBgwawsbHhxwqhQqBkz8X69evx6NEjfuzv76+xk7dbt26lkuUAgIEDB2Lv3r3F5hs3bhxOnz7Nr+3rq4wycPbsWS5VAgBz5syBg4MDnJ2dceTIEQDyPSampqbw8vKCq6srvLy8sGTJErXp2zlz5sDb21tnOcUnPj5e53VevnyJdu3a6Qy7mpGRgYCAALXrLly4kMucK1i7di0mTJigVtbf359LfKempuLTTz9F06ZN0bx5c3To0AEXLlwo1o5FkZycjB49esDT0xO+vr78PgFg8eLFcHNzg5ubG5YuXaq1/JEjR2Btbc3t9P333/N7btWqFbe9qgjhwIEDYW9vz8soNNKuXbuG1q1bw9zcHEuWLNF6vaysLHTp0gXu7u5YtWoVTx82bBj+/vtvfrxkyRJs3ryZH0+cOBENGjTQWa/B0Zdfb1l9XFxctPotV0ZUJcL1ibHu4yhKVj0jI6PYeiuKrPqVK1fI29ubsrOzKS4ujhwcHCg/P59yc3PV2vbo0SMKCAjgUuaKcs+fPy+ynAJd1yEimjZtGm3btk3rvSxZsoSWLVumlubj40P+/v60efNmnrZmzRoNWXjV76BXr140bdo0kslkREQUFxdH+/fvL9KOxVFYyrxDhw5EJP9b8fT0pMzMTMrJyaGAgAC6c+eORnltUvBERPn5+ZSenk5ERDk5OdS8eXO6cOECEen+/h89ekQXLlygL7/8Uud+CyGrXkao/sqs7KgGb6qsKGTVfX193xhZ9ejoaPTr1w9Vq1ZF06ZN0bhxY1y8eFEjX/369bFq1Sr+61lRDkCR5UpynZ49e2Lr1q1ayxWWVb958yby8vLw3XffITIyUuf1VImNjcXly5cxc+ZMLnrYtGlTDRuVlsJS5rdu3UJSUhJu3LgBPz8/WFpawszMDO3atcOePXtKXK+JiQnfM5GTk4Pc3NxixRrr16+PFi1aFKl8LGTVBW8skbG99V5nP6edr13HmyqrnpiYqDYNqZA79/b21rCBo6MjsrKykJSUVGS5tLQ0ft8ODg6IiorSmb9ly5aQSCRqAogKXr58iYSEBDUV4cjISPTt2xcBAQEYMmQInj17hrp16xb53V27dg3e3t5aJU0KExYWplXna8qUKRgwYIBaWmEp8wcPHiAhIQEeHh6YOXMmkpOTYW5ujgMHDqBt27Zar3fq1ClIJBLY2Nhg4cKFcHV1BSDvMFq1aoW4uDh89tlnaN5cGT5o6tSpmDFjBrp06YK5c+fyGCPFIWTVBW8s+njJGwJVWfX8/Hzk5OQIWXUdWFlZlSp8aJUqVcAYQ1ZWlpoi65MnT1C7dm21vNu2bcP+/fthamqKnj17IioqCiNHjtSbrLo24UNdFJYyd3d3h6mpKdzd3TFp0iQEBgaiRo0a8Pb21rpw3rJlS0ilUtSoUQO//PILQkND8e+//wKQh6u9fPkynj9/jg8//JBvsps/fz4aNmyInJwcDBs2DAsXLsTXX39dovYKWfUyQjXaVmWnqBdiZYBIKasuk8nUfr1WZFl1GxsbtUXdhIQE2NjYaL3mzZs3Ua1aNdSpU4eXU/yNFFWuJNfJycnRkOwofB+XLl3CnTt30KFDBwDyvUWOjo4YOXJkkbLqFhYWuHz5ssb3po3SjDiKkjIfMWIERowYAUAeX10RV7xweQXdu3fHqFGjNIJZvfXWW2jXrh0OHToEFxcXNGrUCIBc3mTw4MFYtmxZkfejCyGrbkB0eXpURir7rlhVWfX8/Pw3Rla9R48eiIyMRE5ODm7fvo179+6pTYsoePLkCUaNGoVx48aplcvKyiqyXEmu8/jxY9jY2Gi81OvVq4esrCy+RhQZGYk5c+ZAKpVCKpXiwYMHuHv3LhISEuDr64sTJ05wOfNz586BiNCoUSM4OTnBw8MDs2bN4iOmu3fv4sCBAxrtjIqK0iqrXrjTADSlzDt27MjXJhTtkEql2LdvH/r27atRXtXr7uzZs6hSpQpq1aqFJ0+e8L+3zMxMHDlyhMunP3woD2JKRIiOjn4l6XMhq25gMjIyyrsJRsO5c+fUFikrG2+qrLpEIkHPnj3h4uKCKlWqYMWKFTAxMYFMJuNrFbm5uTAzM8Mnn3yCzz77TK2cu7s7qlatqlZOG7quAwDHjh3jv3QLExgYiNOnT6N9+/bYvn27WjAjxhh69uyJ7du3Y/LkyVi0aBG6du0KIoKVlRUiIyP5VNWGDRswadIkODg4wNLSEvXq1cPChQuL/tKLobCU+aJFi/i5nj17IiUlBVWrVsXKlSv5yGz58uUwNzfH8OHDsW3bNqxZswZmZmawtLTE9u3bAQAPHjzA4MGDQUTIz89Hv379EBQUBEA+Yn3+/DlkMhmaN2+O8PBwAPIRnJ+fH1JTU2FiYoKFCxfyEWJhhKy6gT9CVl2JkFVXYuzy2RVNVr1Hjx4UFxen9Zw2iW9jxdifi5IiZNUFgmJ4FVl1Y6csZdVfl+zsbISFhaFp06Zaz7ds2RL+/v5CN64MEbLqr4mnpyep7qiszEilUtjZ2em93oooq56dnf3GxF54XYQtlFRWWwhZ9UKU1D+6MmCITqOiUhlfDroQtlAibGEYKlzHkZKSUt5NMBqio6PLuwlGg3gulAhbKBG2MAwG7TgYY0GMsVjGWBxjbGoR+XoxxogxppdhlEAgEAgMh8E6DsaYKYDlAN4H4AqgH2PMVUs+KwCfAThnqLYIBAKBQH8YcsTRCkAcEd0hohwADiP/SwAAIABJREFU2wBo23QwG8D/AdDcVqsFMzMz/bWwglO/fv3yboJBeBVZ9aKE5BRUFFn1J0+eICAgANWrV9eQJbe1teXijW5ubpgxYwaPBCmTydC1a1fY2dmhZ8+eauX8/f3h5OTEbacQ+Nu/fz+cnJzg4OCABQsW8Py9e/fGnTt3dN7Lhx9+qCay+eeff4IxxqXZASAuLk7DM27atGlcCpyIMH/+fN6uli1b6hRWLA2ff/453N3d4e7ujl9//ZWnHzlyBD4+PnB3d8fQoUPV9uwoKCxB/+GHH2rkGT16tNozsnbtWtSrV4+X2bBhAz83adIkuLm5wcXFBRMnTtSQhyEi9O3bF56enmp7eb777ju1tu/du1dN+HDBggVo3LixxvNRVhhyA6ANgHiV4wQAvqoZGGM+AN4hov8xxqboqogxNgLACABo1KiR2tx++/btAYDv3gUAJycnODs74+DBg/yPytraGgEBAbh8+bLaA9+lSxe8ePEC584pBzwSiQR2dnZq16lfvz78/Pxw9uxZrngKACEhIZBKpbhy5QpP8/X1hbW1NX777Tee1qRJE3h5eSEmJobvQDU3N0dQUBD+/fdfxMbGvtI9AdD7PTk6OiI7O1tt12r16tVhamqqFvO9atWqPAaDYkc/YwzW1tbIyspSC21bo0YNAEB6ejpPMzc3h6WlJV68eMH/oBT6Qfb29oiJiQEAxMTEoGbNmvj2229RtWpVvlNaUUYxj52SkoIqVaqgRo0aSE9PV3sx1KpVC2vXrkWzZs1gYWHBy1tZWeHw4cNo2bIlnj9/zncBl+aecnJykJGRgZSUFJ339PLlS/z555+YPXs2UlJSkJeXh4cPH+LAgQNo3bo10tLSkJeXh+zsbFSrVg1Tp07F33//jbt37yI9PZ3fExHh119/hbW1NUxMTDBs2DAMHz4cS5cuBRFh0qRJyMjIwLp167hdFA4l69evh5ubG7+n3NxcjBkzBtHR0ahfvz46dOiA4OBgODo6YuDAgZgzZw4WL16scU8K8cgmTZogMzMTOTk52LhxI/z8/PDzzz+jffv2yMjI4JsgFZ5NKSkpePnyJbKyspCeno6NGzfiyJEjOHLkCKysrPDixQscP378tZ69mJgYXLlyBcePH0dWVha6deuGbt26wdLSEp988gl+/fVX2Nvb4/vvv8fWrVvRs2dPte/J0tISVlZW/NkD5B1yfn4+MjIy8OeffyIpKYmfS0lJQWZmJsLCwrBgwQL+PaWkpOCPP/7A2bNn8c8//yArKwvt2rXDwYMH0bp1a35PJ0+ehKWlJU6cOIGQkBBkZGQgISEB58+fx4QJE/DixQtYW1tzaZoRI0bAwsICEydORM2aNXH58mX+Pat+T5mZmYiOjlZ77+kVfW0IKfwBEAZgrcrxIADLVI5NAMQAsCs4jgHQorh6nZ2dtW54qYwo4kjoG2PdAKgtHsfGjRupZcuW5OHhQaNGjeLxJwYOHEju7u7k5uZGP/74I23bto2qV69Ojo6OJJFIKDs7m9q2bUvff/89jxmxatUqCg8P57Er8vPzaeLEieTm5kbu7u60c+dOnj5q1ChycnKiwMBA6tq1K4/HcP78eWrXrh35+PhQUFAQPXr0iIiIli9fTrNnz+btbtu2LUVERFC7du2ISD0ehwJt8SxsbGzUNrWlpKRQjRo1KCUlhaft27dPI6ZE4VgkREQnTpygDz74gB/PmjWL5s+fT0REeXl5ZGdnR3l5eRrfw5QpU+i///0vP87Pz6cmTZrQ7du3qWHDhpSdnU1ERLdu3SKJRKJWVnXTWsOGDUkqlWrU/zrMnTuX5s6dy4/79etHu3btogcPHpCjoyNP//3336l79+4a5XXFLlGca9++PSUkJKjl0fY9Ecnt27JlS8rKyqL09HTy9vam2NhYtTx///03DRo0iPLz88nf358yMzNpyJAhdOXKFY36xo4dS7t27Sr2ukQVewNgIoB3VI5tC9IUWAFwBxDDGJMC8AOwr7gFcoUOjQBqowSDctNL/x89oCqrfuLECeTl5WHbtm24ePEil1W/evUqPv74Yx6HQxGXQ/ErvHPnzvj999+5rLqqXpWqrPrhw4cxceJEPHnyBFFRUVxWfcOGDXz6SSGrvmvXLly8eBEDBw7k0w9//PGHhm7Ue++9B0CuwvuqWFtbo0mTJmo6WLr03BQ28PLyQkpKChITE/HOO8o/UYWsOiD/9W1nZ4erV69q1FP4Xk6ePAknJye8++678Pf316o3VZjk5GTk5uaiSZMmxeYNDw9Xi16o+EycOFEjr0QiwYEDB5CVlYWnT5/i1KlTiI+PR/369ZGVlYVLly6BiLBr1y41gUdVMjIy0Lx5c7Ru3Rq//PILT//xxx/Rq1cvrVPEO3bsgKenJ/r06cNt+N5776FNmzZo0KABGjVqhO7du8PR0VGtnIeHB6ytreHj44NevXrh+vXrqFKlCjw9PTWuoZBVNwYMOVV1AUAzxpg95B1GXwD9FSeJ6AUALtrPGIsB8DkR/QmBceGo52GunhCy6nKohJt4t2/fXqrd+G+//TYePHgAiUSill74XhTxOAC5blNkZCRCQkL0Jqs+depUTJ2q0ylTjQ8++AB//vknWrdujbfffhstW7aEqakpTExM8PPPP2PcuHHIyclB586dtcqqm5qa4t69e2jUqBHi4uLQqVMneHh4oEqVKti7dy9iYmI07N2zZ08MGjQI5ubmWL58OYYMGYLffvsNsbGxuH37NhITE5Gfn4/AwEB07doVbdq0USuvGsY2ODgY69atw6xZs3D16lUEBQVh6NChAJTfhzFgsI6DiPIYY2MBHAJgCmA9EV1jjM2CfMi0z1DXFlQOiJSy6oWlryuyrHppePHiBeLj49GsWbNSly1OVl2XjLfqveTm5mL37t343//+h5kzZ0ImkyElJQUZGRk6ZdVdXFxQu3ZtmJmZ4f79+2jcuHGR7QwPD+cxK1Tp0KEDfvjhB430GTNmYMaMGQDki/iKX/n+/v58dLd//37cvXtXoyxjjMukOzg44L333sPly5dhYmKCW7ducRmW1NRUODk5ITY2Vi1o1YgRIzBt2jQAwO7du9GmTRsuCxIUFISzZ89qdBwKdu3ahdatW+P58+eIj4/Hjh07EBgYiP79+8PCwqLyyKoT0X4iciSipkT0fUHaDG2dBhEFlGS0UVqPlzeZyqyMC6jLqteqVeuNkVUvKWlpaRg1ahR69+6tFqdGsWhfHH5+frh+/Tru3buH7Oxs7NixAz169ODnb926xRfTVVG9F4VjQXx8PKRSKe7fv4/u3bsjOjoatWrVwltvvcXtk5SUhN9++41H3ps6dSpGjx7Nv5PU1FT897//1bje1KlTtcqqa+s08vLykJycDEAeK0QxagCUsuovX77E/PnzMXLkSI3yycnJ3AHi6dOnOHPmDFxcXNCjRw88evQIUqkUcXFxqFmzJndoUThUAHLvJ4XNGjdujOPHjyMvLw+5ubk4fvy4TimfnJwcLFu2DJMnT1aTVc/Pz+dOHkJW/TUQMceVGEqrqqKgKquen5/P5bIruqw6IF9vyMzMRG5uLqKionD06FE4OTkBUK6NyGQyhIaG8l+4ANC6dWvExcUhPT0dtra22LRpE39xFsbMzAwRERHo3Lkz8vPzMWLECH6NBw8ewNraWuv0WnBwMGJiYhAQEIDIyEgNl9VevXphw4YN6N+/P5cGV3gSzpkzhz+z48aN4+sJVatWhZmZGb744gutbS0p2dnZ8Pf3ByBf/9mwYQOfkpo3bx4OHjwImUyGsWPHol27dgDk4Qk2bNiAlStX4tq1axg9ejRMTExARJg+fTq3iS4WL16MAwcOwNTUFHXr1sW6desAyEeyMTExfL0iODhYZ0z1iIgIDBs2DJaWlvDx8cHz58/h4eGB7t278x8Cx44d09pZlgv6WmUvq4+QVVciZNWVGLt8dkWTVZ8/fz5t3LhR67mMjAzy8/PT6nFlbBj7c1FSEhMTqXPnzmppb6pXlUDwSghZ9fKnTp06GDhwoNZz1apVw4wZM9SmaASGJT4+Xi3I1YIFC7BgwYJyC6Vd4WTVHRwcSNdccWUjOjraIOscFVFWvfDieGVG2EJJZbWFkFUvRHkFLjFGVCUrKjviuVAibKFE2MIwVLiOQ5vvdWVFITkiEM+FKsIWSoQtDEOF6zhU9WoqO6paWJUd8VwoEbZQImxhGCpcxyEQCASC8kV0HAKj41Vk1UtCRZFVB+T7HRwcHODs7MylylUlv11dXeHl5YUlS5ZAJpMBUMqx29jY6JRjV9hOoZy8bt06NGvWDM2aNcOWLVt4/k6dOvG9F4WRyWTo0KGDmtJxVFQUGGNqmxyPHDmiIe+uaqvc3Fx88cUXcHBwgI+PD9q0aYNDhw4Va8eiyM7OxieffMLv9Y8//uDnfv75Z3h4eMDNzQ1fffWV1vJxcXH82fPy8sKYMWM08nzwwQdqHn9ff/01PD09IZFI0LVrV/6MJScno0ePHvD09ISvry+uX7+uUVdWVha6dOkCd3d3rFq1iqcPGzYMf//9Nz9esmQJNm/ezI8nTpyIBg0acIn6Mkdffr1l9XFxcdHqt1wZKax2qi+MdR+HNnVcBRkZGcXWW1ghtm3btuTh4cFVhpOSkqhFixY61VF1MWDAAK6Oq4snT55QmzZt1K79zjvv0G+//UZE6uq4V65cIW9vb8rOzqa4uDhycHDgqr+qbXv06BEFBATQrFmziIgoLS2NTp06RYsWLSpWVZeI6OnTp2Rvb0/Pnz+nZ8+ekZ2dHVfZXbt2LYWHh2u9l71799Lnn3+ulhYaGkr+/v68LUREhw8f1lDpVbXV5MmTaciQIVxN9+HDh1yB+FVZsmQJDR8+nNfn7e1NMpmMHj9+TI0bN6Znz56RTCaj/v37U0xMjEZ5bYq+qmzfvp369eunlkd1b86iRYtozJgxREQ0YcIEmjNnDhERXb16lQIDAzXq27VrF82bN4/y8vLIz8+PiIguXrxIn376qVq+tLQ08vHxUUtTVRoujNjHUYhq1aqVdxOMhjdpn8OrsmnTJrRq1Qpt2rTB6NGjIZPJkJeXh0GDBvGARxEREVwVV6EQqxit9O3bl+sgRUVFISwsjNctk8kwadIkuLu7w8PDA1FRUTx99OjRcHZ2RufOnfHs2TNe5sKFC2jfvj2aN2+O999/nysY79y5U2PX8JQpU7TqY0VHR6Nfv36oWrUqmjZtisaNG+PixYsa+erXr49Vq1ZxkbwaNWqgbdu2JfbtP3DgAN5//33UqlULderUQceOHfm6WUhICH7++Wet5bZu3armBp6amopz585hzZo1WjWltJGWloaNGzciIiKCKxU3aNBAzf6vwvXr19GxY0den5WVFS5duoTbt2/D2dkZderUAWMMgYGB2LVrV6nqTk1NRUREhMZoRdXemZmZXMRRtS1ubm64efOmWiwPQL57PzMzU02xYMaMGWpBmwD5d9uoUSMun1PeVDjJEW1aQ5UVheyDoXnwx3K919moreYUQGlRlVXPysrC5MmTsW3bNjRt2pTLqgNKX/6lS5di2bJlah1u586dMWzYMC6rvm7dOsybNw+Auqz606dP0bJlS7Rr1w4xMTFcVv3BgwdwdXXFyJEjuaz6vn37ULduXWzduhXTp0/H6tWr8ccff2hsqHvvvfcQFRWFU6dOqUUwTExMVPteFXLn3t7eGjZwdHREVlYWkpKSUKdOHQDQKqaouJ6pqSmqVauG06dPFymrXrduXaSlpWndB3H69Gls3LiRH+/ZswfBwcFwdnZG9erVceXKFQ1F3cLcunUL9vb2JdLVGj9+PE6cOKGRPmDAAC4Vo0AikSA6Ohp9+vSBVCrFpUuXEB8fj7Zt2+LatWu4f/8+GjZsiOjoaJ0qvXFxcfD29oa1tTXmzp3LRQm/+eYbfPnll1qFBqdOnYotW7agdu3aOHbsGG/L7t270bp1a5w5cwYJCQlISEjg3xMgFz7cunUr/Pz8MHXqVOzevRt+fn5o0KCBxjUUsuo+Pj7F2szQVLiOQ1esgcqIrjlofaOPl7whELLqcqjQJl7FmkdhTp48War1m3r16uHhw4caZVJTU9VG/pGRkfjyyy8BKGXVJRKJ3mTVIyIiSpz3008/RWxsLJo3bw57e3u0atWKa0gtX74cYWFhqFKlCvz8/LTu4re1tcX9+/dRu3ZtnD9/Hr169cKNGzcQGxuLhIQEdO/eXatYZXh4OMLDwzF79mysWLEC06dPxzfffIPx48fDy8sLEokEEolEwz3YzMyMj9JycnIQFBSEffv2YcKECUhISMCQIUMQHBwMQC6rLpVKS2E5w1HhOg6BQAHRmymrXpzcuSo3b95EtWrV1H7FlhQbGxucPXtW7Tqq6qu6ZLwVyq2AXEH2+PHjuHHjBhhjyMvLg5mZGebNm6dTVr1u3bpo1qyZWljcoijNiMPMzAw//vgjP27evDmXVQ8JCeFTbCtWrOAhhFWxsLDg6a1ateJBss6cOYNz587Bzs4OeXl5ePLkCTp16oSjR49qtCk0NBTTp0+HtbU1Nm3aBEDemdvZ2Wk4Y6iydOlSDB06FCdPnkS9evWwaNEidOrUiXcclUZW3RCoPrSVHXNz8/JuQrmiKqvOGHtjZNV79OiByMhI5OTk4Pbt27h3755G9EBA7kU1atQoHoNdQUl/0QcFBeHAgQNISUlBUlISjh49ykdn+fn5ePbsmdZYGQ4ODvyX786dOzF06FDcu3cPUqkUCQkJaNSoEc6cOQNnZ2fcu3cPN2/eBADcvXsX165dg6enJ6ysrPDxxx9jwoQJPKqnIrpiYSIiIrTKqhfuNAB59L7MzEwA8jWc6tWr845DIauenJyMlStXYvjw4Rrlnz59ymc14uLicOfOHdjb22Ps2LF48OABpFIpYmJi4OrqyjuNW7du8fLR0dFwdnYGIJ8iVdzbqlWrEBgYqHMne1JSEg4dOoQBAwaoyaqrxl43Jln1cveSKu2nefPmWr0IBPqjInlVbd26lSQSCXl4eJCPjw+dP3+eLl68SF5eXiSRSMjLy4sOHTpERHKPmMIxxwt7pql6LhUVc3zkyJHk5OREnTt3Vos5fvHiRe6t5erqSuvWrSMieYzrTz75hF9H9doymYw8PT3VYo7PnDmT3n33XXJ0dOTtz83NJRMTE5JIJOTq6koSiYQWL15M+fn5vJyNjQ299dZbVKNGDbKxsaF///2Xp2tTil29ejU1bdqUmjZtSps2beLpZ86coT59+mj9fmbMmEEbNmwgIiJ/f386fPiw2vlFixbR2LFjiYjo+PHj1KpVK5JIJNSyZUs6cuQIz5ednU2TJ0+mpk2bkru7O/n6+nIvs1clLi6OHB0dydnZmQIDA+n+/fv8XFhYGLm4uJCLiwtt376dp+/evZtmzpxJRPJnRGFbHx8f+vXXXzWuUdjzKiQkhNzc3MjDw4N69OhBiYmJRCSPOe7g4ECOjo7Uq1evIpV6x44dSydPniQiuYdgp06dyNXVlZYvX87zSCQSSk5O5sfl6VVV7h1BaT+lldt+k7lx44ZB6jXWjqMoMjMzDdQa/VCWsur6sMXo0aO1uqsSEcXHx1PXrl1f+xplgbE/FyXl/PnzNHjwYLU04Y5bCnR5jFRGFBHI3jReRVZdEbXNWClLWXV92MLb2xvt27fXes7W1haDBw9W2wBorBj7c1FSkpOT1RwoJk6ciG3btpWbiKOQVa/AGFJW3dnZudTeL+VJZZXP1oawhZLKaAsiwr///itk1QVli4WFBZKSklDRflQIBJUdIkJSUpJWjzF9UuFGHN7e3nTp0qXyboZRYKhfU7m5uUhISKhQ04IymUx43BUgbKGkMtrCwsICtra2MDMzU0vX54hD7OMQaGBmZlakv7kxUhmnJHQhbKFE2MIwGLQrZowFMcZiGWNxjLGpWs6PZIz9wxi7zBg7xRhzLa5OITmiRLF/QCBsoYqwhRJhC8NgsI6DMWYKYDmA9wG4AuinpWP4mYg8iMgLwHwAiw3VHoFAIBDoB0OOOFoBiCOiO0SUA2AbADUXICJSDc9VHUDFWnARCASCSogh1zhsAMSrHCcA8C2ciTE2BsAkAFUBdNRWEWNsBIARBYfZjLGremynNYDSqAUWl7+o89rOlSRN9Vj1/3UBPIP+ELYoui2vk1/ftijKLsIWwhbazjkV19gSo6+dhIU/AMIArFU5HgRgWRH5+wPYVIJ69bb7saC+1frMX9R5bedKkqZ6XOj/whaV1BbF2EXYQtjCoLYw5FRVIoB3VI5tC9J0sQ1AzyLOG4pf9Jy/qPPazpUk7ZcizukTYYtXr7usbVGUXfSNsMWr1/1G2sJg+zgYY1UA3ATQCfIO4wKA/kR0TSVPMyK6VfD/7gC+pWL8jBljfxaXp7IgbKFE2EKJsIUSYQsl+rSFwdY4iCiPMTYWwCEApgDWE9E1xtgsyIdM+wCMZYwFAsgF8BzAJyWoWndghcqHsIUSYQslwhZKhC2U6M0WFW7nuEAgEAjKl8q1F18gEAgEr43oOAQCgUBQKkTHIRAIBIJS8UZ1HIwxE8bY94yxpYyxkiy0v7EwxgIYYycZYysZYwHl3Z7yhjFWnTH2J2OsW3m3pTxhjLkUPBNRjLFR5d2e8oQx1pMxtoYxtp0x1qW821OeMMbeZYytY4xpBn3XgtF0HIyx9YyxJ4V3hRcnlFiIEMj3i+RCvlO9QqInWxCAdAAWELYAgC8B7DBMK8sGfdiCiG4Q0UgAfQC0NWR7DYmebLGXiD4FMBLAR4ZsryHRky3uENGwEl/TWLyqGGPtIH/RbSYi94I0U8j3gnSG/OV3AUA/yN175xWqYmjB5zkRrWKMRRFRWFm1X5/oyRbPiEjGGKsPYDERDSir9usTPdlCAqAO5J3oMyL6tWxar1/0YQsiesIY6wFgFID/EtHPZdV+faIvWxSUWwRgKxH9VUbN1yt6tkWJ3ptGE4+DiE4wxuwKJXOhRABgjG0DEEJE8wBoTDkwxhIA5BQc5huutYZFH7ZQ4TkAc0O0syzQ03MRALmIpiuALMbYfiKSGbLdhkBfz0XBHqp9jLH/AaiQHYeengsGIBzAgYraaQB6f1+UCKPpOHRQIqFEFXYDWMoYew/ACUM2rBwolS0YY6EAugKoBWCZYZtW5pTKFkT0DQAwxgajYCRm0NaVLaV9LgIAhEL+Y2K/QVtW9pT2fTEOQCAAa8aYAxGtNGTjypjSPhd1AHwPwJsx9lVBB6MTY+84SgURZQIo8TzdmwwR7Ya8IxUUQEQby7sN5Q0RxQCIKedmGAVEFAEgorzbYQwQURLkaz0lwmgWx3VQWqHENxlhCyXCFkqELZQIWygxqC2MveO4AKAZY8yeMVYVQF8A+8q5TeWFsIUSYQslwhZKhC2UGNQWRtNxMMYiAZwB4MQYS2CMDSOiPAAKocQbAHaoquu+qQhbKBG2UCJsoUTYQkl52MJo3HEFAoFAUDEwmhGHQCAQCCoGouMQCAQCQakQHYdAIBAISoXoOAQCgUBQKkTHIRAIBIJSIToOgUAgEJQK0XEIjA7GWD5j7LLKx66IvHaF5aRf8ZoxBRLUVxhjfzDGnF6hjpGMsY8L/j+YMdZI5dxaxpirntt5gTHmVYIyExhj1V732gKBAtFxCIyRLCLyUvlIy+i6A4hIAmATgAWlLUxEK4loc8HhYACNVM4NJ6Lremmlsp0rULJ2TgAgOg6B3hAdh6BCUDCyOMkY+6vg00ZLHjfG2PmCUcrfjLFmBekDVdJXFcQqKIoTABwKynZijF1ijP1TEDDHvCA9nDF2veA6CwvSvmOMfc4YCwPQAsDWgmtaFowUWhSMSvjLvmBksuwV23kGchVURV0/MXmUw2uMsZkFaeMh78COMcaOFaR1YYydKbDjTsZYjWKuIxCoIToOgTFiqTJNtacg7QmAzkTkA3m0Nm2qpiMB/EhEXpC/uBMYYy4F+dsWpOcDKC6oVXcA/zDGLABsBPAREXlAriY9qkCC+kMAbkTkCWCOamEiigLwJ+QjAy8iylI5vaugrIKPAGx7xXYGAdircvwNEbUA4AmgPWPMs0AB9gGADkTUgTFWF8A0AIEFtvwTwKRiriMQqPFGyaoL3hiyCl6eqpgBWFYwp58PwFFLuTMAvmGM2QLYTUS3GGOdADQHcIExBgCWkHdC2tjKGMsCIIU8VoMTgLtEdLPg/CYAYyCPb/ISwDrG2K8AShxRkIieMsbuMMb8ANwC4Azgj4J6S9POqgBqAFC1Ux/G2AjI/64bQh646u9CZf0K0v8ouE5VyO0mEJQY0XEIKgoTATyGPAysCeQvbjWI6GfG2DkAwQD2M8b+A4AB2EREX5XgGgOI6E/FAWOstrZMRJTHGGsFoBOAMMjF5DqW4l62QR7z+18Ae4iImPwtXuJ2ArgI+frGUgChjDF7AJ8DaElEzxljGyEPlVsYBuAwEfUrRXsFAjXEVJWgomAN4GFB9L5BkMdOVoMx9i6AOwXTM9GQT9kcBRDGGHu7IE9txliTEl4zFoAdY8yh4HgQgOMFawLWRLQf8g5NoqVsGgArHfXuARACeQzobQVppWonydVJpwPwY4w5A6gJIAPACyaPM/++jracBdBWcU+MseqMMW2jN4FAJ6LjEFQUVgD4hDF2BfLpnQwtefoAuMoYuwzAHcDmAk+maQB+Y4z9DeAw5NM4xUJELwEMAbCTMfYPABmAlZC/hH8tqO8UtK8RbASwUrE4Xqje55BLXTchovMFaaVuZ8HaySIAU4joCoBLkI9ifoZ8+kvBagAHGWPHiOgp5B5fkQXXOQO5PQWCEiNk1QUCgUBQKsSIQyAQCASlQnQcAoFAICgVouMQCAQCQakQHYdAIBAISoXoOAQCgUBQKkTHIRAIBIJSIToOgUAgEJQK0XEIBAKBoFT8P2uUmsU/iY9JAAAAAElEQVQ1rwYeAAAAAElFTkSuQmCC\n",
- "text/plain": [
- ""
- ]
- },
- "metadata": {
- "needs_background": "light"
- },
- "output_type": "display_data"
- }
- ],
- "source": [
- "score_save_path = './IJBC/result'\n",
- "files = glob.glob(score_save_path + '/MS1MV2*.npy') \n",
- "methods = []\n",
- "scores = []\n",
- "for file in files:\n",
- " methods.append(Path(file).stem)\n",
- " scores.append(np.load(file)) \n",
- "methods = np.array(methods)\n",
- "scores = dict(zip(methods,scores))\n",
- "colours = dict(zip(methods, sample_colours_from_colourmap(methods.shape[0], 'Set2')))\n",
- "#x_labels = [1/(10**x) for x in np.linspace(6, 0, 6)]\n",
- "x_labels = [10**-6, 10**-5, 10**-4,10**-3, 10**-2, 10**-1]\n",
- "tpr_fpr_table = PrettyTable(['Methods'] + map(str, x_labels))\n",
- "fig = plt.figure()\n",
- "for method in methods:\n",
- " fpr, tpr, _ = roc_curve(label, scores[method])\n",
- " roc_auc = auc(fpr, tpr)\n",
- " fpr = np.flipud(fpr)\n",
- " tpr = np.flipud(tpr) # select largest tpr at same fpr\n",
- " plt.plot(fpr, tpr, color=colours[method], lw=1, label=('[%s (AUC = %0.4f %%)]' % (method.split('-')[-1], roc_auc*100)))\n",
- " tpr_fpr_row = []\n",
- " tpr_fpr_row.append(method)\n",
- " for fpr_iter in np.arange(len(x_labels)):\n",
- " _, min_index = min(list(zip(abs(fpr-x_labels[fpr_iter]), range(len(fpr)))))\n",
- " tpr_fpr_row.append('%.4f' % tpr[min_index])\n",
- " tpr_fpr_table.add_row(tpr_fpr_row)\n",
- "plt.xlim([10**-6, 0.1])\n",
- "plt.ylim([0.3, 1.0])\n",
- "plt.grid(linestyle='--', linewidth=1)\n",
- "plt.xticks(x_labels) \n",
- "plt.yticks(np.linspace(0.3, 1.0, 8, endpoint=True)) \n",
- "plt.xscale('log')\n",
- "plt.xlabel('False Positive Rate')\n",
- "plt.ylabel('True Positive Rate')\n",
- "plt.title('ROC on IJB-C')\n",
- "plt.legend(loc=\"lower right\")\n",
- "plt.show()\n",
- "#fig.savefig('IJB-B.pdf')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 39,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "+-------------------------------------------+--------+--------+--------+--------+--------+--------+\n",
- "| Methods | 1e-06 | 1e-05 | 0.0001 | 0.001 | 0.01 | 0.1 |\n",
- "+-------------------------------------------+--------+--------+--------+--------+--------+--------+\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N1D1F1) | 0.8997 | 0.9434 | 0.9618 | 0.9744 | 0.9832 | 0.9907 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N0D1F2) | 0.8829 | 0.9400 | 0.9607 | 0.9746 | 0.9833 | 0.9910 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N1D1F2) | 0.8985 | 0.9447 | 0.9628 | 0.9753 | 0.9836 | 0.9908 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N1D0F0) | 0.8906 | 0.9394 | 0.9603 | 0.9731 | 0.9829 | 0.9904 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N0D0F0) | 0.8625 | 0.9315 | 0.9565 | 0.9720 | 0.9818 | 0.9901 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N1D1F0) | 0.8943 | 0.9413 | 0.9610 | 0.9735 | 0.9829 | 0.9905 |\n",
- "| MS1MV2-ResNet100-ArcFace-TestMode(N0D1F0) | 0.8795 | 0.9387 | 0.9591 | 0.9731 | 0.9824 | 0.9904 |\n",
- "+-------------------------------------------+--------+--------+--------+--------+--------+--------+\n"
- ]
- }
- ],
- "source": [
- "print(tpr_fpr_table)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# setting N1D1F2 is the best"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "language": "python",
- "name": "python2"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 2
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.15"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/evaluation/IJB/IJBC_Evaluation_VGG2.ipynb b/evaluation/IJB/IJBC_Evaluation_VGG2.ipynb
deleted file mode 100644
index 8f32634..0000000
--- a/evaluation/IJB/IJBC_Evaluation_VGG2.ipynb
+++ /dev/null
@@ -1,532 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "/home/jd4615/miniconda3/envs/insightface/lib/python2.7/site-packages/sklearn/utils/fixes.py:313: FutureWarning: numpy not_equal will not check object identity in the future. The comparison did not return the same result as suggested by the identity (`is`)) and will change.\n",
- " _nan_object_mask = _nan_object_array != _nan_object_array\n"
- ]
- }
- ],
- "source": [
- "import os\n",
- "import numpy as np\n",
- "import cPickle\n",
- "from sklearn.metrics import roc_curve, auc\n",
- "import matplotlib.pyplot as plt\n",
- "import timeit\n",
- "import sklearn\n",
- "import cv2\n",
- "import sys\n",
- "import glob\n",
- "sys.path.append('./recognition')\n",
- "from embedding import Embedding\n",
- "from menpo.visualize import print_progress\n",
- "from menpo.visualize.viewmatplotlib import sample_colours_from_colourmap\n",
- "from prettytable import PrettyTable\n",
- "from pathlib import Path\n",
- "import warnings \n",
- "warnings.filterwarnings(\"ignore\") "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_template_media_list(path):\n",
- " ijb_meta = np.loadtxt(path, dtype=str)\n",
- " templates = ijb_meta[:,1].astype(np.int)\n",
- " medias = ijb_meta[:,2].astype(np.int)\n",
- " return templates, medias"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_template_pair_list(path):\n",
- " pairs = np.loadtxt(path, dtype=str)\n",
- " t1 = pairs[:,0].astype(np.int)\n",
- " t2 = pairs[:,1].astype(np.int)\n",
- " label = pairs[:,2].astype(np.int)\n",
- " return t1, t2, label"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_image_feature(path):\n",
- " with open(path, 'rb') as fid:\n",
- " img_feats = cPickle.load(fid)\n",
- " return img_feats"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [],
- "source": [
- "def get_image_feature(img_path, img_list_path, model_path, gpu_id):\n",
- " img_list = open(img_list_path)\n",
- " embedding = Embedding(model_path, 0, gpu_id)\n",
- " files = img_list.readlines()\n",
- " img_feats = []\n",
- " faceness_scores = []\n",
- " for img_index, each_line in enumerate(print_progress(files)):\n",
- " name_lmk_score = each_line.strip().split(' ')\n",
- " img_name = os.path.join(img_path, name_lmk_score[0])\n",
- " img = cv2.imread(img_name)\n",
- " lmk = np.array([float(x) for x in name_lmk_score[1:-1]], dtype=np.float32)\n",
- " lmk = lmk.reshape( (5,2) )\n",
- " img_feats.append(embedding.get(img,lmk))\n",
- " faceness_scores.append(name_lmk_score[-1])\n",
- " img_feats = np.array(img_feats).astype(np.float32)\n",
- " faceness_scores = np.array(faceness_scores).astype(np.float32)\n",
- " return img_feats, faceness_scores"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [],
- "source": [
- "def image2template_feature(img_feats = None, templates = None, medias = None):\n",
- " # ==========================================================\n",
- " # 1. face image feature l2 normalization. img_feats:[number_image x feats_dim]\n",
- " # 2. compute media feature.\n",
- " # 3. compute template feature.\n",
- " # ========================================================== \n",
- " unique_templates = np.unique(templates)\n",
- " template_feats = np.zeros((len(unique_templates), img_feats.shape[1]))\n",
- "\n",
- " for count_template, uqt in enumerate(unique_templates):\n",
- " (ind_t,) = np.where(templates == uqt)\n",
- " face_norm_feats = img_feats[ind_t]\n",
- " face_medias = medias[ind_t]\n",
- " unique_medias, unique_media_counts = np.unique(face_medias, return_counts=True)\n",
- " media_norm_feats = []\n",
- " for u,ct in zip(unique_medias, unique_media_counts):\n",
- " (ind_m,) = np.where(face_medias == u)\n",
- " if ct == 1:\n",
- " media_norm_feats += [face_norm_feats[ind_m]]\n",
- " else: # image features from the same video will be aggregated into one feature\n",
- " media_norm_feats += [np.mean(face_norm_feats[ind_m], 0, keepdims=True)]\n",
- " media_norm_feats = np.array(media_norm_feats)\n",
- " # media_norm_feats = media_norm_feats / np.sqrt(np.sum(media_norm_feats ** 2, -1, keepdims=True))\n",
- " template_feats[count_template] = np.sum(media_norm_feats, 0)\n",
- " if count_template % 2000 == 0: \n",
- " print('Finish Calculating {} template features.'.format(count_template))\n",
- " template_norm_feats = template_feats / np.sqrt(np.sum(template_feats ** 2, -1, keepdims=True))\n",
- " return template_norm_feats, unique_templates"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [],
- "source": [
- "def verification(template_norm_feats = None, unique_templates = None, p1 = None, p2 = None):\n",
- " # ==========================================================\n",
- " # Compute set-to-set Similarity Score.\n",
- " # ==========================================================\n",
- " template2id = np.zeros((max(unique_templates)+1,1),dtype=int)\n",
- " for count_template, uqt in enumerate(unique_templates):\n",
- " template2id[uqt] = count_template\n",
- " \n",
- " score = np.zeros((len(p1),)) # save cosine distance between pairs \n",
- "\n",
- " total_pairs = np.array(range(len(p1)))\n",
- " batchsize = 100000 # small batchsize instead of all pairs in one batch due to the memory limiation\n",
- " sublists = [total_pairs[i:i + batchsize] for i in range(0, len(p1), batchsize)]\n",
- " total_sublists = len(sublists)\n",
- " for c, s in enumerate(sublists):\n",
- " feat1 = template_norm_feats[template2id[p1[s]]]\n",
- " feat2 = template_norm_feats[template2id[p2[s]]]\n",
- " similarity_score = np.sum(feat1 * feat2, -1)\n",
- " score[s] = similarity_score.flatten()\n",
- " if c % 10 == 0:\n",
- " print('Finish {}/{} pairs.'.format(c, total_sublists))\n",
- " return score"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [],
- "source": [
- "def read_score(path):\n",
- " with open(path, 'rb') as fid:\n",
- " img_feats = cPickle.load(fid)\n",
- " return img_feats"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step1: Load Meta Data"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Time: 1.76 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load image and template relationships for template feature embedding\n",
- "# tid --> template id, mid --> media id \n",
- "# format:\n",
- "# image_name tid mid\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "templates, medias = read_template_media_list(os.path.join('IJBC/meta', 'ijbc_face_tid_mid.txt'))\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 10,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Time: 63.31 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load template pairs for template-to-template verification\n",
- "# tid : template id, label : 1/0\n",
- "# format:\n",
- "# tid_1 tid_2 label\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "p1, p2, label = read_template_pair_list(os.path.join('IJBC/meta', 'ijbc_template_pair_label.txt'))\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 2: Get Image Features"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 11,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "('loading', './pretrained_models/VGG2-ResNet50-Arcface/model', 0)\n",
- "[====================] 100% (469375/469375) - done. \n",
- "Time: 5087.25 s. \n",
- "Feature Shape: (469375 , 1024) .\n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# load image features \n",
- "# format:\n",
- "# img_feats: [image_num x feats_dim] (227630, 512)\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "#img_feats = read_image_feature('./MS1MV2/IJBB_MS1MV2_r100_arcface.pkl')\n",
- "img_path = './IJBC/loose_crop'\n",
- "img_list_path = './IJBC/meta/ijbc_name_5pts_score.txt'\n",
- "model_path = './pretrained_models/VGG2-ResNet50-Arcface/model'\n",
- "gpu_id = 0\n",
- "img_feats, faceness_scores = get_image_feature(img_path, img_list_path, model_path, gpu_id)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))\n",
- "print('Feature Shape: ({} , {}) .'.format(img_feats.shape[0], img_feats.shape[1]))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step3: Get Template Features"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 34,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Finish Calculating 0 template features.\n",
- "Finish Calculating 2000 template features.\n",
- "Finish Calculating 4000 template features.\n",
- "Finish Calculating 6000 template features.\n",
- "Finish Calculating 8000 template features.\n",
- "Finish Calculating 10000 template features.\n",
- "Finish Calculating 12000 template features.\n",
- "Finish Calculating 14000 template features.\n",
- "Finish Calculating 16000 template features.\n",
- "Finish Calculating 18000 template features.\n",
- "Finish Calculating 20000 template features.\n",
- "Finish Calculating 22000 template features.\n",
- "Time: 9.98 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# compute template features from image features.\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "# ========================================================== \n",
- "# Norm feature before aggregation into template feature?\n",
- "# Feature norm from embedding network and faceness score are able to decrease weights for noise samples (not face).\n",
- "# ========================================================== \n",
- "# 1. FaceScore (Feature Norm)\n",
- "# 2. FaceScore (Detector)\n",
- "\n",
- "use_norm_score = True # if True, TestMode(N1) \n",
- "use_detector_score = True # if True, TestMode(D1)\n",
- "use_flip_test = True # if True, TestMode(F1)\n",
- "\n",
- "if use_flip_test:\n",
- " # concat --- F1\n",
- " img_input_feats = img_feats \n",
- " # add --- F2\n",
- " # img_input_feats = img_feats[:,0:img_feats.shape[1]/2] + img_feats[:,img_feats.shape[1]/2:]\n",
- "else:\n",
- " img_input_feats = img_feats[:,0:img_feats.shape[1]/2]\n",
- " \n",
- "if use_norm_score:\n",
- " img_input_feats = img_input_feats\n",
- "else:\n",
- " # normalise features to remove norm information\n",
- " img_input_feats = img_input_feats / np.sqrt(np.sum(img_input_feats ** 2, -1, keepdims=True)) \n",
- " \n",
- "if use_detector_score:\n",
- " img_input_feats = img_input_feats * np.matlib.repmat(faceness_scores[:,np.newaxis], 1, img_input_feats.shape[1])\n",
- "else:\n",
- " img_input_feats = img_input_feats\n",
- "\n",
- "template_norm_feats, unique_templates = image2template_feature(img_input_feats, templates, medias)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 4: Get Template Similarity Scores"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 35,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Finish 0/157 pairs.\n",
- "Finish 10/157 pairs.\n",
- "Finish 20/157 pairs.\n",
- "Finish 30/157 pairs.\n",
- "Finish 40/157 pairs.\n",
- "Finish 50/157 pairs.\n",
- "Finish 60/157 pairs.\n",
- "Finish 70/157 pairs.\n",
- "Finish 80/157 pairs.\n",
- "Finish 90/157 pairs.\n",
- "Finish 100/157 pairs.\n",
- "Finish 110/157 pairs.\n",
- "Finish 120/157 pairs.\n",
- "Finish 130/157 pairs.\n",
- "Finish 140/157 pairs.\n",
- "Finish 150/157 pairs.\n",
- "Time: 146.08 s. \n"
- ]
- }
- ],
- "source": [
- "# =============================================================\n",
- "# compute verification scores between template pairs.\n",
- "# =============================================================\n",
- "start = timeit.default_timer()\n",
- "score = verification(template_norm_feats, unique_templates, p1, p2)\n",
- "stop = timeit.default_timer()\n",
- "print('Time: %.2f s. ' % (stop - start))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 36,
- "metadata": {},
- "outputs": [],
- "source": [
- "score_save_name = './IJBC/result/VGG2-ResNet50-ArcFace-TestMode(N1D1F1).npy'\n",
- "np.save(score_save_name, score)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Step 5: Get ROC Curves and TPR@FPR Table"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 39,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEaCAYAAAAG87ApAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzs3Xl8VOW9+PHPc2bNTPaFsATCpgl7EJRNARcQl4qtWqWLrbU/2+uvarXtvfXXVlvrbe+1vXax7b21ttfa6wUUq7jiDooIggKVVbaEJCzZJ7Nv5/v7Y5JMAkkIyGSB5/168SJz5sw53+c7k3znnOec51EigqZpmqb1lNHXAWiapmkDiy4cmqZp2knRhUPTNE07KbpwaJqmaSdFFw5N0zTtpOjCoWmapp0UXTg0TdO0k6ILhzYgKaXKlVJBpZRPKXVEKfW4Uir9mHVmK6XeUkp5lVIepdQLSqnxx6yTqZT6tVLqYMu29rU8zk9x/POVUlXtHj+ulHqw5eeRSilpicenlDqqlPqDUsp2gm2eq5R6WilV19Lefyil7lFKWVLZFu3sowuHNpB9RkTSgTJgKnBv6xNKqVnAa8BKYCgwCtgKvKeUGt2yjh14E5gALAIygVlAPXBB7zWjS9kt7ZtEIq7/29WKSqkxwAagEpgkIlnADcB0IKMXYtXOIrpwaAOeiBwBXiVRQFo9BDwhIr8REa+INIjID4H1wI9b1rkZGAF8VkR2iIgpIjUi8lMRebmzfbUcxWxs+Ua/USk1u91zq5VSP1VKvddylPPa6ThyEZEa4HVgfDer/QRYJyL3iMjhltftFpEviEjTp41B09rThUMb8JRSRcAVwN6Wxy5gNvB0J6s/BSxo+fkyYJWI+Hq4n1zgJeC3QB7wMPCSUiqv3WpfAG4BBgF24Lsn255O9jsUuJxE0evKZcCKT7svTesJXTi0gew5pZSXxOmZGuD+luW5JD7bhzt5zWGg9Sggr4t1unIVsEdE/iYiMRFZCuwCPtNunf8WkU9EJEiiSJV1tqEeqlNKNQHVgJ/uC8PJtkXTTpkuHNpAdq2IZADzgVKSBaERMIEhnbxmCFDX8nN9F+t0ZShQccyyCmBYu8dH2v0cANI5dfkikg24gPdInI5DKfXFdh3nr7Sse7Jt0bRTpguHNuCJyBrgceCXLY/9wPskOoeP9XkSHeIAbwCXK6XcPdzVIaD4mGUjSBwRpEzL0cvjwEylVL6IPCki6S3/rmhZ7Q3gulTGoWmtdOHQzhS/BhYopaa0PP4+8BWl1J1KqQylVE7L5a6zSHQkA/yNxGmuZ5RSpUopQymVp5T6f0qpKzvZx8vAuUqpLyilrEqpG0l0WL+YyoYppRzAl0kczdR3sdr9wGyl1C+UUoNbXjdWKfU/SqnsVMannX104dDOCCJSCzwB3NfyeC2JDuXPkTj3X0Hikt0LRWRPyzphEp3Ku0hctdQMfEDilNeGTvZRD1wNfIfEH/B/Bq4Wkbpj1z3VZhzzuEkp5QOOkih410gXE+iIyL6WdUYC25VSHuAZYBPgPU3xaRoASk/kpGl9Tyn1d+AdEfl1X8eiaSeijzg0rY8ppYYBF5I4OtC0fi9lhUMp9RelVI1SalsXzyul1G+VUntbhkY4L1WxaFp/pZS6HdhM4jLetX0dj6b1RMpOVSml5gI+EnfvTuzk+SuBO4ArgRnAb0RkRkqC0TRN006blB1xiMg7QEM3qywmUVRERNYD2UopfR26pmlaP9eXfRzDSFwK2aqKjjdSaZqmaf2Qta8D6Aml1G3AbQAul2vakCHJA5OMjMTAn15v8opDp9OJ0+mkubkZ0zQBsFgsZGRkEAgEiEQibetmZmYSj8fx+/1ty1wuF3a7naam5NhwNpsNt9uN3+8nGo22Lc/OziYSiRAIBNqWud1uLBYLzc3Nbcvsdjsulwuv10s8HgfAMAwyMzMJhUKEQiHdJt0m3SbdphS1yc/OnbvqRKSA06AvC0c1MLzd4yK6uANXRB4FHgWYOnWqbN68OfXRDQBNTU1kZ+t7u0Dnoj2di6T+nAsRQeIxYvEI4VgYb9CH3+8hFgkRCwch6sOMejGiYRRhrBIAM45FRTCIYRiChTgohUUpBIXCQIkDVByFQkwnkJiOZdiF3zp2uJxT1peF43ngW0qpZSQ6xz2tw0F3p32FPdutWbOGxYsX93UY/YLORZLORdKnzYXEY0RjQUKRIL6Qn3A0RCDQQCwaQmJe4lEfllgIFQtgIYJV4lgxUcQwMLAoBYBF7KDigEJhAbGS+IOugBiGimAAGUpQKgTKRGEiYkVswUQhEIVphBMFRxQmMQQLInEipg2FQQwLhjgRixXMTHBmYbOl43Lkdd3IU5CywqGUWkpi8Ln8lpnO7gdsACLyXySGb7iSxFDYARJDUWuapp0WIoKJiTfkxeNvwu/3EfTXI7FGYpE6bPEQdglgiZtYiGMTCxYVR2FDiUFiVHyAGBYjiAG4VJR0FcUwAigVRzAxjTiGI0YUhYiBaSpELAhRlBJicSGOFUWciGkHZRAjA6slG4cjE4d7MBZ7Fi73cBwZQ1BWJ8qwYhj99za7lBUOEVlygueFbmY00zRNayUiiBnDH2ii0VtJo6eKWKgZiTZhlRA2M46NGHYEq+nGwA4IM/IV3g+fwKJCZKsIOUYIwcQwAphGFDBRVj8xM3FkEDOFWMSNRJ2gHFgkE6eRh9UyFEdaPhZ7Btb0HFRGFobbDQ47huXsm5l3QHSOt+d0Ovs6hH6jpKSkr0PoN3QukgZKLoJhP/5AI15fLV5/HUboEBJrxBEPYphG4ggAhRI7CgOIYhhh0owAaSqUOKdvBIhjIoZgikJJiJhpIR4ziMetGFE7yijEYebhMPKx2fOwuZ2orCws2RmodDc43SjVf7/d90e6cAxgpaWlfR1Cv6FzkdQXuYibcTxBDw3eOoKBBgKhACrkh4gfZ7wRG2EMTBzKxCo2jJY/PRZLE24VI8PwEccgrgIY1iCxmB0xTWJxRTzkxBpyY/UXgRqJMyMLR54Lw+1EZWejMrNQThfKZu82Ru30GXCFo/3la2e7VatWsWjRor4Oo1/QuUg6nbmImTG8AQ/BkJeA9ygxfy3RaJRYOIQ17sVJCCsmdiwoDNKMZtKNIBYjhN3SBPYYUUuQiGkFiYOYmDErgbCDqD8Xq78QdySPDPtE7IX5WAoKMHIywZ2OcrpRVlu/yYWWNOAKR+v1yRqEw+G+DqHf0LlI6kkuRAQzGiQcbKLJV0fYX08w0AihADYzhEWiGGJiV2BiYhLBafFhs9ZjszZhTwsSM2KETYN43IIQIR7MRJryIZyBMguIRkfhsA3GnZFNZrYdS242KjMblZ2Psjl6IRP6c5EqA65waJrWMxKPEQnU0+yrpdlbQyjowRpqwmJGcJoxDCBMEMPwk2bxkmWrxZVej6EixIgTMGJEBIg5iYayIZBNLOAmGB6Nw1dEhjUXR2Ee9qIcjNxcyMxH2fWp5LPBgCsclrPwCoauZGVl9XUI/cbZmgszFsHjq6XBc4jmoAfT38io3Hr2v/c7nCjiKoTFUk+WtZECSzMWl4+oihKz+AkaMYilEQ1n4vMUQbgAa/MI7KFsMpQbt8NFRm4+lpxMjMJcVMFwSEtHtdybMBCcrZ+LVBtwEzlNnz5dNm3S0xZoZ4d4NEg06MHTfJgmbw3xkBcj0owtHsZqxrEpRYwIhsWH3VqPy1qHGH4Clgg+0yQSykT5Cog3DsHqy8AVsJBmycRms+BwgSXLgjGoACNvMOQMAXcWqh/fP6CdOqXUhyIy/XRsa8AdcbQf7+Vst2XLFsrKyvo6jH5hIOfCjIaIeI8S8NUSCnmIRENE/DU4Y0EQk6gKYbE0k2Wtw2nxYHc3IEYQv0rcOxyJZBNrHIFqGkZT8xhcfgtZ7nwy8zKx5LuxjsxGzRwJzoF1tHA6DOTPRX824ApH+8HHznYVFRX6l6JFf8+FiGBGAvi9R2horMLnq8Ea8pJuBlv6GiKIpRGHxUe2tR5nWj2m4cNrCROLZhH2FRKqHYwZGI2tMYv0qAuXVeGyCbk5DqwFmRizSlD5w3j+xZf0kCMt+vvnYqAacIVD0/orMxYiHvYR8DcS8NcRjPiJ+mohGsIdC2AoCCofhqWBPFs9aelHSLM2ElBRAmYm8WAuwWAmR+uKMTzTcPut5AbTKXA7sY4cin1sPsbQoZCWcdYdOWj9iy4cmnaSYmacBm89/uZDhHw1EKjHEfZiNWMERBEljNUI4LQ0k2k/TG56LWGLDx8gZhrRYDbB+hE0+Mdgbcgly+ekMN2JpTAb66BMrEPTUVl5iauU9E1tWj804DrHzzvvPPnoo4/6Oox+IRgMkpaW1tdh9AupyIU31Myh2n0EmqpRYS9GJIjTjJKuYgD4JDHYndV2mJy0vWTajhBF4RUXgZibkH8Q0jCceN1YsuszSDNCWLMdGNlurIVZ2IoKIX8YyuE6rXHrz0WSzkXSWd053jrBiQYej0f/UrT4tLkwxaTa18jRxkpC9fvIDNSTI1EC8TTiRFHGEdyucgqdB0m3eggAdnHij+YS8w6l4dAcmg+XYvNmY3d4yMlRWPPTMPIysI51o4aMQqX3zrwQ+nORpHORGgOucLSfhetst2HDBt0J2qKnuRARorEw1Y1VNDQcxBdoxBZupiAeJA0hT8URSy2Gq4L8tN0MU2H8OAiaucRD+dRUXkjjgSm4fHGslhDZDgNbjgXLoExslw5BjRzfa3dFd0V/LpJ0LlJjwBUOTeupeDRIsKmao03VRH21mBEfGTE/FhLDaORZmxliqcGVVkeatQGx1lKvXPhi+aQFhlC773qMAyXYgulYDQ+OLCErz4Jj7lCMUSUoV2ZfN1HT+oQuHNoZwarihBor8DUdosl7FAL12OMhfCqA09pMrrWGDFc1ytpIlBjKsOAzM2gOjEHVTMFxeDCu+iHY4yYFqhFrehxLVhq2mYVYS87FyMjp6yZqWr8x4AqHy3V6OxIHsilTpvR1CL1OzBixoIeIvw5foAlv8yEk4mdiXjPlu54lQoB8+xEy3QcwrLW4DCg386iKDMFSP4es2kJsPhvpDUPINIU8mwdbng3r0HTsFw7FGDG6z081fVpn4+eiKzoXqTHgCofdri9PbDVy5Mi+DiGlzFiYWLCRgOcQTQ0VLUcRYUIoQhLGYvhx2WrJsB8m3V3OEdIIKkW1mcUngckUHJxAzp4hFEVt2DiMLTOIfUgmlnNHYBmUi8opQDndfd3M0+5M/1ycDJ2L1BhwhaOpqamvQ+g3Vq5ceUZ0/CWG+A4Q8dXS3FSF33MIQh6cZoQQMaKqGZftEJmuSjLs1fiUDS9WmnFwOJ5NdayA6Cc3Udg8DGdjDqMMwRJtwGGtwjYygmX6bNSQmSjLgPu4n5Iz5XNxOuhcpMbZ8Zuk9StmPEqk+TD+ur34mw9hhJqJKgOvxEmzHCHTUUFOVgVRFSaqHDTjpEEyiUTPIbv+86RXZ+OodpMTc5BvCWCTSkR5SCuoxzbNmZjvoXAC5A3Td1hrWgrowqGllJhxwk2VRHw1hDyHCPuOYjFjhJTiKF4y7HWMyNqC21aPAJWSyX6zlEjsKnIaRpC210FhYz4Wh4nNqMMSrcVmPYilKA/L6LEY2YNg+EU8/8pr+pulpvWSAVc4bLZPN5XkmaSwsLCvQ+hAxCTmrycaaCDQfIhQQzlGNDGacR0mfqOeoc4DDEnbRkgcYLrxWOBDyUEFLiLLW0T+/pGMqrVjSTdxRHZhN8uxFjowRpSgRk2CgoWQ5kapjkN/97dc9CWdiySdi9RI6ZAjSqlFwG8AC/CYiPzbMc8XA38BCoAG4EsiUtXdNvV8HP2HGQsTDTTSeHQXweZqHKEm/IadpngMG37s9koKXTvJsDYRAxoljSrlxsNYMplATkMh2Z8MReqj2DKi2OUgtsh+LJEajKHDMObdBENG69NNmnYaDIghR5RSFuD3wAKgCtiolHpeRHa0W+2XwBMi8lel1CXAz4Evd7ddfed40vr165k5c2av7MuMhQk3HSQabMLbeJB4oAFrPEKDWPCZBhgeChw7GOLewyhLgCOSRQM2tpnFhOOXMyQ6GvfBNIqrMsFvYnXHsNkbsYffxmY7iFE8DYaMQRVfCenZJ92R3Zu56O90LpJ0LlIjlaeqLgD2ish+AKXUMmAx0L5wjAfuafn5beC5E200Go2e5jAHrqNHj6Zs27Gwl1D9AWKhJvy1e1CxEE2WNCpjJodjLjJs+WSHKxmX9SYTM/ZzSLKpwk6FZQpDHBfjri9kWHkOhRVeJBrHnh3GJoewhtdhzQHL4KEwcgJq2CzILvzURxWpzMVAo3ORpHORGqksHMOAynaPq4AZx6yzFfgcidNZnwUylFJ5IlKfwri0Y5ixCBHfUcKNFYSaqohF/BAL02hxsi8mHIhmEA+OoogmRjo3MbNgI1mOWurjgzhgzWILUxieexWlTbOIbfYQKW/CXmhgje7EHduE1dqEMWIW6pxpMPS60z4arKZpvauvO8e/C/xOKfVV4B2gGjhu+Ful1G3AbQAFBQWsXLmy7bl58+YBsGbNmrZlJSUllJaWsmrVKsLhMJCYtH7+/Pls2bKFioqKtnUXLlyIx+Nhw4YNbcumTJnCyJEjO+ynsLCQmTNnsn79+g7fYhYvXkx5eTlbt25tWzZjxgyysrJ47bXX2pYVFxdTVlbG6tWr8Xg8ADgcDhYtWsSuXbvYvXv3Sbep1am0yVAmIwrSGJLmIeavo8a0sk+sVCgLUSMdW9MQxpiVFGXsYWbWJxQOO0hUXBw17ew0HFSHJ+NUoxjHXDLWWXB4FR7zE2y2PeQbHxGOFnDAksPh4ikEbG7mXdDSplWvp+x9WrlyZb98n3r7s9eaizOpTZ/mfWq99+tMatOpvE+nU8o6x5VSs4Afi8jlLY/vBRCRn3exfjqwS0SKutuu7hw/dWY0hGf/OwTr9gDgt2eyJQbvRK3MsU/AFfTjjm0j07qP84a+QVxlE7EN5oiRzrbQUULYmOy4kcEV44h/EsD0R7C6w1gjR3CEtmBNj2NMW4AaPQWVO7iPW6tpWnsDonMc2Aico5QaReJI4ibgC+1XUErlAw0iYgL3krjCqlt6zvGk8vLybodUCHsOEWk+RNRfR6h+HwBRi51NtkGsa7KQU5/D8EiQa9w7KctcSkHuQYK4qVY5vGIOI0AGgyzjGOQtYcon6bgqs8AUjMEe3PkVWELvofLHoIaPQ5X8C6Tn9NkVUCfKxdlE5yJJ5yI1UlY4RCSmlPoW8CqJy3H/IiLblVIPAJtE5HlgPvBzpZSQOFX1f0+03UAgkKqQB5ytW7ce90sRjwQI1u0hVL+fSPMhArZ0qg0nO8iisjGbc6SJsc6dfCfvHwxyV2I1wvgsw6gwTTYaszg37zpyKGbOLpP47gCxGj+2fCvObA+2nLUYNbtQzTmokgtQ83+Iyukf18l3louzlc5Fks5FaqS0j0NEXgZePmbZfe1+XgGsSGUMZzozFibiPUqwdjehxgokFgaLjSp7DqvMDAJ+O9e6dnKlbTfF5+4EwGeMxW/LZV1EUSsOBjmnMMSczIVV5xBe3Ui04SjWXDtOYzcOYwOGkYNyjoDR56OGLkEVdHs2UdO0M1xfd45rpyAeCeCt3MjU3GqObHgMZXEQcmRS4xrKS82HKYzVUab2cGfWFrKdtcRMO3VqGnszb2Gj512IK0a7pjK6bjIT92URqWgCBeaoJtzZlVjNrVg8ByGnEOPGO1BFJX3dZE3T+pEBVzjc7jNvGOyeMGMhmg+sI1i3FzGjKHs6da7BPBEMcqVjNUMND+ONWmYUNBKOpdEYHslhNZFdaQUcCGwG6hgWCzLZvoSCD0dhlgewDU7HPjaD9LwDGDteRVUEUVMugTEXocbPRtmdfd3sHmu9mkjTuWhP5yI1BlzhsFgsfR1Cr2kd+8l3+B8Ea3YBEMgaxPvBfQy2bGGStZJfZxwB4L2Di9mgLsY13MDvPkSdZQdQxWjbOczKvoO88lGE19USqwtgKbKSefVI7OWvIuvWwZAxGIv/Lwwdi7IOzLHAsrKy+jqEfkPnIknnIjUGXOFobm7u6xBSSsw4/iPb8FV9iBkNAmBxWDDdexiWthqlIFcV0ugdzN6GWVQMmkbUHSB67n5qg2+Rbh3MCOdsSkKX4fowk1i1F4BwQS3OQVGcubtRoVp4/SAyciLGzT9B5Q/8PovXXtOj47bSuUjSuUiNAVc4zkTxaJDA0R2EGyqIeA8D4MoMk56zBqtUUBPLoco/lo8P30qF6cKR4SFvUBP+7G2gtpFnPYc8xjLGdwfpe3KIVHpQNgPLKDsZc61Yqt5HVWxLDCM5YjxqzFTU5beisgv6tuGapg1IunD0sebydfiqN6MMG5HcURy1N3GB8VsAnmu+iP0Hb8VhBEnL30Ha6HfIAoalT6fAOZ39G4YyO+NiYjubiXvCWAvcWIakkV2Wj237Mti3DfaBGjcTdf33YHiJHmlW07RPbcAVjjNlznERk4adLxNurMA5YgYv1q7F5XuJ6zNW4yeb1yPjaVYmQ8b/DYWVQtcEijNvZ1TmPILbamh+fR/T4rmYQ0I4zskjLbcJS9UG5Eg1bD8EykAtvAVj4oV93dReUVxc3Nch9Bs6F0k6F6mR0vk4UmGgDzkSC3oINx2kufx9xIxSn9XEcMvLZFv8eJjMYa/J1vQgkUAheeZVXDB+HLnOYgxlIe6PUPuHDwBwluaTMdqPOrIL2fpWYuOjJqPOnY4aMQ6VkduHrdQ0rb8ZKEOOpITX6+3rEE5Z7dYVRH1HsbryKDdsTMh4mqH2WrbXzuY1bwnBvAPYM6spSruYi0q+1eG1sYYA3rcPAJBf/AFqz3bYAzJmKuqi61FTLxuwV0SdDqtXr2b+/Pl9HUa/oHORpHORGgOucMTjxw2e22+JGSNYv59wUyXxkIeo7yiO4QEC3pXMctcA8MrR/wODPcTz11LoGMWk/H9hWHryS4H/gyq8a8oBsObayDZeQvkN1FXf4IWdh1i8+Nq+aFq/0zqaqKZz0Z7ORWoMuMLRn5nREGFPFWYsRNRXS6h+P2YshGQOpQY/E7KfJR5oZpd/Ju97L8YytIbIoNWMcM9hatZ9DHZPatuWmILv/YP411XiKhuCe2gtvPooDB6F5Ys/TKy0a2UXkWiapqXOgCschmH0dQidkniMIx/8GQBH9nAsziycRdP4fdU+RoTXc6X7Q+ojObxZcymWoR9hZHgpzr2Qcbn/D7cteVls+EAjod11BD9OjOfvKo7i3vlL+DgCoyZjXHtH27oOh6N3G9mP6Vwk6Vwk6Vykhu4c/xREhHjIQzzspXHPm5gRP7bJN3KocTVGYB1jeY80IzGhyh7/aLa6IIqFkuyrmDroKx0ujTXDMQKbD+N7twLrIDfuMQrbhkdQCtTkeag5n0WlZfRVUzVNG+DO6s7xUCjU1yEQD/sJ1u2hufw9ALzKSkxCZKSvY+jhP1EA1EULOKDy2W2zE8JCXs54xrjGUpp7DWnW7A7b82+sxrs60fHtGmni8q9EfVAJOYVYvvazLuPYtWsXpaWlKWvnQKJzkaRzkaRzkRq6cJwEMxYiWL8fz963EYuDT2yZ+C0buDpjLQCV3ik8XXU9sWFbwK4ocl3EgsIbSbcfP2eFxEwC/ziC9639IOAa3Iyr5hlUJTBkNMY134LRk7uNZ/fu3fqXooXORZLORZLORWoMuMLRm8xYhGDd3rbTUa1Trm4lnZrGMNeNeIx0m4ePDl/KNmsatvzdkL6VUZnzmV74dazG8aPLigj+dZX4P6xGwnHsHCDTWIuqF9Ssa1DTFqAcrt5uqqZpWo/pwtEJMx7Fe3AD/kOJyeXNnFFU+ivIdnyMjWbmOA6TWdCIR4r5h/0makccwRbZwawhdzE8YwYW1fn9FMEdNXhe+gQAl2M3acYmjMFDMK7/lS4WmqYNGAOuc3zq1KmyefPmlO6jad8aAke2kTV6Hltxcm7jreRbPRz1F3GQQnyuwzThIM1xDm5bPmnWXApdkyhKv6DTsaBijUGaVu4iVuvHdY4F177HUCMnYiz4Mioz/9TjbGoiOzv7xCueBXQuknQuknQuks7qzvFeISZpxRfy/r4XyLIcJD/Pw2NbHsI95U84LFBW8D0uzrr4BJsQvG/tJ7A5MdqtxRYmx/EG1v01qIlzMC7/Wm+0RNM07bTrnzdFdCOVQ47EI34aq3ZQVVnF0QPPcNmg5RRlOzjquI8h578CwDWjf8/oExSN4I4aah5ZT2DzYTLHB8g3Hid3xMfYFl6L8U+/Pm1FY82aNadlO2cCnYsknYsknYvUOOuPOCK+GiKeaprL1wEQNK3kplVQkLmOyKCf43ZN4Y2DPyIcb+aKkf/RaYd3K4mZeF7bS2h7Dc7xBaSPjqBe+W/UzKsw5nyut5qkaZqWUmd14TBjEXyVmzDjUdKLzsPj+09GWbdhKJPD+T/mUOQInxz9C3bDzZUjHybLMbzT7YgpBDZVt40plXn5WJx1byKvvAVjylCzP9uLrdI0TUutlBYOpdQi4DeABXhMRP7tmOdHAH8FslvW+b6IvNzdNp3Orr/x91Qs1Exz+TpC9fsA8OVfjKvxFwx27OQ1/6WE0qsJ1j5JoWsSUwtupiTnKpTq/Kxe9IiPxr9vx/RHcU4YROaC0bD2KWTPh6j5SzDOu+xTx9uVkpKSlG17oNG5SNK5SNK5SI2UXVWllLIAnwALgCpgI7BERHa0W+dRYLOI/KdSajzwsoiM7G67n3bIETMWobnifcLNR6hzZbH3UBVxLywY81fepZioM4PJ+UsYmj4Vh6X7IT4kZnL0V+uwFrjIvnosRvVHyAcvgbcBNe/GxFAhNj1WjqZpfW+gXFV1AbBXRPYDKKWWAYuBHe3WESCz5ecs4NCJNtrc3HxKwQRr9+Ct2kQs0ABAdsZbFKp9jCl04Rl8Ll6rkzT7MOYM/laXp6TaMyMx/B9UJwLPXI964vcIoMbPRp1/BSpv6CnFeTJWrVrFokWLUr6fgUDnIknnIknnIjVSWTiGAZXtHlcBM45Z58fAa0qpOwA3cMLzOqZpnlIwjYd2EA3uIzd9E964g3XZ03qtAAAgAElEQVSBBUwf/ggWlyIaqWRTzWNcPPgOshxF3W4n1hgksOUwgU2JGpeu3sOo2IPx1X+FnMJendM7HA732r76O52LJJ2LJJ2L1OjrzvElwOMi8h9KqVnA35RSE0WkQ3VQSt0G3AZQUFDAypXJeSjmzZsHdLzsrqSkhNLSUlatWkU4HCZmGkwv/JjMtH2sC89kn7OSHPebvHL0LVBCmjGISFMmb+94H2XamTJlCiNHjuywn5G2AgbvN5BgjLA1Tm1umClNz2E5/2IODv88W9/d0LbujBkzyMrK4rXXXmtbVlxcTFlZGatXr26bXMbhcLBo0SJ27drF7t27T6pN7W3ZsoWKioq2xwsXLsTj8bBhQzKmztpUWFjIzJkzWb9+PUePHm1bvnjxYsrLy9m6dWuvtykrK4v58+efUptWrlx5xrXpVN6n1lycSW36NO9TU1PTGdemU3mfTqdU9nHMAn4sIpe3PL4XQER+3m6d7cAiEalsebwfmCkiNV1tt6SkRNonsCdWvLKNC7JewOcOsdW5A6VymTfsdgpdEzBU97UzfLCJxuXbALCPyCJjfhFWsxE5uBNZ/wLGbb9EpaWfVDyni54WM0nnIknnIknnIul09nH0qHAopezACBHZ2+MNK2Ul0Tl+KVBNonP8CyKyvd06rwDLReRxpdQ44E1gmHQTVI87x80weF/Cu/95vIHzAZNP3PspdxRzQeGNjM8d0qN2NKzYjoSiZF97Lrz9OOz5MPFE7hBUyQWoGVej+unkUpqmaa1OZ+E44V88pdRVwMfA6y2Py5RSz57odSISA74FvArsBJ4Ske1KqQeUUte0rPYd4P8opbYCS4Gvdlc0AAKBwIl2jZhxop7NNB94m2b/dNbGnFQVz8ZdOJEZhaN7VDREhHB5I2YwinvmcHjzz7DnQ9SiWzHu/hOWrz6IMeuaPi0ap/vwcyDTuUjSuUjSuUiNnvzVe4BEp3YTgIhsAcb2ZOMi8rKInCsiY0TkX1uW3Sciz7f8vENE5ojIFBEpE5HXut8iRCKRbp/3HdrK4Q/+m9odH+ILjODDumLOm3QO1aH/Ym/zKhyWzG5f36rxmR00Pr0da5YTa15i5Frjmm9hjJ/d5T0dva39ediznc5Fks5Fks5FavSkczwqIk3HXC3Ub4fUbajYQljqyc34gK3RQYRKq/iorp7RWfMZn/tZXLa8bl8vpuDfWE3kYBN5X5qCbUgGUnMQc99WmHBhL7VC0zSt/+pJ4diplPo8YCilRgF3AutTG9bJETGReJSor4ZYNILkVHLI7qHSlsPI9GLG5X6T/LRzMZSl623ETLzvVhDYlLg3wz2jCOvgdMzdG5GX/gsKR8KwHh1oaZqmndFO2DmulHID9wELWxa9CvxERIIpjq1T5513nnz00UcE6/YSCzRgxkL4D3/cEqxBVPzk5b7ENnMITZk/ZNHw8T3abt0TW4gd9ZFx6WjShsTgyB6kchfs/Qg1ZT7GpV9OYatOTTAYJC0tra/D6Bd0LpJ0LpJ0LpJ6+87xy0XkX4B/aRfA54C/n44ATlY8HgegcferuApGQbSCzPxmLLbDBPwHGGQ7zBuREqKu8/hMD4tGzBPCDETJ/9p5WOwhzBf+ADYHatAI1Gduh7HnpbJJp8zj8ehfihY6F0k6F0k6F6nRk17eH3ay7AenO5CeCgW8eA9+AEB2QT3Z7tcw0xvYF21it2HwphpC1D2EssLu58xoz/vmPgy3DZVmQba8BbEoxmU3Y8y7EXXOtF69G/xktL8h6Wync5Gkc5Gkc5EaXR5xKKUuBxYBw5RSD7d7KhM4tXE/TgO7JUrMs55M9wZiDX521V3ANmc9kWAJgVAeS2ZdRaa96IR/7CNVHrxryoke8YEp5Nw4AbV+BbL9PdTFX0DlFPZSizRN0waW7k5V1QDbgBCwvd1yL/D9VAbVHUPFcWUEsGR+jh11F/GPulosox7DGxpNJH1wtwMUxjwhopUexBTC+xow0mxkLy7F2rAFteK7CGBceydq9JTea5CmadoA02XhEJHNwGal1JMiEurFmLonVl7cMoUKTwF21y7igz8giwYG56cztWBSty8NbD5M5GATtgI3hstO2oQC7EVZxP/+MWrO51AXXNFv7tHoiSlTdIFrpXORpHORpHORGj3pHB+mlPpXYDzQNouSiJybsqi6ETDdTJ02g0LjCY4GDxCMhZiYt5BxufO7nD8jethL6JM6IuVNpI0vwH1BYgRcCXqRgzsg6EMVDB9QRQNg5MiRfR1Cv6FzkaRzkaRzkRo9+Uv5OPDfgAKuAJ4Clqcwpm5FYoLdbqPS9x75aVcQlMWUFXyx20mXmt/cT/SID3txFo5z85BwEHPLW5j/+W3MZ38DThfkDOrFVpwe7UfmPNvpXCTpXCTpXKRGT444XCLyqlLqlyKyD/ihUmoT8KMUx9YpZQlRE3kfALd9HNJhyo+upV9UjH1oJuZHb2CuXprY1qS5qIuuRzndKYtX0zTtTNOTwhFWiXM4+5RS3yQx0m33c6qmkiXMzub38JjjePHgNorc2d2uHtxeQ9zfbnyrSBA142qMOZ9NcaCapmlnpp4UjrtJzM53J/CvJKZ4/Voqg+pOyHRjOD7DaEcal2bkMyjt+BoW94QI7qwlvLee6BEfrmnDsOa6kIodyN6PUKMm90Hkp19hob5kuJXORZLORZLORWqc0kROSqlhIlKdgnhOaELZFNn60UdYja7HnfK+V0H4k3rsw7Nwji/APjQxIq65Zjl4GxKnp7IKeitkTdO0Ptdr83Eopc5XSl2rlMpveTxBKfUE0Ge3Y5rhSLdFI+4NY/qjOM7NI/OyMW1FQz7ZhJRvg8GjzpiisX59vxprsk/pXCTpXCTpXKRGd3eO/xy4DthKokP8ReB24N+Bb/ZOeMeLRqNtP4sIpi9C3BsmUtmM751yACxZDjLmjerwOtm7GTVkDKpkRm+Gm1Lt5zU+2+lcJOlcJOlcpEZ3fRyLgSkiElRK5QKVwCQR2d87oZ1YeF8DTS/sBlOw5qThnjUc9wXDMOwdm2W+9F/Ino8wrvomKiOnj6LVNE07M3RXOEKtQ6eLSINS6pP+VDQgMYeGc0wu2deUHv9c41HMpT8DMwbRCMaN/wJDxvRBlJqmaWeWLjvHlVJNwFutD4GL2z1GRD6X8ug6MX36dNm0aRMAwV21hD+p77xwHDmA+fpfMT7/z2BYUDZHb4eqaZrWb/TWfBzXHfP4d6djh5/WieYcl0N7kW1rE0OJuLNQDlcvRdb7ysvL9ZAKLXQuknQuknQuUqO7QQ7f7M1AeioQCHT7vPnqXyAzHzX1UtS55/dSVH1j69at+peihc5Fks5Fks5FavTkBsB+KVzRRPSQ9/gnBIxLvoDKGdz7QWmapp0FUlo4lFKLgN8AFuAxEfm3Y57/FYm+EwAXMEhEuh1DxBBF+EAjjSu24zg3D8eYXMwd78P+LUhtFXjrQXV9n4emaZr26fS4cCilHCISPon1LcDvgQVAFbBRKfW8iOxoXUdE7m63/h3A1BNtN8204VtbgbM0n+zPJDrF48v+ghpUjJo+AVVYDFn5PQ1zQJsx48y5J+XT0rlI0rlI0rlIjRMOq66UukAp9TGwp+XxFKXUIz3Y9gXAXhHZLyIRYBmJe0O6sgRYeuJ4IOPiUW1Fo215yfkYk+YmCkg/nSP8dMvKyurrEPoNnYsknYsknYvU6Ml8HL8FrgbqAURkK8nTS90ZBh3GPK9qWXYcpVQxMIp2l/t2JRaL92DXZ4fXXnutr0PoN3QuknQuknQuUqMnp6oMEak45lv86f7rfROwQkQ63a5S6jbgNoDSoWNY++5avK4YAPPmzSMtFmPDu+/SmJY4C1ZSUkJpaSmrVq0iHE6cXcvKymL+/Pls2bKFioqKtm0vXLgQj8fDhg3J4bemTJnCyJEjO0wCU1hYyMyZM1m/fn2HYQwWL15MeXk5W7dubVs2Y8YMsrKyOnxoi4uLKSsrY/Xq1Xg8HgAcDgeLFi1i165d7N69u23defPmAbBmzZq2ZZ21qdWZ1KZP8z6tXLnyjGvTqbxPrbk4k9r0ad6npqamM65Np/I+nVYi0u0/4BkSp50+ItHJ/W3g6R68bhbwarvH9wL3drHuZmD2ibYpIkweViKRo15pL7b0Z2JWfSJnm+eee66vQ+g3dC6SdC6SdC6SgE3Sg7+xPfnXk1NV/wTcA4wAjgIzW5adyEbgHKXUKKWUncRRxfPHrqSUKgVygPd7sE2imQa2QemIaSLxGBKPwSkMDX8mKC4u7usQ+g2diySdiySdi9ToyamqmIjcdLIbFpGYUupbwKskjlT+IiLblVIPkKh8rUXkJmBZS0U8IZdFYW54EXnvWWgdXl0pcKSdbIgDXllZWV+H0G/oXCTpXCTpXKTGCSdyUkrtA3YDy4G/i0gnd931nrLiIfLRfz0ABSMwpl/el6H0udWrVzN//vy+DqNf0LlI0rlI0rlI6rWJnABEZAzwIDAN+Fgp9ZxS6qSPQE4fQU2ad9YXDaCtA03TuWhP5yJJ5yI1etLHgYisE5E7gfOAZuDJlEalaZqm9Vs9uQEwXSn1RaXUC8AHQC0wO+WRaSfkcOih4lvpXCTpXCTpXKRGT/o4yoEXgKdE5N3eCKo708cUycY1b6GKzu3rUDRN0waMXu3jAEaLyB39oWgAmHGzr0PoN3bt2tXXIfQbOhdJOhdJOhep0WXhUEr9R8uPzyil/n7sv16K73hmrM923d+0v5P0bKdzkaRzkaRzkRrd3cexvOX/fjHzX6uIYYe8oX0dxhktGo1SVVVFKBTq61B6rKioiJ07d/Z1GP2CzkXS2ZgLp9NJUVERNpstZfvobgbAD1p+HCciHYpHy419fTJDYMjqRKWl98WuzxpVVVVkZGQwcuTIATPScFNTE9nZ3U7lctbQuUg623IhItTX11NVVcWoUaNStp+e9HF8rZNlt57uQHoqIyOjr3bd77QOdHa6hUIh8vLyBkzRAEhP118mWulcJJ1tuVBKkZeXl/KzBV0ecSilbiQxHMioY/o0MoCmlEal9bmBVDQ0TUvqjd/d7vo4PiAxB0cRiZn8WnlJjGbbJ7zePh3xpF9Zs2YNixd3NzfW2cPn851VpyS6o3ORpHORGt31cRwADgBv9F44mqZpWn/X3eW4a1r+b1RKNbT716iUaui9ELWzTXl5OWlpaZSVlVFfX09ZWRllZWUMHjyYYcOGtT2ORCIntd2//OUvHDlypO3xhRdeeFwH4tVXX33S31C/9KUv8dxzz51wvTvuuIN169a17bv9fNjr16/nsssuA6Cmpob58+fjdrv59re/3WEbRUVFTJo0iYkTJzJhwgTuu+++tkl7TNPk8ssvp7i4mGuvvbbD6y688EJKSkracvfss89SUVHB/PnzGT9+PBMmTOB3v0teA3P33Xfzzjvv9KgtAEePHsVqtfLYY4+1LYvFYsfl8rHHHuvQpscff5yJEycyadIkzjvvPH71q191n8QeePjhh5kwYQITJkzg0UcfbVu+efNmZs6cyaRJk1i8eDE+n++41+7YsaMtR2VlZWRkZLTlZfny5YwfPx7DMI6bGOnBBx9k7NixlJaW8sYbHb9rx2IxJk+efNx70uo73/kOkydP5pZbbmlb9vjjj3d4P7Zs2cKttya7lp988knGjh3b5TZTrquJOkjM/AeJIdGP+3e6JgQ52X8TJkw4tVlMzkA7d+5MyXZ37NiRku321IEDB6Sz9/n++++XX/ziF52+JhAInHC7c+bMkc2bN3d4PGnSJHn//fdFRKS+vl6mT58uWVlZJxXvF7/4RXn22We7XaempkZmz57dYd/Dhw+X1157TURE3n//fbn00ktFRMTr9cratWvlkUcekbvuuqvDdoYNGyaNjY0iIuLxeOTzn/+8fO1rXxMREdM05Y033pBly5bJ4sWLu227iEh1dXXbMo/HI6NHj5bdu3eLiMjevXtl0aJFPWqLiMhvf/tbufDCC+WSSy5pWxaNRo/L5Z/+9Ke2Nr3wwgsybdo0OXz4sIiIBINB+dOf/tTpPntq8+bNMnnyZAkEAhKJRGTu3Lmyf/9+EREpKyuTtWvXiojIH//4R/nxj3/c7bYikYgUFBRIZWWliIhs375ddu/efVwut27dKlOnTpVwOCx79+6VsWPHSjweb3v+3//932XJkiXHvSciInV1dW15/spXviI7duwQn88nl1xyiUSj0Q7rzp8/X6qqqtoev/76651uU6Tz32F6YyInEWm9RXt4S6GIk5jV7xuAO2WV7AScTmdf7brfKS0t7esQ+txf//pXLrjgAmbNmsXtt9+OaZrEYjG+/OUvt30z/+1vf8vy5cvZsmULN954Y4ejlZtuuolly5YBsGLFCq6//vq2bZumyT333NP2jXjFihVty2+//XZKS0tZsGABdXV1ba/ZuHEj8+bNY9q0aVxxxRVtU4g+/fTTXHHFFR1i/973vseDDz54XJvS09OZM2fOCT/rmZmZPProozz11FN4PB6UUlx66aXk5eX1KHdDhw5tm68iMzOT0tJSqqurARgzZgyHDx+mtrb2uNd11palS5fy61//mv3793P48OEe7f9nP/sZDz/8MIMHDwYSv9tf//rXe/TaruzcuZOZM2eSlpaGzWZj/vz5PPvsswDs27ePOXPmALBgwQKeeeaZbrf1+uuvM27cOIqKigAYP3485557/FBHK1euZMmSJdjtdsaMGcOIESP48MMPAaioqOD111/vcDTRnsViIRwOIyIEAgFsNhsPPfQQd999N1Zrx56Eq6++muXLl3e6nd7Wk4mcngPOV0qNAf4beBH4X+DqVAbWlebm5r7Ybb+0atUqFi1alPL9fOPd/z3t2/zjRV/41NvYtm0bzz77LOvWrcPv9/O9732PZcuWMWbMGOrq6vj444+B5LX8jzzyCL/73e86TO6zYMECbr31VkzTZPny5fz5z3/m5z//OZD4A7lz5062bt1KbW0t559/PnPnzmX16tUcOHCAHTt2cOjQIcaPH883v/lNwuEwd911F88//zz5+fk8+eST/OhHP+LRRx/lvffe40tf+lKH+C+66CJWrFjB2rVrj/sj0VNZWVkUFxezd+9epk2bBoDf7+903RtvvJG0tMSEZ6tXr+5wGmn//v1s27aN888/v23Z1KlTWbdu3XEXYBzblvLychoaGpg2bRo33HADTz31FHfdddcJY9++fXtbzN154oknePjhh49bXlJSctwf0kmTJvGTn/yEhoYGHA4HL774InPnzgUSX7RefPFFrr76ap5++mkqKyu73e+yZctYsmTJCeOrrq7uMOdHUVER1dXVnH/++Xz729/mF7/4RYcvF+1lZ2ezYMECpk6dysKFC3E6nWzevJmf/OQnx607ffp0fv3rX3PPPfecMKZU68mn1RSRqFLqc8AjIvJbpVSfXVVlmnqsqlat57ZT7XT8kU+FN954g40bNzJ9+nTi8TiRSIThw4dz+eWXs3v3bu68806uuuoqFi5c2OU2bDYbM2fOZNmyZcTj8bZvlwBr165lyZIlWCwWBg8ezIUXXsimTZt45513WLJkCYZhUFRU1PZHY+fOnWzfvr2tr6L99g4fPkxBQcFx+//BD37AT3/6007/UPSUHDNQ6bGPWy1fvrzTGfGam5u57rrreOSRRzrc9zBo0CAOHTp03PrHtmXZsmXceOONQOII7vbbb+euu+7q8rLQk71c9Oabb+bmm2/u0boTJ07knnvu4bLLLiM9PZ1JkyZhsSRmCn388ce56667uP/++1m8eHG3d1aHQiFeeumlTgtWTz333HMMHz6csrKy4/o92rv33nu59957Abjlllt48MEH+eMf/8ibb77J1KlT257r6v3oCz2aOlYpdQPwZaC1JyZ197JrWg+JCF/72tf46U9/etwdwv/4xz945ZVX+P3vf88zzzzToZP0WDfddBM33HBDp6eNTjaeyZMn8+67x48HmpaW1ulNWQsXLuRHP/oRmzZtOqV9ejweKisrOeecc07p9ZFIhM997nPccsstXHPNNR2eC4VCbUco7R3blqVLl1JXV8df//pXAA4dOsT+/fsZPXo0hmEQi8XajqgaGhrIz88HEqd+Pvzww7Yjgq6czBEHwG233cZtt90GwJ133tl2emn8+PG8/vrrQKITfNWqVV3u86WXXmLGjBltsXZn2LBhHY5eqqqqGDZsGE8//TR///vfef755wmFQjQ3N/OVr3ylLU/H2rRpEzabjZEjR/L973+fl19+mZtvvpkDBw4watSoLt+PvtDTO8cvBh4Skf1KqVHA0tSG1bXWbw9a4jTF2eyyyy7jqaeeoq6uDovFQn19PQcPHqS2thYR4YYbbuCBBx7go48+AhKjDnR2H9D8+fP5/ve/3/atudVFF13EsmXLME2To0eP8t577zF9+nTmzp3L8uXLMU2T6upq1qxZAyT+MFVXV/PBB4nReiKRCNu3bwdg3Lhx7N27t9N2/OAHP+Chhx466fZ7vV7+6Z/+iRtuuIHMzMy25YbRo/nZEBG++tWvUlZWxp133nnc85988gkTJ048bnn7tuzYsYNYLEZ1dTXl5eWUl5e3nTIEmDt3Lv/7v4lTnYFAgKeffpqLL74YSHzT/u53v9vWDxQOh/nzn/983P5uvvlmtmzZcty/rs7319TUAIlTaKtWreKmm27qsNw0TR588EG++c1vdpmbpUuX9ug0FcA111zD0qVLiUQi7Nu3j4qKCqZNm8ZDDz1EVVUV5eXl/M///A8LFy7ssmgA3HfffTzwwANEIpG2MytKKQKBAND1+9EXejJ17DbgTmCTUqoUqBSRf015ZF3QQ44kne1zKU+aNIn777+fyy67jDlz5rBw4UKOHj1KZWUlc+fOpaysjFtuuYWf/exnQOI0wNe//vXjLuU1DIPvfe975Obmdtj+9ddfT2lpKZMnT+ayyy7j4YcfZtCgQVx//fWMGDGC8ePHc8sttzBr1iwgMWnQihUruOeee5g8eTJTp05lw4YNAFx11VWsXr2603Z85jOfIScnp8OyoqIi/vmf/5k///nPFBUVdRjl9aKLLmLSpEnMnDmTMWPG8Ic//KHtuVmzZnHrrbfy6quvUlRUxJtvdj2k3Jo1a1i6dCmvv/562+Wnr776KpD4I15eXs7UqVOPe137tixdupTPfvazHZ6/7rrrWLo08d3ykUceYdmyZZSVlTFz5ky++MUvMnt2Yh64a665hm984xtccsklTJgwgWnTpnV6iezJuvbaaxk/fjzXXnstjz76aFtR/dvf/kZJSQmlpaWMGjWKL3/5ywBUVlZ2ONryer28/fbbx13q+vTTT1NUVMTGjRu5/PLLueqqqwCYMmUK1157LePGjePKK6/kD3/4Q4+Ld6sVK1Ywe/ZsBg8eTH5+PqWlpUyaNKn1SlIA3n777bZ99rkTXXYFXASUA+8B64D9wJzTdVnXyf4bN25cp5efnY2OvbzydOmvl+N2x+/3pyia08M0TZk9e7Z4PJ6U7+t05OKpp57q8nLV3mzLp9XfPxc9FQgEZMaMGRKLxdqW9cvLcdv5FXCliMwRkdnAVcBvUlLFeuBkb/o6k1VUVPR1CClhsVjweDydduR2pb9/LpRS/PKXv+TgwYMp39fpyIWIcPfdd3f6XG+25dPq75+Lnjp48CAPPfRQ26n6J598kjvvvPO4I9Xe0pPOcbuI7Gh9ICI7lVL2nmxcKbWIRJGxAI+JyL91ss7ngR8DAmwVkf55CY/Wa4YPH37CSyUHotZTWgPB5/8/e+ceF1Xx/vHPgIAChoq3BBUFBZaFXbyBQYIXFPMCJZqlKVqaWppalpZimd80y0ua/jS11EIwL4WVeCsxTMlLoqbmJUWFVBQFucMuz++PZYdddlkW3RWQeb9e+9IzZ2bOzLOHMztn5vk8w4YZPF+b+vIk4O7uDnd3d348YsQIjBgxotraY8zA8RdjbDWA70qPR8AIkUPGmCVU4oghAFIBHGOM7dQchBhjHQDMgurV133GWPOqdkAgEAgEjxdjXlVNgGpd493SzxWovMcroxuAy0R0hYiKAMQCKC/lOg7ASiK6DwBElF5ZpZq7R+o6hvwT6hrivihD2KIMYQvzYHDGwRjzBuAK4Aciqup+QScAmu8bUgH4lcvTsfQ6f0D1OutDIqp4czVUTlUCFVlZWTVmX3d1o1Qqq7yT5UlF2KIMYQvzYCiQ0/tQRfr7CyrJkXlE9LUZrt8BQDBUcT9+Z4x5E5FWoCjG2HgA4wGgWbNmiIuL4+fUUfDUe+kB8C13u3fv5t7VDg4OCA4ORnJystaict++fZGVlcW3TQKq7XUuLi5a12nRogX8/f2RlJTE950DQFhYGFJSUnDq1Cme5ufnBwcHB+zdu5entW3bFnK5HAkJCcjKygKg2r4ZGhqKf/75R2u7pbF9Ul/f1H3q2LEjCgsLkZ+fz9Ps7OxgaWmpJflibW0NW1tbZGdn8wGdMQYHBwfk5+drtVPtkay53dLGxgYNGjRAVlYW93a2tLREw4YNkZeXp7Ww+dRTT0GpVGrJaTRo0AA2NjbIzCy7XerVqwd7e3vk5ORAoVDw9EaNGtWZPpWXHHkS+vQkfk/m7FNeXh7i4uK0nnsmpaLtVgDOArAr/X8zAMeqsl0LKkHEPRrHswDMKpdnNYAxGse/AuhqqF5XV1e928/qIj/++KNZ6q0J23Hr169PMpmM7t69SzKZjGQyGbVo0YJatWrFjwsLC3kZtWKsIdavX8+VWIlUirEuLi5aeQYMGGAWdVwiojfffJP++OMPfu1u3brxc5rquLdv36agoCCytbXVq44rlUrJy8uLJBIJzZkzhwoKCvj5devWUfv27cnNzY2+/fZbnXJq2yUlJfH8bm5uOvl79epFmZmZevuhVCopODiYsrOzedrWrVsJAL0NxqoAACAASURBVF26dImn6dsuqmmroqIimjFjBrm6upKvry91796ddu/eXakdDVFQUECjRo3iff3555/5uejoaJJKpSSRSGjmzJkV1nHy5Eny8/MjiURCUqmUioqKDJb/9NNPycPDg3x8fKhPnz50/fp13n+1vWUyGVlbW9NPP/2kc73p06eTt7c3RUZG8rRvvvmGVqxYodUmtQoyEdF3331Hrq6u1bYd19CD/69yxyeqVLFqNnEFQDsA1gBOAfAqlycUwMbS/zeF6tWWo6F6xcBRxpM8cFRVVt2YgeNJl1W/c+cOtWvXjlJSUuju3bvk4uLCH/6a5dSo89+/f18n/7p162jhwoV6+/Ljjz/SO++8o5X2wgsvUGBgIM2bN4+nVTZwvP322zRmzBj+A+DmzZu0detWg3asjGXLltFrr73G65PL5VRSUkK3b9+mNm3a0N27d6mkpIRefvllSkhI0ClfVFREUqmUTp8+TUQqGymVSoPlf/31Vy7rv3z5cnr55Zd16k1PT6cmTZpQfn6+VvoTJ6sOoD1jbEfp5wcArhrHOwyUU89kFADeBLAHwHkA3xPRWcbYPMaY2k1zD4AMxtg5AAcAzCCiDEP12traVnbpOoNMJqvuJlQ7aln1oKCgOi+rHh8fj/79+6Nly5ZwdHREr169tF6Xlkedv1GjRjr5w8LCuFRIeaKjo7UUcx88eIA///wTa9eu5basjOzsbGzYsAHLly+HtbVqd3/Lli217P8wnDt3Dr169eL12dvb4+TJk/j333/h4eEBR0dHMMbQp08fvbLq8fHx6Ny5M7y9vQEATZs2hYWFhcHyvXr14muN/v7+SE1N1al369atGDhwoM53+iTKqg8pd/yl3lwGIKJdAHaVS4vS+D8BmF76MQr1TSYAXFxcHst1lEterTxTFbGcrqtJVFU0ZdXr1auH8ePH12lZ9bS0NLRu3Ro2NjYAyuS9Na9naWkJW1tbHD58mOdXo5m/adOmyM7O1hGPBIDDhw9jw4YN/PiHH37AgAED4OHhATs7O5w6darSHzWXLl1Cu3bttNR4K2LKlCl6oxGOGDECM2bM0EqTyWSIi4vDsGHD+NrjjRs3EBAQgLNnz+L69et4+umnERcXp1el9+LFiyAi9O3bF3fv3sWIESPw9ttvo0OHDkaVX79+vc4PBEClIPz+++/rpD9xsupEVLHITTWiuRhV14mLi9OJlWAOTPGQNwdCVl0FkbaMur6HPQAkJiZWKSxus2bNcPPmTZ0yDx480Jr5x8TE4L333gOgmsHFxMRAJpOZTFZ9+fLlRucdN24cLly4gM6dO6Ndu3bo1q0bLC0t0bRpU6xcuRIRERGoV68e/P399Xq+KxQK/PHHH/jzzz9Rv3599OzZE126dEFQUFCl5Tds2IAzZ87otDc1NRUXLlzg90V5nlRZdYGgRkIkZNU1ZdXPnTuHpKQkfi41NdWgmqqTk5PB/BXJeGtub71z5w4OHjyI8+fPgzEGhUIBKysrLFiwAI6Ojrh//75WWbWseocOHXD16lXk5ORUOuuoyozDysoKX3xRpojUuXNnLqseFhbGf2itWrVK76tAZ2dnBAUF8SiK/fv3x19//YWgoCCD5Xfv3o3PPvsMBw8e1HkrsmXLFgwZMqTSWeWTJqsuENRINGXVAdR5WfXQ0FDEx8cjKysLGRkZ+PXXXw3OttT5MzMzdfIrlUrcvXsXbdq00Snn5uaGlJQUAKrXeWPHjsW1a9eQkpKC1NRUtGrVCkeOHIGHhweuXbuGixcvAgCuXr2Ks2fPwsfHBw0bNsSoUaMwdepUFBcXA1DJnqvXkTRZvny5Xln18oMGoIp+qJYhj4+Ph52dHR841LLq9+7dw+rVq/WGqe3fvz+Sk5ORn58PhUKB33//HRKJxGD548eP44033uCvKMtjrER7bZJVN3rGwRizIaLHE3LOAIaidtU1WrRoUd1NqFY0ZdUVCgVsbGywevVqWFpa4tVXXwURgTGGTz/9FECZrHqDBg34wx0ok1UHoLX/PiIiAklJSfDx8QFjTEtW/cCBA5BIJGjTpo2OrPqUKVPw4MEDKJVKvP322/Dy8sKAAQOwceNGREZG6vRj0KBBmDNnjlaas7Mz8vLyUFxcjG3btuHXX3/lWkXPPvssANUi/QsvvIDZs2cDUL1amjVrFnr16gXGGObNm2cwZos6f5cuXQBAK/+xY8cQGBio13lOLaseGRmJmJgYzJ07V+u8Wlb9mWeewaZNm/DKK6+gsLAQ1tbW+Prrr3lohIULF+L999+Hp6cnGjRoADs7O3z88ccVttcYbt26heeee46/RtScab7xxht8IP/www/Rvn17AKo1mjNnziAqKgqOjo6YMmUKOnfuDAsLCwwaNAj9+vUzWP6dd95Bbm4uhgxRLQu3a9eOxzm/fPky0tPTERgYaLDdmrLqALisuq+vr5as+uN4NW0UlW27gko65AyA66XHMqhCyJpkW1dVP507d9a7/UxgOmrqdtzaTG2SIicimjRpkt7tqkREN27coH79+j3mFtVtaqOs+nIAAwFklA40p6CKCFgtlPeKrctovp9+kngYWXVTBAAyJ49TitwUtvD19eUKBuVxdnZGZGRkjbc5UPPvC2OpjbLqFkR0rdxOiGoTjFK/DxVASybkSeJhZNU1XzHVVB6XFLkpbKHv/b8m6nCsNZ3acF8YQ22UVb/BGOsGgEql0icDuGjeZgkEAoGgpmLMq6qJUDnotQFwG4B/aZpAIBAI6iCVzjhIFSOjxsxLq+LA9KRTY3ZY1ADEfVGGsEUZwhbmodKBgzG2FqqwrloQ0XiztKgSnpQYwqYgJSXlscmO1HQKCwu51EZdR9iiDGEL82DMq6r9UMmd/wrgDwDNAVSbP4faGUYArRggTxIpKSlo0KAB5HI5MjIyIJfLIZfL0bJlSzg5OfFjzR8RmrEOKuLrr7/GrVu3+HFgYCDatWunlWfgwIFV/pU6cuRI/Pjjj5Xmmzx5Mg4fPsyv7edXFtcsKSmJS1Kkp6cjODgYdnZ2mDp1qlYdzs7OXLzRy8sLUVFRPE5DSUkJ+vXrh+bNmyM8PFyrXGBgINzd3bnt1H4Go0ePRrNmzXR2sE2bNk2vt7a+vgCqjRr16tXDunXreJpCodCx5bp167T6tGHDBi4i2alTJyxdurTCaxrLkiVL4OXlBS8vLy0v8pMnT8Lf3x/e3t4ICwvTu+Pq3Llz3EZyuRwNGzbEl1+qZPoyMjLQu3dvdOjQAf369eNxdRYuXMjze3l5oV69esjKyjJYlyZLly6FVCrFwIED+eafhIQELQdHtX+KGrUfUVV2HpqUqu7fhWqwOWyq/cBV/QhZ9TKErHoZQlZd5Suyf/9++u6773T295fvu5qEhAT6888/SSaTaaVfvnyZy31X1hcilZx4YGAg9erVi6cVFxfr2HLt2rW8Tz/99BN17tyZx0jJz8+ntWvX6r2msZw8eZJ8fHwoLy+PioqKKDAwkK5cuUJERHK5nA4dOkRERGvWrKEPP/zQYF1FRUXUrFkzunHjBhERTZs2jd9/H3/8Mb3//vs6ZXbs2EEhISGV1qWJn58fKZVKmjt3Lu3atYuUSiWFhITo3NMjR47kcVSIiC5duqTzvampCX4c5WkHoG67LAtqDGpZ9WeffbbOy6ozxtC7d2/Y2dkZbb+goCA0adJEJ93V1RU3b97EnTt3dM7p60tMTAyWLVuGK1eu4ObNm0Zd+5NPPsGSJUu4t3T9+vUr3QZcGefPn4e/vz8aNGgAKysrPPPMM3x29e+//yIgIACAShVZn6y6Jvv27YOnpycXqoyLi8Po0aMBqGZq+maZFcmLlK9LEyKCQqHgsuobN27E4MGDdWZr4eHhiI6ONsIK5seYNY77KFvjsABwD8BMczbKEFX5o3jS0XzVYU6WbHw4AT5DTB/d5ZHr0JRVJyK88cYbdVpWvXPnzgBQ4YDz4osvcpG8hISESl/J+fr64vDhwzqbMMr3JSUlBffu3UPnzp0xdOhQfP/993jrrbcqbfvZs2d5mw2xadMmLFmyRCfd3d1dJz6Ft7c3PvroI9y7dw82Njb47bffuNyHh4cHfv75ZwwcOBBbt26t1FcoNjZWaxDIyMjgCsdOTk46A2ROTg7279+PtWvXVlqXJhMnToSfnx98fHzQrVs3LFiwAHv27NHJ16VLl0cW4jQVBu9WpvL6kwFQi/qXlE55qg2156QABnWITIkpHvLmQFNWHVCtcwhZdejVlwJUKq1VeSdekYx3+b7ExsZygcjhw4dj0qRJeOutt0wmqz5q1CiMGjXKqLxSqRTTp09Hnz59YG9vD19fX/7M2LBhA9566y3MnTsXYWFhBnXvCgoK8Msvv+gdsCoiLi4OQUFBOn+XldUVGRnJNcyioqIwbdo0/Pzzz4iOjkabNm3w+eefgzFWo2TVDb6qKh0kdhGRsvRTrYMGAK3g7nUdQ9Hd6gJEKln15ORkJCQk4MKFC5gzZw4cHR1x+vRpPPvss1i5ciVef/11g/UMHz4ckydP1lHHfZj2+Pj4cPXWM2fOID4+HoBhWfXMzEyTyKqrMdUGkopkvMv3JSYmBuvWrYOLiwteeOEF/PXXX7hy5QosLS1hYWGh5b2tllUHVGrCJ06cqLQdmzZt0lpkVn8q+r7Gjx+Pv/76C7///jvq16/P1XElEgn27duHEydOICIiAm5ubhVe85dffoGfn5+W2q2joyN/dZeWloann35aq0xFswp9dekjNTUVycnJGDhwIJYsWYItW7bA1tYWCQkJACr+PqoDY9Y4khljvmZviUBQRYSsurasuqmpSMZbsy/nzp2DQqFAWloaUlJSkJKSghkzZvB1ox49evAQtHl5edi6dSt69lRJ3c2aNQvvvPMOXwcqLCzE+vW6QcNGjRqlV1a9ojCqavnzlJQU7N69m8ujqNNLSkowf/58TJgwocK+61urGDx4MDZu3AhAtbam+Qrv/v37OHz4MAYNGmRUXfqYPXs2fxVVUFAAxhgsLCxqpKy6od1T9Ur/PQtAAeACgL8AnATwl6lW56v6EbuqyhC7qoiio6NJJpORRCKhTp060dGjR+nEiRMkl8tJJpORXC6nPXv2EBHRli1bqGPHjiSTyaiwsFDvTiPNnUBKpZKmTZtGXl5eJJVKaevWrTx9woQJ5O7uTiEhIdSvXz++q+rEiRN8t5ZEIqH169cTEdFvv/1Go0eP5tfRvHZJSQn5+PjwXVVEqt1TjRs3Jnt7e3JycqJ//vmHp0ulUpJKpSSRSGj27NlUUFDAy/n7+5OjoyPVr1+fnJycaP/+/TrX0yQiIoJatmxJVlZW5OTkRN988w0RERUUFJCHh4eWGqsazb7Mnj2bPvjgA63zJ06cIKlUSkRE169fp/79+5NMJiNvb29aunSpVt5169aRRCIhiURCXl5etGzZMp3rVZXu3buTp6cnyWQy+umnn3j6559/Th07dqQOHTrQ+++/TyUlJbyNgwYN4vkePHhATZo0oQcPHmjVm56eTsHBweTm5kYhISF07949fm7t2rU0YsQInbZUVFd5jh49SuPGjdNqq0Qiof79+1NhYSERES1YsIBWrVrF81TnripDA8dfpf+66vuYqgFV/Xh6elZk+zqHvgeBKaipA4chcnNzzdQa0/A4ZdVNYYvvv/++wu2qtUkivqbfF8ZSUlJCAQEBlJmZydNq6nZcVjoj+VffxwyTH6PQjHVc16k25x8z8zCy6jX9vnicsuqmsAURYdq0aXrPPc6+PCo1/b4wlvT0dLz77rt84f3AgQN4/vnnK103MReMKljvZoylAqhwSwERGb/dwIS4u7vThQsXquPSNY6EhAS+o8eUnD9/Hp6eniav15xkZ2fzyHJ1HWGLMuqqLfT9DTPGThCRSbZIGtqOawnAHqUzj5qCUlltoUBqHGrJA4G4LzQRtihD2MI8GBo4bhLRvEepnDEWCuALqAahdUS0sNz5SACfocxP5EsiWgeBQCAQ1FgMDRyPNNMoDfq0EkAIgFQAxxhjO4noXLmsW4joTWPrrci5qS4iVD/LqKpT2ZOMsEUZwhbmwdBTuPcj1t0NwGUiukJERQBiATxyAAlz7FevrYSGhlZ3E2oMj8uLvjYgbFGGsIV5qHDGQUT3HrFuJwCaYjCpAPSJKw1hjPWAKhztNCLSEZBhjI0HMB4AWrRogbi4OH4uKCgIALgTFqDSsPHw8MDu3bu55LSDgwOCg4ORnJyMa9eu8bx9+/ZFVlYW/vzzT54mk8ng4uKidZ0WLVrA398fSUlJWrG+w8LCkJKSoiVx7ufnBwcHBy3P7rZt20IulyMhIYGvTdjY2CA0NBT//PMPNBf8je2Turyp+9SxY0cUFhZqSZXb2dnB0tJSy3Pf2toatra2yM7O5u+SGWNwcHBAfn4+byegEu4DoCVlbWNjgwYNGiArK4vLZlhaWiIjIwOenp5wc3NDXFwcwsLCYGlpiVu3bsHCwgKOjo4AgMOHD6Nhw4bIzMzkddarVw/29vbIycnR8lhu1KgR1qxZg+DgYLRoodLoHDBgAP777z+cPHmS53vppZdw5MgR3Lhxw+g+vfzyywgLC8OAAQMq7FPDhg0xceJEhIeHw8/PD6GhoSAiHDp0CLm5uTh27Bj+97//Yc+ePbCxscHs2bOxefNm1KtXD4sXL8agQYOQmZkJR0dHSCQSFBcXo379+hg5ciTGjh3LZ+JffPEFNm3aBEtLSyxatAjBwcGwsLBA48aN4eXlxdsUGxsLLy8vzJ07F99++y3q1auHRYsWYeDAgSgoKEDfvn3x008/wdLSUqdPubm5GD58OA4ePIiCggIUFRVhxYoVWLBgAW7evAlbW1vk5uZi06ZNOH/+PJYtWwYbGxtkZmYiNDQUn332GXx9fVFSUoIpU6bg4MGDcHBwQMOGDbFkyRL4+Pg89L2nVCrxyiuvcGn+L7/8kkvSfPrpp/juu+8AAOPGjcP06dP1fk+//PIL3nvvPSiVSjRr1gwJCQlQKpVYtGgRLz9+/HhMmzaN33urVq3CN998AysrKzz33HOYPXs2AODzzz9HbGwsLC0t8emnn/LNLOo+3b59GyNGjMDNmzcxadIkTJ48GdnZ2Zg0aRJef/11eHt7w8HBAZ999hkaNmzInRnnzp2LLVu24K233sLEiRN1/p7y8vIQFxen9dwzKaba11v+AyACqnUN9fErUK1haOZxBGBT+v/XAfxWWb3CAbAM4QBYxpMkq37q1Cny9fWlwsJCunz5Mrm5uZFSqdSRKb916xYFBwfTvHnztMrdvn3bYDk1FV2HSOXYFxsbq7cvy5Ytoy+//FIrrVOnThQYGEibNm3iaZoS6pr9Vn8HQ4YModmzZ3NHvMuXL9OuXbsM2rEypk6dSvPnzycior///puCg4OJSFduPTg4mMuta5KRkUGenp5c/vz27duVlt+7dy/17duXO2Kqyxiyr5rt27fTggULSKFQkL+/PxGpHCg1nQGJVFL7nTp10kr74IMPdBwq1dREWXVjSQPQWuPYGWWL4OpBK4OI1D/h1gGoXCpTINDgSZRVj4uLw0svvQRra2u4urqiTZs2ejWdWrRogTVr1mDFihVVKmfMdQxJeEdHR2vJbVy8eBEKhQIffvghYmJiKryeJhcuXEBycjI++ugjvg7h6uqqY6Oqcu7cOfTq1QsA4OXlhcuXLyMjI0NHbr1Hjx5cbl2T7777DsOGDePilM2bNwegK9euWf7//u//MGvWLL7mqC5jzPdhZWWFvLw8rdlxVFQU5s3T3pdkb2+PVq1acfmc6ubhtJyN4xiADoyxdlANGMMBvKyZgTH2NBGptYkHAzhvxvYIHpJbnx0yeZ0tZwQ+ch2asuo5OTl49913nwhZ9bS0NC3/HGdnZ6SlpcHXV1cyrmPHjsjPz0dGRobBctnZ2bzfbm5u2LZtW4X5u3btCplMhqSkJJ3rFRQUIDU1VUtFOCYmBsOHD0dwcDDGjBmDu3fvVuqYdvbsWfj6+hq12SUiIkKvzteMGTMwYsQIrTSZTIYdO3age/fuOHLkCP777z+kpqbqyK3Hx8fz2ByaXLx4EYwxBAUFITc3F1OnTsXIkSMNlr948SISEhLw3nvvoUGDBli8eDE6d+5s0L5qQkNDER0dDX9/f8ycORM7duyAv78/j1GiSZcuXZCYmIhOnTpVajNzY7aBg4gUjLE3AeyBajvu10R0ljE2D6op004AUxhjg6HSwroHILKyeuuiM09FqNdCzI0pHvLmQFNWnYhQUFAgZNVRtvaiScOGDav0nrtevXpgjCE/P19LkTU9PV0n8FNsbCx27doFS0tLhIeHY9u2bZgwYYLJZNXVMz1j+OCDDzBlyhTI5XLIZDL4+PjA0tLSoNy6JgqFAmfOnMG+ffuQm5uL7t27o3v37gbLKxQKvqZ45MgRvPjiixUKWpbHysqKz3iLiooQGhqKnTt3YurUqUhNTcWYMWMwYMAAAKqZTEpKitG2MCfmnHGAiHYB2FUuLUrj/7MAzDJnGwRPLkQqWfWPP/4YCoVC61f76dOnER8fj5UrV2L79u346quvKqxn+PDhGDp06CMHySFSyaonJibqnDMkqz5nzhwtWXUnJyetIEOpqalwcnLSe82LFy/C1tYWjo6OVSpnzHWKiop0tnyX78fJkydx5coVrnhbWFiIjh07YsKECXB0dMT9+/e1yqtl1evXr4/k5GSUlJRUOuuoyozDwcGBK9iWlJTAxcWFx5UfP348xo8fDwB499139cqqOzs7w8nJCba2trC1tUVAQABOnz4NV1fXCss7OzvjhRdeAAB0794dxcXFuH//fpW/jxUrVmDs2LFITExEs2bNsHjxYvTu3ZsPHLVNVr1GoU8Wu66iueuqLqIpq56Tk/PEyKoPHjwYMTExKCoqwr///otr167pjZSXnp6OiRMnYvLkyVrl7t27Z7CcMde5ffs2nJycdB7qzZo1Q35+Pl8jiomJwfz587mk+n///YerV68iNTUVfn5++P3337mc+Z9//gkiQqtWreDu7g5vb2/MmzePz5iuXr3K45dosm3bNr2y6uUHDUD1WrK4uBgAsGbNGvTo0YNHDdWUW9+5cyffoaRJeHg4EhMToVQqkZubi6NHj8LDw8Ng+fDwcBw4cACAatYJAI0bNzb6ewRUIQH27NmDESNGIC8vj9tdc3dZTZJVN+uMQyAwJ97e3pg7dy769OnDt6euXr0alpaWePXVV0FEYIzh008/BQCMGTMGr732Gho0aMAf7oDKqXTGjBkAoLVIGRERgaSkJPj4+IAxhiVLlqB58+aIiIjAgQMHIJFI0KZNG3Tv3h2Aajvktm3bMGXKFDx48ABKpRJvv/02vLy8MGDAAGzcuJFHetNk0KBBmDNnDj+WyWQIDw+Hp6cn6tWrh1WrVsHCwgIlJSV8raK4uBhWVlYYPXo0D9OqLufn5wdra2utcvqo6DqASkRP/Uu3PH369MHhw4cRFBSELVu24Ndff+XnGGMIDw/Hli1b8Pbbb2Px4sXo168fiAgNGzZETEwMf1X1zTffYPr06XBzc0ODBg3QrFkzfP7554a/9Eo4c+YM357s7e2NxYsX83Ph4eHIzMyEtbU1Vq9ezX3CVq5cCRsbG7z22muQSqXo1asXvL29YWFhgUmTJnHNp4rKjxs3DpGRkZBKpbCxscGmTZsqtW95PvzwQ0RFRYExhv79++P//u//8N133+GNN97geY4cOYJPPvnkkexjMky1PetxfcR23DLq2nZcQxizHbc6eZxS5KawxeDBg+ny5ct6zx09epQiIyMf+RqPg5p+XxiLPps/qdtxzUL9+vWruwk1Bnd39+pugll4GFn1mi6/8jilyB/VFoWFhYiIiICrq6ve8127dkVgYGCFM5maRE2/L4zl3r17Whsopk2bhtjYWP4a7nFToax6TaVLly70sPGZBcZRG2XVBQJBGeaWVa91Mw5N2YG6zu7du6u7CTUGITFfhrBFGcIW5qHWDRy1YXr8uNDUTarr1LaZszkRtihD2MI81LqBQyAQCATVS60bOPR5e9ZVhGR0GeK+KEPYogxhC/NQ6wYOITlShjnijdcE1JLYcrkcGRkZkMvlkMvlaNmyJZycnPix2gkNMO6++Prrr3Hr1i1+HBgYyL2K1QwcOBCNGjWqUntHjhyJH3/8sdJ8kydPxuHDh/m1/fzKogwkJSVxqZL09HQEBwfDzs4OU6dO1arD2dmZizd6eXkhKiqKv7IsKSlBv3790Lp1a4SHh2uVCwwMhLu7O7edWqBv165dcHd3h5ubGz777DOef+jQobhy5UqFfXn++ee1pPyPHz8Oxhj279/P0y5fvqyzM2727NlYtmwZANVrpEWLFvF2de3atUJhxarwzjvvQCqVQiqVYs+ePTx9//796NSpE6RSKcaOHavls6NJSkoK+vTpA4lEAolEwr2/Kyq/cOFCblcvLy/Uq1cPWVlZuHbtGoKDgyGRSODl5YUvv/xS7/WWLl0KqVSKgQMHcufFhIQE7lsEALdu3cJzzz3Hj9V+RFXZeWhSTLWv93F9PD099e5brotoSoSbkprqx2FIVj03N7fSemuLrHp2djYdOnSIVqxYoSNL7uTkxH0TsrKyaNiwYTR27FgiUvmK7N+/n2JjYyksLMxg34mIioqKqF27dpSSkkIFBQUklUrpwoULRES0f/9+mjBhgt6+JCcnU0REhFba9OnTKTAwkLeFiOjSpUskk8m08mn6HqxYsYJCQ0PpwYMHRESUmZlJGzdu1HtNY/nxxx+pX79+pFAoKDs7m+RyOWVnZ5NCoSAnJyfumzJr1izasGGD3joCAwPp119/JSLVd5GXl2d0+R07dlBISAgREaWlpXGbZ2VlUfv27bl9NfHz8yOlUklz586lXbt2kVKppJCQEB0flJEjR1JSEQ3ZGQAAIABJREFUUhI/1mdfNcKPoxyavzLrOpq/+Ooqall1Pz+/J0ZW3d7eHgEBAZX6LD311FP46quv8P333yMrKwuMMfTu3dto34WkpCR4enqibdu2sLGxwbBhw3igr+DgYOzevZsHSdKkvKx6SUkJtm/fjo0bNyI+Pt7ov9FPPvkEq1ev5rNFBwcHjBo1yqiyFXHu3DkEBQXB0tIS9vb28PDwwN69e5Geng47OzvumxISEoLt27frlD99+jQsLS25NLu9vT0aNGhgdPmYmBi89NJLAIBWrVrxGcFTTz0FDw8PpKWl6ZQhIigUCuTl5cHKygobN27E4MGDdWa+hqTuHzdCckRQKTEXhpq8zpfctz5yHU+qrHpVcHBwQNu2bXH58mWDulQA8OKLL3KRvISEBKSlpaF167KQOc7OzjySpaWlJVxcXPD3339DJpNp1fPHH39gzJgx/DgxMRHu7u5o3749AgMDER8frzWw6OPevXsoLi5G27ZtK+3jwoUL+eCuSc+ePbF06VKtNJlMhoULF2Lq1KnIycnBH3/8AX9/f4SHhyM/Px8nT56EXC7H9u3btQQI1Vy8eBFPPfUUwsPDce3aNfTt2xcLFixAixYtKi2fk5OD/fv3Y+3atTr1XrlyBX///beWpLqaiRMnws/PDz4+PujWrRsWLFig9YpNTZcuXR5ZiNNUiIFDUCmmeMibA01ZdaVSiaKiIiGrboAtW7ZU6Z148+bN8d9//+kMHOX7oo7HAahmcDExMQgLCzOZrPrMmTMxc+ZMo/I+99xzOH78OLp3747mzZuja9eusLS0hIWFBTZv3ozJkyejqKgIISEhFcqqJyYm4uTJk3ByckJERAS+/fZbjB49utLycXFxCAoK0tm08uDBAwwZMgQrVqzQK3kfGRnJNcyioqIwbdo0/Pzzz4iOjkabNm3w+eefgzHGv4+aQK17VaUWFhPA4AOxLkCkklVPTk7GqVOncOHCBcyZMweOjo44ffo0nn32WaxcuRKvv/66wXqGDx+OyZMn66jjPkx7fHx8uHrrmTNnuNqrIVn1zMxMPKwaQlZWFm7cuIEOHTrwNFtbW6PKVib7XZGMt2ZfiouLsWPHDkRFRcHFxQVTp07Frl27kJuba1BWvUmTJrCysjJKgkVz8VnzM23aNL35o6KikJycjL1798LCwgIdO3YEoNogcOjQIRw9ehQBAQE8XRNnZ2d06tQJLi4usLKyQnh4OFdXrqx8bGwsf02lpqioCC+88ALGjBmDwYMHG+xnamoqkpOTMXDgQCxZsgRbtmyBra0tEhISAAhZ9UdC3zvXukpd94rVlFVXKpVPjKy6sWRnZ2PixIkYOnSo1g8qY51k/f39ce7cOVy7dg2FhYX4/vvvtR5uly5dgpeXl045zb7s27cPXbt2xY0bN5CSkoLr169j0KBBiIuLQ6NGjdC4cWNun4yMDOzdu5dHzps5cyYmTZrEv5MHDx7g22+/1bnezJkz9cqql39NBahmDPfu3QOgihVy/vx59O7dG0CZLHpBQQEWLVqECRMm6LXJnTt3kJGRAQD47bffIJFIKi1///59HD58GIMGDeJpRITIyEjI5XJMmTJFzzegzezZs/mrqIKCAjDGYGFhgby8PAA1S1a91g0cubm51d2EGsOff/5Z3U2oVjRl1eVyOfr27Yvbt2/jxo0b6NGjB+RyOcaMGcOlqNWy6uW38qpl1ctHtouIiICHhwd8fHzQp08fLVn1Nm3aQCKRYMyYMTqy6tOnT4ePjw98fX35dzRgwAD+y7E8gwYNQuPGjbXSnJ2d8e6772L9+vVwdnbGhQsX+Llnn30W3t7e8Pf3h6urK1atWsXPde/eHSNHjsSePXvg7OysJXleHisrKyxfvhwhISGQSCQYOXIkF87877//4ODgoPf1mmZfYmJi8Pzzz2udHzJkCI89/t133yEqKgpyuRy9e/fG/Pnz4eLiAkC1PTkgIACdO3eGVCpFUFDQQ6/1qCksLERgYCAkEgkmTZrEZfYBYMGCBfD09IRMJsOQIUPQo0cPAKq/I/UgUK9ePXz22Wfo2bMnvL29YW1tjbFjxxosDwDbt29H//79tWYEBw8eRExMDPbt28dnSfrWLgDVpgpra2v4+PgAUK1HSaVSHDt2DCEhIQAMS90/dky1PetxfYSsehlCVr2Mmi6fXdtk1RctWlThdtXc3Fzy9/cnhULxyNcxNzX9vjCWkpISCggIoMzMTJ4mtuMKBBo8jKx6TedxyqqbAkdHR51dYGpsbW0RFRWFmzdvPuZW1V3S09Px7rvv8oX3AwcO4Pnnn0fTpk2rpT21Tlbdx8eHTp8+Xd3NqBGkpKTwab8pqY2y6oWFhU9M7IVHRdiijLpqCyGrXg5ra+vqbkKNwRyDRm2lLj4cKkLYogxhC/NQ6waOzMzM6m5CjUHt5SsQ94UmwhZlCFuYB7MOHIyxUMbYBcbYZcZYhR48jLEhjDFijJlkGiUQCAQC82G2gYMxZglgJYD+ACQAXmKMSfTkawjgLQB1e2+pQCAQ1BLMOePoBuAyEV0hoiIAsQD0Cdh8DOBTALputXqwsrIyXQtrOS1atKjuJpiFh5FVN2b/f22RVQeA+fPnw83NDR4eHlyqXKFQwNLSEnK5nEtqL1u2jDv8qeXYnZycKpRjV9tO7V+yfv16dOjQAR06dMB3333H8/fu3btCB9OSkhL07NkTOTk5PG3btm1gjGk5Oe7fv19H3l3TVsXFxXj33Xfh5uaGTp064ZlnnqnQz8FYCgsLMXr0aN7XpKQkfm7z5s3w9vaGl5cXZs2apbf8pk2btLzTGWP4+++/Aah8LaRSKdzc3LS81jMyMtC7d2906NAB/fr143Y7e/YsunfvDhsbGy4lX578/Hz07dsXUqkUa9as4emvvvoqNDcBLVu2DJs2beLH06ZNQ8uWLSus19yYU6vKCYCmClgqAD/NDIyxTgBaE9EvjLEZqADG2HgA4wGV4qTmu/2goCAA4N6pAODu7g4PDw/s3r2bxypwcHBAcHAwkpOTtVRl+/bti6ysLC1nOplMBhcXF63rtGjRAv7+/khKSuKKpwAQFhaGlJQULg4HAH5+fnBwcMDevXt5Wtu2bSGXy5GQkMBvLBsbG4SGhuKff/7RcvCqSp8AmLxPHTt2RGFhIfLz83manZ0dLC0ttWK+W1tbw9bWFtnZ2dyjnzEGBwcH5Ofna4W2VWv0aD5sbGxs0KBBA2RlZXG9JbWzVrt27biTWUJCAp566inMnTsX1tbWmDx5MoAyjSb1e+zMzEzUq1cP9vb2yMnJ0Yq30KhRI6xbtw4dOnTgqrNEhIYNG3Lv5/v37/MtplXpU1FREXJzc5GZmVlhnwoKCnD8+HF8/PHHyMzMhEKhwM2bNxEfH4/u3bsjOzsbCoUChYWFuHDhArZu3YrDhw8jLS0NQ4cOxcWLF5GTkwN7e3tul8LCQgwbNgx37tzBjBkzUFRUhI8++ginT5/GuXPnuF3UG0p27drFlWgZY7h79y7mz5+PhIQEKJVK9OzZE/3794eDgwPCw8OxdOlSTJ06VadPu3btgo+PD+zt7ZGXl4eioiJs2rQJ/v7+2Lx5M2bNmoXc3Fzk5OSguLiY72zKzMzktsrJycGHH36I27dv4/Dhw7C2tsbt27dx5syZR7r3NmzYAAsLCyQmJuL27dsYPnw4evfujTt37uC9997DwYMH0bhxY0ycOBEHDx6EXC7X+p5GjRqFiIgIFBUV4fTp0xg7diwkEgmKi4sxbtw4LF++HL6+vhg6dCi/b6KiohAcHIxp06Zh2bJlmD9/Pj744ANYWVnhk08+wZ49e6BQKLTWW9R92rZtG7p3744pU6bgueeew+uvv47ExEQolUq0adMGWVlZcHBwwIgRIxASEsI9+z/77DM0aNAA+fn5vF7N7ykvLw9xcXFazz2TYiqHkPIfABEA1mkcvwLgS41jCwAJAFxKjxMAdKmsXg8PjwqdZOoa6jgSpqamOgDqi8exYcMG6tq1K3l7e9PEiRNJqVRScXExjRw5kqRSKXl5edEXX3xBsbGxZGdnRx07diSZTEaFhYUUEBBA//vf/3jMizVr1tDChQt5PA6lUknTpk0jLy8vkkqltHXrVp4+ceJEcnd3pz59+lC/fv14PI6jR49Sjx49qFOnThQaGkq3bt0iIqKVK1fSxx9/zNsdEBBAy5cvpx49ehCRdjyOefPm0aJFi3jeXr160dGjR6m4uFgnVsiFCxeoWbNmWmmVxfFQs2nTJpo0aRI/Hjt2LH3//fdERHTnzh3y8fHR+Q6IiIYOHUqJiYn8OCsri5ycnOj8+fMkkUh4+r59+3Tigqhjlzx48IAcHR0pOztb7zUelvHjx9PmzZv5cUBAAJ04cYIOHz5Mffv25elff/01TZ482WBdM2bMoKioKCIiun79ulbfNG3Xvn17Sk9P15uPSDsGSXl27txJc+bMoYKCAvL39yciogEDBtDNmzd18g4cOJBOnDhhVL3mdgA054wjDUBrjWPn0jQ1DQFIASSUqmW2BLCTMTaYiCpUfFNHyBJAa5ZgVi6awRGv46P/AnpSZdXT0tK0ojs6OzsjLS0Nvr6+umbs2BH5+fnIyMiAo6MjgIq1qp599llYWlrC1taWz2bKy6qr40U0bdoU2dnZ3HaaHD58GBs2bODHP/zwAwYMGAAPDw/Y2dnh1KlTOoq65bl06RLatWunVy22PFOmTMHvv/+ukz5ixAitKHmAamYdFxeHYcOGISUlBadPn8aNGzcQEBCAs2fP4vr163j66acRFxdnUKWXiLBlyxb+6syQrTIyMrg0i5OTU5UcI0NDQxEdHQ1/f3/MnDkTO3bsgL+/P1q2bKmTt0uXLkhMTESnTp2Mrt9cmHPgOAagA2OsHVQDxnAAL6tPElEWAO72yBhLAPCOoUFDUE2Y4CFvDoSsugoy0ok3MTGxSus3zZo1w82bN3XKPHjwQEuBNyYmBu+99x6AMll1mUxmMln15cuXG5133LhxuHDhAjp37ox27dqhW7dusLS0RNOmTbFy5UpERESgXr168Pf3N+jFf/jwYTRp0gQeHh5VamtVsbKy4rFGioqKEBoaip07d2Lq1KlITU3FmDFjuD5V8+bNkZKSYtb2GIvZBg4iUjDG3gSwB4AlgK+J6CxjbB5UU6ad5rq2oG5ApJJVV68baD7gTp8+jfj4eKxcuRLbt2/HV199VWE9w4cPx9ChQx85SA6RSlY9MTFR55whWfU5c+ZoyapXJneuycWLF2Fra8tnG1XByclJa/E4NTVVS321IhlvC4uyPTV37tzBwYMHcf78eTDGoFAoYGVlhQULFhiUVe/QoQOuXr3K120MUZUZh5WVFb744gt+3LlzZy5/HhYWxgNMrVq1ymCExfIS6Ya+E0dHR9y5cwfNmjVDWloann76aYP9qYgVK1Zg7NixSExMRLNmzbB48WL07t2bDxx1RladiHYRUUciciWi/5WmRekbNIgo2JjZRlV3vDzJVBZl7UlHU1a9UaNGT4ys+uDBgxETE4OioiL8+++/uHbtmt7ofunp6Zg4cSLfLKDG2HgcoaGhiI+PR2ZmJjIyMvDrr7/y2ZlSqcTdu3fRpk0bnXJubm78l+/WrVsxduxYXLt2DSkpKUhNTUWrVq1w5MgReHh44Nq1a7h48SIA4OrVqzh79ix8fHzQsGFDjBo1ClOnTuWvn9PT03l4Xk2WL1+uV1a9/KABqNSz1TLk8fHxcHBw4AOHWhb93r17WL16NV577TW9dlEqldi2bRsPTgUArVu3ho2NDY4dOwYiwrfffsv//gYPHoyNGzcCUIUyfpi/y4yMDOzZswcjRoxAXl4eH5w1NwnUJFn1ale7rerH29tb72JQXeTq1atmqbc2LY5HR0eTTCYjqVRKnTp1oqNHj9KJEydILpeTTCYjuVxOe/bsISKiLVu26CyOnzx5Uqs+zQVoQ4vjEyZMIHd3dwoJCdFaHD9x4gQFBASQt7c3SSQSWr9+PRER/fbbbzR69Gh+Hc1rl5SUkI+PD18cJyL66KOPqH379tSxY0fe/uLiYrKwsCCZTEYSiYRkMhktWbKElEolL+fk5ESNGzcme3t7cnJyon/++Yen61OK/eqrr8jV1ZVcXV1p48aNPP3IkSM0bNgwvd9PVFQUffPNN0REFBgYSPv27dM6v3jxYnrzzTeJiOjgwYPUrVs3kslk1LVrV9q/fz/PV1hYSG+//Ta5urqSVColPz8/2rt3r95rGsvly5epY8eO5OHhQX369KFLly7xcxEREeTp6Umenp60ZcsWnr5jxw766KOP+PG+ffsoICBAp+6kpCSSSCTUvn17mjJlCk9PT0+n4OBgcnNzo5CQELp37x4REd24cYOcnJyoYcOG5ODgQE5OTpSbm6u33W+++SbfcJCbm0u9e/cmiURCK1eu5HlkMhmvm6h6F8erfSCo6kfIqpchZNXLqOny2bVNVn3SpEmUkJCg99yNGzeoX79+j3yNx0FNvy+M5ejRoxQZGamVVp0DR63TqhI8+QhZ9erH19eX+xOVx9nZGZGRkVo+OQLzcu/ePa0NFNOmTUNsbCzs7OyqpT21Tlbdzc2NKnpXXNeIi4szyzpHbZRV17dttK4ibFFGXbWFkFUvR3WNsDURTcmKuo64L8oQtihD2MI81LqBQy1JISiTHBGI+0ITYYsyhC3MQ60bODT1auo6mlpYdR1xX5QhbFGGsIV5qHUDh0AgEAiqFzFwCGocDyOrbgxCVl1bVv3EiRPw9/eHVCqFj4+PlvPd0KFDceXKlQr78vzzz2spMh8/fhyMMd5WALh8+bLOzrjZs2dzKXAiwqJFi+Du7g65XI6uXbsiOjq6UjtWxjvvvAOpVAqpVKqlBr1//3506tQJUqkUY8eO1VJP1syjKatuY2ODn3/+GQDwxRdfwNXVFYwxLaXbTZs2wdvbGz4+PggICOAaaefOndOqq2HDhvjyyy91rrl06VJIpVIMHDiQO0MmJCRoOTjeunULzz33HD8+cOAAvweqBVPt631cH09PT737lusi5Z3XTEVN9ePQ5wCopiLHKk3KO/ypHfXUKsMZGRnUpUsXHQXaylArvhoiPT2dnnnmGa1rt27dmju8aarjnjp1inx9famwsJAuX75Mbm5uXPVXs223bt2i4OBgmjdvHhERZWdn06FDh2jx4sVGqeP+888/dPnyZSJS+Wa0aNGCHjx4QERE+/fvpwkTJujtS3JyMkVERGilTZ8+nQIDA2ns2LE87dKlSySTybTyafoerFixgkJDQ/k1MzMztZwQH4Yff/yR+vXrRwqFgrKzs0kul1N2djYpFApycnLi/Z01axZt2LDBYF3p6enUpEkTys/PJyKiv/76i1JSUnRseejQIX68c+dOre9ZTVFRETVr1oxu3Lihc87Pz4+USiXNnTuXdu3aRUqlkkJCQnS+r5EjR1JSUhI/1mdfNcKPoxzGyinUBZ4kP4eHZePGjejWrRueeeYZTJo0CSUlJVAoFHjllVfg7e0NqVSK5cuXY8uWLUhOTsaLL76oNVsZPnw4F5nbtm0bIiIieN0lJSWYPn06pFIpvL29+S/ykpISTJo0CR4eHggJCcHdu3d5mWPHjiEoKAidO3dG//79uYLx1q1b0b9/f622z5gxQ68+VlxcHF566SVYW1vD1dUVbdq0wYkTJ3TytWjRAmvWrMGKFSsAqOKDBAQE4KmnnjLKdu7u7nB1dQWgmpE4OjryvgQHB2P37t081oUm0dHRWtvAS0pKsH37dmzcuBHx8fFGzwQ/+eQTrF69mscIcXBwwKhRo4wqWxHnzp1DUFAQLC0tYW9vDx8fH+zduxfp6emws7Pj/Q0JCcH27dsN1rV161YMHDiQa1r5+vqibdu2OvkCAgL4LNXf3x+pqak6efbt2wdPT08tEU01RASFQoG8vDxYWVlh48aNGDx4sM7MNzw83CQzMlNgTnVcs6BPa6iukpCQoCW/bS7++2OlyetsFfDGI9ehKauen5+Pt99+W8iqA3rFFNXX05RV10R97OLiAkC1G8nFxQV///23jkT6H3/8gTFjxvDjxMREuLu7o3379ggMDER8fHyl/kX37t1DcXGx3gdxeRYuXMgHd0169uyJpUuXaqXJZDIsXLgQU6dORU5ODhISEtCpUyeEh4cjPz8fJ0+ehFwux/bt27VEC/URGxuL999/v9L2abJ+/XqdHwjqujRFEzWZOHEi/Pz84OPjg27dumHBggV6IyF26dLlkYU4TUWtGzj0/QKqq1QU2tPUmOIhbw6ErLoKKufEW1E8jopk1dPS0hAZGYno6GgtyfPmzZvjv//+0xk4yvclJiaGCwKqZdXDwsJMJqs+c+ZMzJw506i8zz33HI4fP47u3bujefPm6NKlCywtLWFhYYHNmzdj8uTJKCoqQkhIiMGtuqmpqbhw4YLWulNl7N+/H99++y0OHTqklV5QUIBffvkFS5Ys0VsuMjISkZGRAICoqChMmzYNP//8M6Kjo9GmTRt8/vnnYIzx76MmUOsGDoFADZGQVX8UWXVA9eNjwIAB+PTTT9G1a1etcxXJeGv2pbi4GDt27MAvv/yCjz76CCUlJcjMzERubm6Fsuqenp5o0qQJrKyscP36db0KvJpUZcYBqB6+UVFRAFSL+Gp13MDAQP5Q37VrF65evVrhNbds2YIhQ4YYFcseUIVvfv3117Fnzx40btxY69wvv/wCPz8/NG3atILSKlJTU5GcnIx58+bxme3cuXORkJCAnj171h1ZdXOgGQugrmNjY1PdTahWNGXVGWNCVr0UY3/RFxYWIiwsDK+99hqef/55nfOXLl2Cl5eXTrpmX9Rxt2/cuIGUlBRcv34dgwYNQlxcHBo1aoTGjRtz+2RkZGDv3r0ICAgAoJpJTJo0iX8nDx48wLfffqtzvZkzZ+qVVdc3aCgUCty7dw8AcPLkSVy8eBG9e/fm9gJUA+KiRYswYcKECm0TExNT4aul8qSkpCAiIgKbN2+Gm5vbQ9c1e/Zs/uOloKAAjDFYWFhwmXghq/4In86dO+vdRSAwHbVpV5VaVt3b21vIqlPVZNW/+eYbsrKyIplMxj+nT58mIqK0tDQeA7s8X3/9Nc2dO5eIVDt91q5dq3V++/btNHDgQCIiOnPmDPXo0YPXHxMTw/OVlJTQJ598Qh06dCAvLy+Sy+Va8cIfhpycHC6d7u/vT6dOneLnpk6dSh4eHtSxY0davnw5T09KSqLXX3+dH1+6dIlat25NJSUlWnUvXryYnJycyNLSklq1akXjx48nIqLRo0dT48aNeR+7devGyzx48ICaNGnCd45VxNGjR2ncuHH8+PPPPyeJREL9+/enwsJCIiJasGABrVq1Squd1bWrqtoHgqp+qiq3/SRz/vx5s9RbUwcOQ+Tl5ZmpNabhccqqm8IWixYtqnC7am5uLvn7+5NCoXjk65ibmn5fGEtJSQkFBARQZmYmTxPbcatARTtG6iIXLlyo7iaYhYeRVS8sLDRjix6dxymrbgpbODo66uwCU2Nra4uoqCjcvHnzka9jbmr6fWEs6enpePfdd7k+3YEDB/D8889Xum5iLsTiuEAvRFTl3S+monXr1pVulayNdO/evbqbYDRjx441eF7fllOB+WjRogUGDx7Mj3v27Mm3m5dHNbkwL7VuxiEwP/Xr10dGRsZjuQEFAoHpICJkZGRwp0VzUesCOfn6+tLJkyeruxk1AnMFqSkuLkZqamqtei1YUlIidtyVImxRRl20Rf369eHs7AwrKyutdFMGchKvqgQ6WFlZ6Yj/1XTqaqQ3fQhblCFsYR7MOhQzxkIZYxcYY5cZYzqun4yxCYyxM4yxZMbYIcaYpLI6heRIGer98QJhC02ELcoQtjAPZhs4GGOWAFYC6A9AAuAlPQPDZiLyJiI5gEUA9PvkCwQCgaDGYM4ZRzcAl4noChEVAYgFoKV8RkSa4bnsANSuBReBQCCog5hzjcMJgOaeylQAfuUzMcbeADAdgDWAXvoqYoyNBzC+9LCQMfa3CdvpAKAqaoGV5Td0Xt85Y9I0jzX/3xTAXZgOYQvDbXmU/Ka2hSG7CFsIW+g7515ZY43GVJ6E5T8AIgCs0zh+BcCXBvK/DGCjEfWazPuxtL6vTJnf0Hl954xJ0zwu939hizpqi0rsImwhbGFWW5jzVVUagNYax86laRURCyDcjO2piJ9MnN/QeX3njEn7ycA5UyJs8fB1P25bGLKLqRG2ePi6n0hbmM2PgzFWD8BFAL2hGjCOAXiZiM5q5OlARJdK/z8IwFyqZJ8xY+x4ZXnqCsIWZQhblCFsUYawRRmmtIXZ1jiISMEYexPAHgCWAL4morOMsXlQTZl2AniTMdYHQDGA+wBGG1F1xYEV6h7CFmUIW5QhbFGGsEUZJrNFrfMcFwgEAkH1Urd88QUCgUDwyIiBQyAQCARVQgwcAoFAIKgST9TAwRizYIz9jzG2gjFmzEL7EwtjLJgxlsgYW80YC67u9lQ3jDE7xthxxtjA6m5LdcIY8yy9J7YxxiZWd3uqE8ZYOGNsLWNsC2Osb3W3pzphjLVnjK1njG0zJn+NGTgYY18zxtLLe4VXJpRYjjCo/EWKofJUr5WYyBYEIAdAfQhbAMB7AL43TysfD6awBRGdJ6IJAIYBCDBne82JiWzxIxGNAzABwIvmbK85MZEtrhDRq0Zfs6bsqmKM9YDqQbeJiKSlaZZQ+YKEQPXwOwbgJai29y4oV8XY0s99IlrDGNtGRBGPq/2mxES2uEtEJYyxFgCWENGIx9V+U2IiW8gAOEI1iN4lop8fT+tNiylsQUTpjLHBACYC+JaINj+u9psSU9mitNxiANFE9Ndjar5JMbEtjHpu1ph4HET0O2PMpVwyF0oEAMZYLIAwIloAQOeVA2MsFUBR6aHSfK1tdYlRAAAFnUlEQVQ1L6awhQb3AdiYo52PAxPdF8FQiWhKAOQzxnYRUYk5220OTHVflPpQ7WSM/QKgVg4cJrovGICFAOJr66ABmPx5YRQ1ZuCoAKOEEjXYAWAFY+xZAL+bs2HVQJVswRh7AUA/AI0AfGnepj12qmQLIvoAABhjkSidiZm1dY+Xqt4XwQBegOrHxC6ztuzxU9XnxWQAfQA4MMbciGi1ORv3mKnqfeEI4H8AfBljs0oHmAqp6QNHlSCiPABGv6d7kiGiHVANpIJSiGhDdbehuiGiBAAJ1dyMGgERLQewvLrbURMgogyo1nqMosYsjldAVYUSn2SELcoQtihD2KIMYYsyzGqLmj5wHAPQgTHWjjFmDWA4gJ3V3KbqQtiiDGGLMoQtyhC2KMOstqgxAwdjLAbAEQDujLFUxtirRKQAoBZKPA/ge0113ScVYYsyhC3KELYoQ9iijOqwRY3ZjisQCASC2kGNmXEIBAKBoHYgBg6BQCAQVAkxcAgEAoGgSoiBQyAQCARVQgwcAoFAIKgSYuAQCAQCQZUQA4egxsEYUzLGkjU+LgbyupSXk37IayaUSlCfYoz9wRhzf4g6JjDGRpX+P5Ix1krj3DrGmMTE7TzGGJMbUWYqY8z2Ua8tEKgRA4egJpJPRHKNT8pjuu4IIpIB2Ajgs6oWJqLVRLSp9DASQCuNc68R0TmTtLKsnatgXDunAhADh8BkiIFDUCsonVkkMsb+Kv08oyePF2PsaOks5TRjrENp+kiN9DWlsQoM8TsAt9KyvRljJxljZ0oD5tiUpi9kjJ0rvc7npWkfMsbeYYxFAOgCILr0mg1KZwpdSmcl/GFfOjP58iHbeQQqFVR1Xf/HVFEOzzLGPipNmwLVAHaAMXagNK0vY+xIqR23MsbsK7mOQKCFGDgENZEGGq+pfihNSwcQQkSdoIrWpk/VdAKAL4hIDtWDO5Ux5lmaP6A0XQmgsqBWgwCcYYzVB7ABwItE5A2VmvTEUgnq5wF4EZEPgPmahYloG4DjUM0M5ESUr3F6e2lZNS8CiH3IdoYC+FHj+AMi6gLAB0AQY8ynVAH2PwA9iagnY6wpgNkA+pTa8jiA6ZVcRyDQ4omSVRc8MeSXPjw1sQLwZek7fSWAjnrKHQHwAWPMGcAOIrrEGOsNoDOAY4wxAGgA1SCkj2jGWD6AFKhiNbgDuEpEF0vPbwTwBlTxTQoArGeM/QzA6IiCRHSHMXaFMeYP4BIADwB/lNZblXZaA7AHoGmnYYyx8VD9XT8NVeCq0+XK+pem/1F6HWuo7CYQGI0YOAS1hWkAbkMVBtYCqge3FkS0mTH2J4ABAHYxxl4HwABsJKJZRlxjBBEdVx8wxproy0RECsZYNwC9AURAJSbXqwp9iYUq5vc/AH4gImKqp7jR7QRwAqr1jRUAXmCMtQPwDoCuRHSfMbYBqlC55WEA9hHRS1Vor0CghXhVJagtOAC4WRq97xWoYidrwRhrD+BK6euZOKhe2fwKIIIx1rw0TxPGWFsjr3kBgAtjzK30+BUAB0vXBByIaBdUA5pMT9lsAA0rqPcHAGFQxYCOLU2rUjtJpU46B4A/Y8wDwFMAcgFkMVWc+f4VtCUJQIC6T4wxO8aYvtmbQFAhYuAQ1BZWARjNGDsF1eudXD15hgH4mzGWDEAKYFPpTqbZAPYyxk4D2AfVa5xKIaICAGMAbGWMnQFQAmA1VA/hn0vrOwT9awQbAKxWL46Xq/c+VFLXbYnoaGlaldtZunayGMAMIjoF4CRUs5jNUL3+UvMVgN2MsQNEdAeqHV8xpdc5ApU9BQKjEbLqAoFAIKgSYsYhEAgEgiohBg6BQCAQVAkxcAgEAoGgSoiBQyAQCARVQgwcAoFAIKgSYuAQCASC/2+vjgUAAAAABvlbj2F/ScQiDgAWcQCwBFxCY4ZIBzz0AAAAAElFTkSuQmCC\n",
- "text/plain": [
- ""
- ]
- },
- "metadata": {
- "needs_background": "light"
- },
- "output_type": "display_data"
- }
- ],
- "source": [
- "score_save_path = './IJBC/result'\n",
- "files = glob.glob(score_save_path + '/VGG2*.npy') \n",
- "methods = []\n",
- "scores = []\n",
- "for file in files:\n",
- " methods.append(Path(file).stem)\n",
- " scores.append(np.load(file)) \n",
- "methods = np.array(methods)\n",
- "scores = dict(zip(methods,scores))\n",
- "colours = dict(zip(methods, sample_colours_from_colourmap(methods.shape[0], 'Set2')))\n",
- "#x_labels = [1/(10**x) for x in np.linspace(6, 0, 6)]\n",
- "x_labels = [10**-6, 10**-5, 10**-4,10**-3, 10**-2, 10**-1]\n",
- "tpr_fpr_table = PrettyTable(['Methods'] + map(str, x_labels))\n",
- "fig = plt.figure()\n",
- "for method in methods:\n",
- " fpr, tpr, _ = roc_curve(label, scores[method])\n",
- " roc_auc = auc(fpr, tpr)\n",
- " fpr = np.flipud(fpr)\n",
- " tpr = np.flipud(tpr) # select largest tpr at same fpr\n",
- " plt.plot(fpr, tpr, color=colours[method], lw=1, label=('[%s (AUC = %0.4f %%)]' % (method.split('-')[-1], roc_auc*100)))\n",
- " tpr_fpr_row = []\n",
- " tpr_fpr_row.append(method)\n",
- " for fpr_iter in np.arange(len(x_labels)):\n",
- " _, min_index = min(list(zip(abs(fpr-x_labels[fpr_iter]), range(len(fpr)))))\n",
- " tpr_fpr_row.append('%.4f' % tpr[min_index])\n",
- " tpr_fpr_table.add_row(tpr_fpr_row)\n",
- "plt.xlim([10**-6, 0.1])\n",
- "plt.ylim([0.3, 1.0])\n",
- "plt.grid(linestyle='--', linewidth=1)\n",
- "plt.xticks(x_labels) \n",
- "plt.yticks(np.linspace(0.3, 1.0, 8, endpoint=True)) \n",
- "plt.xscale('log')\n",
- "plt.xlabel('False Positive Rate')\n",
- "plt.ylabel('True Positive Rate')\n",
- "plt.title('ROC on IJB-C')\n",
- "plt.legend(loc=\"lower right\")\n",
- "plt.show()\n",
- "#fig.savefig('IJB-C.pdf')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 38,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "+----------------------------------------+--------+--------+--------+--------+--------+--------+\n",
- "| Methods | 1e-06 | 1e-05 | 0.0001 | 0.001 | 0.01 | 0.1 |\n",
- "+----------------------------------------+--------+--------+--------+--------+--------+--------+\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N1D1F2) | 0.7444 | 0.8751 | 0.9279 | 0.9635 | 0.9841 | 0.9939 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N1D0F0) | 0.6863 | 0.8554 | 0.9199 | 0.9586 | 0.9820 | 0.9934 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N1D1F1) | 0.7586 | 0.8717 | 0.9253 | 0.9620 | 0.9836 | 0.9937 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N0D0F0) | 0.7081 | 0.8612 | 0.9214 | 0.9595 | 0.9823 | 0.9934 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N1D1F0) | 0.7470 | 0.8675 | 0.9245 | 0.9610 | 0.9830 | 0.9935 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N0D1F0) | 0.7637 | 0.8733 | 0.9258 | 0.9617 | 0.9831 | 0.9936 |\n",
- "| VGG2-ResNet50-ArcFace-TestMode(N0D1F2) | 0.7668 | 0.8796 | 0.9289 | 0.9636 | 0.9840 | 0.9941 |\n",
- "+----------------------------------------+--------+--------+--------+--------+--------+--------+\n"
- ]
- }
- ],
- "source": [
- "print(tpr_fpr_table)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# setting N0D1F2 is the best"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "language": "python",
- "name": "python2"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 2
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.15"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/evaluation/IJB/readme.txt b/evaluation/IJB/readme.txt
deleted file mode 100644
index f6027e1..0000000
--- a/evaluation/IJB/readme.txt
+++ /dev/null
@@ -1,19 +0,0 @@
-To reproduce the figures and tables in the notebook, please download everything (model, code, data and meta info) from here:
-[Dropbox] https://www.dropbox.com/s/33a6haw7v79e5qe/IJB_release.tar?dl=0
-or
-[Baidu Cloud] https://pan.baidu.com/s/1oer0p4_mcOrs4cfdeWfbFg
-
-Please apply for the IJB-B and IJB-C by yourself and strictly follow their distribution licenses.
-
-Aknowledgement
-Great thanks for Weidi Xie's instruction [2,3,4,5] to evaluate ArcFace [1] on IJB-B[6] and IJB-C[7].
-
-[1] Jiankang Deng, Jia Guo, Niannan Xue, Stefanos Zafeiriou. Arcface: Additive angular margin loss for deep face recognition[J]. arXiv:1801.07698, 2018.
-[2] https://github.com/ox-vgg/vgg_face2.
-[3] Qiong Cao, Li Shen, Weidi Xie, Omkar M Parkhi, Andrew Zisserman. VGGFace2: A dataset for recognising faces across pose and age. FG, 2018.
-[4] Weidi Xie, Andrew Zisserman. Multicolumn Networks for Face Recognition. BMVC 2018.
-[5] Weidi Xie, Li Shen, Andrew Zisserman. Comparator Networks. ECCV, 2018.
-[6] Whitelam, Cameron, Emma Taborsky, Austin Blanton, Brianna Maze, Jocelyn C. Adams, Tim Miller, Nathan D. Kalka et al. IARPA Janus Benchmark-B Face Dataset. CVPR Workshops, 2017.
-[7] Maze, Brianna, Jocelyn Adams, James A. Duncan, Nathan Kalka, Tim Miller, Charles Otto, Anil K. Jain et al. IARPA Janus Benchmark–C: Face Dataset and Protocol. ICB, 2018.
-
-
diff --git a/examples/README.md b/examples/README.md
new file mode 100644
index 0000000..a414efb
--- /dev/null
+++ b/examples/README.md
@@ -0,0 +1,4 @@
+InsightFace Example
+---
+
+Before running the examples, please install insightface package via `pip install -U insightface`
diff --git a/examples/demo_analysis.py b/examples/demo_analysis.py
new file mode 100644
index 0000000..28045da
--- /dev/null
+++ b/examples/demo_analysis.py
@@ -0,0 +1,34 @@
+import argparse
+import cv2
+import sys
+import numpy as np
+import insightface
+from insightface.app import FaceAnalysis
+from insightface.data import get_image as ins_get_image
+
+assert insightface.__version__>='0.3'
+
+parser = argparse.ArgumentParser(description='insightface app test')
+# general
+parser.add_argument('--ctx', default=0, type=int, help='ctx id, <0 means using cpu')
+parser.add_argument('--det-size', default=640, type=int, help='detection size')
+args = parser.parse_args()
+
+app = FaceAnalysis()
+app.prepare(ctx_id=args.ctx, det_size=(args.det_size,args.det_size))
+
+img = ins_get_image('t1')
+faces = app.get(img)
+assert len(faces)==6
+rimg = app.draw_on(img, faces)
+cv2.imwrite("./t1_output.jpg", rimg)
+
+# then print all-to-all face similarity
+feats = []
+for face in faces:
+ feats.append(face.normed_embedding)
+feats = np.array(feats, dtype=np.float32)
+sims = np.dot(feats, feats.T)
+print(sims)
+
+
diff --git a/examples/mask_renderer.py b/examples/mask_renderer.py
new file mode 100644
index 0000000..5b04742
--- /dev/null
+++ b/examples/mask_renderer.py
@@ -0,0 +1,22 @@
+import os, sys, datetime
+import numpy as np
+import os.path as osp
+import cv2
+import insightface
+from insightface.app import MaskRenderer
+from insightface.data import get_image as ins_get_image
+
+
+if __name__ == "__main__":
+ #make sure that you have download correct insightface model pack.
+ #make sure that BFM.mat and BFM_UV.mat have been generated
+ tool = MaskRenderer()
+ tool.prepare(ctx_id=0, det_size=(128,128))
+ image = ins_get_image('Tom_Hanks_54745')
+ mask_image = "mask_blue"
+ params = tool.build_params(image)
+ mask_out = tool.render_mask(image, mask_image, params)
+
+ cv2.imwrite('output_mask.jpg', mask_out)
+
+
diff --git a/deploy/mxnet_to_ort.py b/examples/mxnet_to_onnx.py
similarity index 56%
rename from deploy/mxnet_to_ort.py
rename to examples/mxnet_to_onnx.py
index 16f9f2f..5a9c73a 100644
--- a/deploy/mxnet_to_ort.py
+++ b/examples/mxnet_to_onnx.py
@@ -2,10 +2,13 @@ import sys
import os
import argparse
import onnx
+import json
import mxnet as mx
from onnx import helper
from onnx import TensorProto
from onnx import numpy_helper
+import onnxruntime
+import cv2
print('mxnet version:', mx.__version__)
print('onnx version:', onnx.__version__)
@@ -23,12 +26,15 @@ def create_map(graph_member_list):
return member_map
-parser = argparse.ArgumentParser(description='convert arcface models to onnx')
+parser = argparse.ArgumentParser(description='convert mxnet model to onnx')
# general
parser.add_argument('params', default='./r100a/model-0000.params', help='mxnet params to load.')
parser.add_argument('output', default='./r100a.onnx', help='path to write onnx model.')
parser.add_argument('--eps', default=1.0e-8, type=float, help='eps for weights.')
parser.add_argument('--input-shape', default='3,112,112', help='input shape.')
+parser.add_argument('--check', action='store_true')
+parser.add_argument('--input-mean', default=0.0, type=float, help='input mean for checking.')
+parser.add_argument('--input-std', default=1.0, type=float, help='input std for checking.')
args = parser.parse_args()
input_shape = (1,) + tuple( [int(x) for x in args.input_shape.split(',')] )
@@ -41,6 +47,29 @@ assert os.path.exists(sym_file)
assert os.path.exists(params_file)
sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
+
+nodes = json.loads(sym.tojson())['nodes']
+bn_fixgamma_list = []
+for nodeid, node in enumerate(nodes):
+ if node['op'] == 'BatchNorm':
+ attr = node['attrs']
+ fix_gamma = False
+ if attr is not None and 'fix_gamma' in attr:
+ if str(attr['fix_gamma']).lower()=='true':
+ fix_gamma = True
+ if fix_gamma:
+ bn_fixgamma_list.append(node['name'])
+ #print(node, fix_gamma)
+
+print('fixgamma list:', bn_fixgamma_list)
+layer = None
+#layer = 'conv_2_dw_relu' #for debug
+
+if layer is not None:
+ all_layers = sym.get_internals()
+ sym = all_layers[layer + '_output']
+
+
eps = args.eps
arg = {}
@@ -50,24 +79,27 @@ ac = 0
for k in arg_params:
v = arg_params[k]
nv = v.asnumpy()
- #print(k, nv.dtype)
nv = nv.astype(np.float32)
+ #print(k, nv.shape)
+ if k.endswith('_gamma'):
+ bnname = k[:-6]
+ if bnname in bn_fixgamma_list:
+ nv[:] = 1.0
ac += nv.size
invalid += np.count_nonzero(np.abs(nv)= len(self.seq):
- raise StopIteration
- idx = self.seq[self.cur]
- self.cur += 1
- s = self.imgrec.read_idx(idx)
- header, img = recordio.unpack(s)
- label = header.label
- return label, img, None, None
-
- def brightness_aug(self, src, x):
- alpha = 1.0 + random.uniform(-x, x)
- src *= alpha
- return src
-
- def contrast_aug(self, src, x):
- alpha = 1.0 + random.uniform(-x, x)
- coef = nd.array([[[0.299, 0.587, 0.114]]])
- gray = src * coef
- gray = (3.0 * (1.0 - alpha) / gray.size) * nd.sum(gray)
- src *= alpha
- src += gray
- return src
-
- def saturation_aug(self, src, x):
- alpha = 1.0 + random.uniform(-x, x)
- coef = nd.array([[[0.299, 0.587, 0.114]]])
- gray = src * coef
- gray = nd.sum(gray, axis=2, keepdims=True)
- gray *= (1.0 - alpha)
- src *= alpha
- src += gray
- return src
-
- def color_aug(self, img, x):
- #augs = [self.brightness_aug, self.contrast_aug, self.saturation_aug]
- #random.shuffle(augs)
- #for aug in augs:
- # #print(img.shape)
- # img = aug(img, x)
- # #print(img.shape)
- #return img
- return self.CJA(img)
-
- def mirror_aug(self, img):
- _rd = random.randint(0, 1)
- if _rd == 1:
- for c in range(img.shape[2]):
- img[:, :, c] = np.fliplr(img[:, :, c])
- return img
-
- def compress_aug(self, img):
- buf = BytesIO()
- img = Image.fromarray(img.asnumpy(), 'RGB')
- q = random.randint(2, 20)
- img.save(buf, format='JPEG', quality=q)
- buf = buf.getvalue()
- img = Image.open(BytesIO(buf))
- return nd.array(np.asarray(img, 'float32'))
-
- def next(self):
- if not self.is_init:
- self.reset()
- self.is_init = True
- """Returns the next batch of data."""
- #print('in next', self.cur, self.labelcur)
- self.nbatch += 1
- batch_size = self.batch_size
- c, h, w = self.data_shape
- batch_data = nd.empty((batch_size, c, h, w))
- if self.provide_label is not None:
- batch_label = nd.empty(self.provide_label[0][1])
- i = 0
- try:
- while i < batch_size:
- #print('XXXX', i)
- label, s, bbox, landmark = self.next_sample()
- gender = int(label[0])
- age = int(label[1])
- assert age >= 0
- #assert gender==0 or gender==1
- plabel = np.zeros(shape=(101, ), dtype=np.float32)
- plabel[0] = gender
- if age == 0:
- age = 1
- if age > 100:
- age = 100
- plabel[1:age + 1] = 1
- label = plabel
- _data = self.imdecode(s)
- if _data.shape[0] != self.data_shape[1]:
- _data = mx.image.resize_short(_data, self.data_shape[1])
- if self.rand_mirror:
- _rd = random.randint(0, 1)
- if _rd == 1:
- _data = mx.ndarray.flip(data=_data, axis=1)
- if self.color_jittering > 0:
- if self.color_jittering > 1:
- _rd = random.randint(0, 1)
- if _rd == 1:
- _data = self.compress_aug(_data)
- #print('do color aug')
- _data = _data.astype('float32', copy=False)
- #print(_data.__class__)
- _data = self.color_aug(_data, 0.125)
- if self.nd_mean is not None:
- _data = _data.astype('float32', copy=False)
- _data -= self.nd_mean
- _data *= 0.0078125
- if self.cutoff > 0:
- _rd = random.randint(0, 1)
- if _rd == 1:
- #print('do cutoff aug', self.cutoff)
- centerh = random.randint(0, _data.shape[0] - 1)
- centerw = random.randint(0, _data.shape[1] - 1)
- half = self.cutoff // 2
- starth = max(0, centerh - half)
- endh = min(_data.shape[0], centerh + half)
- startw = max(0, centerw - half)
- endw = min(_data.shape[1], centerw + half)
- #print(starth, endh, startw, endw, _data.shape)
- _data[starth:endh, startw:endw, :] = 128
- data = [_data]
- for datum in data:
- assert i < batch_size, 'Batch size must be multiples of augmenter output length'
- #print(datum.shape)
- batch_data[i][:] = self.postprocess_data(datum)
- batch_label[i][:] = label
- i += 1
- except StopIteration:
- if i < batch_size:
- raise StopIteration
-
- return io.DataBatch([batch_data], [batch_label], batch_size - i)
-
- def check_data_shape(self, data_shape):
- """Checks if the input data shape is valid"""
- if not len(data_shape) == 3:
- raise ValueError(
- 'data_shape should have length 3, with dimensions CxHxW')
- if not data_shape[0] == 3:
- raise ValueError(
- 'This iterator expects inputs to have 3 channels.')
-
- def check_valid_image(self, data):
- """Checks if the input data is valid"""
- if len(data[0].shape) == 0:
- raise RuntimeError('Data shape is wrong')
-
- def imdecode(self, s):
- """Decodes a string or byte string to an NDArray.
- See mx.img.imdecode for more details."""
- img = mx.image.imdecode(s) #mx.ndarray
- return img
-
- def read_image(self, fname):
- """Reads an input image `fname` and returns the decoded raw bytes.
-
- Example usage:
- ----------
- >>> dataIter.read_image('Face.jpg') # returns decoded raw bytes.
- """
- with open(os.path.join(self.path_root, fname), 'rb') as fin:
- img = fin.read()
- return img
-
- def augmentation_transform(self, data):
- """Transforms input data with specified augmentation."""
- for aug in self.auglist:
- data = [ret for src in data for ret in aug(src)]
- return data
-
- def postprocess_data(self, datum):
- """Final postprocessing step before image is loaded into the batch."""
- return nd.transpose(datum, axes=(2, 0, 1))
-
-
-class FaceImageIterList(io.DataIter):
- def __init__(self, iter_list):
- assert len(iter_list) > 0
- self.provide_data = iter_list[0].provide_data
- self.provide_label = iter_list[0].provide_label
- self.iter_list = iter_list
- self.cur_iter = None
-
- def reset(self):
- self.cur_iter.reset()
-
- def next(self):
- self.cur_iter = random.choice(self.iter_list)
- while True:
- try:
- ret = self.cur_iter.next()
- except StopIteration:
- self.cur_iter.reset()
- continue
- return ret
diff --git a/gender-age/face_model.py b/gender-age/face_model.py
deleted file mode 100644
index 1ce54bb..0000000
--- a/gender-age/face_model.py
+++ /dev/null
@@ -1,109 +0,0 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-from scipy import misc
-import sys
-import os
-import argparse
-#import tensorflow as tf
-import numpy as np
-import mxnet as mx
-import random
-import cv2
-import sklearn
-from sklearn.decomposition import PCA
-from time import sleep
-from easydict import EasyDict as edict
-from mtcnn_detector import MtcnnDetector
-sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'src', 'common'))
-import face_image
-import face_preprocess
-
-
-def do_flip(data):
- for idx in range(data.shape[0]):
- data[idx, :, :] = np.fliplr(data[idx, :, :])
-
-
-def get_model(ctx, image_size, model_str, layer):
- _vec = model_str.split(',')
- assert len(_vec) == 2
- prefix = _vec[0]
- epoch = int(_vec[1])
- print('loading', prefix, epoch)
- sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
- all_layers = sym.get_internals()
- sym = all_layers[layer + '_output']
- model = mx.mod.Module(symbol=sym, context=ctx, label_names=None)
- #model.bind(data_shapes=[('data', (args.batch_size, 3, image_size[0], image_size[1]))], label_shapes=[('softmax_label', (args.batch_size,))])
- model.bind(data_shapes=[('data', (1, 3, image_size[0], image_size[1]))])
- model.set_params(arg_params, aux_params)
- return model
-
-
-class FaceModel:
- def __init__(self, args):
- self.args = args
- if args.gpu >= 0:
- ctx = mx.gpu(args.gpu)
- else:
- ctx = mx.cpu()
- _vec = args.image_size.split(',')
- assert len(_vec) == 2
- image_size = (int(_vec[0]), int(_vec[1]))
- self.model = None
- if len(args.model) > 0:
- self.model = get_model(ctx, image_size, args.model, 'fc1')
-
- self.det_minsize = 50
- self.det_threshold = [0.6, 0.7, 0.8]
- #self.det_factor = 0.9
- self.image_size = image_size
- mtcnn_path = os.path.join(os.path.dirname(__file__), 'mtcnn-model')
- if args.det == 0:
- detector = MtcnnDetector(model_folder=mtcnn_path,
- ctx=ctx,
- num_worker=1,
- accurate_landmark=True,
- threshold=self.det_threshold)
- else:
- detector = MtcnnDetector(model_folder=mtcnn_path,
- ctx=ctx,
- num_worker=1,
- accurate_landmark=True,
- threshold=[0.0, 0.0, 0.2])
- self.detector = detector
-
- def get_input(self, face_img):
- ret = self.detector.detect_face(face_img, det_type=self.args.det)
- if ret is None:
- return None
- bbox, points = ret
- if bbox.shape[0] == 0:
- return None
- bbox = bbox[0, 0:4]
- points = points[0, :].reshape((2, 5)).T
- #print(bbox)
- #print(points)
- nimg = face_preprocess.preprocess(face_img,
- bbox,
- points,
- image_size='112,112')
- nimg = cv2.cvtColor(nimg, cv2.COLOR_BGR2RGB)
- aligned = np.transpose(nimg, (2, 0, 1))
- input_blob = np.expand_dims(aligned, axis=0)
- data = mx.nd.array(input_blob)
- db = mx.io.DataBatch(data=(data, ))
- return db
-
- def get_ga(self, data):
- self.model.forward(data, is_train=False)
- ret = self.model.get_outputs()[0].asnumpy()
- g = ret[:, 0:2].flatten()
- gender = np.argmax(g)
- a = ret[:, 2:202].reshape((100, 2))
- a = np.argmax(a, axis=1)
- age = int(sum(a))
-
- return gender, age
diff --git a/gender-age/helper.py b/gender-age/helper.py
deleted file mode 100644
index 38f2c9c..0000000
--- a/gender-age/helper.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# coding: utf-8
-# YuanYang
-import math
-import cv2
-import numpy as np
-
-
-def nms(boxes, overlap_threshold, mode='Union'):
- """
- non max suppression
-
- Parameters:
- ----------
- box: numpy array n x 5
- input bbox array
- overlap_threshold: float number
- threshold of overlap
- mode: float number
- how to compute overlap ratio, 'Union' or 'Min'
- Returns:
- -------
- index array of the selected bbox
- """
- # if there are no boxes, return an empty list
- if len(boxes) == 0:
- return []
-
- # if the bounding boxes integers, convert them to floats
- if boxes.dtype.kind == "i":
- boxes = boxes.astype("float")
-
- # initialize the list of picked indexes
- pick = []
-
- # grab the coordinates of the bounding boxes
- x1, y1, x2, y2, score = [boxes[:, i] for i in range(5)]
-
- area = (x2 - x1 + 1) * (y2 - y1 + 1)
- idxs = np.argsort(score)
-
- # keep looping while some indexes still remain in the indexes list
- while len(idxs) > 0:
- # grab the last index in the indexes list and add the index value to the list of picked indexes
- last = len(idxs) - 1
- i = idxs[last]
- pick.append(i)
-
- xx1 = np.maximum(x1[i], x1[idxs[:last]])
- yy1 = np.maximum(y1[i], y1[idxs[:last]])
- xx2 = np.minimum(x2[i], x2[idxs[:last]])
- yy2 = np.minimum(y2[i], y2[idxs[:last]])
-
- # compute the width and height of the bounding box
- w = np.maximum(0, xx2 - xx1 + 1)
- h = np.maximum(0, yy2 - yy1 + 1)
-
- inter = w * h
- if mode == 'Min':
- overlap = inter / np.minimum(area[i], area[idxs[:last]])
- else:
- overlap = inter / (area[i] + area[idxs[:last]] - inter)
-
- # delete all indexes from the index list that have
- idxs = np.delete(
- idxs,
- np.concatenate(([last], np.where(overlap > overlap_threshold)[0])))
-
- return pick
-
-
-def adjust_input(in_data):
- """
- adjust the input from (h, w, c) to ( 1, c, h, w) for network input
-
- Parameters:
- ----------
- in_data: numpy array of shape (h, w, c)
- input data
- Returns:
- -------
- out_data: numpy array of shape (1, c, h, w)
- reshaped array
- """
- if in_data.dtype is not np.dtype('float32'):
- out_data = in_data.astype(np.float32)
- else:
- out_data = in_data
-
- out_data = out_data.transpose((2, 0, 1))
- out_data = np.expand_dims(out_data, 0)
- out_data = (out_data - 127.5) * 0.0078125
- return out_data
-
-
-def generate_bbox(map, reg, scale, threshold):
- """
- generate bbox from feature map
- Parameters:
- ----------
- map: numpy array , n x m x 1
- detect score for each position
- reg: numpy array , n x m x 4
- bbox
- scale: float number
- scale of this detection
- threshold: float number
- detect threshold
- Returns:
- -------
- bbox array
- """
- stride = 2
- cellsize = 12
-
- t_index = np.where(map > threshold)
-
- # find nothing
- if t_index[0].size == 0:
- return np.array([])
-
- dx1, dy1, dx2, dy2 = [reg[0, i, t_index[0], t_index[1]] for i in range(4)]
-
- reg = np.array([dx1, dy1, dx2, dy2])
- score = map[t_index[0], t_index[1]]
- boundingbox = np.vstack([
- np.round((stride * t_index[1] + 1) / scale),
- np.round((stride * t_index[0] + 1) / scale),
- np.round((stride * t_index[1] + 1 + cellsize) / scale),
- np.round((stride * t_index[0] + 1 + cellsize) / scale), score, reg
- ])
-
- return boundingbox.T
-
-
-def detect_first_stage(img, net, scale, threshold):
- """
- run PNet for first stage
-
- Parameters:
- ----------
- img: numpy array, bgr order
- input image
- scale: float number
- how much should the input image scale
- net: PNet
- worker
- Returns:
- -------
- total_boxes : bboxes
- """
- height, width, _ = img.shape
- hs = int(math.ceil(height * scale))
- ws = int(math.ceil(width * scale))
-
- im_data = cv2.resize(img, (ws, hs))
-
- # adjust for the network input
- input_buf = adjust_input(im_data)
- output = net.predict(input_buf)
- boxes = generate_bbox(output[1][0, 1, :, :], output[0], scale, threshold)
-
- if boxes.size == 0:
- return None
-
- # nms
- pick = nms(boxes[:, 0:5], 0.5, mode='Union')
- boxes = boxes[pick]
- return boxes
-
-
-def detect_first_stage_warpper(args):
- return detect_first_stage(*args)
diff --git a/gender-age/model/model-0000.params b/gender-age/model/model-0000.params
deleted file mode 100644
index 35118ad..0000000
Binary files a/gender-age/model/model-0000.params and /dev/null differ
diff --git a/gender-age/model/model-symbol.json b/gender-age/model/model-symbol.json
deleted file mode 100644
index cea9abc..0000000
--- a/gender-age/model/model-symbol.json
+++ /dev/null
@@ -1,2399 +0,0 @@
-{
- "nodes": [
- {
- "op": "null",
- "name": "data",
- "inputs": []
- },
- {
- "op": "_minus_scalar",
- "name": "_minusscalar0",
- "attrs": {"scalar": "127.5"},
- "inputs": [[0, 0, 0]]
- },
- {
- "op": "_mul_scalar",
- "name": "_mulscalar0",
- "attrs": {"scalar": "0.0078125"},
- "inputs": [[1, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_1_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "8",
- "num_group": "1",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_1_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "8",
- "num_group": "1",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[2, 0, 0], [3, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_1_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_1_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_1_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_1_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_1_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[4, 0, 0], [5, 0, 0], [6, 0, 0], [7, 0, 1], [8, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_1_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[9, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_2_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "8",
- "num_group": "8",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_2_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "8",
- "num_group": "8",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[10, 0, 0], [11, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_2_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_2_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_2_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_2_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_2_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[12, 0, 0], [13, 0, 0], [14, 0, 0], [15, 0, 1], [16, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_2_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[17, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_2_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "16",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_2_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "16",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[18, 0, 0], [19, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_2_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_2_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_2_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_2_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_2_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[20, 0, 0], [21, 0, 0], [22, 0, 0], [23, 0, 1], [24, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_2_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[25, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_3_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "16",
- "num_group": "16",
- "pad": "(1, 1)",
- "stride": "(2, 2)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_3_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "16",
- "num_group": "16",
- "pad": "(1, 1)",
- "stride": "(2, 2)"
- },
- "inputs": [[26, 0, 0], [27, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_3_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_3_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_3_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_3_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_3_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[28, 0, 0], [29, 0, 0], [30, 0, 0], [31, 0, 1], [32, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_3_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[33, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_3_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "32",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_3_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "32",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[34, 0, 0], [35, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_3_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_3_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_3_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_3_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_3_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[36, 0, 0], [37, 0, 0], [38, 0, 0], [39, 0, 1], [40, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_3_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[41, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_4_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "32",
- "num_group": "32",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_4_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "32",
- "num_group": "32",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[42, 0, 0], [43, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_4_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_4_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_4_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_4_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_4_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[44, 0, 0], [45, 0, 0], [46, 0, 0], [47, 0, 1], [48, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_4_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[49, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_4_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "32",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_4_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "32",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[50, 0, 0], [51, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_4_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_4_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_4_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_4_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_4_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[52, 0, 0], [53, 0, 0], [54, 0, 0], [55, 0, 1], [56, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_4_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[57, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_5_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "32",
- "num_group": "32",
- "pad": "(1, 1)",
- "stride": "(2, 2)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_5_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "32",
- "num_group": "32",
- "pad": "(1, 1)",
- "stride": "(2, 2)"
- },
- "inputs": [[58, 0, 0], [59, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_5_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_5_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_5_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_5_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_5_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[60, 0, 0], [61, 0, 0], [62, 0, 0], [63, 0, 1], [64, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_5_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[65, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_5_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_5_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[66, 0, 0], [67, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_5_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_5_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_5_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_5_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_5_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[68, 0, 0], [69, 0, 0], [70, 0, 0], [71, 0, 1], [72, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_5_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[73, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_6_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "64",
- "num_group": "64",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_6_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "64",
- "num_group": "64",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[74, 0, 0], [75, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_6_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_6_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_6_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_6_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_6_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[76, 0, 0], [77, 0, 0], [78, 0, 0], [79, 0, 1], [80, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_6_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[81, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_6_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_6_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[82, 0, 0], [83, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_6_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_6_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_6_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_6_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_6_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[84, 0, 0], [85, 0, 0], [86, 0, 0], [87, 0, 1], [88, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_6_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[89, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_7_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "64",
- "num_group": "64",
- "pad": "(1, 1)",
- "stride": "(2, 2)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_7_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "64",
- "num_group": "64",
- "pad": "(1, 1)",
- "stride": "(2, 2)"
- },
- "inputs": [[90, 0, 0], [91, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_7_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_7_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_7_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_7_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_7_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[92, 0, 0], [93, 0, 0], [94, 0, 0], [95, 0, 1], [96, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_7_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[97, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_7_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_7_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[98, 0, 0], [99, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_7_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_7_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_7_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_7_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_7_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[100, 0, 0], [101, 0, 0], [102, 0, 0], [103, 0, 1], [104, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_7_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[105, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_8_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_8_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[106, 0, 0], [107, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_8_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_8_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_8_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_8_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_8_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[108, 0, 0], [109, 0, 0], [110, 0, 0], [111, 0, 1], [112, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_8_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[113, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_8_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_8_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[114, 0, 0], [115, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_8_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_8_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_8_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_8_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_8_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[116, 0, 0], [117, 0, 0], [118, 0, 0], [119, 0, 1], [120, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_8_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[121, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_9_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_9_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[122, 0, 0], [123, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_9_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_9_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_9_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_9_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_9_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[124, 0, 0], [125, 0, 0], [126, 0, 0], [127, 0, 1], [128, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_9_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[129, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_9_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_9_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[130, 0, 0], [131, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_9_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_9_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_9_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_9_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_9_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[132, 0, 0], [133, 0, 0], [134, 0, 0], [135, 0, 1], [136, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_9_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[137, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_10_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_10_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[138, 0, 0], [139, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_10_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_10_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_10_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_10_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_10_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[140, 0, 0], [141, 0, 0], [142, 0, 0], [143, 0, 1], [144, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_10_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[145, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_10_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_10_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[146, 0, 0], [147, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_10_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_10_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_10_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_10_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_10_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[148, 0, 0], [149, 0, 0], [150, 0, 0], [151, 0, 1], [152, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_10_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[153, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_11_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_11_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[154, 0, 0], [155, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_11_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_11_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_11_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_11_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_11_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[156, 0, 0], [157, 0, 0], [158, 0, 0], [159, 0, 1], [160, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_11_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[161, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_11_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_11_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[162, 0, 0], [163, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_11_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_11_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_11_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_11_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_11_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[164, 0, 0], [165, 0, 0], [166, 0, 0], [167, 0, 1], [168, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_11_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[169, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_12_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_12_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[170, 0, 0], [171, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_12_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_12_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_12_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_12_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_12_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[172, 0, 0], [173, 0, 0], [174, 0, 0], [175, 0, 1], [176, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_12_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[177, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_12_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_12_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[178, 0, 0], [179, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_12_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_12_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_12_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_12_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_12_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[180, 0, 0], [181, 0, 0], [182, 0, 0], [183, 0, 1], [184, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_12_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[185, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_13_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(2, 2)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_13_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "128",
- "num_group": "128",
- "pad": "(1, 1)",
- "stride": "(2, 2)"
- },
- "inputs": [[186, 0, 0], [187, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_13_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_13_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_13_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_13_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_13_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[188, 0, 0], [189, 0, 0], [190, 0, 0], [191, 0, 1], [192, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_13_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[193, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_13_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "256",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_13_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "256",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[194, 0, 0], [195, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_13_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_13_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_13_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_13_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_13_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[196, 0, 0], [197, 0, 0], [198, 0, 0], [199, 0, 1], [200, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_13_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[201, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_14_dw_conv2d_weight",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "256",
- "num_group": "256",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_14_dw_conv2d",
- "attrs": {
- "kernel": "(3, 3)",
- "no_bias": "True",
- "num_filter": "256",
- "num_group": "256",
- "pad": "(1, 1)",
- "stride": "(1, 1)"
- },
- "inputs": [[202, 0, 0], [203, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_14_dw_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_14_dw_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_14_dw_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_14_dw_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_14_dw_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[204, 0, 0], [205, 0, 0], [206, 0, 0], [207, 0, 1], [208, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_14_dw_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[209, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_14_conv2d_weight",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "256",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": []
- },
- {
- "op": "Convolution",
- "name": "conv_14_conv2d",
- "attrs": {
- "kernel": "(1, 1)",
- "no_bias": "True",
- "num_filter": "256",
- "num_group": "1",
- "pad": "(0, 0)",
- "stride": "(1, 1)"
- },
- "inputs": [[210, 0, 0], [211, 0, 0]]
- },
- {
- "op": "null",
- "name": "conv_14_batchnorm_gamma",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_14_batchnorm_beta",
- "attrs": {"fix_gamma": "True"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_14_batchnorm_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "conv_14_batchnorm_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "fix_gamma": "True"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "conv_14_batchnorm",
- "attrs": {"fix_gamma": "True"},
- "inputs": [[212, 0, 0], [213, 0, 0], [214, 0, 0], [215, 0, 1], [216, 0, 1]]
- },
- {
- "op": "Activation",
- "name": "conv_14_relu",
- "attrs": {"act_type": "relu"},
- "inputs": [[217, 0, 0]]
- },
- {
- "op": "null",
- "name": "bn1_gamma",
- "attrs": {
- "eps": "2e-05",
- "fix_gamma": "False",
- "momentum": "0.9"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "bn1_beta",
- "attrs": {
- "eps": "2e-05",
- "fix_gamma": "False",
- "momentum": "0.9"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "bn1_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "eps": "2e-05",
- "fix_gamma": "False",
- "momentum": "0.9"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "bn1_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "eps": "2e-05",
- "fix_gamma": "False",
- "momentum": "0.9"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "bn1",
- "attrs": {
- "eps": "2e-05",
- "fix_gamma": "False",
- "momentum": "0.9"
- },
- "inputs": [[218, 0, 0], [219, 0, 0], [220, 0, 0], [221, 0, 1], [222, 0, 1]]
- },
- {
- "op": "null",
- "name": "relu1_gamma",
- "attrs": {
- "__init__": "[\"Constant\", {\"value\": 0.25}]",
- "act_type": "prelu"
- },
- "inputs": []
- },
- {
- "op": "LeakyReLU",
- "name": "relu1",
- "attrs": {"act_type": "prelu"},
- "inputs": [[223, 0, 0], [224, 0, 0]]
- },
- {
- "op": "Pooling",
- "name": "pool1",
- "attrs": {
- "global_pool": "True",
- "kernel": "(7, 7)",
- "pool_type": "avg"
- },
- "inputs": [[225, 0, 0]]
- },
- {
- "op": "Flatten",
- "name": "flatten0",
- "inputs": [[226, 0, 0]]
- },
- {
- "op": "null",
- "name": "pre_fc1_weight",
- "attrs": {"num_hidden": "202"},
- "inputs": []
- },
- {
- "op": "null",
- "name": "pre_fc1_bias",
- "attrs": {"num_hidden": "202"},
- "inputs": []
- },
- {
- "op": "FullyConnected",
- "name": "pre_fc1",
- "attrs": {"num_hidden": "202"},
- "inputs": [[227, 0, 0], [228, 0, 0], [229, 0, 0]]
- },
- {
- "op": "null",
- "name": "fc1_gamma",
- "attrs": {
- "eps": "2e-05",
- "fix_gamma": "True",
- "momentum": "0.9"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "fc1_beta",
- "attrs": {
- "eps": "2e-05",
- "fix_gamma": "True",
- "momentum": "0.9"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "fc1_moving_mean",
- "attrs": {
- "__init__": "[\"zero\", {}]",
- "eps": "2e-05",
- "fix_gamma": "True",
- "momentum": "0.9"
- },
- "inputs": []
- },
- {
- "op": "null",
- "name": "fc1_moving_var",
- "attrs": {
- "__init__": "[\"one\", {}]",
- "eps": "2e-05",
- "fix_gamma": "True",
- "momentum": "0.9"
- },
- "inputs": []
- },
- {
- "op": "BatchNorm",
- "name": "fc1",
- "attrs": {
- "eps": "2e-05",
- "fix_gamma": "True",
- "momentum": "0.9"
- },
- "inputs": [[230, 0, 0], [231, 0, 0], [232, 0, 0], [233, 0, 1], [234, 0, 1]]
- }
- ],
- "arg_nodes": [
- 0,
- 3,
- 5,
- 6,
- 7,
- 8,
- 11,
- 13,
- 14,
- 15,
- 16,
- 19,
- 21,
- 22,
- 23,
- 24,
- 27,
- 29,
- 30,
- 31,
- 32,
- 35,
- 37,
- 38,
- 39,
- 40,
- 43,
- 45,
- 46,
- 47,
- 48,
- 51,
- 53,
- 54,
- 55,
- 56,
- 59,
- 61,
- 62,
- 63,
- 64,
- 67,
- 69,
- 70,
- 71,
- 72,
- 75,
- 77,
- 78,
- 79,
- 80,
- 83,
- 85,
- 86,
- 87,
- 88,
- 91,
- 93,
- 94,
- 95,
- 96,
- 99,
- 101,
- 102,
- 103,
- 104,
- 107,
- 109,
- 110,
- 111,
- 112,
- 115,
- 117,
- 118,
- 119,
- 120,
- 123,
- 125,
- 126,
- 127,
- 128,
- 131,
- 133,
- 134,
- 135,
- 136,
- 139,
- 141,
- 142,
- 143,
- 144,
- 147,
- 149,
- 150,
- 151,
- 152,
- 155,
- 157,
- 158,
- 159,
- 160,
- 163,
- 165,
- 166,
- 167,
- 168,
- 171,
- 173,
- 174,
- 175,
- 176,
- 179,
- 181,
- 182,
- 183,
- 184,
- 187,
- 189,
- 190,
- 191,
- 192,
- 195,
- 197,
- 198,
- 199,
- 200,
- 203,
- 205,
- 206,
- 207,
- 208,
- 211,
- 213,
- 214,
- 215,
- 216,
- 219,
- 220,
- 221,
- 222,
- 224,
- 228,
- 229,
- 231,
- 232,
- 233,
- 234
- ],
- "node_row_ptr": [
- 0,
- 1,
- 2,
- 3,
- 4,
- 5,
- 6,
- 7,
- 8,
- 9,
- 12,
- 13,
- 14,
- 15,
- 16,
- 17,
- 18,
- 19,
- 22,
- 23,
- 24,
- 25,
- 26,
- 27,
- 28,
- 29,
- 32,
- 33,
- 34,
- 35,
- 36,
- 37,
- 38,
- 39,
- 42,
- 43,
- 44,
- 45,
- 46,
- 47,
- 48,
- 49,
- 52,
- 53,
- 54,
- 55,
- 56,
- 57,
- 58,
- 59,
- 62,
- 63,
- 64,
- 65,
- 66,
- 67,
- 68,
- 69,
- 72,
- 73,
- 74,
- 75,
- 76,
- 77,
- 78,
- 79,
- 82,
- 83,
- 84,
- 85,
- 86,
- 87,
- 88,
- 89,
- 92,
- 93,
- 94,
- 95,
- 96,
- 97,
- 98,
- 99,
- 102,
- 103,
- 104,
- 105,
- 106,
- 107,
- 108,
- 109,
- 112,
- 113,
- 114,
- 115,
- 116,
- 117,
- 118,
- 119,
- 122,
- 123,
- 124,
- 125,
- 126,
- 127,
- 128,
- 129,
- 132,
- 133,
- 134,
- 135,
- 136,
- 137,
- 138,
- 139,
- 142,
- 143,
- 144,
- 145,
- 146,
- 147,
- 148,
- 149,
- 152,
- 153,
- 154,
- 155,
- 156,
- 157,
- 158,
- 159,
- 162,
- 163,
- 164,
- 165,
- 166,
- 167,
- 168,
- 169,
- 172,
- 173,
- 174,
- 175,
- 176,
- 177,
- 178,
- 179,
- 182,
- 183,
- 184,
- 185,
- 186,
- 187,
- 188,
- 189,
- 192,
- 193,
- 194,
- 195,
- 196,
- 197,
- 198,
- 199,
- 202,
- 203,
- 204,
- 205,
- 206,
- 207,
- 208,
- 209,
- 212,
- 213,
- 214,
- 215,
- 216,
- 217,
- 218,
- 219,
- 222,
- 223,
- 224,
- 225,
- 226,
- 227,
- 228,
- 229,
- 232,
- 233,
- 234,
- 235,
- 236,
- 237,
- 238,
- 239,
- 242,
- 243,
- 244,
- 245,
- 246,
- 247,
- 248,
- 249,
- 252,
- 253,
- 254,
- 255,
- 256,
- 257,
- 258,
- 259,
- 262,
- 263,
- 264,
- 265,
- 266,
- 267,
- 268,
- 269,
- 272,
- 273,
- 274,
- 275,
- 276,
- 277,
- 280,
- 281,
- 282,
- 283,
- 284,
- 285,
- 286,
- 287,
- 288,
- 289,
- 290,
- 291,
- 294
- ],
- "heads": [[235, 0, 0]],
- "attrs": {"mxnet_version": ["int", 10300]}
-}
\ No newline at end of file
diff --git a/gender-age/mtcnn-model/det1-0001.params b/gender-age/mtcnn-model/det1-0001.params
deleted file mode 100644
index e4b04aa..0000000
Binary files a/gender-age/mtcnn-model/det1-0001.params and /dev/null differ
diff --git a/gender-age/mtcnn-model/det1-symbol.json b/gender-age/mtcnn-model/det1-symbol.json
deleted file mode 100644
index bd9b772..0000000
--- a/gender-age/mtcnn-model/det1-symbol.json
+++ /dev/null
@@ -1,266 +0,0 @@
-{
- "nodes": [
- {
- "op": "null",
- "param": {},
- "name": "data",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "10",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1",
- "inputs": [[0, 0], [1, 0], [2, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1",
- "inputs": [[3, 0], [4, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(2,2)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1",
- "inputs": [[5, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "16",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2",
- "inputs": [[6, 0], [7, 0], [8, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2",
- "inputs": [[9, 0], [10, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "32",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3",
- "inputs": [[11, 0], [12, 0], [13, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3",
- "inputs": [[14, 0], [15, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(1,1)",
- "no_bias": "False",
- "num_filter": "4",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv4_2",
- "inputs": [[16, 0], [17, 0], [18, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(1,1)",
- "no_bias": "False",
- "num_filter": "2",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv4_1",
- "inputs": [[16, 0], [20, 0], [21, 0]],
- "backward_source_id": -1
- },
- {
- "op": "SoftmaxActivation",
- "param": {"mode": "channel"},
- "name": "prob1",
- "inputs": [[22, 0]],
- "backward_source_id": -1
- }
- ],
- "arg_nodes": [
- 0,
- 1,
- 2,
- 4,
- 7,
- 8,
- 10,
- 12,
- 13,
- 15,
- 17,
- 18,
- 20,
- 21
- ],
- "heads": [[19, 0], [23, 0]]
-}
\ No newline at end of file
diff --git a/gender-age/mtcnn-model/det1.caffemodel b/gender-age/mtcnn-model/det1.caffemodel
deleted file mode 100644
index 79e93b4..0000000
Binary files a/gender-age/mtcnn-model/det1.caffemodel and /dev/null differ
diff --git a/gender-age/mtcnn-model/det1.prototxt b/gender-age/mtcnn-model/det1.prototxt
deleted file mode 100644
index c5c1657..0000000
--- a/gender-age/mtcnn-model/det1.prototxt
+++ /dev/null
@@ -1,177 +0,0 @@
-name: "PNet"
-input: "data"
-input_dim: 1
-input_dim: 3
-input_dim: 12
-input_dim: 12
-
-layer {
- name: "conv1"
- type: "Convolution"
- bottom: "data"
- top: "conv1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 10
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "PReLU1"
- type: "PReLU"
- bottom: "conv1"
- top: "conv1"
-}
-layer {
- name: "pool1"
- type: "Pooling"
- bottom: "conv1"
- top: "pool1"
- pooling_param {
- pool: MAX
- kernel_size: 2
- stride: 2
- }
-}
-
-layer {
- name: "conv2"
- type: "Convolution"
- bottom: "pool1"
- top: "conv2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 16
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "PReLU2"
- type: "PReLU"
- bottom: "conv2"
- top: "conv2"
-}
-
-layer {
- name: "conv3"
- type: "Convolution"
- bottom: "conv2"
- top: "conv3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 32
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "PReLU3"
- type: "PReLU"
- bottom: "conv3"
- top: "conv3"
-}
-
-
-layer {
- name: "conv4-1"
- type: "Convolution"
- bottom: "conv3"
- top: "conv4-1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 2
- kernel_size: 1
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-layer {
- name: "conv4-2"
- type: "Convolution"
- bottom: "conv3"
- top: "conv4-2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- convolution_param {
- num_output: 4
- kernel_size: 1
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prob1"
- type: "Softmax"
- bottom: "conv4-1"
- top: "prob1"
-}
diff --git a/gender-age/mtcnn-model/det2-0001.params b/gender-age/mtcnn-model/det2-0001.params
deleted file mode 100644
index a14a478..0000000
Binary files a/gender-age/mtcnn-model/det2-0001.params and /dev/null differ
diff --git a/gender-age/mtcnn-model/det2-symbol.json b/gender-age/mtcnn-model/det2-symbol.json
deleted file mode 100644
index a13246a..0000000
--- a/gender-age/mtcnn-model/det2-symbol.json
+++ /dev/null
@@ -1,324 +0,0 @@
-{
- "nodes": [
- {
- "op": "null",
- "param": {},
- "name": "data",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1",
- "inputs": [[0, 0], [1, 0], [2, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1",
- "inputs": [[3, 0], [4, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1",
- "inputs": [[5, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2",
- "inputs": [[6, 0], [7, 0], [8, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2",
- "inputs": [[9, 0], [10, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2",
- "inputs": [[11, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3",
- "inputs": [[12, 0], [13, 0], [14, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3",
- "inputs": [[15, 0], [16, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "128"
- },
- "name": "conv4",
- "inputs": [[17, 0], [18, 0], [19, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4",
- "inputs": [[20, 0], [21, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "4"
- },
- "name": "conv5_2",
- "inputs": [[22, 0], [23, 0], [24, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "conv5_1",
- "inputs": [[22, 0], [26, 0], [27, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prob1_label",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "SoftmaxOutput",
- "param": {
- "grad_scale": "1",
- "ignore_label": "-1",
- "multi_output": "False",
- "normalization": "null",
- "use_ignore": "False"
- },
- "name": "prob1",
- "inputs": [[28, 0], [29, 0]],
- "backward_source_id": -1
- }
- ],
- "arg_nodes": [
- 0,
- 1,
- 2,
- 4,
- 7,
- 8,
- 10,
- 13,
- 14,
- 16,
- 18,
- 19,
- 21,
- 23,
- 24,
- 26,
- 27,
- 29
- ],
- "heads": [[25, 0], [30, 0]]
-}
\ No newline at end of file
diff --git a/gender-age/mtcnn-model/det2.caffemodel b/gender-age/mtcnn-model/det2.caffemodel
deleted file mode 100644
index a5a540c..0000000
Binary files a/gender-age/mtcnn-model/det2.caffemodel and /dev/null differ
diff --git a/gender-age/mtcnn-model/det2.prototxt b/gender-age/mtcnn-model/det2.prototxt
deleted file mode 100644
index 51093e6..0000000
--- a/gender-age/mtcnn-model/det2.prototxt
+++ /dev/null
@@ -1,228 +0,0 @@
-name: "RNet"
-input: "data"
-input_dim: 1
-input_dim: 3
-input_dim: 24
-input_dim: 24
-
-
-##########################
-######################
-layer {
- name: "conv1"
- type: "Convolution"
- bottom: "data"
- top: "conv1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu1"
- type: "PReLU"
- bottom: "conv1"
- top: "conv1"
- propagate_down: true
-}
-layer {
- name: "pool1"
- type: "Pooling"
- bottom: "conv1"
- top: "pool1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2"
- type: "Convolution"
- bottom: "pool1"
- top: "conv2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu2"
- type: "PReLU"
- bottom: "conv2"
- top: "conv2"
- propagate_down: true
-}
-layer {
- name: "pool2"
- type: "Pooling"
- bottom: "conv2"
- top: "pool2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-####################################
-
-##################################
-layer {
- name: "conv3"
- type: "Convolution"
- bottom: "pool2"
- top: "conv3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu3"
- type: "PReLU"
- bottom: "conv3"
- top: "conv3"
- propagate_down: true
-}
-###############################
-
-###############################
-
-layer {
- name: "conv4"
- type: "InnerProduct"
- bottom: "conv3"
- top: "conv4"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- inner_product_param {
- num_output: 128
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu4"
- type: "PReLU"
- bottom: "conv4"
- top: "conv4"
-}
-
-layer {
- name: "conv5-1"
- type: "InnerProduct"
- bottom: "conv4"
- top: "conv5-1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- inner_product_param {
- num_output: 2
- #kernel_size: 1
- #stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "conv5-2"
- type: "InnerProduct"
- bottom: "conv4"
- top: "conv5-2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 4
- #kernel_size: 1
- #stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prob1"
- type: "Softmax"
- bottom: "conv5-1"
- top: "prob1"
-}
\ No newline at end of file
diff --git a/gender-age/mtcnn-model/det3-0001.params b/gender-age/mtcnn-model/det3-0001.params
deleted file mode 100644
index cae898b..0000000
Binary files a/gender-age/mtcnn-model/det3-0001.params and /dev/null differ
diff --git a/gender-age/mtcnn-model/det3-symbol.json b/gender-age/mtcnn-model/det3-symbol.json
deleted file mode 100644
index 00061ed..0000000
--- a/gender-age/mtcnn-model/det3-symbol.json
+++ /dev/null
@@ -1,418 +0,0 @@
-{
- "nodes": [
- {
- "op": "null",
- "param": {},
- "name": "data",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "32",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1",
- "inputs": [[0, 0], [1, 0], [2, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1",
- "inputs": [[3, 0], [4, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1",
- "inputs": [[5, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2",
- "inputs": [[6, 0], [7, 0], [8, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2",
- "inputs": [[9, 0], [10, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2",
- "inputs": [[11, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3",
- "inputs": [[12, 0], [13, 0], [14, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3",
- "inputs": [[15, 0], [16, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(2,2)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool3",
- "inputs": [[17, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "128",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv4",
- "inputs": [[18, 0], [19, 0], [20, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4",
- "inputs": [[21, 0], [22, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "256"
- },
- "name": "conv5",
- "inputs": [[23, 0], [24, 0], [25, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu5",
- "inputs": [[26, 0], [27, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "10"
- },
- "name": "conv6_3",
- "inputs": [[28, 0], [29, 0], [30, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "4"
- },
- "name": "conv6_2",
- "inputs": [[28, 0], [32, 0], [33, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv6_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "conv6_1",
- "inputs": [[28, 0], [35, 0], [36, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prob1_label",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "SoftmaxOutput",
- "param": {
- "grad_scale": "1",
- "ignore_label": "-1",
- "multi_output": "False",
- "normalization": "null",
- "use_ignore": "False"
- },
- "name": "prob1",
- "inputs": [[37, 0], [38, 0]],
- "backward_source_id": -1
- }
- ],
- "arg_nodes": [
- 0,
- 1,
- 2,
- 4,
- 7,
- 8,
- 10,
- 13,
- 14,
- 16,
- 19,
- 20,
- 22,
- 24,
- 25,
- 27,
- 29,
- 30,
- 32,
- 33,
- 35,
- 36,
- 38
- ],
- "heads": [[31, 0], [34, 0], [39, 0]]
-}
\ No newline at end of file
diff --git a/gender-age/mtcnn-model/det3.caffemodel b/gender-age/mtcnn-model/det3.caffemodel
deleted file mode 100644
index 7b4b8a4..0000000
Binary files a/gender-age/mtcnn-model/det3.caffemodel and /dev/null differ
diff --git a/gender-age/mtcnn-model/det3.prototxt b/gender-age/mtcnn-model/det3.prototxt
deleted file mode 100644
index a192307..0000000
--- a/gender-age/mtcnn-model/det3.prototxt
+++ /dev/null
@@ -1,294 +0,0 @@
-name: "ONet"
-input: "data"
-input_dim: 1
-input_dim: 3
-input_dim: 48
-input_dim: 48
-##################################
-layer {
- name: "conv1"
- type: "Convolution"
- bottom: "data"
- top: "conv1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 32
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu1"
- type: "PReLU"
- bottom: "conv1"
- top: "conv1"
-}
-layer {
- name: "pool1"
- type: "Pooling"
- bottom: "conv1"
- top: "pool1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-layer {
- name: "conv2"
- type: "Convolution"
- bottom: "pool1"
- top: "conv2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-layer {
- name: "prelu2"
- type: "PReLU"
- bottom: "conv2"
- top: "conv2"
-}
-layer {
- name: "pool2"
- type: "Pooling"
- bottom: "conv2"
- top: "pool2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv3"
- type: "Convolution"
- bottom: "pool2"
- top: "conv3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 3
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu3"
- type: "PReLU"
- bottom: "conv3"
- top: "conv3"
-}
-layer {
- name: "pool3"
- type: "Pooling"
- bottom: "conv3"
- top: "pool3"
- pooling_param {
- pool: MAX
- kernel_size: 2
- stride: 2
- }
-}
-layer {
- name: "conv4"
- type: "Convolution"
- bottom: "pool3"
- top: "conv4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 128
- kernel_size: 2
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prelu4"
- type: "PReLU"
- bottom: "conv4"
- top: "conv4"
-}
-
-
-layer {
- name: "conv5"
- type: "InnerProduct"
- bottom: "conv4"
- top: "conv5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- #kernel_size: 3
- num_output: 256
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-layer {
- name: "drop5"
- type: "Dropout"
- bottom: "conv5"
- top: "conv5"
- dropout_param {
- dropout_ratio: 0.25
- }
-}
-layer {
- name: "prelu5"
- type: "PReLU"
- bottom: "conv5"
- top: "conv5"
-}
-
-
-layer {
- name: "conv6-1"
- type: "InnerProduct"
- bottom: "conv5"
- top: "conv6-1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- #kernel_size: 1
- num_output: 2
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "conv6-2"
- type: "InnerProduct"
- bottom: "conv5"
- top: "conv6-2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- #kernel_size: 1
- num_output: 4
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "conv6-3"
- type: "InnerProduct"
- bottom: "conv5"
- top: "conv6-3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- #kernel_size: 1
- num_output: 10
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-layer {
- name: "prob1"
- type: "Softmax"
- bottom: "conv6-1"
- top: "prob1"
-}
diff --git a/gender-age/mtcnn-model/det4-0001.params b/gender-age/mtcnn-model/det4-0001.params
deleted file mode 100644
index efca9a9..0000000
Binary files a/gender-age/mtcnn-model/det4-0001.params and /dev/null differ
diff --git a/gender-age/mtcnn-model/det4-symbol.json b/gender-age/mtcnn-model/det4-symbol.json
deleted file mode 100644
index aa90e2a..0000000
--- a/gender-age/mtcnn-model/det4-symbol.json
+++ /dev/null
@@ -1,1392 +0,0 @@
-{
- "nodes": [
- {
- "op": "null",
- "param": {},
- "name": "data",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "SliceChannel",
- "param": {
- "axis": "1",
- "num_outputs": "5",
- "squeeze_axis": "False"
- },
- "name": "slice",
- "inputs": [[0, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_1",
- "inputs": [[1, 0], [2, 0], [3, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_1",
- "inputs": [[4, 0], [5, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_1",
- "inputs": [[6, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_1",
- "inputs": [[7, 0], [8, 0], [9, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_1",
- "inputs": [[10, 0], [11, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_1",
- "inputs": [[12, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_1",
- "inputs": [[13, 0], [14, 0], [15, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_1",
- "inputs": [[16, 0], [17, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_2",
- "inputs": [[1, 1], [19, 0], [20, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_2",
- "inputs": [[21, 0], [22, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_2",
- "inputs": [[23, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_2",
- "inputs": [[24, 0], [25, 0], [26, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_2",
- "inputs": [[27, 0], [28, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_2",
- "inputs": [[29, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_2",
- "inputs": [[30, 0], [31, 0], [32, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_2",
- "inputs": [[33, 0], [34, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_3",
- "inputs": [[1, 2], [36, 0], [37, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_3",
- "inputs": [[38, 0], [39, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_3",
- "inputs": [[40, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_3",
- "inputs": [[41, 0], [42, 0], [43, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_3",
- "inputs": [[44, 0], [45, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_3",
- "inputs": [[46, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_3",
- "inputs": [[47, 0], [48, 0], [49, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_3",
- "inputs": [[50, 0], [51, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_4",
- "inputs": [[1, 3], [53, 0], [54, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_4",
- "inputs": [[55, 0], [56, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_4",
- "inputs": [[57, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_4",
- "inputs": [[58, 0], [59, 0], [60, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_4",
- "inputs": [[61, 0], [62, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_4",
- "inputs": [[63, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_4",
- "inputs": [[64, 0], [65, 0], [66, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_4",
- "inputs": [[67, 0], [68, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv1_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "28",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv1_5",
- "inputs": [[1, 4], [70, 0], [71, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu1_5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu1_5",
- "inputs": [[72, 0], [73, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool1_5",
- "inputs": [[74, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv2_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(3,3)",
- "no_bias": "False",
- "num_filter": "48",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv2_5",
- "inputs": [[75, 0], [76, 0], [77, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu2_5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu2_5",
- "inputs": [[78, 0], [79, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Pooling",
- "param": {
- "global_pool": "False",
- "kernel": "(3,3)",
- "pad": "(0,0)",
- "pool_type": "max",
- "pooling_convention": "full",
- "stride": "(2,2)"
- },
- "name": "pool2_5",
- "inputs": [[80, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "conv3_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "Convolution",
- "param": {
- "cudnn_off": "False",
- "cudnn_tune": "off",
- "dilate": "(1,1)",
- "kernel": "(2,2)",
- "no_bias": "False",
- "num_filter": "64",
- "num_group": "1",
- "pad": "(0,0)",
- "stride": "(1,1)",
- "workspace": "1024"
- },
- "name": "conv3_5",
- "inputs": [[81, 0], [82, 0], [83, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu3_5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu3_5",
- "inputs": [[84, 0], [85, 0]],
- "backward_source_id": -1
- },
- {
- "op": "Concat",
- "param": {
- "dim": "1",
- "num_args": "5"
- },
- "name": "concat",
- "inputs": [[18, 0], [35, 0], [52, 0], [69, 0], [86, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "256"
- },
- "name": "fc4",
- "inputs": [[87, 0], [88, 0], [89, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4",
- "inputs": [[90, 0], [91, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_1",
- "inputs": [[92, 0], [93, 0], [94, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_1_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_1",
- "inputs": [[95, 0], [96, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_1_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_1_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_1",
- "inputs": [[97, 0], [98, 0], [99, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_2",
- "inputs": [[92, 0], [101, 0], [102, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_2_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_2",
- "inputs": [[103, 0], [104, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_2_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_2_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_2",
- "inputs": [[105, 0], [106, 0], [107, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_3",
- "inputs": [[92, 0], [109, 0], [110, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_3_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_3",
- "inputs": [[111, 0], [112, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_3_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_3_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_3",
- "inputs": [[113, 0], [114, 0], [115, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_4",
- "inputs": [[92, 0], [117, 0], [118, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_4_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_4",
- "inputs": [[119, 0], [120, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_4_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_4_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_4",
- "inputs": [[121, 0], [122, 0], [123, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc4_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "64"
- },
- "name": "fc4_5",
- "inputs": [[92, 0], [125, 0], [126, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "prelu4_5_gamma",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "LeakyReLU",
- "param": {
- "act_type": "prelu",
- "lower_bound": "0.125",
- "slope": "0.25",
- "upper_bound": "0.334"
- },
- "name": "prelu4_5",
- "inputs": [[127, 0], [128, 0]],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_5_weight",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "null",
- "param": {},
- "name": "fc5_5_bias",
- "inputs": [],
- "backward_source_id": -1
- },
- {
- "op": "FullyConnected",
- "param": {
- "no_bias": "False",
- "num_hidden": "2"
- },
- "name": "fc5_5",
- "inputs": [[129, 0], [130, 0], [131, 0]],
- "backward_source_id": -1
- }
- ],
- "arg_nodes": [
- 0,
- 2,
- 3,
- 5,
- 8,
- 9,
- 11,
- 14,
- 15,
- 17,
- 19,
- 20,
- 22,
- 25,
- 26,
- 28,
- 31,
- 32,
- 34,
- 36,
- 37,
- 39,
- 42,
- 43,
- 45,
- 48,
- 49,
- 51,
- 53,
- 54,
- 56,
- 59,
- 60,
- 62,
- 65,
- 66,
- 68,
- 70,
- 71,
- 73,
- 76,
- 77,
- 79,
- 82,
- 83,
- 85,
- 88,
- 89,
- 91,
- 93,
- 94,
- 96,
- 98,
- 99,
- 101,
- 102,
- 104,
- 106,
- 107,
- 109,
- 110,
- 112,
- 114,
- 115,
- 117,
- 118,
- 120,
- 122,
- 123,
- 125,
- 126,
- 128,
- 130,
- 131
- ],
- "heads": [[100, 0], [108, 0], [116, 0], [124, 0], [132, 0]]
-}
\ No newline at end of file
diff --git a/gender-age/mtcnn-model/det4.caffemodel b/gender-age/mtcnn-model/det4.caffemodel
deleted file mode 100644
index 38353c4..0000000
Binary files a/gender-age/mtcnn-model/det4.caffemodel and /dev/null differ
diff --git a/gender-age/mtcnn-model/det4.prototxt b/gender-age/mtcnn-model/det4.prototxt
deleted file mode 100644
index 4cdc329..0000000
--- a/gender-age/mtcnn-model/det4.prototxt
+++ /dev/null
@@ -1,995 +0,0 @@
-name: "LNet"
-input: "data"
-input_dim: 1
-input_dim: 15
-input_dim: 24
-input_dim: 24
-
-layer {
- name: "slicer_data"
- type: "Slice"
- bottom: "data"
- top: "data241"
- top: "data242"
- top: "data243"
- top: "data244"
- top: "data245"
- slice_param {
- axis: 1
- slice_point: 3
- slice_point: 6
- slice_point: 9
- slice_point: 12
- }
-}
-layer {
- name: "conv1_1"
- type: "Convolution"
- bottom: "data241"
- top: "conv1_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_1"
- type: "PReLU"
- bottom: "conv1_1"
- top: "conv1_1"
-
-}
-layer {
- name: "pool1_1"
- type: "Pooling"
- bottom: "conv1_1"
- top: "pool1_1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_1"
- type: "Convolution"
- bottom: "pool1_1"
- top: "conv2_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_1"
- type: "PReLU"
- bottom: "conv2_1"
- top: "conv2_1"
-}
-layer {
- name: "pool2_1"
- type: "Pooling"
- bottom: "conv2_1"
- top: "pool2_1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_1"
- type: "Convolution"
- bottom: "pool2_1"
- top: "conv3_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_1"
- type: "PReLU"
- bottom: "conv3_1"
- top: "conv3_1"
-}
-##########################
-layer {
- name: "conv1_2"
- type: "Convolution"
- bottom: "data242"
- top: "conv1_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_2"
- type: "PReLU"
- bottom: "conv1_2"
- top: "conv1_2"
-
-}
-layer {
- name: "pool1_2"
- type: "Pooling"
- bottom: "conv1_2"
- top: "pool1_2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_2"
- type: "Convolution"
- bottom: "pool1_2"
- top: "conv2_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_2"
- type: "PReLU"
- bottom: "conv2_2"
- top: "conv2_2"
-}
-layer {
- name: "pool2_2"
- type: "Pooling"
- bottom: "conv2_2"
- top: "pool2_2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_2"
- type: "Convolution"
- bottom: "pool2_2"
- top: "conv3_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_2"
- type: "PReLU"
- bottom: "conv3_2"
- top: "conv3_2"
-}
-##########################
-##########################
-layer {
- name: "conv1_3"
- type: "Convolution"
- bottom: "data243"
- top: "conv1_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_3"
- type: "PReLU"
- bottom: "conv1_3"
- top: "conv1_3"
-
-}
-layer {
- name: "pool1_3"
- type: "Pooling"
- bottom: "conv1_3"
- top: "pool1_3"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_3"
- type: "Convolution"
- bottom: "pool1_3"
- top: "conv2_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_3"
- type: "PReLU"
- bottom: "conv2_3"
- top: "conv2_3"
-}
-layer {
- name: "pool2_3"
- type: "Pooling"
- bottom: "conv2_3"
- top: "pool2_3"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_3"
- type: "Convolution"
- bottom: "pool2_3"
- top: "conv3_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_3"
- type: "PReLU"
- bottom: "conv3_3"
- top: "conv3_3"
-}
-##########################
-##########################
-layer {
- name: "conv1_4"
- type: "Convolution"
- bottom: "data244"
- top: "conv1_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_4"
- type: "PReLU"
- bottom: "conv1_4"
- top: "conv1_4"
-
-}
-layer {
- name: "pool1_4"
- type: "Pooling"
- bottom: "conv1_4"
- top: "pool1_4"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_4"
- type: "Convolution"
- bottom: "pool1_4"
- top: "conv2_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_4"
- type: "PReLU"
- bottom: "conv2_4"
- top: "conv2_4"
-}
-layer {
- name: "pool2_4"
- type: "Pooling"
- bottom: "conv2_4"
- top: "pool2_4"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_4"
- type: "Convolution"
- bottom: "pool2_4"
- top: "conv3_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_4"
- type: "PReLU"
- bottom: "conv3_4"
- top: "conv3_4"
-}
-##########################
-##########################
-layer {
- name: "conv1_5"
- type: "Convolution"
- bottom: "data245"
- top: "conv1_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 28
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu1_5"
- type: "PReLU"
- bottom: "conv1_5"
- top: "conv1_5"
-
-}
-layer {
- name: "pool1_5"
- type: "Pooling"
- bottom: "conv1_5"
- top: "pool1_5"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-}
-
-layer {
- name: "conv2_5"
- type: "Convolution"
- bottom: "pool1_5"
- top: "conv2_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 48
- kernel_size: 3
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu2_5"
- type: "PReLU"
- bottom: "conv2_5"
- top: "conv2_5"
-}
-layer {
- name: "pool2_5"
- type: "Pooling"
- bottom: "conv2_5"
- top: "pool2_5"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
-
-}
-layer {
- name: "conv3_5"
- type: "Convolution"
- bottom: "pool2_5"
- top: "conv3_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- convolution_param {
- num_output: 64
- kernel_size: 2
- stride: 1
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu3_5"
- type: "PReLU"
- bottom: "conv3_5"
- top: "conv3_5"
-}
-##########################
-layer {
- name: "concat"
- bottom: "conv3_1"
- bottom: "conv3_2"
- bottom: "conv3_3"
- bottom: "conv3_4"
- bottom: "conv3_5"
- top: "conv3"
- type: "Concat"
- concat_param {
- axis: 1
- }
-}
-##########################
-layer {
- name: "fc4"
- type: "InnerProduct"
- bottom: "conv3"
- top: "fc4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 256
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4"
- type: "PReLU"
- bottom: "fc4"
- top: "fc4"
-}
-############################
-layer {
- name: "fc4_1"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_1"
- type: "PReLU"
- bottom: "fc4_1"
- top: "fc4_1"
-}
-layer {
- name: "fc5_1"
- type: "InnerProduct"
- bottom: "fc4_1"
- top: "fc5_1"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-
-#########################
-layer {
- name: "fc4_2"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_2"
- type: "PReLU"
- bottom: "fc4_2"
- top: "fc4_2"
-}
-layer {
- name: "fc5_2"
- type: "InnerProduct"
- bottom: "fc4_2"
- top: "fc5_2"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-#########################
-layer {
- name: "fc4_3"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_3"
- type: "PReLU"
- bottom: "fc4_3"
- top: "fc4_3"
-}
-layer {
- name: "fc5_3"
- type: "InnerProduct"
- bottom: "fc4_3"
- top: "fc5_3"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-#########################
-layer {
- name: "fc4_4"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_4"
- type: "PReLU"
- bottom: "fc4_4"
- top: "fc4_4"
-}
-layer {
- name: "fc5_4"
- type: "InnerProduct"
- bottom: "fc4_4"
- top: "fc5_4"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-#########################
-layer {
- name: "fc4_5"
- type: "InnerProduct"
- bottom: "fc4"
- top: "fc4_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 64
- weight_filler {
- type: "xavier"
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-
-}
-layer {
- name: "prelu4_5"
- type: "PReLU"
- bottom: "fc4_5"
- top: "fc4_5"
-}
-layer {
- name: "fc5_5"
- type: "InnerProduct"
- bottom: "fc4_5"
- top: "fc5_5"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 1
- }
- inner_product_param {
- num_output: 2
- weight_filler {
- type: "xavier"
- #type: "constant"
- #value: 0
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
-}
-
-#########################
-
diff --git a/gender-age/mtcnn_detector.py b/gender-age/mtcnn_detector.py
deleted file mode 100644
index 1ce4146..0000000
--- a/gender-age/mtcnn_detector.py
+++ /dev/null
@@ -1,696 +0,0 @@
-# coding: utf-8
-import os
-import mxnet as mx
-import numpy as np
-import math
-import cv2
-from multiprocessing import Pool
-from itertools import repeat
-from itertools import izip
-from helper import nms, adjust_input, generate_bbox, detect_first_stage_warpper
-
-
-class MtcnnDetector(object):
- """
- Joint Face Detection and Alignment using Multi-task Cascaded Convolutional Neural Networks
- see https://github.com/kpzhang93/MTCNN_face_detection_alignment
- this is a mxnet version
- """
- def __init__(self,
- model_folder='.',
- minsize=20,
- threshold=[0.6, 0.7, 0.8],
- factor=0.709,
- num_worker=1,
- accurate_landmark=False,
- ctx=mx.cpu()):
- """
- Initialize the detector
-
- Parameters:
- ----------
- model_folder : string
- path for the models
- minsize : float number
- minimal face to detect
- threshold : float number
- detect threshold for 3 stages
- factor: float number
- scale factor for image pyramid
- num_worker: int number
- number of processes we use for first stage
- accurate_landmark: bool
- use accurate landmark localization or not
-
- """
- self.num_worker = num_worker
- self.accurate_landmark = accurate_landmark
-
- # load 4 models from folder
- models = ['det1', 'det2', 'det3', 'det4']
- models = [os.path.join(model_folder, f) for f in models]
-
- self.PNets = []
- for i in range(num_worker):
- workner_net = mx.model.FeedForward.load(models[0], 1, ctx=ctx)
- self.PNets.append(workner_net)
-
- #self.Pool = Pool(num_worker)
-
- self.RNet = mx.model.FeedForward.load(models[1], 1, ctx=ctx)
- self.ONet = mx.model.FeedForward.load(models[2], 1, ctx=ctx)
- self.LNet = mx.model.FeedForward.load(models[3], 1, ctx=ctx)
-
- self.minsize = float(minsize)
- self.factor = float(factor)
- self.threshold = threshold
-
- def convert_to_square(self, bbox):
- """
- convert bbox to square
-
- Parameters:
- ----------
- bbox: numpy array , shape n x 5
- input bbox
-
- Returns:
- -------
- square bbox
- """
- square_bbox = bbox.copy()
-
- h = bbox[:, 3] - bbox[:, 1] + 1
- w = bbox[:, 2] - bbox[:, 0] + 1
- max_side = np.maximum(h, w)
- square_bbox[:, 0] = bbox[:, 0] + w * 0.5 - max_side * 0.5
- square_bbox[:, 1] = bbox[:, 1] + h * 0.5 - max_side * 0.5
- square_bbox[:, 2] = square_bbox[:, 0] + max_side - 1
- square_bbox[:, 3] = square_bbox[:, 1] + max_side - 1
- return square_bbox
-
- def calibrate_box(self, bbox, reg):
- """
- calibrate bboxes
-
- Parameters:
- ----------
- bbox: numpy array, shape n x 5
- input bboxes
- reg: numpy array, shape n x 4
- bboxex adjustment
-
- Returns:
- -------
- bboxes after refinement
-
- """
- w = bbox[:, 2] - bbox[:, 0] + 1
- w = np.expand_dims(w, 1)
- h = bbox[:, 3] - bbox[:, 1] + 1
- h = np.expand_dims(h, 1)
- reg_m = np.hstack([w, h, w, h])
- aug = reg_m * reg
- bbox[:, 0:4] = bbox[:, 0:4] + aug
- return bbox
-
- def pad(self, bboxes, w, h):
- """
- pad the the bboxes, alse restrict the size of it
-
- Parameters:
- ----------
- bboxes: numpy array, n x 5
- input bboxes
- w: float number
- width of the input image
- h: float number
- height of the input image
- Returns :
- ------s
- dy, dx : numpy array, n x 1
- start point of the bbox in target image
- edy, edx : numpy array, n x 1
- end point of the bbox in target image
- y, x : numpy array, n x 1
- start point of the bbox in original image
- ex, ex : numpy array, n x 1
- end point of the bbox in original image
- tmph, tmpw: numpy array, n x 1
- height and width of the bbox
-
- """
- tmpw, tmph = bboxes[:, 2] - bboxes[:, 0] + 1, bboxes[:,
- 3] - bboxes[:,
- 1] + 1
- num_box = bboxes.shape[0]
-
- dx, dy = np.zeros((num_box, )), np.zeros((num_box, ))
- edx, edy = tmpw.copy() - 1, tmph.copy() - 1
-
- x, y, ex, ey = bboxes[:, 0], bboxes[:, 1], bboxes[:, 2], bboxes[:, 3]
-
- tmp_index = np.where(ex > w - 1)
- edx[tmp_index] = tmpw[tmp_index] + w - 2 - ex[tmp_index]
- ex[tmp_index] = w - 1
-
- tmp_index = np.where(ey > h - 1)
- edy[tmp_index] = tmph[tmp_index] + h - 2 - ey[tmp_index]
- ey[tmp_index] = h - 1
-
- tmp_index = np.where(x < 0)
- dx[tmp_index] = 0 - x[tmp_index]
- x[tmp_index] = 0
-
- tmp_index = np.where(y < 0)
- dy[tmp_index] = 0 - y[tmp_index]
- y[tmp_index] = 0
-
- return_list = [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph]
- return_list = [item.astype(np.int32) for item in return_list]
-
- return return_list
-
- def slice_index(self, number):
- """
- slice the index into (n,n,m), m < n
- Parameters:
- ----------
- number: int number
- number
- """
- def chunks(l, n):
- """Yield successive n-sized chunks from l."""
- for i in range(0, len(l), n):
- yield l[i:i + n]
-
- num_list = range(number)
- return list(chunks(num_list, self.num_worker))
-
- def detect_face_limited(self, img, det_type=2):
- height, width, _ = img.shape
- if det_type >= 2:
- total_boxes = np.array(
- [[0.0, 0.0, img.shape[1], img.shape[0], 0.9]],
- dtype=np.float32)
- num_box = total_boxes.shape[0]
-
- # pad the bbox
- [dy, edy, dx, edx, y, ey, x, ex, tmpw,
- tmph] = self.pad(total_boxes, width, height)
- # (3, 24, 24) is the input shape for RNet
- input_buf = np.zeros((num_box, 3, 24, 24), dtype=np.float32)
-
- for i in range(num_box):
- tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.uint8)
- tmp[dy[i]:edy[i] + 1,
- dx[i]:edx[i] + 1, :] = img[y[i]:ey[i] + 1,
- x[i]:ex[i] + 1, :]
- input_buf[i, :, :, :] = adjust_input(cv2.resize(tmp, (24, 24)))
-
- output = self.RNet.predict(input_buf)
-
- # filter the total_boxes with threshold
- passed = np.where(output[1][:, 1] > self.threshold[1])
- total_boxes = total_boxes[passed]
-
- if total_boxes.size == 0:
- return None
-
- total_boxes[:, 4] = output[1][passed, 1].reshape((-1, ))
- reg = output[0][passed]
-
- # nms
- pick = nms(total_boxes, 0.7, 'Union')
- total_boxes = total_boxes[pick]
- total_boxes = self.calibrate_box(total_boxes, reg[pick])
- total_boxes = self.convert_to_square(total_boxes)
- total_boxes[:, 0:4] = np.round(total_boxes[:, 0:4])
- else:
- total_boxes = np.array(
- [[0.0, 0.0, img.shape[1], img.shape[0], 0.9]],
- dtype=np.float32)
- num_box = total_boxes.shape[0]
- [dy, edy, dx, edx, y, ey, x, ex, tmpw,
- tmph] = self.pad(total_boxes, width, height)
- # (3, 48, 48) is the input shape for ONet
- input_buf = np.zeros((num_box, 3, 48, 48), dtype=np.float32)
-
- for i in range(num_box):
- tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.float32)
- tmp[dy[i]:edy[i] + 1, dx[i]:edx[i] + 1, :] = img[y[i]:ey[i] + 1,
- x[i]:ex[i] + 1, :]
- input_buf[i, :, :, :] = adjust_input(cv2.resize(tmp, (48, 48)))
-
- output = self.ONet.predict(input_buf)
- #print(output[2])
-
- # filter the total_boxes with threshold
- passed = np.where(output[2][:, 1] > self.threshold[2])
- total_boxes = total_boxes[passed]
-
- if total_boxes.size == 0:
- return None
-
- total_boxes[:, 4] = output[2][passed, 1].reshape((-1, ))
- reg = output[1][passed]
- points = output[0][passed]
-
- # compute landmark points
- bbw = total_boxes[:, 2] - total_boxes[:, 0] + 1
- bbh = total_boxes[:, 3] - total_boxes[:, 1] + 1
- points[:, 0:5] = np.expand_dims(
- total_boxes[:, 0], 1) + np.expand_dims(bbw, 1) * points[:, 0:5]
- points[:, 5:10] = np.expand_dims(
- total_boxes[:, 1], 1) + np.expand_dims(bbh, 1) * points[:, 5:10]
-
- # nms
- total_boxes = self.calibrate_box(total_boxes, reg)
- pick = nms(total_boxes, 0.7, 'Min')
- total_boxes = total_boxes[pick]
- points = points[pick]
-
- if not self.accurate_landmark:
- return total_boxes, points
-
- #############################################
- # extended stage
- #############################################
- num_box = total_boxes.shape[0]
- patchw = np.maximum(total_boxes[:, 2] - total_boxes[:, 0] + 1,
- total_boxes[:, 3] - total_boxes[:, 1] + 1)
- patchw = np.round(patchw * 0.25)
-
- # make it even
- patchw[np.where(np.mod(patchw, 2) == 1)] += 1
-
- input_buf = np.zeros((num_box, 15, 24, 24), dtype=np.float32)
- for i in range(5):
- x, y = points[:, i], points[:, i + 5]
- x, y = np.round(x - 0.5 * patchw), np.round(y - 0.5 * patchw)
- [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph] = self.pad(
- np.vstack([x, y, x + patchw - 1, y + patchw - 1]).T, width,
- height)
- for j in range(num_box):
- tmpim = np.zeros((tmpw[j], tmpw[j], 3), dtype=np.float32)
- tmpim[dy[j]:edy[j] + 1,
- dx[j]:edx[j] + 1, :] = img[y[j]:ey[j] + 1,
- x[j]:ex[j] + 1, :]
- input_buf[j, i * 3:i * 3 + 3, :, :] = adjust_input(
- cv2.resize(tmpim, (24, 24)))
-
- output = self.LNet.predict(input_buf)
-
- pointx = np.zeros((num_box, 5))
- pointy = np.zeros((num_box, 5))
-
- for k in range(5):
- # do not make a large movement
- tmp_index = np.where(np.abs(output[k] - 0.5) > 0.35)
- output[k][tmp_index[0]] = 0.5
-
- pointx[:, k] = np.round(points[:, k] -
- 0.5 * patchw) + output[k][:, 0] * patchw
- pointy[:, k] = np.round(points[:, k + 5] -
- 0.5 * patchw) + output[k][:, 1] * patchw
-
- points = np.hstack([pointx, pointy])
- points = points.astype(np.int32)
-
- return total_boxes, points
-
- def detect_face(self, img, det_type=0):
- """
- detect face over img
- Parameters:
- ----------
- img: numpy array, bgr order of shape (1, 3, n, m)
- input image
- Retures:
- -------
- bboxes: numpy array, n x 5 (x1,y2,x2,y2,score)
- bboxes
- points: numpy array, n x 10 (x1, x2 ... x5, y1, y2 ..y5)
- landmarks
- """
-
- # check input
- height, width, _ = img.shape
- if det_type == 0:
- MIN_DET_SIZE = 12
-
- if img is None:
- return None
-
- # only works for color image
- if len(img.shape) != 3:
- return None
-
- # detected boxes
- total_boxes = []
-
- minl = min(height, width)
-
- # get all the valid scales
- scales = []
- m = MIN_DET_SIZE / self.minsize
- minl *= m
- factor_count = 0
- while minl > MIN_DET_SIZE:
- scales.append(m * self.factor**factor_count)
- minl *= self.factor
- factor_count += 1
-
- #############################################
- # first stage
- #############################################
- #for scale in scales:
- # return_boxes = self.detect_first_stage(img, scale, 0)
- # if return_boxes is not None:
- # total_boxes.append(return_boxes)
-
- sliced_index = self.slice_index(len(scales))
- total_boxes = []
- for batch in sliced_index:
- #local_boxes = self.Pool.map( detect_first_stage_warpper, \
- # izip(repeat(img), self.PNets[:len(batch)], [scales[i] for i in batch], repeat(self.threshold[0])) )
- local_boxes = map( detect_first_stage_warpper, \
- izip(repeat(img), self.PNets[:len(batch)], [scales[i] for i in batch], repeat(self.threshold[0])) )
- total_boxes.extend(local_boxes)
-
- # remove the Nones
- total_boxes = [i for i in total_boxes if i is not None]
-
- if len(total_boxes) == 0:
- return None
-
- total_boxes = np.vstack(total_boxes)
-
- if total_boxes.size == 0:
- return None
-
- # merge the detection from first stage
- pick = nms(total_boxes[:, 0:5], 0.7, 'Union')
- total_boxes = total_boxes[pick]
-
- bbw = total_boxes[:, 2] - total_boxes[:, 0] + 1
- bbh = total_boxes[:, 3] - total_boxes[:, 1] + 1
-
- # refine the bboxes
- total_boxes = np.vstack([
- total_boxes[:, 0] + total_boxes[:, 5] * bbw,
- total_boxes[:, 1] + total_boxes[:, 6] * bbh,
- total_boxes[:, 2] + total_boxes[:, 7] * bbw,
- total_boxes[:, 3] + total_boxes[:, 8] * bbh, total_boxes[:, 4]
- ])
-
- total_boxes = total_boxes.T
- total_boxes = self.convert_to_square(total_boxes)
- total_boxes[:, 0:4] = np.round(total_boxes[:, 0:4])
- else:
- total_boxes = np.array(
- [[0.0, 0.0, img.shape[1], img.shape[0], 0.9]],
- dtype=np.float32)
-
- #############################################
- # second stage
- #############################################
- num_box = total_boxes.shape[0]
-
- # pad the bbox
- [dy, edy, dx, edx, y, ey, x, ex, tmpw,
- tmph] = self.pad(total_boxes, width, height)
- # (3, 24, 24) is the input shape for RNet
- input_buf = np.zeros((num_box, 3, 24, 24), dtype=np.float32)
-
- for i in range(num_box):
- tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.uint8)
- tmp[dy[i]:edy[i] + 1, dx[i]:edx[i] + 1, :] = img[y[i]:ey[i] + 1,
- x[i]:ex[i] + 1, :]
- input_buf[i, :, :, :] = adjust_input(cv2.resize(tmp, (24, 24)))
-
- output = self.RNet.predict(input_buf)
-
- # filter the total_boxes with threshold
- passed = np.where(output[1][:, 1] > self.threshold[1])
- total_boxes = total_boxes[passed]
-
- if total_boxes.size == 0:
- return None
-
- total_boxes[:, 4] = output[1][passed, 1].reshape((-1, ))
- reg = output[0][passed]
-
- # nms
- pick = nms(total_boxes, 0.7, 'Union')
- total_boxes = total_boxes[pick]
- total_boxes = self.calibrate_box(total_boxes, reg[pick])
- total_boxes = self.convert_to_square(total_boxes)
- total_boxes[:, 0:4] = np.round(total_boxes[:, 0:4])
-
- #############################################
- # third stage
- #############################################
- num_box = total_boxes.shape[0]
-
- # pad the bbox
- [dy, edy, dx, edx, y, ey, x, ex, tmpw,
- tmph] = self.pad(total_boxes, width, height)
- # (3, 48, 48) is the input shape for ONet
- input_buf = np.zeros((num_box, 3, 48, 48), dtype=np.float32)
-
- for i in range(num_box):
- tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.float32)
- tmp[dy[i]:edy[i] + 1, dx[i]:edx[i] + 1, :] = img[y[i]:ey[i] + 1,
- x[i]:ex[i] + 1, :]
- input_buf[i, :, :, :] = adjust_input(cv2.resize(tmp, (48, 48)))
-
- output = self.ONet.predict(input_buf)
-
- # filter the total_boxes with threshold
- passed = np.where(output[2][:, 1] > self.threshold[2])
- total_boxes = total_boxes[passed]
-
- if total_boxes.size == 0:
- return None
-
- total_boxes[:, 4] = output[2][passed, 1].reshape((-1, ))
- reg = output[1][passed]
- points = output[0][passed]
-
- # compute landmark points
- bbw = total_boxes[:, 2] - total_boxes[:, 0] + 1
- bbh = total_boxes[:, 3] - total_boxes[:, 1] + 1
- points[:, 0:5] = np.expand_dims(
- total_boxes[:, 0], 1) + np.expand_dims(bbw, 1) * points[:, 0:5]
- points[:, 5:10] = np.expand_dims(
- total_boxes[:, 1], 1) + np.expand_dims(bbh, 1) * points[:, 5:10]
-
- # nms
- total_boxes = self.calibrate_box(total_boxes, reg)
- pick = nms(total_boxes, 0.7, 'Min')
- total_boxes = total_boxes[pick]
- points = points[pick]
-
- if not self.accurate_landmark:
- return total_boxes, points
-
- #############################################
- # extended stage
- #############################################
- num_box = total_boxes.shape[0]
- patchw = np.maximum(total_boxes[:, 2] - total_boxes[:, 0] + 1,
- total_boxes[:, 3] - total_boxes[:, 1] + 1)
- patchw = np.round(patchw * 0.25)
-
- # make it even
- patchw[np.where(np.mod(patchw, 2) == 1)] += 1
-
- input_buf = np.zeros((num_box, 15, 24, 24), dtype=np.float32)
- for i in range(5):
- x, y = points[:, i], points[:, i + 5]
- x, y = np.round(x - 0.5 * patchw), np.round(y - 0.5 * patchw)
- [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph] = self.pad(
- np.vstack([x, y, x + patchw - 1, y + patchw - 1]).T, width,
- height)
- for j in range(num_box):
- tmpim = np.zeros((tmpw[j], tmpw[j], 3), dtype=np.float32)
- tmpim[dy[j]:edy[j] + 1,
- dx[j]:edx[j] + 1, :] = img[y[j]:ey[j] + 1,
- x[j]:ex[j] + 1, :]
- input_buf[j, i * 3:i * 3 + 3, :, :] = adjust_input(
- cv2.resize(tmpim, (24, 24)))
-
- output = self.LNet.predict(input_buf)
-
- pointx = np.zeros((num_box, 5))
- pointy = np.zeros((num_box, 5))
-
- for k in range(5):
- # do not make a large movement
- tmp_index = np.where(np.abs(output[k] - 0.5) > 0.35)
- output[k][tmp_index[0]] = 0.5
-
- pointx[:, k] = np.round(points[:, k] -
- 0.5 * patchw) + output[k][:, 0] * patchw
- pointy[:, k] = np.round(points[:, k + 5] -
- 0.5 * patchw) + output[k][:, 1] * patchw
-
- points = np.hstack([pointx, pointy])
- points = points.astype(np.int32)
-
- return total_boxes, points
-
- def list2colmatrix(self, pts_list):
- """
- convert list to column matrix
- Parameters:
- ----------
- pts_list:
- input list
- Retures:
- -------
- colMat:
-
- """
- assert len(pts_list) > 0
- colMat = []
- for i in range(len(pts_list)):
- colMat.append(pts_list[i][0])
- colMat.append(pts_list[i][1])
- colMat = np.matrix(colMat).transpose()
- return colMat
-
- def find_tfrom_between_shapes(self, from_shape, to_shape):
- """
- find transform between shapes
- Parameters:
- ----------
- from_shape:
- to_shape:
- Retures:
- -------
- tran_m:
- tran_b:
- """
- assert from_shape.shape[0] == to_shape.shape[
- 0] and from_shape.shape[0] % 2 == 0
-
- sigma_from = 0.0
- sigma_to = 0.0
- cov = np.matrix([[0.0, 0.0], [0.0, 0.0]])
-
- # compute the mean and cov
- from_shape_points = from_shape.reshape(from_shape.shape[0] / 2, 2)
- to_shape_points = to_shape.reshape(to_shape.shape[0] / 2, 2)
- mean_from = from_shape_points.mean(axis=0)
- mean_to = to_shape_points.mean(axis=0)
-
- for i in range(from_shape_points.shape[0]):
- temp_dis = np.linalg.norm(from_shape_points[i] - mean_from)
- sigma_from += temp_dis * temp_dis
- temp_dis = np.linalg.norm(to_shape_points[i] - mean_to)
- sigma_to += temp_dis * temp_dis
- cov += (to_shape_points[i].transpose() -
- mean_to.transpose()) * (from_shape_points[i] - mean_from)
-
- sigma_from = sigma_from / to_shape_points.shape[0]
- sigma_to = sigma_to / to_shape_points.shape[0]
- cov = cov / to_shape_points.shape[0]
-
- # compute the affine matrix
- s = np.matrix([[1.0, 0.0], [0.0, 1.0]])
- u, d, vt = np.linalg.svd(cov)
-
- if np.linalg.det(cov) < 0:
- if d[1] < d[0]:
- s[1, 1] = -1
- else:
- s[0, 0] = -1
- r = u * s * vt
- c = 1.0
- if sigma_from != 0:
- c = 1.0 / sigma_from * np.trace(np.diag(d) * s)
-
- tran_b = mean_to.transpose() - c * r * mean_from.transpose()
- tran_m = c * r
-
- return tran_m, tran_b
-
- def extract_image_chips(self, img, points, desired_size=256, padding=0):
- """
- crop and align face
- Parameters:
- ----------
- img: numpy array, bgr order of shape (1, 3, n, m)
- input image
- points: numpy array, n x 10 (x1, x2 ... x5, y1, y2 ..y5)
- desired_size: default 256
- padding: default 0
- Retures:
- -------
- crop_imgs: list, n
- cropped and aligned faces
- """
- crop_imgs = []
- for p in points:
- shape = []
- for k in range(len(p) / 2):
- shape.append(p[k])
- shape.append(p[k + 5])
-
- if padding > 0:
- padding = padding
- else:
- padding = 0
- # average positions of face points
- mean_face_shape_x = [
- 0.224152, 0.75610125, 0.490127, 0.254149, 0.726104
- ]
- mean_face_shape_y = [
- 0.2119465, 0.2119465, 0.628106, 0.780233, 0.780233
- ]
-
- from_points = []
- to_points = []
-
- for i in range(len(shape) / 2):
- x = (padding + mean_face_shape_x[i]) / (2 * padding +
- 1) * desired_size
- y = (padding + mean_face_shape_y[i]) / (2 * padding +
- 1) * desired_size
- to_points.append([x, y])
- from_points.append([shape[2 * i], shape[2 * i + 1]])
-
- # convert the points to Mat
- from_mat = self.list2colmatrix(from_points)
- to_mat = self.list2colmatrix(to_points)
-
- # compute the similar transfrom
- tran_m, tran_b = self.find_tfrom_between_shapes(from_mat, to_mat)
-
- probe_vec = np.matrix([1.0, 0.0]).transpose()
- probe_vec = tran_m * probe_vec
-
- scale = np.linalg.norm(probe_vec)
- angle = 180.0 / math.pi * math.atan2(probe_vec[1, 0], probe_vec[0,
- 0])
-
- from_center = [(shape[0] + shape[2]) / 2.0,
- (shape[1] + shape[3]) / 2.0]
- to_center = [0, 0]
- to_center[1] = desired_size * 0.4
- to_center[0] = desired_size * 0.5
-
- ex = to_center[0] - from_center[0]
- ey = to_center[1] - from_center[1]
-
- rot_mat = cv2.getRotationMatrix2D((from_center[0], from_center[1]),
- -1 * angle, scale)
- rot_mat[0][2] += ex
- rot_mat[1][2] += ey
-
- chips = cv2.warpAffine(img, rot_mat, (desired_size, desired_size))
- crop_imgs.append(chips)
-
- return crop_imgs
diff --git a/gender-age/test.py b/gender-age/test.py
deleted file mode 100644
index 78fc3b0..0000000
--- a/gender-age/test.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import face_model
-import argparse
-import cv2
-import sys
-import numpy as np
-import datetime
-
-parser = argparse.ArgumentParser(description='face model test')
-# general
-parser.add_argument('--image-size', default='112,112', help='')
-parser.add_argument('--image', default='Tom_Hanks_54745.png', help='')
-parser.add_argument('--model',
- default='model/model,0',
- help='path to load model.')
-parser.add_argument('--gpu', default=0, type=int, help='gpu id')
-parser.add_argument(
- '--det',
- default=0,
- type=int,
- help='mtcnn option, 1 means using R+O, 0 means detect from begining')
-args = parser.parse_args()
-
-model = face_model.FaceModel(args)
-#img = cv2.imread('Tom_Hanks_54745.png')
-img = cv2.imread(args.image)
-img = model.get_input(img)
-#f1 = model.get_feature(img)
-#print(f1[0:10])
-for _ in range(5):
- gender, age = model.get_ga(img)
-time_now = datetime.datetime.now()
-count = 200
-for _ in range(count):
- gender, age = model.get_ga(img)
-time_now2 = datetime.datetime.now()
-diff = time_now2 - time_now
-print('time cost', diff.total_seconds() / count)
-print('gender is', gender)
-print('age is', age)
diff --git a/gender-age/train.py b/gender-age/train.py
deleted file mode 100644
index 4948d78..0000000
--- a/gender-age/train.py
+++ /dev/null
@@ -1,420 +0,0 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import os
-import sys
-import math
-import random
-import logging
-import pickle
-import numpy as np
-import sklearn
-from data import FaceImageIter
-import mxnet as mx
-from mxnet import ndarray as nd
-import argparse
-import mxnet.optimizer as optimizer
-sys.path.append(os.path.join(os.path.dirname(__file__), 'common'))
-#import face_image
-import fresnet
-import fmobilenet
-
-logger = logging.getLogger()
-logger.setLevel(logging.INFO)
-
-AGE = 100
-
-args = None
-
-
-class AccMetric(mx.metric.EvalMetric):
- def __init__(self):
- self.axis = 1
- super(AccMetric, self).__init__('acc',
- axis=self.axis,
- output_names=None,
- label_names=None)
- self.losses = []
- self.count = 0
-
- def update(self, labels, preds):
- self.count += 1
- label = labels[0].asnumpy()[:, 0:1]
- pred_label = preds[-1].asnumpy()[:, 0:2]
- pred_label = np.argmax(pred_label, axis=self.axis)
- pred_label = pred_label.astype('int32').flatten()
- label = label.astype('int32').flatten()
- assert label.shape == pred_label.shape
- self.sum_metric += (pred_label.flat == label.flat).sum()
- self.num_inst += len(pred_label.flat)
-
-
-class LossValueMetric(mx.metric.EvalMetric):
- def __init__(self):
- self.axis = 1
- super(LossValueMetric, self).__init__('lossvalue',
- axis=self.axis,
- output_names=None,
- label_names=None)
- self.losses = []
-
- def update(self, labels, preds):
- loss = preds[-1].asnumpy()[0]
- self.sum_metric += loss
- self.num_inst += 1.0
- gt_label = preds[-2].asnumpy()
- #print(gt_label)
-
-
-class MAEMetric(mx.metric.EvalMetric):
- def __init__(self):
- self.axis = 1
- super(MAEMetric, self).__init__('MAE',
- axis=self.axis,
- output_names=None,
- label_names=None)
- self.losses = []
- self.count = 0
-
- def update(self, labels, preds):
- self.count += 1
- label = labels[0].asnumpy()
- label_age = np.count_nonzero(label[:, 1:], axis=1)
- pred_age = np.zeros(label_age.shape, dtype=np.int)
- #pred_age = np.zeros( label_age.shape, dtype=np.float32)
- pred = preds[-1].asnumpy()
- for i in range(AGE):
- _pred = pred[:, 2 + i * 2:4 + i * 2]
- _pred = np.argmax(_pred, axis=1)
- #pred = pred[:,1]
- pred_age += _pred
- #pred_age = pred_age.astype(np.int)
- mae = np.mean(np.abs(label_age - pred_age))
- self.sum_metric += mae
- self.num_inst += 1.0
-
-
-class CUMMetric(mx.metric.EvalMetric):
- def __init__(self, n=5):
- self.axis = 1
- self.n = n
- super(CUMMetric, self).__init__('CUM_%d' % n,
- axis=self.axis,
- output_names=None,
- label_names=None)
- self.losses = []
- self.count = 0
-
- def update(self, labels, preds):
- self.count += 1
- label = labels[0].asnumpy()
- label_age = np.count_nonzero(label[:, 1:], axis=1)
- pred_age = np.zeros(label_age.shape, dtype=np.int)
- pred = preds[-1].asnumpy()
- for i in range(AGE):
- _pred = pred[:, 2 + i * 2:4 + i * 2]
- _pred = np.argmax(_pred, axis=1)
- #pred = pred[:,1]
- pred_age += _pred
- diff = np.abs(label_age - pred_age)
- cum = np.sum((diff < self.n))
- self.sum_metric += cum
- self.num_inst += len(label_age)
-
-
-def parse_args():
- parser = argparse.ArgumentParser(description='Train face network')
- # general
- parser.add_argument('--data-dir',
- default='',
- help='training set directory')
- parser.add_argument('--prefix',
- default='../model/model',
- help='directory to save model.')
- parser.add_argument('--pretrained',
- default='',
- help='pretrained model to load')
- parser.add_argument(
- '--ckpt',
- type=int,
- default=1,
- help=
- 'checkpoint saving option. 0: discard saving. 1: save when necessary. 2: always save'
- )
- parser.add_argument('--loss-type', type=int, default=4, help='loss type')
- parser.add_argument(
- '--verbose',
- type=int,
- default=2000,
- help='do verification testing and model saving every verbose batches')
- parser.add_argument('--max-steps',
- type=int,
- default=0,
- help='max training batches')
- parser.add_argument('--end-epoch',
- type=int,
- default=100000,
- help='training epoch size.')
- parser.add_argument('--network', default='r50', help='specify network')
- parser.add_argument('--image-size',
- default='112,112',
- help='specify input image height and width')
- parser.add_argument('--version-input',
- type=int,
- default=1,
- help='network input config')
- parser.add_argument('--version-output',
- type=str,
- default='GAP',
- help='network embedding output config')
- parser.add_argument('--version-act',
- type=str,
- default='prelu',
- help='network activation config')
- parser.add_argument('--multiplier', type=float, default=1.0, help='')
- parser.add_argument('--lr',
- type=float,
- default=0.1,
- help='start learning rate')
- parser.add_argument('--lr-steps',
- type=str,
- default='',
- help='steps of lr changing')
- parser.add_argument('--wd',
- type=float,
- default=0.0005,
- help='weight decay')
- parser.add_argument('--bn-mom', type=float, default=0.9, help='bn mom')
- parser.add_argument('--mom', type=float, default=0.9, help='momentum')
- parser.add_argument('--per-batch-size',
- type=int,
- default=128,
- help='batch size in each context')
- parser.add_argument('--rand-mirror',
- type=int,
- default=1,
- help='if do random mirror in training')
- parser.add_argument('--cutoff', type=int, default=0, help='cut off aug')
- parser.add_argument('--color',
- type=int,
- default=0,
- help='color jittering aug')
- parser.add_argument('--ce-loss',
- default=False,
- action='store_true',
- help='if output ce loss')
- args = parser.parse_args()
- return args
-
-
-def get_symbol(args, arg_params, aux_params):
- data_shape = (args.image_channel, args.image_h, args.image_w)
- image_shape = ",".join([str(x) for x in data_shape])
- margin_symbols = []
- if args.network[0] == 'm':
- fc1 = fmobilenet.get_symbol(AGE * 2 + 2,
- multiplier=args.multiplier,
- version_input=args.version_input,
- version_output=args.version_output)
- else:
- fc1 = fresnet.get_symbol(AGE * 2 + 2,
- args.num_layers,
- version_input=args.version_input,
- version_output=args.version_output)
- label = mx.symbol.Variable('softmax_label')
- gender_label = mx.symbol.slice_axis(data=label, axis=1, begin=0, end=1)
- gender_label = mx.symbol.reshape(gender_label,
- shape=(args.per_batch_size, ))
- gender_fc1 = mx.symbol.slice_axis(data=fc1, axis=1, begin=0, end=2)
- #gender_fc7 = mx.sym.FullyConnected(data=gender_fc1, num_hidden=2, name='gender_fc7')
- gender_softmax = mx.symbol.SoftmaxOutput(data=gender_fc1,
- label=gender_label,
- name='gender_softmax',
- normalization='valid',
- use_ignore=True,
- ignore_label=9999)
- outs = [gender_softmax]
- for i in range(AGE):
- age_label = mx.symbol.slice_axis(data=label,
- axis=1,
- begin=i + 1,
- end=i + 2)
- age_label = mx.symbol.reshape(age_label, shape=(args.per_batch_size, ))
- age_fc1 = mx.symbol.slice_axis(data=fc1,
- axis=1,
- begin=2 + i * 2,
- end=4 + i * 2)
- #age_fc7 = mx.sym.FullyConnected(data=age_fc1, num_hidden=2, name='age_fc7_%i'%i)
- age_softmax = mx.symbol.SoftmaxOutput(data=age_fc1,
- label=age_label,
- name='age_softmax_%d' % i,
- normalization='valid',
- grad_scale=1)
- outs.append(age_softmax)
- outs.append(mx.sym.BlockGrad(fc1))
-
- out = mx.symbol.Group(outs)
- return (out, arg_params, aux_params)
-
-
-def train_net(args):
- ctx = []
- cvd = os.environ['CUDA_VISIBLE_DEVICES'].strip()
- if len(cvd) > 0:
- for i in range(len(cvd.split(','))):
- ctx.append(mx.gpu(i))
- if len(ctx) == 0:
- ctx = [mx.cpu()]
- print('use cpu')
- else:
- print('gpu num:', len(ctx))
- prefix = args.prefix
- prefix_dir = os.path.dirname(prefix)
- if not os.path.exists(prefix_dir):
- os.makedirs(prefix_dir)
- end_epoch = args.end_epoch
- args.ctx_num = len(ctx)
- args.num_layers = int(args.network[1:])
- print('num_layers', args.num_layers)
- if args.per_batch_size == 0:
- args.per_batch_size = 128
- args.batch_size = args.per_batch_size * args.ctx_num
- args.rescale_threshold = 0
- args.image_channel = 3
-
- data_dir_list = args.data_dir.split(',')
- assert len(data_dir_list) == 1
- data_dir = data_dir_list[0]
- path_imgrec = None
- path_imglist = None
- image_size = [int(x) for x in args.image_size.split(',')]
- assert len(image_size) == 2
- assert image_size[0] == image_size[1]
- args.image_h = image_size[0]
- args.image_w = image_size[1]
- print('image_size', image_size)
- path_imgrec = os.path.join(data_dir, "train.rec")
- path_imgrec_val = os.path.join(data_dir, "val.rec")
-
- print('Called with argument:', args)
- data_shape = (args.image_channel, image_size[0], image_size[1])
- mean = None
-
- begin_epoch = 0
- base_lr = args.lr
- base_wd = args.wd
- base_mom = args.mom
- if len(args.pretrained) == 0:
- arg_params = None
- aux_params = None
- sym, arg_params, aux_params = get_symbol(args, arg_params, aux_params)
- else:
- vec = args.pretrained.split(',')
- print('loading', vec)
- _, arg_params, aux_params = mx.model.load_checkpoint(
- vec[0], int(vec[1]))
- sym, arg_params, aux_params = get_symbol(args, arg_params, aux_params)
-
- #label_name = 'softmax_label'
- #label_shape = (args.batch_size,)
- model = mx.mod.Module(
- context=ctx,
- symbol=sym,
- )
- val_dataiter = None
-
- train_dataiter = FaceImageIter(
- batch_size=args.batch_size,
- data_shape=data_shape,
- path_imgrec=path_imgrec,
- shuffle=True,
- rand_mirror=args.rand_mirror,
- mean=mean,
- cutoff=args.cutoff,
- color_jittering=args.color,
- )
- val_dataiter = FaceImageIter(
- batch_size=args.batch_size,
- data_shape=data_shape,
- path_imgrec=path_imgrec_val,
- shuffle=False,
- rand_mirror=False,
- mean=mean,
- )
-
- metric = mx.metric.CompositeEvalMetric(
- [AccMetric(), MAEMetric(), CUMMetric()])
-
- if args.network[0] == 'r' or args.network[0] == 'y':
- initializer = mx.init.Xavier(rnd_type='gaussian',
- factor_type="out",
- magnitude=2) #resnet style
- elif args.network[0] == 'i' or args.network[0] == 'x':
- initializer = mx.init.Xavier(rnd_type='gaussian',
- factor_type="in",
- magnitude=2) #inception
- else:
- initializer = mx.init.Xavier(rnd_type='uniform',
- factor_type="in",
- magnitude=2)
- _rescale = 1.0 / args.ctx_num
- opt = optimizer.SGD(learning_rate=base_lr,
- momentum=base_mom,
- wd=base_wd,
- rescale_grad=_rescale)
- #opt = optimizer.Nadam(learning_rate=base_lr, wd=base_wd, rescale_grad=_rescale)
- som = 20
- _cb = mx.callback.Speedometer(args.batch_size, som)
- lr_steps = [int(x) for x in args.lr_steps.split(',')]
-
- global_step = [0]
-
- def _batch_callback(param):
- _cb(param)
- global_step[0] += 1
- mbatch = global_step[0]
- for _lr in lr_steps:
- if mbatch == _lr:
- opt.lr *= 0.1
- print('lr change to', opt.lr)
- break
- if mbatch % 1000 == 0:
- print('lr-batch-epoch:', opt.lr, param.nbatch, param.epoch)
- if mbatch == lr_steps[-1]:
- arg, aux = model.get_params()
- all_layers = model.symbol.get_internals()
- _sym = all_layers['fc1_output']
- mx.model.save_checkpoint(args.prefix, 0, _sym, arg, aux)
- sys.exit(0)
-
- epoch_cb = None
- train_dataiter = mx.io.PrefetchingIter(train_dataiter)
- print('start fitting')
-
- model.fit(
- train_dataiter,
- begin_epoch=begin_epoch,
- num_epoch=end_epoch,
- eval_data=val_dataiter,
- eval_metric=metric,
- kvstore='device',
- optimizer=opt,
- #optimizer_params = optimizer_params,
- initializer=initializer,
- arg_params=arg_params,
- aux_params=aux_params,
- allow_missing=True,
- batch_end_callback=_batch_callback,
- epoch_end_callback=epoch_cb)
-
-
-def main():
- #time.sleep(3600*6.5)
- global args
- args = parse_args()
- train_net(args)
-
-
-if __name__ == '__main__':
- main()
diff --git a/python-package/README.md b/python-package/README.md
index 10777df..df6eced 100644
--- a/python-package/README.md
+++ b/python-package/README.md
@@ -1,4 +1,4 @@
-## Python package of insightface README
+## Python package
For insightface pip-package <= 0.1.5, we use MXNet as inference backend, please download all models from [onedrive](https://1drv.ms/u/s!AswpsDO2toNKrUy0VktHTWgIQ0bn?e=UEF7C4), and put them all under `~/.insightface/models/` directory.
diff --git a/deploy/Tom_Hanks_54745.png b/python-package/insightface/data/images/Tom_Hanks_54745.png
similarity index 100%
rename from deploy/Tom_Hanks_54745.png
rename to python-package/insightface/data/images/Tom_Hanks_54745.png
diff --git a/sample-images/t1.jpg b/python-package/insightface/data/images/t1.jpg
similarity index 100%
rename from sample-images/t1.jpg
rename to python-package/insightface/data/images/t1.jpg
diff --git a/python-package/insightface/model_zoo/arcface_onnx.py b/python-package/insightface/model_zoo/arcface_onnx.py
index 02a6757..9c7de62 100644
--- a/python-package/insightface/model_zoo/arcface_onnx.py
+++ b/python-package/insightface/model_zoo/arcface_onnx.py
@@ -82,4 +82,14 @@ class ArcFaceONNX:
sim = np.dot(feat1, feat2) / (norm(feat1) * norm(feat2))
return sim
+ def forward(self, imgs):
+ if not isinstance(imgs, list):
+ imgs = [imgs]
+ input_size = self.input_size
+
+ blob = cv2.dnn.blobFromImages(imgs, 1.0 / self.input_std, input_size,
+ (self.input_mean, self.input_mean, self.input_mean), swapRB=True)
+ net_out = self.session.run(self.output_names, {self.input_name: blob})[0]
+ return net_out
+
diff --git a/recognition/README.md b/recognition/README.md
index 5cda5a3..e494947 100644
--- a/recognition/README.md
+++ b/recognition/README.md
@@ -1,24 +1,46 @@
-## Angular Margin Loss for Deep Face Recognition
+## Face Recognition
-### Citation
-If you find this project useful in your research, please consider to cite the following related papers:
+
+
+
-```
-@inproceedings{deng2019arcface,
- title={Arcface: Additive angular margin loss for deep face recognition},
- author={Deng, Jiankang and Guo, Jia and Xue, Niannan and Zafeiriou, Stefanos},
- booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition},
- pages={4690--4699},
- year={2019}
-}
+## Introduction
+
+These are the face recognition methods of [InsightFace](https://insightface.ai)
+
+
+
+
+
+
+
+### Datasets
+
+ Please refer to [datasets](_datasets_) page for the details of face recognition datasets used for training and evaluation.
+
+### Evaluation
+
+ Please refer to [evaluation](_evaluation_) page for the details of face recognition evaluation.
+
+
+## Methods
+
+
+Supported methods:
+
+- [x] [ArcFace_mxnet (CVPR'2019)](arcface_mxnet)
+- [x] [ArcFace_torch (CVPR'2019)](arcface_torch)
+- [x] [SubCenter ArcFace (ECCV'2020)](subcenter_arcface)
+- [x] [PartialFC_mxnet (Arxiv'2020)](partial_fc)
+- [x] [PartialFC_torch (Arxiv'2020)](arcface_torch)
+- [x] [VPL (CVPR'2021)](vpl)
+- [x] [OneFlow_face](oneflow_face)
+
+
+## Contributing
+
+We appreciate all contributions to improve the face recognition model zoo of InsightFace.
-@inproceedings{deng2020subcenter,
- title={Sub-center ArcFace: Boosting Face Recognition by Large-scale Noisy Web Faces},
- author={Deng, Jiankang and Guo, Jia and Liu, Tongliang and Gong, Mingming and Zafeiriou, Stefanos},
- booktitle={Proceedings of the IEEE Conference on European Conference on Computer Vision},
- year={2020}
-}
-```
diff --git a/recognition/_datasets_/README.md b/recognition/_datasets_/README.md
new file mode 100644
index 0000000..5cda5a3
--- /dev/null
+++ b/recognition/_datasets_/README.md
@@ -0,0 +1,24 @@
+## Angular Margin Loss for Deep Face Recognition
+
+### Citation
+
+If you find this project useful in your research, please consider to cite the following related papers:
+
+```
+
+@inproceedings{deng2019arcface,
+ title={Arcface: Additive angular margin loss for deep face recognition},
+ author={Deng, Jiankang and Guo, Jia and Xue, Niannan and Zafeiriou, Stefanos},
+ booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition},
+ pages={4690--4699},
+ year={2019}
+}
+
+@inproceedings{deng2020subcenter,
+ title={Sub-center ArcFace: Boosting Face Recognition by Large-scale Noisy Web Faces},
+ author={Deng, Jiankang and Guo, Jia and Liu, Tongliang and Gong, Mingming and Zafeiriou, Stefanos},
+ booktitle={Proceedings of the IEEE Conference on European Conference on Computer Vision},
+ year={2020}
+}
+
+```
diff --git a/evaluation/IJB/README.md b/recognition/_evaluation_/ijb/README.md
similarity index 100%
rename from evaluation/IJB/README.md
rename to recognition/_evaluation_/ijb/README.md
diff --git a/evaluation/IJB/example.sh b/recognition/_evaluation_/ijb/example.sh
similarity index 59%
rename from evaluation/IJB/example.sh
rename to recognition/_evaluation_/ijb/example.sh
index 9a386c2..63f8269 100755
--- a/evaluation/IJB/example.sh
+++ b/recognition/_evaluation_/ijb/example.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-python -u IJB_11.py --model-prefix ./pretrained_models/r100-arcface/model --model-epoch 1 --gpu 0 --target IJBC --job arcface > ijbc_11.log 2>&1 &
+python -u ijb_11.py --model-prefix ./pretrained_models/r100-arcface/model --model-epoch 1 --gpu 0 --target IJBC --job arcface > ijbc_11.log 2>&1 &
-python -u IJB_1N.py --model-prefix ./pretrained_models/r100-arcface/model --model-epoch 1 --gpu 0 --target IJBB --job arcface > ijbb_1n.log 2>&1 &
+python -u ijb_1n.py --model-prefix ./pretrained_models/r100-arcface/model --model-epoch 1 --gpu 0 --target IJBB --job arcface > ijbb_1n.log 2>&1 &
diff --git a/evaluation/IJB/IJB_11.py b/recognition/_evaluation_/ijb/ijb_11.py
similarity index 100%
rename from evaluation/IJB/IJB_11.py
rename to recognition/_evaluation_/ijb/ijb_11.py
diff --git a/evaluation/IJB/IJB_1N.py b/recognition/_evaluation_/ijb/ijb_1n.py
similarity index 100%
rename from evaluation/IJB/IJB_1N.py
rename to recognition/_evaluation_/ijb/ijb_1n.py
diff --git a/evaluation/IJB/IJB_evals.py b/recognition/_evaluation_/ijb/ijb_evals.py
similarity index 100%
rename from evaluation/IJB/IJB_evals.py
rename to recognition/_evaluation_/ijb/ijb_evals.py
diff --git a/recognition/_evaluation_/ijb/ijb_onnx.py b/recognition/_evaluation_/ijb/ijb_onnx.py
new file mode 100644
index 0000000..eb2edbe
--- /dev/null
+++ b/recognition/_evaluation_/ijb/ijb_onnx.py
@@ -0,0 +1,267 @@
+import argparse
+import os
+import pickle
+import timeit
+
+import cv2
+import mxnet as mx
+import numpy as np
+import pandas as pd
+import prettytable
+import skimage.transform
+from sklearn.metrics import roc_curve
+from sklearn.preprocessing import normalize
+import insightface
+from insightface.model_zoo import ArcFaceONNX
+
+
+SRC = np.array(
+ [
+ [30.2946, 51.6963],
+ [65.5318, 51.5014],
+ [48.0252, 71.7366],
+ [33.5493, 92.3655],
+ [62.7299, 92.2041]]
+ , dtype=np.float32)
+SRC[:, 0] += 8.0
+
+
+class AlignedDataSet(mx.gluon.data.Dataset):
+ def __init__(self, root, lines, align=True):
+ self.lines = lines
+ self.root = root
+ self.align = align
+
+ def __len__(self):
+ return len(self.lines)
+
+ def __getitem__(self, idx):
+ each_line = self.lines[idx]
+ name_lmk_score = each_line.strip().split(' ')
+ name = os.path.join(self.root, name_lmk_score[0])
+ img = cv2.cvtColor(cv2.imread(name), cv2.COLOR_BGR2RGB)
+ landmark5 = np.array([float(x) for x in name_lmk_score[1:-1]], dtype=np.float32).reshape((5, 2))
+ st = skimage.transform.SimilarityTransform()
+ st.estimate(landmark5, SRC)
+ img = cv2.warpAffine(img, st.params[0:2, :], (112, 112), borderValue=0.0)
+ img_1 = np.expand_dims(img, 0)
+ img_2 = np.expand_dims(np.fliplr(img), 0)
+ output = np.concatenate((img_1, img_2), axis=0).astype(np.float32)
+ output = np.transpose(output, (0, 3, 1, 2))
+ output = mx.nd.array(output)
+ return output
+
+
+def extract(model_file, dataset):
+ model = ArcFaceONNX(model_file=model_file)
+ model.check()
+ feat_mat = np.zeros(shape=(len(dataset), 2 * model.feat_dim))
+
+ def batchify_fn(data):
+ return mx.nd.concat(*data, dim=0)
+
+ data_loader = mx.gluon.data.DataLoader(
+ dataset, 128, last_batch='keep', num_workers=4,
+ thread_pool=True, prefetch=16, batchify_fn=batchify_fn)
+ num_iter = 0
+ for batch in data_loader:
+ batch = batch.asnumpy()
+ feat = model.forward(batch)
+ feat = np.reshape(feat, (-1, model.feat_dim * 2))
+ feat_mat[128 * num_iter: 128 * num_iter + feat.shape[0], :] = feat
+ num_iter += 1
+ if num_iter % 50 == 0:
+ print(num_iter)
+ return feat_mat
+
+
+def read_template_media_list(path):
+ ijb_meta = pd.read_csv(path, sep=' ', header=None).values
+ templates = ijb_meta[:, 1].astype(np.int)
+ medias = ijb_meta[:, 2].astype(np.int)
+ return templates, medias
+
+
+def read_template_pair_list(path):
+ pairs = pd.read_csv(path, sep=' ', header=None).values
+ t1 = pairs[:, 0].astype(np.int)
+ t2 = pairs[:, 1].astype(np.int)
+ label = pairs[:, 2].astype(np.int)
+ return t1, t2, label
+
+
+def read_image_feature(path):
+ with open(path, 'rb') as fid:
+ img_feats = pickle.load(fid)
+ return img_feats
+
+
+def image2template_feature(img_feats=None,
+ templates=None,
+ medias=None):
+ unique_templates = np.unique(templates)
+ template_feats = np.zeros((len(unique_templates), img_feats.shape[1]))
+ for count_template, uqt in enumerate(unique_templates):
+ (ind_t,) = np.where(templates == uqt)
+ face_norm_feats = img_feats[ind_t]
+ face_medias = medias[ind_t]
+ unique_medias, unique_media_counts = np.unique(face_medias, return_counts=True)
+ media_norm_feats = []
+ for u, ct in zip(unique_medias, unique_media_counts):
+ (ind_m,) = np.where(face_medias == u)
+ if ct == 1:
+ media_norm_feats += [face_norm_feats[ind_m]]
+ else: # image features from the same video will be aggregated into one feature
+ media_norm_feats += [np.mean(face_norm_feats[ind_m], axis=0, keepdims=True), ]
+ media_norm_feats = np.array(media_norm_feats)
+ template_feats[count_template] = np.sum(media_norm_feats, axis=0)
+ if count_template % 2000 == 0:
+ print('Finish Calculating {} template features.'.format(
+ count_template))
+ template_norm_feats = normalize(template_feats)
+ return template_norm_feats, unique_templates
+
+
+def verification(template_norm_feats=None,
+ unique_templates=None,
+ p1=None,
+ p2=None):
+ template2id = np.zeros((max(unique_templates) + 1, 1), dtype=int)
+ for count_template, uqt in enumerate(unique_templates):
+ template2id[uqt] = count_template
+ score = np.zeros((len(p1),))
+ total_pairs = np.array(range(len(p1)))
+ batchsize = 100000
+ sublists = [total_pairs[i: i + batchsize] for i in range(0, len(p1), batchsize)]
+ total_sublists = len(sublists)
+ for c, s in enumerate(sublists):
+ feat1 = template_norm_feats[template2id[p1[s]]]
+ feat2 = template_norm_feats[template2id[p2[s]]]
+ similarity_score = np.sum(feat1 * feat2, -1)
+ score[s] = similarity_score.flatten()
+ if c % 10 == 0:
+ print('Finish {}/{} pairs.'.format(c, total_sublists))
+ return score
+
+
+def verification2(template_norm_feats=None,
+ unique_templates=None,
+ p1=None,
+ p2=None):
+ template2id = np.zeros((max(unique_templates) + 1, 1), dtype=int)
+ for count_template, uqt in enumerate(unique_templates):
+ template2id[uqt] = count_template
+ score = np.zeros((len(p1),)) # save cosine distance between pairs
+ total_pairs = np.array(range(len(p1)))
+ batchsize = 100000 # small batchsize instead of all pairs in one batch due to the memory limiation
+ sublists = [total_pairs[i:i + batchsize] for i in range(0, len(p1), batchsize)]
+ total_sublists = len(sublists)
+ for c, s in enumerate(sublists):
+ feat1 = template_norm_feats[template2id[p1[s]]]
+ feat2 = template_norm_feats[template2id[p2[s]]]
+ similarity_score = np.sum(feat1 * feat2, -1)
+ score[s] = similarity_score.flatten()
+ if c % 10 == 0:
+ print('Finish {}/{} pairs.'.format(c, total_sublists))
+ return score
+
+
+def main(args):
+ use_norm_score = True # if Ture, TestMode(N1)
+ use_detector_score = True # if Ture, TestMode(D1)
+ use_flip_test = True # if Ture, TestMode(F1)
+ assert args.target == 'IJBC' or args.target == 'IJBB'
+
+ start = timeit.default_timer()
+ templates, medias = read_template_media_list(
+ os.path.join('%s/meta' % args.image_path, '%s_face_tid_mid.txt' % args.target.lower()))
+ stop = timeit.default_timer()
+ print('Time: %.2f s. ' % (stop - start))
+
+ start = timeit.default_timer()
+ p1, p2, label = read_template_pair_list(
+ os.path.join('%s/meta' % args.image_path,
+ '%s_template_pair_label.txt' % args.target.lower()))
+ stop = timeit.default_timer()
+ print('Time: %.2f s. ' % (stop - start))
+
+ start = timeit.default_timer()
+ img_path = '%s/loose_crop' % args.image_path
+ img_list_path = '%s/meta/%s_name_5pts_score.txt' % (args.image_path, args.target.lower())
+ img_list = open(img_list_path)
+ files = img_list.readlines()
+ dataset = AlignedDataSet(root=img_path, lines=files, align=True)
+ img_feats = extract(args.model_file, dataset)
+
+ faceness_scores = []
+ for each_line in files:
+ name_lmk_score = each_line.split()
+ faceness_scores.append(name_lmk_score[-1])
+ faceness_scores = np.array(faceness_scores).astype(np.float32)
+ stop = timeit.default_timer()
+ print('Time: %.2f s. ' % (stop - start))
+ print('Feature Shape: ({} , {}) .'.format(img_feats.shape[0], img_feats.shape[1]))
+ start = timeit.default_timer()
+
+ if use_flip_test:
+ img_input_feats = img_feats[:, 0:img_feats.shape[1] // 2] + img_feats[:, img_feats.shape[1] // 2:]
+ else:
+ img_input_feats = img_feats[:, 0:img_feats.shape[1] // 2]
+
+ if use_norm_score:
+ img_input_feats = img_input_feats
+ else:
+ img_input_feats = img_input_feats / np.sqrt(np.sum(img_input_feats ** 2, -1, keepdims=True))
+
+ if use_detector_score:
+ print(img_input_feats.shape, faceness_scores.shape)
+ img_input_feats = img_input_feats * faceness_scores[:, np.newaxis]
+ else:
+ img_input_feats = img_input_feats
+
+ template_norm_feats, unique_templates = image2template_feature(
+ img_input_feats, templates, medias)
+ stop = timeit.default_timer()
+ print('Time: %.2f s. ' % (stop - start))
+
+ start = timeit.default_timer()
+ score = verification(template_norm_feats, unique_templates, p1, p2)
+ stop = timeit.default_timer()
+ print('Time: %.2f s. ' % (stop - start))
+ save_path = os.path.join(args.result_dir, "{}_result".format(args.target))
+ if not os.path.exists(save_path):
+ os.makedirs(save_path)
+ score_save_file = os.path.join(save_path, "{}.npy".format(args.model_file.split('/')[-1]))
+ np.save(score_save_file, score)
+ files = [score_save_file]
+ methods = []
+ scores = []
+ for file in files:
+ methods.append(os.path.basename(file))
+ scores.append(np.load(file))
+ methods = np.array(methods)
+ scores = dict(zip(methods, scores))
+ x_labels = [10 ** -6, 10 ** -5, 10 ** -4, 10 ** -3, 10 ** -2, 10 ** -1]
+ tpr_fpr_table = prettytable.PrettyTable(['Methods'] + [str(x) for x in x_labels])
+ for method in methods:
+ fpr, tpr, _ = roc_curve(label, scores[method])
+ fpr = np.flipud(fpr)
+ tpr = np.flipud(tpr)
+ tpr_fpr_row = []
+ tpr_fpr_row.append("%s-%s" % (method, args.target))
+ for fpr_iter in np.arange(len(x_labels)):
+ _, min_index = min(
+ list(zip(abs(fpr - x_labels[fpr_iter]), range(len(fpr)))))
+ tpr_fpr_row.append('%.2f' % (tpr[min_index] * 100))
+ tpr_fpr_table.add_row(tpr_fpr_row)
+ print(tpr_fpr_table)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='do onnx ijb test')
+ # general
+ parser.add_argument('--model-file', default='', help='path to onnx model.')
+ parser.add_argument('--image-path', default='', type=str, help='')
+ parser.add_argument('--result-dir', default='.', type=str, help='')
+ parser.add_argument('--target', default='IJBC', type=str, help='target, set to IJBC or IJBB')
+ main(parser.parse_args())
diff --git a/evaluation/Megaface/README.md b/recognition/_evaluation_/megaface/README.md
similarity index 100%
rename from evaluation/Megaface/README.md
rename to recognition/_evaluation_/megaface/README.md
diff --git a/evaluation/Megaface/gen_megaface.py b/recognition/_evaluation_/megaface/gen_megaface.py
similarity index 100%
rename from evaluation/Megaface/gen_megaface.py
rename to recognition/_evaluation_/megaface/gen_megaface.py
diff --git a/evaluation/Megaface/remove_noises.py b/recognition/_evaluation_/megaface/remove_noises.py
similarity index 100%
rename from evaluation/Megaface/remove_noises.py
rename to recognition/_evaluation_/megaface/remove_noises.py
diff --git a/evaluation/Megaface/run.sh b/recognition/_evaluation_/megaface/run.sh
similarity index 100%
rename from evaluation/Megaface/run.sh
rename to recognition/_evaluation_/megaface/run.sh
diff --git a/recognition/tools/README.md b/recognition/_tools_/README.md
similarity index 100%
rename from recognition/tools/README.md
rename to recognition/_tools_/README.md
diff --git a/recognition/tools/cpp-align/FacePreprocess.h b/recognition/_tools_/cpp_align/face_align.h
similarity index 100%
rename from recognition/tools/cpp-align/FacePreprocess.h
rename to recognition/_tools_/cpp_align/face_align.h
diff --git a/recognition/tools/mask_renderer.py b/recognition/_tools_/mask_renderer.py
similarity index 100%
rename from recognition/tools/mask_renderer.py
rename to recognition/_tools_/mask_renderer.py
diff --git a/recognition/ArcFace/README.md b/recognition/arcface_mxnet/README.md
similarity index 93%
rename from recognition/ArcFace/README.md
rename to recognition/arcface_mxnet/README.md
index 0fc1555..3d463f7 100644
--- a/recognition/ArcFace/README.md
+++ b/recognition/arcface_mxnet/README.md
@@ -2,13 +2,13 @@
### Memory Consumption and Training Speed
-
+
Parallel acceleration on both feature x and centre W. Setting: ResNet 50, batch size 8 * 64, feature dimension 512, float point 32, GPU 8 * P40 (24GB).
### Illustration of Main Steps
-
+
Parallel calculation by simple matrix partition. Setting: ResNet 50, batch size 8 * 64, feature dimension 512, float point 32, identity number 1 Million, GPU 8 * 1080ti (11GB). Communication cost: 1MB (feature x). Training speed: 800 samples/second.
diff --git a/recognition/common/build_eval_pack.py b/recognition/arcface_mxnet/common/build_eval_pack.py
similarity index 100%
rename from recognition/common/build_eval_pack.py
rename to recognition/arcface_mxnet/common/build_eval_pack.py
diff --git a/recognition/common/face_align.py b/recognition/arcface_mxnet/common/face_align.py
similarity index 100%
rename from recognition/common/face_align.py
rename to recognition/arcface_mxnet/common/face_align.py
diff --git a/recognition/common/flops_counter.py b/recognition/arcface_mxnet/common/flops_counter.py
similarity index 100%
rename from recognition/common/flops_counter.py
rename to recognition/arcface_mxnet/common/flops_counter.py
diff --git a/recognition/common/rec2image.py b/recognition/arcface_mxnet/common/rec2image.py
similarity index 100%
rename from recognition/common/rec2image.py
rename to recognition/arcface_mxnet/common/rec2image.py
diff --git a/recognition/common/rec2shufrec.py b/recognition/arcface_mxnet/common/rec2shufrec.py
similarity index 100%
rename from recognition/common/rec2shufrec.py
rename to recognition/arcface_mxnet/common/rec2shufrec.py
diff --git a/recognition/common/rec_builder.py b/recognition/arcface_mxnet/common/rec_builder.py
similarity index 100%
rename from recognition/common/rec_builder.py
rename to recognition/arcface_mxnet/common/rec_builder.py
diff --git a/recognition/common/verification.py b/recognition/arcface_mxnet/common/verification.py
similarity index 100%
rename from recognition/common/verification.py
rename to recognition/arcface_mxnet/common/verification.py
diff --git a/recognition/ArcFace/image_iter.py b/recognition/arcface_mxnet/image_iter.py
similarity index 100%
rename from recognition/ArcFace/image_iter.py
rename to recognition/arcface_mxnet/image_iter.py
diff --git a/recognition/ArcFace/metric.py b/recognition/arcface_mxnet/metric.py
similarity index 100%
rename from recognition/ArcFace/metric.py
rename to recognition/arcface_mxnet/metric.py
diff --git a/recognition/ArcFace/parall_module_local_v1.py b/recognition/arcface_mxnet/parall_module_local_v1.py
similarity index 100%
rename from recognition/ArcFace/parall_module_local_v1.py
rename to recognition/arcface_mxnet/parall_module_local_v1.py
diff --git a/recognition/ArcFace/sample_config.py b/recognition/arcface_mxnet/sample_config.py
similarity index 100%
rename from recognition/ArcFace/sample_config.py
rename to recognition/arcface_mxnet/sample_config.py
diff --git a/recognition/symbol/fdensenet.py b/recognition/arcface_mxnet/symbol/fdensenet.py
similarity index 100%
rename from recognition/symbol/fdensenet.py
rename to recognition/arcface_mxnet/symbol/fdensenet.py
diff --git a/recognition/symbol/fmnasnet.py b/recognition/arcface_mxnet/symbol/fmnasnet.py
similarity index 100%
rename from recognition/symbol/fmnasnet.py
rename to recognition/arcface_mxnet/symbol/fmnasnet.py
diff --git a/recognition/symbol/fmobilefacenet.py b/recognition/arcface_mxnet/symbol/fmobilefacenet.py
similarity index 100%
rename from recognition/symbol/fmobilefacenet.py
rename to recognition/arcface_mxnet/symbol/fmobilefacenet.py
diff --git a/recognition/symbol/fmobilenet.py b/recognition/arcface_mxnet/symbol/fmobilenet.py
similarity index 100%
rename from recognition/symbol/fmobilenet.py
rename to recognition/arcface_mxnet/symbol/fmobilenet.py
diff --git a/recognition/symbol/fresnet.py b/recognition/arcface_mxnet/symbol/fresnet.py
similarity index 100%
rename from recognition/symbol/fresnet.py
rename to recognition/arcface_mxnet/symbol/fresnet.py
diff --git a/recognition/symbol/memonger.py b/recognition/arcface_mxnet/symbol/memonger.py
similarity index 100%
rename from recognition/symbol/memonger.py
rename to recognition/arcface_mxnet/symbol/memonger.py
diff --git a/recognition/symbol/memonger_v2.py b/recognition/arcface_mxnet/symbol/memonger_v2.py
similarity index 100%
rename from recognition/symbol/memonger_v2.py
rename to recognition/arcface_mxnet/symbol/memonger_v2.py
diff --git a/recognition/symbol/symbol_utils.py b/recognition/arcface_mxnet/symbol/symbol_utils.py
similarity index 100%
rename from recognition/symbol/symbol_utils.py
rename to recognition/arcface_mxnet/symbol/symbol_utils.py
diff --git a/recognition/symbol/vargfacenet.py b/recognition/arcface_mxnet/symbol/vargfacenet.py
similarity index 100%
rename from recognition/symbol/vargfacenet.py
rename to recognition/arcface_mxnet/symbol/vargfacenet.py
diff --git a/recognition/ArcFace/train.py b/recognition/arcface_mxnet/train.py
similarity index 99%
rename from recognition/ArcFace/train.py
rename to recognition/arcface_mxnet/train.py
index 8cae863..2f2ebad 100644
--- a/recognition/ArcFace/train.py
+++ b/recognition/arcface_mxnet/train.py
@@ -16,10 +16,10 @@ import argparse
import mxnet.optimizer as optimizer
from config import config, default, generate_config
from metric import *
-sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'common'))
+sys.path.append(os.path.join(os.path.dirname(__file__), 'common'))
import flops_counter
import verification
-sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'symbol'))
+sys.path.append(os.path.join(os.path.dirname(__file__), 'symbol'))
import fresnet
import fmobilefacenet
import fmobilenet
diff --git a/recognition/ArcFace/train_parall.py b/recognition/arcface_mxnet/train_parall.py
similarity index 99%
rename from recognition/ArcFace/train_parall.py
rename to recognition/arcface_mxnet/train_parall.py
index 1351baa..ce6b916 100644
--- a/recognition/ArcFace/train_parall.py
+++ b/recognition/arcface_mxnet/train_parall.py
@@ -20,11 +20,11 @@ import mxnet as mx
from mxnet import ndarray as nd
import argparse
import mxnet.optimizer as optimizer
-sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'common'))
+sys.path.append(os.path.join(os.path.dirname(__file__), 'common'))
import flops_counter
from config import config, default, generate_config
import verification
-sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'symbol'))
+sys.path.append(os.path.join(os.path.dirname(__file__), 'symbol'))
import fresnet
import fmobilefacenet
import fmobilenet
diff --git a/recognition/ArcFace/triplet_image_iter.py b/recognition/arcface_mxnet/triplet_image_iter.py
similarity index 100%
rename from recognition/ArcFace/triplet_image_iter.py
rename to recognition/arcface_mxnet/triplet_image_iter.py
diff --git a/recognition/ArcFace/verification.py b/recognition/arcface_mxnet/verification.py
similarity index 100%
rename from recognition/ArcFace/verification.py
rename to recognition/arcface_mxnet/verification.py
diff --git a/recognition/SubCenter-ArcFace/README.md b/recognition/subcenter_arcface/README.md
similarity index 89%
rename from recognition/SubCenter-ArcFace/README.md
rename to recognition/subcenter_arcface/README.md
index 56a91b5..ed642fd 100644
--- a/recognition/SubCenter-ArcFace/README.md
+++ b/recognition/subcenter_arcface/README.md
@@ -5,14 +5,14 @@
We introduce one extra hyperparameter (subcenter number `loss_K`) to ArcFace to relax the intra-class compactness constraint. In our experiments, we find ``loss_K=3`` can achieve a good balance between accuracy and robustness.
-
+
### 2. Implementation
The training process of Subcenter ArcFace is almost same as [ArcFace](https://github.com/deepinsight/insightface/tree/master/recognition/ArcFace)
The increased GPU memory consumption can be easily alleviated by our parallel framework.
-
+
### 3. Training Dataset
diff --git a/recognition/subcenter_arcface/common/build_eval_pack.py b/recognition/subcenter_arcface/common/build_eval_pack.py
new file mode 100644
index 0000000..23208ce
--- /dev/null
+++ b/recognition/subcenter_arcface/common/build_eval_pack.py
@@ -0,0 +1,136 @@
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+#import mxnet as mx
+#from mxnet import ndarray as nd
+import argparse
+import cv2
+import pickle
+import numpy as np
+import sys
+import os
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'common'))
+sys.path.append(
+ os.path.join(os.path.dirname(__file__), '..', '..', 'RetinaFace'))
+import face_align
+from retinaface import RetinaFace
+
+
+def to_rgb(img):
+ w, h = img.shape
+ ret = np.empty((w, h, 3), dtype=np.uint8)
+ ret[:, :, 0] = ret[:, :, 1] = ret[:, :, 2] = img
+ return ret
+
+
+def IOU(Reframe, GTframe):
+ x1 = Reframe[0]
+ y1 = Reframe[1]
+ width1 = Reframe[2] - Reframe[0]
+ height1 = Reframe[3] - Reframe[1]
+
+ x2 = GTframe[0]
+ y2 = GTframe[1]
+ width2 = GTframe[2] - GTframe[0]
+ height2 = GTframe[3] - GTframe[1]
+
+ endx = max(x1 + width1, x2 + width2)
+ startx = min(x1, x2)
+ width = width1 + width2 - (endx - startx)
+
+ endy = max(y1 + height1, y2 + height2)
+ starty = min(y1, y2)
+ height = height1 + height2 - (endy - starty)
+
+ if width <= 0 or height <= 0:
+ ratio = 0
+ else:
+ Area = width * height
+ Area1 = width1 * height1
+ Area2 = width2 * height2
+ ratio = Area * 1. / (Area1 + Area2 - Area)
+ return ratio
+
+
+parser = argparse.ArgumentParser(description='Package eval images')
+# general
+parser.add_argument('--data-dir', default='', help='')
+parser.add_argument('--image-size', type=int, default=112, help='')
+parser.add_argument('--gpu', type=int, default=0, help='')
+parser.add_argument('--det-prefix', type=str, default='./model/R50', help='')
+parser.add_argument('--output', default='./', help='path to save.')
+parser.add_argument('--align-mode', default='arcface', help='align mode.')
+args = parser.parse_args()
+
+gpu_id = args.gpu
+
+detector = RetinaFace(args.det_prefix, 0, gpu_id, network='net3')
+target_size = 400
+max_size = 800
+
+
+def get_norm_crop(image_path):
+ im = cv2.imread(image_path)
+ im_shape = im.shape
+ im_size_min = np.min(im_shape[0:2])
+ im_size_max = np.max(im_shape[0:2])
+ im_scale = float(target_size) / float(im_size_min)
+ # prevent bigger axis from being more than max_size:
+ if np.round(im_scale * im_size_max) > max_size:
+ im_scale = float(max_size) / float(im_size_max)
+ bbox, landmark = detector.detect(im, threshold=0.5, scales=[im_scale])
+ #print(im.shape, bbox.shape, landmark.shape)
+ if bbox.shape[0] == 0:
+ bbox, landmark = detector.detect(
+ im,
+ threshold=0.05,
+ scales=[im_scale * 0.75, im_scale, im_scale * 2.0])
+ print('refine', im.shape, bbox.shape, landmark.shape)
+ nrof_faces = bbox.shape[0]
+ if nrof_faces > 0:
+ det = bbox[:, 0:4]
+ img_size = np.asarray(im.shape)[0:2]
+ bindex = 0
+ if nrof_faces > 1:
+ bounding_box_size = (det[:, 2] - det[:, 0]) * (det[:, 3] -
+ det[:, 1])
+ img_center = img_size / 2
+ offsets = np.vstack([(det[:, 0] + det[:, 2]) / 2 - img_center[1],
+ (det[:, 1] + det[:, 3]) / 2 - img_center[0]])
+ offset_dist_squared = np.sum(np.power(offsets, 2.0), 0)
+ bindex = np.argmax(bounding_box_size - offset_dist_squared *
+ 2.0) # some extra weight on the centering
+ #_bbox = bounding_boxes[bindex, 0:4]
+ _landmark = landmark[bindex]
+ warped = face_align.norm_crop(im,
+ landmark=_landmark,
+ image_size=args.image_size,
+ mode=args.align_mode)
+ return warped
+ else:
+ return None
+
+
+bins = []
+issame_list = []
+pp = 0
+for line in open(os.path.join(args.data_dir, 'pairs_label.txt'), 'r'):
+ pp += 1
+ if pp % 100 == 0:
+ print('processing', pp)
+ line = line.strip().split()
+ assert len(line) == 3
+ path1 = os.path.join(args.data_dir, line[0])
+ path2 = os.path.join(args.data_dir, line[1])
+ im1 = get_norm_crop(path1)
+ im2 = get_norm_crop(path2)
+ issame = True
+ if line[2] == '0':
+ issame = False
+ issame_list.append(issame)
+ for im in [im1, im2]:
+ _, s = cv2.imencode('.jpg', im)
+ bins.append(s)
+
+with open(args.output, 'wb') as f:
+ pickle.dump((bins, issame_list), f, protocol=pickle.HIGHEST_PROTOCOL)
diff --git a/recognition/subcenter_arcface/common/face_align.py b/recognition/subcenter_arcface/common/face_align.py
new file mode 100644
index 0000000..4f48a76
--- /dev/null
+++ b/recognition/subcenter_arcface/common/face_align.py
@@ -0,0 +1,71 @@
+import cv2
+import numpy as np
+from skimage import transform as trans
+
+src1 = np.array([[51.642, 50.115], [57.617, 49.990], [35.740, 69.007],
+ [51.157, 89.050], [57.025, 89.702]],
+ dtype=np.float32)
+#<--left
+src2 = np.array([[45.031, 50.118], [65.568, 50.872], [39.677, 68.111],
+ [45.177, 86.190], [64.246, 86.758]],
+ dtype=np.float32)
+
+#---frontal
+src3 = np.array([[39.730, 51.138], [72.270, 51.138], [56.000, 68.493],
+ [42.463, 87.010], [69.537, 87.010]],
+ dtype=np.float32)
+
+#-->right
+src4 = np.array([[46.845, 50.872], [67.382, 50.118], [72.737, 68.111],
+ [48.167, 86.758], [67.236, 86.190]],
+ dtype=np.float32)
+
+#-->right profile
+src5 = np.array([[54.796, 49.990], [60.771, 50.115], [76.673, 69.007],
+ [55.388, 89.702], [61.257, 89.050]],
+ dtype=np.float32)
+
+src = np.array([src1, src2, src3, src4, src5])
+src_map = {112: src, 224: src * 2}
+
+arcface_src = np.array(
+ [[38.2946, 51.6963], [73.5318, 51.5014], [56.0252, 71.7366],
+ [41.5493, 92.3655], [70.7299, 92.2041]],
+ dtype=np.float32)
+
+arcface_src = np.expand_dims(arcface_src, axis=0)
+
+# In[66]:
+
+
+# lmk is prediction; src is template
+def estimate_norm(lmk, image_size=112, mode='arcface'):
+ assert lmk.shape == (5, 2)
+ tform = trans.SimilarityTransform()
+ lmk_tran = np.insert(lmk, 2, values=np.ones(5), axis=1)
+ min_M = []
+ min_index = []
+ min_error = float('inf')
+ if mode == 'arcface':
+ assert image_size == 112
+ src = arcface_src
+ else:
+ src = src_map[image_size]
+ for i in np.arange(src.shape[0]):
+ tform.estimate(lmk, src[i])
+ M = tform.params[0:2, :]
+ results = np.dot(M, lmk_tran.T)
+ results = results.T
+ error = np.sum(np.sqrt(np.sum((results - src[i])**2, axis=1)))
+ # print(error)
+ if error < min_error:
+ min_error = error
+ min_M = M
+ min_index = i
+ return min_M, min_index
+
+
+def norm_crop(img, landmark, image_size=112, mode='arcface'):
+ M, pose_index = estimate_norm(landmark, image_size, mode)
+ warped = cv2.warpAffine(img, M, (image_size, image_size), borderValue=0.0)
+ return warped
diff --git a/recognition/subcenter_arcface/common/flops_counter.py b/recognition/subcenter_arcface/common/flops_counter.py
new file mode 100644
index 0000000..8094241
--- /dev/null
+++ b/recognition/subcenter_arcface/common/flops_counter.py
@@ -0,0 +1,120 @@
+'''
+@author: insightface
+'''
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import sys
+import os
+import json
+import argparse
+import numpy as np
+import mxnet as mx
+
+
+def is_no_bias(attr):
+ ret = False
+ if 'no_bias' in attr and (attr['no_bias'] == True
+ or attr['no_bias'] == 'True'):
+ ret = True
+ return ret
+
+
+def count_fc_flops(input_filter, output_filter, attr):
+ #print(input_filter, output_filter ,attr)
+ ret = 2 * input_filter * output_filter
+ if is_no_bias(attr):
+ ret -= output_filter
+ return int(ret)
+
+
+def count_conv_flops(input_shape, output_shape, attr):
+ kernel = attr['kernel'][1:-1].split(',')
+ kernel = [int(x) for x in kernel]
+
+ #print('kernel', kernel)
+ if is_no_bias(attr):
+ ret = (2 * input_shape[1] * kernel[0] * kernel[1] -
+ 1) * output_shape[2] * output_shape[3] * output_shape[1]
+ else:
+ ret = 2 * input_shape[1] * kernel[0] * kernel[1] * output_shape[
+ 2] * output_shape[3] * output_shape[1]
+ num_group = 1
+ if 'num_group' in attr:
+ num_group = int(attr['num_group'])
+ ret /= num_group
+ return int(ret)
+
+
+def count_flops(sym, **data_shapes):
+ all_layers = sym.get_internals()
+ #print(all_layers)
+ arg_shapes, out_shapes, aux_shapes = all_layers.infer_shape(**data_shapes)
+ out_shape_dict = dict(zip(all_layers.list_outputs(), out_shapes))
+
+ nodes = json.loads(sym.tojson())['nodes']
+ nodeid_shape = {}
+ for nodeid, node in enumerate(nodes):
+ name = node['name']
+ layer_name = name + "_output"
+ if layer_name in out_shape_dict:
+ nodeid_shape[nodeid] = out_shape_dict[layer_name]
+ #print(nodeid_shape)
+ FLOPs = 0
+ for nodeid, node in enumerate(nodes):
+ flops = 0
+ if node['op'] == 'Convolution':
+ output_shape = nodeid_shape[nodeid]
+ name = node['name']
+ attr = node['attrs']
+ input_nodeid = node['inputs'][0][0]
+ input_shape = nodeid_shape[input_nodeid]
+ flops = count_conv_flops(input_shape, output_shape, attr)
+ elif node['op'] == 'FullyConnected':
+ attr = node['attrs']
+ output_shape = nodeid_shape[nodeid]
+ input_nodeid = node['inputs'][0][0]
+ input_shape = nodeid_shape[input_nodeid]
+ output_filter = output_shape[1]
+ input_filter = input_shape[1] * input_shape[2] * input_shape[3]
+ #assert len(input_shape)==4 and input_shape[2]==1 and input_shape[3]==1
+ flops = count_fc_flops(input_filter, output_filter, attr)
+ #print(node, flops)
+ FLOPs += flops
+
+ return FLOPs
+
+
+def flops_str(FLOPs):
+ preset = [(1e12, 'T'), (1e9, 'G'), (1e6, 'M'), (1e3, 'K')]
+
+ for p in preset:
+ if FLOPs // p[0] > 0:
+ N = FLOPs / p[0]
+ ret = "%.1f%s" % (N, p[1])
+ return ret
+ ret = "%.1f" % (FLOPs)
+ return ret
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='flops counter')
+ # general
+ #parser.add_argument('--model', default='../models2/y2-arcface-retinat1/model,1', help='path to load model.')
+ #parser.add_argument('--model', default='../models2/r100fc-arcface-retinaa/model,1', help='path to load model.')
+ parser.add_argument('--model',
+ default='../models2/r50fc-arcface-emore/model,1',
+ help='path to load model.')
+ args = parser.parse_args()
+ _vec = args.model.split(',')
+ assert len(_vec) == 2
+ prefix = _vec[0]
+ epoch = int(_vec[1])
+ print('loading', prefix, epoch)
+ sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
+ all_layers = sym.get_internals()
+ sym = all_layers['fc1_output']
+ FLOPs = count_flops(sym, data=(1, 3, 112, 112))
+ print('FLOPs:', FLOPs)
diff --git a/recognition/subcenter_arcface/common/rec2image.py b/recognition/subcenter_arcface/common/rec2image.py
new file mode 100644
index 0000000..21e5ec4
--- /dev/null
+++ b/recognition/subcenter_arcface/common/rec2image.py
@@ -0,0 +1,60 @@
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import os
+import sys
+import mxnet as mx
+from mxnet import ndarray as nd
+import random
+import argparse
+import cv2
+import time
+import sklearn
+import numpy as np
+
+
+def main(args):
+ include_datasets = args.include.split(',')
+ rec_list = []
+ for ds in include_datasets:
+ path_imgrec = os.path.join(ds, 'train.rec')
+ path_imgidx = os.path.join(ds, 'train.idx')
+ imgrec = mx.recordio.MXIndexedRecordIO(path_imgidx, path_imgrec, 'r') # pylint: disable=redefined-variable-type
+ rec_list.append(imgrec)
+ if not os.path.exists(args.output):
+ os.makedirs(args.output)
+ for ds_id in range(len(rec_list)):
+ id_list = []
+ imgrec = rec_list[ds_id]
+ s = imgrec.read_idx(0)
+ header, _ = mx.recordio.unpack(s)
+ assert header.flag > 0
+ print('header0 label', header.label)
+ header0 = (int(header.label[0]), int(header.label[1]))
+ seq_identity = range(int(header.label[0]), int(header.label[1]))
+ pp = 0
+ for identity in seq_identity:
+ id_dir = os.path.join(args.output, "%d_%d" % (ds_id, identity))
+ os.makedirs(id_dir)
+ pp += 1
+ if pp % 10 == 0:
+ print('processing id', pp)
+ s = imgrec.read_idx(identity)
+ header, _ = mx.recordio.unpack(s)
+ imgid = 0
+ for _idx in range(int(header.label[0]), int(header.label[1])):
+ s = imgrec.read_idx(_idx)
+ _header, _img = mx.recordio.unpack(s)
+ _img = mx.image.imdecode(_img).asnumpy()[:, :, ::-1] # to bgr
+ image_path = os.path.join(id_dir, "%d.jpg" % imgid)
+ cv2.imwrite(image_path, _img)
+ imgid += 1
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='do dataset merge')
+ # general
+ parser.add_argument('--include', default='', type=str, help='')
+ parser.add_argument('--output', default='', type=str, help='')
+ args = parser.parse_args()
+ main(args)
diff --git a/recognition/subcenter_arcface/common/rec2shufrec.py b/recognition/subcenter_arcface/common/rec2shufrec.py
new file mode 100644
index 0000000..cf916b4
--- /dev/null
+++ b/recognition/subcenter_arcface/common/rec2shufrec.py
@@ -0,0 +1,72 @@
+import os
+import os.path as osp
+import sys
+import datetime
+import glob
+import shutil
+import numbers
+import mxnet as mx
+from mxnet import ndarray as nd
+from mxnet import io
+from mxnet import recordio
+import random
+import argparse
+import cv2
+import time
+import numpy as np
+
+def main(args):
+ ds = args.input
+ path_imgrec = osp.join(ds, 'train.rec')
+ path_imgidx = osp.join(ds, 'train.idx')
+ imgrec = mx.recordio.MXIndexedRecordIO(path_imgidx, path_imgrec, 'r') # pylint: disable=redefined-variable-type
+ if not osp.exists(args.output):
+ os.makedirs(args.output)
+ writer = mx.recordio.MXRecordIO(osp.join(args.output, 'train.rec'), 'w')
+ s = imgrec.read_idx(0)
+ header, _ = recordio.unpack(s)
+ if header.flag > 0:
+ print('header0 label', header.label)
+ header0 = (int(header.label[0]), int(header.label[1]))
+ imgidx = list(range(1, int(header.label[0])))
+ else:
+ imgidx = list(imgrec.keys)
+ random.shuffle(imgidx)
+ label_stat = None
+ print('total images:', len(imgidx))
+ for i, idx in enumerate(imgidx):
+ if i%10000==0:
+ print('processing', i, idx)
+ s = imgrec.read_idx(idx)
+ header, img = mx.recordio.unpack(s)
+ label = header.label
+ if not isinstance(label, numbers.Number):
+ label = label[0]
+ if label_stat is None:
+ label_stat = [label, label]
+ else:
+ label_stat[0] = min(label, label_stat[0])
+ label_stat[1] = max(label, label_stat[1])
+ wheader = mx.recordio.IRHeader(0, label, i, 0)
+ ws = mx.recordio.pack(wheader, img)
+ writer.write(ws)
+ print('label_stat:', label_stat)
+ writer.close()
+ if args.copy_vers:
+ for binfile in glob.glob(osp.join(args.input, '*.bin')):
+ target_file = osp.join(args.output, binfile.split('/')[-1])
+ shutil.copyfile(binfile, target_file)
+ with open(osp.join(args.output, 'property'), 'w') as f:
+ f.write("%d,112,112\n"%(int(label_stat[1])+1))
+ f.write("%d\n"%len(imgidx))
+ f.write("shuffled\n")
+ f.write("%s\n"%(datetime.datetime.now()))
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='convert rec to shuffled rec')
+ # general
+ parser.add_argument('--input', default='', type=str, help='')
+ parser.add_argument('--output', default='', type=str, help='')
+ parser.add_argument('--copy-vers', action='store_true', help='copy verification bins')
+ args = parser.parse_args()
+ main(args)
diff --git a/recognition/subcenter_arcface/common/rec_builder.py b/recognition/subcenter_arcface/common/rec_builder.py
new file mode 100644
index 0000000..1d51715
--- /dev/null
+++ b/recognition/subcenter_arcface/common/rec_builder.py
@@ -0,0 +1,109 @@
+import os
+import sys
+import mxnet as mx
+from mxnet import ndarray as nd
+import random
+import argparse
+import cv2
+import time
+import sklearn
+import numpy as np
+
+
+class SeqRecBuilder():
+ def __init__(self, path, image_size=(112, 112)):
+ self.path = path
+ self.image_size = image_size
+ self.last_label = -1
+ self.widx = 0
+ if not os.path.exists(path):
+ os.makedirs(path)
+ self.writer = mx.recordio.MXIndexedRecordIO(
+ os.path.join(path, 'train.idx'), os.path.join(path, 'train.rec'),
+ 'w')
+ self.label_stat = [-1, -1]
+
+ def add(self, label, img, is_image=True):
+ #img should be BGR
+ #if self.sis:
+ # assert label>=self.last_label
+ idx = self.widx
+ self.widx += 1
+ header = mx.recordio.IRHeader(0, label, idx, 0)
+ if is_image:
+ s = mx.recordio.pack_img(header, img, quality=95, img_fmt='.jpg')
+ else:
+ s = mx.recordio.pack(header, img)
+ self.writer.write_idx(idx, s)
+ if self.label_stat[0] < 0:
+ self.label_stat = [label, label]
+ else:
+ self.label_stat[0] = min(self.label_stat[0], label)
+ self.label_stat[1] = max(self.label_stat[1], label)
+
+ def close(self):
+ with open(os.path.join(self.path, 'property'), 'w') as f:
+ f.write("%d,%d,%d\n" % (self.label_stat[1] + 1, self.image_size[0],
+ self.image_size[1]))
+
+
+class RecBuilder():
+ def __init__(self, path, image_size=(112, 112)):
+ self.path = path
+ self.image_size = image_size
+ self.last_label = -1
+ self.widx = 1
+ if not os.path.exists(path):
+ os.makedirs(path)
+ self.writer = mx.recordio.MXIndexedRecordIO(
+ os.path.join(path, 'train.idx'), os.path.join(path, 'train.rec'),
+ 'w')
+ self.label_stat = [-1, -1]
+ self.identities = []
+
+ def add(self, label, imgs):
+ #img should be BGR
+ assert label >= 0
+ assert label > self.last_label
+ assert len(imgs) > 0
+ idflag = [self.widx, -1]
+ for img in imgs:
+ idx = self.widx
+ self.widx += 1
+ header = mx.recordio.IRHeader(0, label, idx, 0)
+ if isinstance(img, np.ndarray):
+ s = mx.recordio.pack_img(header,
+ img,
+ quality=95,
+ img_fmt='.jpg')
+ else:
+ s = mx.recordio.pack(header, img)
+ self.writer.write_idx(idx, s)
+ idflag[1] = self.widx
+ self.identities.append(idflag)
+ if self.label_stat[0] < 0:
+ self.label_stat = [label, label]
+ else:
+ self.label_stat[0] = min(self.label_stat[0], label)
+ self.label_stat[1] = max(self.label_stat[1], label)
+ self.last_label = label
+
+ def close(self):
+ id_idx = self.widx
+ for id_flag in self.identities:
+ idx = self.widx
+ self.widx += 1
+ _header = mx.recordio.IRHeader(0, id_flag, idx, 0)
+ s = mx.recordio.pack(_header, b'')
+ self.writer.write_idx(idx, s)
+
+ print('id0:', (id_idx, self.widx))
+ idx = 0
+ _header = mx.recordio.IRHeader(0, (id_idx, self.widx), idx, 1)
+ s = mx.recordio.pack(_header, b'')
+ self.writer.write_idx(idx, s)
+ print('label stat:', self.label_stat)
+
+ with open(os.path.join(self.path, 'property'), 'w') as f:
+ f.write("%d,%d,%d\n" % (self.label_stat[1] + 1, self.image_size[0],
+ self.image_size[1]))
diff --git a/recognition/subcenter_arcface/common/verification.py b/recognition/subcenter_arcface/common/verification.py
new file mode 100644
index 0000000..f46942a
--- /dev/null
+++ b/recognition/subcenter_arcface/common/verification.py
@@ -0,0 +1,423 @@
+"""Helper for evaluation on the Labeled Faces in the Wild dataset
+"""
+
+# MIT License
+#
+# Copyright (c) 2016 David Sandberg
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import os
+import argparse
+import sys
+import numpy as np
+from scipy import misc
+from sklearn.model_selection import KFold
+from scipy import interpolate
+import sklearn
+import cv2
+import math
+import datetime
+import pickle
+from sklearn.decomposition import PCA
+import mxnet as mx
+from mxnet import ndarray as nd
+
+
+class LFold:
+ def __init__(self, n_splits=2, shuffle=False):
+ self.n_splits = n_splits
+ if self.n_splits > 1:
+ self.k_fold = KFold(n_splits=n_splits, shuffle=shuffle)
+
+ def split(self, indices):
+ if self.n_splits > 1:
+ return self.k_fold.split(indices)
+ else:
+ return [(indices, indices)]
+
+
+def calculate_roc(thresholds,
+ embeddings1,
+ embeddings2,
+ actual_issame,
+ nrof_folds=10,
+ pca=0):
+ assert (embeddings1.shape[0] == embeddings2.shape[0])
+ assert (embeddings1.shape[1] == embeddings2.shape[1])
+ nrof_pairs = min(len(actual_issame), embeddings1.shape[0])
+ nrof_thresholds = len(thresholds)
+ k_fold = LFold(n_splits=nrof_folds, shuffle=False)
+
+ tprs = np.zeros((nrof_folds, nrof_thresholds))
+ fprs = np.zeros((nrof_folds, nrof_thresholds))
+ accuracy = np.zeros((nrof_folds))
+ indices = np.arange(nrof_pairs)
+ #print('pca', pca)
+
+ if pca == 0:
+ diff = np.subtract(embeddings1, embeddings2)
+ dist = np.sum(np.square(diff), 1)
+
+ for fold_idx, (train_set, test_set) in enumerate(k_fold.split(indices)):
+ #print('train_set', train_set)
+ #print('test_set', test_set)
+ if pca > 0:
+ print('doing pca on', fold_idx)
+ embed1_train = embeddings1[train_set]
+ embed2_train = embeddings2[train_set]
+ _embed_train = np.concatenate((embed1_train, embed2_train), axis=0)
+ #print(_embed_train.shape)
+ pca_model = PCA(n_components=pca)
+ pca_model.fit(_embed_train)
+ embed1 = pca_model.transform(embeddings1)
+ embed2 = pca_model.transform(embeddings2)
+ embed1 = sklearn.preprocessing.normalize(embed1)
+ embed2 = sklearn.preprocessing.normalize(embed2)
+ #print(embed1.shape, embed2.shape)
+ diff = np.subtract(embed1, embed2)
+ dist = np.sum(np.square(diff), 1)
+
+ # Find the best threshold for the fold
+ acc_train = np.zeros((nrof_thresholds))
+ for threshold_idx, threshold in enumerate(thresholds):
+ _, _, acc_train[threshold_idx] = calculate_accuracy(
+ threshold, dist[train_set], actual_issame[train_set])
+ best_threshold_index = np.argmax(acc_train)
+ #print('threshold', thresholds[best_threshold_index])
+ for threshold_idx, threshold in enumerate(thresholds):
+ tprs[fold_idx,
+ threshold_idx], fprs[fold_idx,
+ threshold_idx], _ = calculate_accuracy(
+ threshold, dist[test_set],
+ actual_issame[test_set])
+ _, _, accuracy[fold_idx] = calculate_accuracy(
+ thresholds[best_threshold_index], dist[test_set],
+ actual_issame[test_set])
+
+ tpr = np.mean(tprs, 0)
+ fpr = np.mean(fprs, 0)
+ return tpr, fpr, accuracy
+
+
+def calculate_accuracy(threshold, dist, actual_issame):
+ predict_issame = np.less(dist, threshold)
+ tp = np.sum(np.logical_and(predict_issame, actual_issame))
+ fp = np.sum(np.logical_and(predict_issame, np.logical_not(actual_issame)))
+ tn = np.sum(
+ np.logical_and(np.logical_not(predict_issame),
+ np.logical_not(actual_issame)))
+ fn = np.sum(np.logical_and(np.logical_not(predict_issame), actual_issame))
+
+ tpr = 0 if (tp + fn == 0) else float(tp) / float(tp + fn)
+ fpr = 0 if (fp + tn == 0) else float(fp) / float(fp + tn)
+ acc = float(tp + tn) / dist.size
+ return tpr, fpr, acc
+
+
+def calculate_val(thresholds,
+ embeddings1,
+ embeddings2,
+ actual_issame,
+ far_target,
+ nrof_folds=10):
+ assert (embeddings1.shape[0] == embeddings2.shape[0])
+ assert (embeddings1.shape[1] == embeddings2.shape[1])
+ nrof_pairs = min(len(actual_issame), embeddings1.shape[0])
+ nrof_thresholds = len(thresholds)
+ k_fold = LFold(n_splits=nrof_folds, shuffle=False)
+
+ val = np.zeros(nrof_folds)
+ far = np.zeros(nrof_folds)
+
+ diff = np.subtract(embeddings1, embeddings2)
+ dist = np.sum(np.square(diff), 1)
+ indices = np.arange(nrof_pairs)
+
+ for fold_idx, (train_set, test_set) in enumerate(k_fold.split(indices)):
+
+ # Find the threshold that gives FAR = far_target
+ far_train = np.zeros(nrof_thresholds)
+ for threshold_idx, threshold in enumerate(thresholds):
+ _, far_train[threshold_idx] = calculate_val_far(
+ threshold, dist[train_set], actual_issame[train_set])
+ if np.max(far_train) >= far_target:
+ f = interpolate.interp1d(far_train, thresholds, kind='slinear')
+ threshold = f(far_target)
+ else:
+ threshold = 0.0
+
+ val[fold_idx], far[fold_idx] = calculate_val_far(
+ threshold, dist[test_set], actual_issame[test_set])
+
+ val_mean = np.mean(val)
+ far_mean = np.mean(far)
+ val_std = np.std(val)
+ return val_mean, val_std, far_mean
+
+
+def calculate_val_far(threshold, dist, actual_issame):
+ predict_issame = np.less(dist, threshold)
+ true_accept = np.sum(np.logical_and(predict_issame, actual_issame))
+ false_accept = np.sum(
+ np.logical_and(predict_issame, np.logical_not(actual_issame)))
+ n_same = np.sum(actual_issame)
+ n_diff = np.sum(np.logical_not(actual_issame))
+ #print(true_accept, false_accept)
+ #print(n_same, n_diff)
+ val = float(true_accept) / float(n_same)
+ far = float(false_accept) / float(n_diff)
+ return val, far
+
+
+def evaluate(embeddings, actual_issame, nrof_folds=10, pca=0):
+ # Calculate evaluation metrics
+ thresholds = np.arange(0, 4, 0.01)
+ embeddings1 = embeddings[0::2]
+ embeddings2 = embeddings[1::2]
+ tpr, fpr, accuracy = calculate_roc(thresholds,
+ embeddings1,
+ embeddings2,
+ np.asarray(actual_issame),
+ nrof_folds=nrof_folds,
+ pca=pca)
+ thresholds = np.arange(0, 4, 0.001)
+ val, val_std, far = calculate_val(thresholds,
+ embeddings1,
+ embeddings2,
+ np.asarray(actual_issame),
+ 1e-3,
+ nrof_folds=nrof_folds)
+ return tpr, fpr, accuracy, val, val_std, far
+
+
+def load_bin(path, image_size):
+ try:
+ with open(path, 'rb') as f:
+ bins, issame_list = pickle.load(f) #py2
+ except UnicodeDecodeError as e:
+ with open(path, 'rb') as f:
+ bins, issame_list = pickle.load(f, encoding='bytes') #py3
+ data_list = []
+ for flip in [0, 1]:
+ data = nd.empty(
+ (len(issame_list) * 2, 3, image_size[0], image_size[1]))
+ data_list.append(data)
+ for i in range(len(issame_list) * 2):
+ _bin = bins[i]
+ img = mx.image.imdecode(_bin)
+ if img.shape[1] != image_size[0]:
+ img = mx.image.resize_short(img, image_size[0])
+ img = nd.transpose(img, axes=(2, 0, 1))
+ for flip in [0, 1]:
+ if flip == 1:
+ img = mx.ndarray.flip(data=img, axis=2)
+ data_list[flip][i][:] = img
+ if i % 1000 == 0:
+ print('loading bin', i)
+ print(data_list[0].shape)
+ return (data_list, issame_list)
+
+
+def test(data_set,
+ mx_model,
+ batch_size,
+ nfolds=10,
+ data_extra=None,
+ label_shape=None):
+ print('testing verification..')
+ data_list = data_set[0]
+ issame_list = data_set[1]
+ model = mx_model
+ embeddings_list = []
+ if data_extra is not None:
+ _data_extra = nd.array(data_extra)
+ time_consumed = 0.0
+ if label_shape is None:
+ _label = nd.ones((batch_size, ))
+ else:
+ _label = nd.ones(label_shape)
+ for i in range(len(data_list)):
+ data = data_list[i]
+ embeddings = None
+ ba = 0
+ while ba < data.shape[0]:
+ bb = min(ba + batch_size, data.shape[0])
+ count = bb - ba
+ _data = nd.slice_axis(data, axis=0, begin=bb - batch_size, end=bb)
+ #print(_data.shape, _label.shape)
+ time0 = datetime.datetime.now()
+ if data_extra is None:
+ db = mx.io.DataBatch(data=(_data, ), label=(_label, ))
+ else:
+ db = mx.io.DataBatch(data=(_data, _data_extra),
+ label=(_label, ))
+ model.forward(db, is_train=False)
+ net_out = model.get_outputs()
+ #_arg, _aux = model.get_params()
+ #__arg = {}
+ #for k,v in _arg.iteritems():
+ # __arg[k] = v.as_in_context(_ctx)
+ #_arg = __arg
+ #_arg["data"] = _data.as_in_context(_ctx)
+ #_arg["softmax_label"] = _label.as_in_context(_ctx)
+ #for k,v in _arg.iteritems():
+ # print(k,v.context)
+ #exe = sym.bind(_ctx, _arg ,args_grad=None, grad_req="null", aux_states=_aux)
+ #exe.forward(is_train=False)
+ #net_out = exe.outputs
+ _embeddings = net_out[0].asnumpy()
+ time_now = datetime.datetime.now()
+ diff = time_now - time0
+ time_consumed += diff.total_seconds()
+ #print(_embeddings.shape)
+ if embeddings is None:
+ embeddings = np.zeros((data.shape[0], _embeddings.shape[1]))
+ embeddings[ba:bb, :] = _embeddings[(batch_size - count):, :]
+ ba = bb
+ embeddings_list.append(embeddings)
+
+ _xnorm = 0.0
+ _xnorm_cnt = 0
+ for embed in embeddings_list:
+ for i in range(embed.shape[0]):
+ _em = embed[i]
+ _norm = np.linalg.norm(_em)
+ #print(_em.shape, _norm)
+ _xnorm += _norm
+ _xnorm_cnt += 1
+ _xnorm /= _xnorm_cnt
+
+ embeddings = embeddings_list[0].copy()
+ embeddings = sklearn.preprocessing.normalize(embeddings)
+ acc1 = 0.0
+ std1 = 0.0
+ #_, _, accuracy, val, val_std, far = evaluate(embeddings, issame_list, nrof_folds=10)
+ #acc1, std1 = np.mean(accuracy), np.std(accuracy)
+
+ #print('Validation rate: %2.5f+-%2.5f @ FAR=%2.5f' % (val, val_std, far))
+ #embeddings = np.concatenate(embeddings_list, axis=1)
+ embeddings = embeddings_list[0] + embeddings_list[1]
+ embeddings = sklearn.preprocessing.normalize(embeddings)
+ print(embeddings.shape)
+ print('infer time', time_consumed)
+ _, _, accuracy, val, val_std, far = evaluate(embeddings,
+ issame_list,
+ nrof_folds=nfolds)
+ acc2, std2 = np.mean(accuracy), np.std(accuracy)
+ return acc1, std1, acc2, std2, _xnorm, embeddings_list
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser(description='do verification')
+ # general
+ parser.add_argument('--data-dir', default='', help='')
+ parser.add_argument('--model',
+ default='../model/softmax,50',
+ help='path to load model.')
+ parser.add_argument('--target',
+ default='lfw,cfp_ff,cfp_fp,agedb_30',
+ help='test targets.')
+ parser.add_argument('--gpu', default=0, type=int, help='gpu id')
+ parser.add_argument('--batch-size', default=32, type=int, help='')
+ parser.add_argument('--max', default='', type=str, help='')
+ parser.add_argument('--mode', default=0, type=int, help='')
+ parser.add_argument('--nfolds', default=10, type=int, help='')
+ args = parser.parse_args()
+ #sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'common'))
+ #import face_image
+ #prop = face_image.load_property(args.data_dir)
+ #image_size = prop.image_size
+ image_size = [112, 112]
+ print('image_size', image_size)
+ ctx = mx.gpu(args.gpu)
+ nets = []
+ vec = args.model.split(',')
+ prefix = args.model.split(',')[0]
+ epochs = []
+ if len(vec) == 1:
+ pdir = os.path.dirname(prefix)
+ for fname in os.listdir(pdir):
+ if not fname.endswith('.params'):
+ continue
+ _file = os.path.join(pdir, fname)
+ if _file.startswith(prefix):
+ epoch = int(fname.split('.')[0].split('-')[1])
+ epochs.append(epoch)
+ epochs = sorted(epochs, reverse=True)
+ if len(args.max) > 0:
+ _max = [int(x) for x in args.max.split(',')]
+ assert len(_max) == 2
+ if len(epochs) > _max[1]:
+ epochs = epochs[_max[0]:_max[1]]
+
+ else:
+ epochs = [int(x) for x in vec[1].split('|')]
+ print('model number', len(epochs))
+ time0 = datetime.datetime.now()
+ for epoch in epochs:
+ print('loading', prefix, epoch)
+ sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
+ #arg_params, aux_params = ch_dev(arg_params, aux_params, ctx)
+ all_layers = sym.get_internals()
+ sym = all_layers['fc1_output']
+ model = mx.mod.Module(symbol=sym, context=ctx, label_names=None)
+ #model.bind(data_shapes=[('data', (args.batch_size, 3, image_size[0], image_size[1]))], label_shapes=[('softmax_label', (args.batch_size,))])
+ model.bind(data_shapes=[('data', (args.batch_size, 3, image_size[0],
+ image_size[1]))])
+ model.set_params(arg_params, aux_params)
+ nets.append(model)
+ time_now = datetime.datetime.now()
+ diff = time_now - time0
+ print('model loading time', diff.total_seconds())
+
+ ver_list = []
+ ver_name_list = []
+ for name in args.target.split(','):
+ path = os.path.join(args.data_dir, name + ".bin")
+ if os.path.exists(path):
+ print('loading.. ', name)
+ data_set = load_bin(path, image_size)
+ ver_list.append(data_set)
+ ver_name_list.append(name)
+
+ if args.mode == 0:
+ for i in range(len(ver_list)):
+ results = []
+ for model in nets:
+ acc1, std1, acc2, std2, xnorm, embeddings_list = test(
+ ver_list[i], model, args.batch_size, args.nfolds)
+ print('[%s]XNorm: %f' % (ver_name_list[i], xnorm))
+ print('[%s]Accuracy: %1.5f+-%1.5f' %
+ (ver_name_list[i], acc1, std1))
+ print('[%s]Accuracy-Flip: %1.5f+-%1.5f' %
+ (ver_name_list[i], acc2, std2))
+ results.append(acc2)
+ print('Max of [%s] is %1.5f' % (ver_name_list[i], np.max(results)))
+ elif args.mode == 1:
+ model = nets[0]
+ test_badcase(ver_list[0], model, args.batch_size, args.target)
+ else:
+ model = nets[0]
+ dumpR(ver_list[0], model, args.batch_size, args.target)
diff --git a/recognition/SubCenter-ArcFace/drop.py b/recognition/subcenter_arcface/drop.py
similarity index 100%
rename from recognition/SubCenter-ArcFace/drop.py
rename to recognition/subcenter_arcface/drop.py
diff --git a/recognition/SubCenter-ArcFace/image_iter.py b/recognition/subcenter_arcface/image_iter.py
similarity index 100%
rename from recognition/SubCenter-ArcFace/image_iter.py
rename to recognition/subcenter_arcface/image_iter.py
diff --git a/recognition/SubCenter-ArcFace/parall_module_local_v1.py b/recognition/subcenter_arcface/parall_module_local_v1.py
similarity index 100%
rename from recognition/SubCenter-ArcFace/parall_module_local_v1.py
rename to recognition/subcenter_arcface/parall_module_local_v1.py
diff --git a/recognition/SubCenter-ArcFace/sample_config.py b/recognition/subcenter_arcface/sample_config.py
similarity index 100%
rename from recognition/SubCenter-ArcFace/sample_config.py
rename to recognition/subcenter_arcface/sample_config.py
diff --git a/recognition/subcenter_arcface/symbol/fdensenet.py b/recognition/subcenter_arcface/symbol/fdensenet.py
new file mode 100644
index 0000000..b3d49ee
--- /dev/null
+++ b/recognition/subcenter_arcface/symbol/fdensenet.py
@@ -0,0 +1,169 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# coding: utf-8
+# pylint: disable= arguments-differ
+"""DenseNet, implemented in Gluon."""
+
+import sys
+import os
+import mxnet as mx
+import mxnet.ndarray as nd
+import mxnet.gluon as gluon
+import mxnet.gluon.nn as nn
+import mxnet.autograd as ag
+import symbol_utils
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from config import config
+
+
+def Act():
+ if config.net_act == 'prelu':
+ return nn.PReLU()
+ else:
+ return nn.Activation(config.net_act)
+
+
+# Helpers
+def _make_dense_block(num_layers, bn_size, growth_rate, dropout, stage_index):
+ out = nn.HybridSequential(prefix='stage%d_' % stage_index)
+ with out.name_scope():
+ for _ in range(num_layers):
+ out.add(_make_dense_layer(growth_rate, bn_size, dropout))
+ return out
+
+
+def _make_dense_layer(growth_rate, bn_size, dropout):
+ new_features = nn.HybridSequential(prefix='')
+ new_features.add(nn.BatchNorm())
+ #new_features.add(nn.Activation('relu'))
+ new_features.add(Act())
+ new_features.add(
+ nn.Conv2D(bn_size * growth_rate, kernel_size=1, use_bias=False))
+ new_features.add(nn.BatchNorm())
+ #new_features.add(nn.Activation('relu'))
+ new_features.add(Act())
+ new_features.add(
+ nn.Conv2D(growth_rate, kernel_size=3, padding=1, use_bias=False))
+ if dropout:
+ new_features.add(nn.Dropout(dropout))
+
+ out = gluon.contrib.nn.HybridConcurrent(axis=1, prefix='')
+ out.add(gluon.contrib.nn.Identity())
+ out.add(new_features)
+
+ return out
+
+
+def _make_transition(num_output_features):
+ out = nn.HybridSequential(prefix='')
+ out.add(nn.BatchNorm())
+ #out.add(nn.Activation('relu'))
+ out.add(Act())
+ out.add(nn.Conv2D(num_output_features, kernel_size=1, use_bias=False))
+ out.add(nn.AvgPool2D(pool_size=2, strides=2))
+ return out
+
+
+# Net
+class DenseNet(nn.HybridBlock):
+ r"""Densenet-BC model from the
+ `"Densely Connected Convolutional Networks" `_ paper.
+
+ Parameters
+ ----------
+ num_init_features : int
+ Number of filters to learn in the first convolution layer.
+ growth_rate : int
+ Number of filters to add each layer (`k` in the paper).
+ block_config : list of int
+ List of integers for numbers of layers in each pooling block.
+ bn_size : int, default 4
+ Multiplicative factor for number of bottle neck layers.
+ (i.e. bn_size * k features in the bottleneck layer)
+ dropout : float, default 0
+ Rate of dropout after each dense layer.
+ classes : int, default 1000
+ Number of classification classes.
+ """
+ def __init__(self,
+ num_init_features,
+ growth_rate,
+ block_config,
+ bn_size=4,
+ dropout=0,
+ classes=1000,
+ **kwargs):
+
+ super(DenseNet, self).__init__(**kwargs)
+ with self.name_scope():
+ self.features = nn.HybridSequential(prefix='')
+ self.features.add(
+ nn.Conv2D(num_init_features,
+ kernel_size=3,
+ strides=1,
+ padding=1,
+ use_bias=False))
+ self.features.add(nn.BatchNorm())
+ self.features.add(nn.Activation('relu'))
+ self.features.add(nn.MaxPool2D(pool_size=3, strides=2, padding=1))
+ # Add dense blocks
+ num_features = num_init_features
+ for i, num_layers in enumerate(block_config):
+ self.features.add(
+ _make_dense_block(num_layers, bn_size, growth_rate,
+ dropout, i + 1))
+ num_features = num_features + num_layers * growth_rate
+ if i != len(block_config) - 1:
+ self.features.add(_make_transition(num_features // 2))
+ num_features = num_features // 2
+ self.features.add(nn.BatchNorm())
+ self.features.add(nn.Activation('relu'))
+ #self.features.add(nn.AvgPool2D(pool_size=7))
+ #self.features.add(nn.Flatten())
+
+ #self.output = nn.Dense(classes)
+
+ def hybrid_forward(self, F, x):
+ x = self.features(x)
+ #x = self.output(x)
+ return x
+
+
+# Specification
+densenet_spec = {
+ 121: (64, 32, [6, 12, 24, 16]),
+ 161: (96, 48, [6, 12, 36, 24]),
+ 169: (64, 32, [6, 12, 32, 32]),
+ 201: (64, 32, [6, 12, 48, 32])
+}
+
+
+# Constructor
+def get_symbol():
+ num_layers = config.num_layers
+ num_init_features, growth_rate, block_config = densenet_spec[num_layers]
+ net = DenseNet(num_init_features,
+ growth_rate,
+ block_config,
+ dropout=config.densenet_dropout)
+ data = mx.sym.Variable(name='data')
+ data = data - 127.5
+ data = data * 0.0078125
+ body = net(data)
+ fc1 = symbol_utils.get_fc1(body, config.emb_size, config.net_output)
+ return fc1
diff --git a/recognition/subcenter_arcface/symbol/fmnasnet.py b/recognition/subcenter_arcface/symbol/fmnasnet.py
new file mode 100644
index 0000000..118beb9
--- /dev/null
+++ b/recognition/subcenter_arcface/symbol/fmnasnet.py
@@ -0,0 +1,213 @@
+import sys
+import os
+import mxnet as mx
+import mxnet.ndarray as nd
+import mxnet.gluon as gluon
+import mxnet.gluon.nn as nn
+import mxnet.autograd as ag
+import symbol_utils
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from config import config
+
+
+def Act():
+ if config.net_act == 'prelu':
+ return nn.PReLU()
+ else:
+ return nn.Activation(config.net_act)
+
+
+def ConvBlock(channels, kernel_size, strides, **kwargs):
+ out = nn.HybridSequential(**kwargs)
+ with out.name_scope():
+ out.add(
+ nn.Conv2D(channels,
+ kernel_size,
+ strides=strides,
+ padding=1,
+ use_bias=False), nn.BatchNorm(scale=True),
+ Act()
+ #nn.Activation('relu')
+ )
+ return out
+
+
+def Conv1x1(channels, is_linear=False, **kwargs):
+ out = nn.HybridSequential(**kwargs)
+ with out.name_scope():
+ out.add(nn.Conv2D(channels, 1, padding=0, use_bias=False),
+ nn.BatchNorm(scale=True))
+ if not is_linear:
+ #out.add(nn.Activation('relu'))
+ out.add(Act())
+ return out
+
+
+def DWise(channels, strides, kernel_size=3, **kwargs):
+ out = nn.HybridSequential(**kwargs)
+ with out.name_scope():
+ out.add(
+ nn.Conv2D(channels,
+ kernel_size,
+ strides=strides,
+ padding=kernel_size // 2,
+ groups=channels,
+ use_bias=False), nn.BatchNorm(scale=True),
+ Act()
+ #nn.Activation('relu')
+ )
+ return out
+
+
+class SepCONV(nn.HybridBlock):
+ def __init__(self,
+ inp,
+ output,
+ kernel_size,
+ depth_multiplier=1,
+ with_bn=True,
+ **kwargs):
+ super(SepCONV, self).__init__(**kwargs)
+ with self.name_scope():
+ self.net = nn.HybridSequential()
+ cn = int(inp * depth_multiplier)
+
+ if output is None:
+ self.net.add(
+ nn.Conv2D(in_channels=inp,
+ channels=cn,
+ groups=inp,
+ kernel_size=kernel_size,
+ strides=(1, 1),
+ padding=kernel_size // 2,
+ use_bias=not with_bn))
+ else:
+ self.net.add(
+ nn.Conv2D(in_channels=inp,
+ channels=cn,
+ groups=inp,
+ kernel_size=kernel_size,
+ strides=(1, 1),
+ padding=kernel_size // 2,
+ use_bias=False),
+ nn.BatchNorm(),
+ Act(),
+ #nn.Activation('relu'),
+ nn.Conv2D(in_channels=cn,
+ channels=output,
+ kernel_size=(1, 1),
+ strides=(1, 1),
+ use_bias=not with_bn))
+
+ self.with_bn = with_bn
+ self.act = Act()
+ #self.act = nn.Activation('relu')
+ if with_bn:
+ self.bn = nn.BatchNorm()
+
+ def hybrid_forward(self, F, x):
+ x = self.net(x)
+ if self.with_bn:
+ x = self.bn(x)
+ if self.act is not None:
+ x = self.act(x)
+ return x
+
+
+class ExpandedConv(nn.HybridBlock):
+ def __init__(self,
+ inp,
+ oup,
+ t,
+ strides,
+ kernel=3,
+ same_shape=True,
+ **kwargs):
+ super(ExpandedConv, self).__init__(**kwargs)
+
+ self.same_shape = same_shape
+ self.strides = strides
+ with self.name_scope():
+ self.bottleneck = nn.HybridSequential()
+ self.bottleneck.add(
+ Conv1x1(inp * t, prefix="expand_"),
+ DWise(inp * t, self.strides, kernel, prefix="dwise_"),
+ Conv1x1(oup, is_linear=True, prefix="linear_"))
+
+ def hybrid_forward(self, F, x):
+ out = self.bottleneck(x)
+ if self.strides == 1 and self.same_shape:
+ out = F.elemwise_add(out, x)
+ return out
+
+
+def ExpandedConvSequence(t, k, inp, oup, repeats, first_strides, **kwargs):
+ seq = nn.HybridSequential(**kwargs)
+ with seq.name_scope():
+ seq.add(ExpandedConv(inp, oup, t, first_strides, k, same_shape=False))
+ curr_inp = oup
+ for i in range(1, repeats):
+ seq.add(ExpandedConv(curr_inp, oup, t, 1))
+ curr_inp = oup
+ return seq
+
+
+class MNasNet(nn.HybridBlock):
+ def __init__(self, m=1.0, **kwargs):
+ super(MNasNet, self).__init__(**kwargs)
+
+ self.first_oup = int(32 * m)
+ self.second_oup = int(16 * m)
+ #self.second_oup = int(32*m)
+ self.interverted_residual_setting = [
+ # t, c, n, s, k
+ [3, int(24 * m), 3, 2, 3, "stage2_"], # -> 56x56
+ [3, int(40 * m), 3, 2, 5, "stage3_"], # -> 28x28
+ [6, int(80 * m), 3, 2, 5, "stage4_1_"], # -> 14x14
+ [6, int(96 * m), 2, 1, 3, "stage4_2_"], # -> 14x14
+ [6, int(192 * m), 4, 2, 5, "stage5_1_"], # -> 7x7
+ [6, int(320 * m), 1, 1, 3, "stage5_2_"], # -> 7x7
+ ]
+ self.last_channels = int(1024 * m)
+
+ with self.name_scope():
+ self.features = nn.HybridSequential()
+ self.features.add(
+ ConvBlock(self.first_oup, 3, 1, prefix="stage1_conv0_"))
+ self.features.add(
+ SepCONV(self.first_oup,
+ self.second_oup,
+ 3,
+ prefix="stage1_sepconv0_"))
+ inp = self.second_oup
+ for i, (t, c, n, s, k,
+ prefix) in enumerate(self.interverted_residual_setting):
+ oup = c
+ self.features.add(
+ ExpandedConvSequence(t, k, inp, oup, n, s, prefix=prefix))
+ inp = oup
+
+ self.features.add(Conv1x1(self.last_channels, prefix="stage5_3_"))
+ #self.features.add(nn.GlobalAvgPool2D())
+ #self.features.add(nn.Flatten())
+ #self.output = nn.Dense(num_classes)
+ def hybrid_forward(self, F, x):
+ x = self.features(x)
+ #x = self.output(x)
+ return x
+
+ def num_output_channel(self):
+ return self.last_channels
+
+
+def get_symbol():
+ net = MNasNet(config.net_multiplier)
+ data = mx.sym.Variable(name='data')
+ data = data - 127.5
+ data = data * 0.0078125
+ body = net(data)
+ fc1 = symbol_utils.get_fc1(body,
+ config.emb_size,
+ config.net_output,
+ input_channel=net.num_output_channel())
+ return fc1
diff --git a/recognition/subcenter_arcface/symbol/fmobilefacenet.py b/recognition/subcenter_arcface/symbol/fmobilefacenet.py
new file mode 100644
index 0000000..f498264
--- /dev/null
+++ b/recognition/subcenter_arcface/symbol/fmobilefacenet.py
@@ -0,0 +1,224 @@
+import sys
+import os
+import mxnet as mx
+import symbol_utils
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from config import config
+
+
+def Act(data, act_type, name):
+ #ignore param act_type, set it in this function
+ if act_type == 'prelu':
+ body = mx.sym.LeakyReLU(data=data, act_type='prelu', name=name)
+ else:
+ body = mx.sym.Activation(data=data, act_type=act_type, name=name)
+ return body
+
+
+def Conv(data,
+ num_filter=1,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ num_group=1,
+ name=None,
+ suffix=''):
+ conv = mx.sym.Convolution(data=data,
+ num_filter=num_filter,
+ kernel=kernel,
+ num_group=num_group,
+ stride=stride,
+ pad=pad,
+ no_bias=True,
+ name='%s%s_conv2d' % (name, suffix))
+ bn = mx.sym.BatchNorm(data=conv,
+ name='%s%s_batchnorm' % (name, suffix),
+ fix_gamma=False,
+ momentum=config.bn_mom)
+ act = Act(data=bn,
+ act_type=config.net_act,
+ name='%s%s_relu' % (name, suffix))
+ return act
+
+
+def Linear(data,
+ num_filter=1,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ num_group=1,
+ name=None,
+ suffix=''):
+ conv = mx.sym.Convolution(data=data,
+ num_filter=num_filter,
+ kernel=kernel,
+ num_group=num_group,
+ stride=stride,
+ pad=pad,
+ no_bias=True,
+ name='%s%s_conv2d' % (name, suffix))
+ bn = mx.sym.BatchNorm(data=conv,
+ name='%s%s_batchnorm' % (name, suffix),
+ fix_gamma=False,
+ momentum=config.bn_mom)
+ return bn
+
+
+def ConvOnly(data,
+ num_filter=1,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ num_group=1,
+ name=None,
+ suffix=''):
+ conv = mx.sym.Convolution(data=data,
+ num_filter=num_filter,
+ kernel=kernel,
+ num_group=num_group,
+ stride=stride,
+ pad=pad,
+ no_bias=True,
+ name='%s%s_conv2d' % (name, suffix))
+ return conv
+
+
+def DResidual(data,
+ num_out=1,
+ kernel=(3, 3),
+ stride=(2, 2),
+ pad=(1, 1),
+ num_group=1,
+ name=None,
+ suffix=''):
+ conv = Conv(data=data,
+ num_filter=num_group,
+ kernel=(1, 1),
+ pad=(0, 0),
+ stride=(1, 1),
+ name='%s%s_conv_sep' % (name, suffix))
+ conv_dw = Conv(data=conv,
+ num_filter=num_group,
+ num_group=num_group,
+ kernel=kernel,
+ pad=pad,
+ stride=stride,
+ name='%s%s_conv_dw' % (name, suffix))
+ proj = Linear(data=conv_dw,
+ num_filter=num_out,
+ kernel=(1, 1),
+ pad=(0, 0),
+ stride=(1, 1),
+ name='%s%s_conv_proj' % (name, suffix))
+ return proj
+
+
+def Residual(data,
+ num_block=1,
+ num_out=1,
+ kernel=(3, 3),
+ stride=(1, 1),
+ pad=(1, 1),
+ num_group=1,
+ name=None,
+ suffix=''):
+ identity = data
+ for i in range(num_block):
+ shortcut = identity
+ conv = DResidual(data=identity,
+ num_out=num_out,
+ kernel=kernel,
+ stride=stride,
+ pad=pad,
+ num_group=num_group,
+ name='%s%s_block' % (name, suffix),
+ suffix='%d' % i)
+ identity = conv + shortcut
+ return identity
+
+
+def get_symbol():
+ num_classes = config.emb_size
+ print('in_network', config)
+ fc_type = config.net_output
+ data = mx.symbol.Variable(name="data")
+ data = data - 127.5
+ data = data * 0.0078125
+ blocks = config.net_blocks
+ conv_1 = Conv(data,
+ num_filter=64,
+ kernel=(3, 3),
+ pad=(1, 1),
+ stride=(2, 2),
+ name="conv_1")
+ if blocks[0] == 1:
+ conv_2_dw = Conv(conv_1,
+ num_group=64,
+ num_filter=64,
+ kernel=(3, 3),
+ pad=(1, 1),
+ stride=(1, 1),
+ name="conv_2_dw")
+ else:
+ conv_2_dw = Residual(conv_1,
+ num_block=blocks[0],
+ num_out=64,
+ kernel=(3, 3),
+ stride=(1, 1),
+ pad=(1, 1),
+ num_group=64,
+ name="res_2")
+ conv_23 = DResidual(conv_2_dw,
+ num_out=64,
+ kernel=(3, 3),
+ stride=(2, 2),
+ pad=(1, 1),
+ num_group=128,
+ name="dconv_23")
+ conv_3 = Residual(conv_23,
+ num_block=blocks[1],
+ num_out=64,
+ kernel=(3, 3),
+ stride=(1, 1),
+ pad=(1, 1),
+ num_group=128,
+ name="res_3")
+ conv_34 = DResidual(conv_3,
+ num_out=128,
+ kernel=(3, 3),
+ stride=(2, 2),
+ pad=(1, 1),
+ num_group=256,
+ name="dconv_34")
+ conv_4 = Residual(conv_34,
+ num_block=blocks[2],
+ num_out=128,
+ kernel=(3, 3),
+ stride=(1, 1),
+ pad=(1, 1),
+ num_group=256,
+ name="res_4")
+ conv_45 = DResidual(conv_4,
+ num_out=128,
+ kernel=(3, 3),
+ stride=(2, 2),
+ pad=(1, 1),
+ num_group=512,
+ name="dconv_45")
+ conv_5 = Residual(conv_45,
+ num_block=blocks[3],
+ num_out=128,
+ kernel=(3, 3),
+ stride=(1, 1),
+ pad=(1, 1),
+ num_group=256,
+ name="res_5")
+ conv_6_sep = Conv(conv_5,
+ num_filter=512,
+ kernel=(1, 1),
+ pad=(0, 0),
+ stride=(1, 1),
+ name="conv_6sep")
+
+ fc1 = symbol_utils.get_fc1(conv_6_sep, num_classes, fc_type)
+ return fc1
diff --git a/gender-age/fmobilenet.py b/recognition/subcenter_arcface/symbol/fmobilenet.py
similarity index 93%
rename from gender-age/fmobilenet.py
rename to recognition/subcenter_arcface/symbol/fmobilenet.py
index 6bb1c0a..bdbf8a5 100644
--- a/gender-age/fmobilenet.py
+++ b/recognition/subcenter_arcface/symbol/fmobilenet.py
@@ -15,14 +15,20 @@
# specific language governing permissions and limitations
# under the License.
+import sys
+import os
import mxnet as mx
import symbol_utils
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from config import config
def Act(data, act_type, name):
#ignore param act_type, set it in this function
- #body = mx.sym.LeakyReLU(data = data, act_type='prelu', name = name)
- body = mx.sym.Activation(data=data, act_type='relu', name=name)
+ if act_type == 'prelu':
+ body = mx.sym.LeakyReLU(data=data, act_type='prelu', name=name)
+ else:
+ body = mx.sym.Activation(data=data, act_type=act_type, name=name)
return body
@@ -45,7 +51,9 @@ def Conv(data,
bn = mx.sym.BatchNorm(data=conv,
name='%s%s_batchnorm' % (name, suffix),
fix_gamma=True)
- act = Act(data=bn, act_type='relu', name='%s%s_relu' % (name, suffix))
+ act = Act(data=bn,
+ act_type=config.net_act,
+ name='%s%s_relu' % (name, suffix))
return act
@@ -68,20 +76,16 @@ def ConvOnly(data,
return conv
-def get_symbol(num_classes, **kwargs):
+def get_symbol():
+ num_classes = config.emb_size
+ bn_mom = config.bn_mom
+ workspace = config.workspace
data = mx.symbol.Variable(name="data") # 224
data = data - 127.5
data = data * 0.0078125
- version_input = kwargs.get('version_input', 1)
- assert version_input >= 0
- version_output = kwargs.get('version_output', 'E')
- multiplier = kwargs.get('multiplier', 1.0)
- fc_type = version_output
- base_filter = int(32 * multiplier)
- bf = base_filter
- print(version_input, version_output, base_filter)
-
- if version_input == 0:
+ fc_type = config.net_output
+ bf = int(32 * config.net_multiplier)
+ if config.net_input == 0:
conv_1 = Conv(data,
num_filter=bf,
kernel=(3, 3),
diff --git a/gender-age/fresnet.py b/recognition/subcenter_arcface/symbol/fresnet.py
similarity index 94%
rename from gender-age/fresnet.py
rename to recognition/subcenter_arcface/symbol/fresnet.py
index 037a42b..7b17788 100644
--- a/gender-age/fresnet.py
+++ b/recognition/subcenter_arcface/symbol/fresnet.py
@@ -25,10 +25,15 @@ Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun. "Identity Mappings in Deep Re
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import sys
+import os
import mxnet as mx
import numpy as np
import symbol_utils
+import memonger
import sklearn
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from config import config
def Conv(**kwargs):
@@ -965,9 +970,19 @@ def residual_unit(data, num_filter, stride, dim_match, name, bottle_neck,
bottle_neck, **kwargs)
-def resnet(units, num_stages, filter_list, num_classes, bottle_neck, **kwargs):
- bn_mom = kwargs.get('bn_mom', 0.9)
- workspace = kwargs.get('workspace', 256)
+def resnet(units, num_stages, filter_list, num_classes, bottle_neck):
+ bn_mom = config.bn_mom
+ workspace = config.workspace
+ kwargs = {
+ 'version_se': config.net_se,
+ 'version_input': config.net_input,
+ 'version_output': config.net_output,
+ 'version_unit': config.net_unit,
+ 'version_act': config.net_act,
+ 'bn_mom': bn_mom,
+ 'workspace': workspace,
+ 'memonger': config.memonger,
+ }
"""Return ResNet symbol of
Parameters
----------
@@ -984,17 +999,21 @@ def resnet(units, num_stages, filter_list, num_classes, bottle_neck, **kwargs):
workspace : int
Workspace used in convolution operator
"""
- version_se = kwargs.get('version_se', 0)
+ version_se = kwargs.get('version_se', 1)
version_input = kwargs.get('version_input', 1)
assert version_input >= 0
- version_output = kwargs.get('version_output', 'GAP')
+ version_output = kwargs.get('version_output', 'E')
fc_type = version_output
version_unit = kwargs.get('version_unit', 3)
act_type = kwargs.get('version_act', 'prelu')
- print(version_se, version_input, version_output, version_unit, act_type)
+ memonger = kwargs.get('memonger', False)
+ print(version_se, version_input, version_output, version_unit, act_type,
+ memonger)
num_unit = len(units)
assert (num_unit == num_stages)
data = mx.sym.Variable(name='data')
+ if config.fp16:
+ data = mx.sym.Cast(data=data, dtype=np.float16)
if version_input == 0:
#data = mx.sym.BatchNorm(data=data, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='bn_data')
data = mx.sym.identity(data=data, name='id')
@@ -1062,6 +1081,8 @@ def resnet(units, num_stages, filter_list, num_classes, bottle_neck, **kwargs):
#else:
# body = residual_unit(body, filter_list[i+1], (2, 2), False,
# name='stage%d_unit%d' % (i + 1, 1), bottle_neck=bottle_neck, **kwargs)
+ if i==num_stages-1 and config.fp16:
+ body = mx.sym.Cast(data=body, dtype=np.float32)
body = residual_unit(body,
filter_list[i + 1], (2, 2),
False,
@@ -1076,16 +1097,34 @@ def resnet(units, num_stages, filter_list, num_classes, bottle_neck, **kwargs):
bottle_neck=bottle_neck,
**kwargs)
+ if bottle_neck:
+ body = Conv(data=body,
+ num_filter=512,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ no_bias=True,
+ name="convd",
+ workspace=workspace)
+ body = mx.sym.BatchNorm(data=body,
+ fix_gamma=False,
+ eps=2e-5,
+ momentum=bn_mom,
+ name='bnd')
+ body = Act(data=body, act_type=act_type, name='relud')
+
fc1 = symbol_utils.get_fc1(body, num_classes, fc_type)
return fc1
-def get_symbol(num_classes, num_layers, **kwargs):
+def get_symbol():
"""
Adapted from https://github.com/tornadomeet/ResNet/blob/master/train_resnet.py
Original author Wei Wu
"""
- if num_layers >= 101:
+ num_classes = config.emb_size
+ num_layers = config.num_layers
+ if num_layers >= 500:
filter_list = [64, 256, 512, 1024, 2048]
bottle_neck = True
else:
@@ -1104,10 +1143,22 @@ def get_symbol(num_classes, num_layers, **kwargs):
units = [3, 6, 24, 3]
elif num_layers == 90:
units = [3, 8, 30, 3]
+ elif num_layers == 98:
+ units = [3, 4, 38, 3]
+ elif num_layers == 99:
+ units = [3, 8, 35, 3]
elif num_layers == 100:
units = [3, 13, 30, 3]
+ elif num_layers == 134:
+ units = [3, 10, 50, 3]
+ elif num_layers == 136:
+ units = [3, 13, 48, 3]
+ elif num_layers == 140:
+ units = [3, 15, 48, 3]
elif num_layers == 124:
units = [3, 13, 40, 5]
+ elif num_layers == 160:
+ units = [3, 24, 49, 3]
elif num_layers == 101:
units = [3, 4, 23, 3]
elif num_layers == 152:
@@ -1121,9 +1172,20 @@ def get_symbol(num_classes, num_layers, **kwargs):
"no experiments done on num_layers {}, you can do it yourself".
format(num_layers))
- return resnet(units=units,
- num_stages=num_stages,
- filter_list=filter_list,
- num_classes=num_classes,
- bottle_neck=bottle_neck,
- **kwargs)
+ net = resnet(units=units,
+ num_stages=num_stages,
+ filter_list=filter_list,
+ num_classes=num_classes,
+ bottle_neck=bottle_neck)
+
+ if config.memonger:
+ dshape = (config.per_batch_size, config.image_shape[2],
+ config.image_shape[0], config.image_shape[1])
+ net_mem_planned = memonger.search_plan(net, data=dshape)
+ old_cost = memonger.get_cost(net, data=dshape)
+ new_cost = memonger.get_cost(net_mem_planned, data=dshape)
+
+ print('Old feature map cost=%d MB' % old_cost)
+ print('New feature map cost=%d MB' % new_cost)
+ net = net_mem_planned
+ return net
diff --git a/recognition/subcenter_arcface/symbol/memonger.py b/recognition/subcenter_arcface/symbol/memonger.py
new file mode 100644
index 0000000..8ad610b
--- /dev/null
+++ b/recognition/subcenter_arcface/symbol/memonger.py
@@ -0,0 +1,175 @@
+import mxnet as mx
+import math
+
+
+def prod(shape):
+ """Get product of the shape.
+ """
+ ret = 1
+ for s in shape:
+ ret *= s
+ return ret
+
+
+def is_param(name):
+ """Quick script to check if name is a parameter.
+ """
+ if name == 'data':
+ return False
+ if name.endswith('weight'):
+ return True
+ if name.endswith('bias'):
+ return True
+ if name.endswith('beta'):
+ return True
+ if name.endswith('gamma'):
+ return True
+ return False
+
+
+def make_mirror_plan(sym, threshold, plan_info=None, **kwargs):
+ """Memory allocation planner with a given threshold.
+
+ The user can pass in a network configuration,
+ a threshold that limits memory per block.
+ And input shape configurations.
+
+ Parameters
+ ----------
+ sym : symbol
+ Input configuration of symbols.
+ The user need to pre-mark the attribute "mirror_stage" on the nodes
+ that can be book-kept as stage
+
+ The algorithm will decide whether to disbale mirror on the stage nodes.
+
+ threshold: integer
+ A tuning parameter to tune the approximate size of each stage blocks
+
+ plan_info: dict, optional
+ Used to hold plan information.
+
+ **kwargs:
+ The arguments to infer shape.
+
+ Returns
+ -------
+ alloc_sym: symbol
+ A symbol with force mirror tagged on the nodes for better allocation.
+ """
+ threshold = threshold << 20
+ sym = sym.__copy__()
+ internals = sym.get_internals()
+ _, out_shapes, _ = internals.infer_shape(**kwargs)
+ shape_dict = list(zip(internals.list_outputs(), out_shapes))
+ total_size = 0
+ param_size = 0
+ local_size = 0
+ save_size = 0
+ max_size = 0
+ last_sb = None
+ last_local = 0
+ period = 1
+ last_stage = ''
+ stage_decision = ''
+
+ for idx, item in enumerate(shape_dict):
+ sb = internals[idx]
+ name, shape = item
+ if is_param(name):
+ param_size += prod(shape) * 4
+ continue
+ else:
+ total_size += prod(shape) * 4
+ local_size += prod(shape) * 4
+ sb._set_attr(force_mirroring='True')
+
+ if sb.attr('mirror_stage') is not None:
+ stage = sb.attr('mirror_stage')
+ if stage == 'True' or stage != last_stage:
+ if local_size > threshold:
+ save_size += prod(shape) * 4
+ max_size = max(max_size, local_size)
+ local_size = 0
+ stage_decision = 'False'
+ sb._set_attr(force_mirroring=stage_decision)
+ else:
+ stage_decision = 'True'
+ pass
+ last_stage = stage
+ elif stage == last_stage and stage_decision == 'False':
+ save_size += prod(shape) * 4
+ sb._set_attr(force_mirroring=stage_decision)
+
+ if plan_info is not None:
+ plan_info['max_size'] = max_size
+ plan_info['save_size'] = save_size
+ return sym
+
+
+def get_cost(sym, type_dict=None, **kwargs):
+ """Get the cost of the current symbolic plan by running bind on CPU.
+
+ sym : Symbolic Variable
+
+ """
+ texec = sym.simple_bind(ctx=mx.gpu(),
+ grad_req='write',
+ type_dict=type_dict,
+ **kwargs)
+ return int(texec.debug_str().split('\n')[-3].split()[1])
+
+
+def search_plan(sym, ntrial=6, type_dict=None, **kwargs):
+ """Quickly heurestic search over possible plans to find good memory plan.
+
+ Parameters
+ ----------
+ sym : symbolic
+ Symbolic configurations
+
+ ntrial: integer
+ Additional grid search steps
+ """
+ history = []
+ threshold = 0
+ min_threshold = None
+ min_cost = None
+ nbegin = 3
+
+ for k in range(nbegin):
+ info = {}
+ sym = make_mirror_plan(sym,
+ threshold=threshold,
+ plan_info=info,
+ **kwargs)
+ cost = get_cost(sym, type_dict, **kwargs)
+ save_size = info['save_size'] >> 20
+ local_size = info['max_size'] >> 20
+ guess = int(math.sqrt(save_size * local_size / 2))
+ if min_cost is None or min_cost > cost:
+ min_cost = cost
+ if min_threshold is None or local_size < min_threshold:
+ min_threshold = local_size
+ print("Search threshold=%d MB, cost=%d MB" % (threshold, cost))
+ history.append((cost, threshold, sym))
+ threshold = guess
+
+ max_threshold = threshold * math.sqrt(2)
+ step = int((max_threshold - min_threshold) / ntrial)
+ threshold = min_threshold + step
+ if step > 0:
+ for k in range(ntrial):
+ sym = make_mirror_plan(sym,
+ threshold=threshold,
+ plan_info=info,
+ **kwargs)
+ cost = get_cost(sym, type_dict, **kwargs)
+ print("Search threshold=%d MB, cost=%d MB" % (threshold, cost))
+ history.append((cost, threshold, sym))
+ threshold += step
+
+ history.sort(key=lambda x: x[0])
+ cost, threshold, sym = history[0]
+ print('Find best plan with threshold=%d, cost=%d MB' % (threshold, cost))
+ return sym
diff --git a/recognition/subcenter_arcface/symbol/memonger_v2.py b/recognition/subcenter_arcface/symbol/memonger_v2.py
new file mode 100644
index 0000000..92963de
--- /dev/null
+++ b/recognition/subcenter_arcface/symbol/memonger_v2.py
@@ -0,0 +1,300 @@
+import mxnet as mx
+import math
+
+
+def prod(shape):
+ """Get product of the shape.
+ """
+ ret = 1
+ for s in shape:
+ ret *= s
+ return ret
+
+
+def is_param(name):
+ """Quick script to check if name is a parameter.
+ """
+ if name == 'data':
+ return False
+ if name.endswith('weight'):
+ return True
+ if name.endswith('bias'):
+ return True
+ if name.endswith('beta'):
+ return True
+ if name.endswith('gamma'):
+ return True
+ return False
+
+
+def make_mirror_plan(sym, threshold, plan_info=None, **kwargs):
+ """Memory allocation planner with a given threshold.
+
+ The user can pass in a network configuration,
+ a threshold that limits memory per block.
+ And input shape configurations.
+
+ Parameters
+ ----------
+ sym : symbol
+ Input configuration of symbols.
+ The user need to pre-mark the attribute "mirror_stage" on the nodes
+ that can be book-kept as stage
+
+ The algorithm will decide whether to disbale mirror on the stage nodes.
+
+ threshold: integer
+ A tuning parameter to tune the approximate size of each stage blocks
+
+ plan_info: dict, optional
+ Used to hold plan information.
+
+ **kwargs:
+ The arguments to infer shape.
+
+ Returns
+ -------
+ alloc_sym: symbol
+ A symbol with force mirror tagged on the nodes for better allocation.
+ """
+ threshold = threshold << 20
+ sym = sym.__copy__()
+ internals = sym.get_internals()
+ _, out_shapes, _ = internals.infer_shape(**kwargs)
+ shape_dict = list(zip(internals.list_outputs(), out_shapes))
+ total_size = 0
+ param_size = 0
+ local_size = 0
+ save_size = 0
+ max_size = 0
+ last_sb = None
+ last_local = 0
+ period = 1
+ last_stage = ''
+ stage_decision = ''
+
+ for idx, item in enumerate(shape_dict):
+ sb = internals[idx]
+ name, shape = item
+ if is_param(name):
+ param_size += prod(shape) * 4
+ continue
+ else:
+ total_size += prod(shape) * 4
+ local_size += prod(shape) * 4
+ sb._set_attr(force_mirroring='True')
+
+ if sb.attr('mirror_stage') is not None:
+ stage = sb.attr('mirror_stage')
+ if stage == 'True' or stage != last_stage:
+ if local_size > threshold:
+ save_size += prod(shape) * 4
+ max_size = max(max_size, local_size)
+ local_size = 0
+ stage_decision = 'False'
+ sb._set_attr(force_mirroring=stage_decision)
+ else:
+ stage_decision = 'True'
+ pass
+ last_stage = stage
+ elif stage == last_stage and stage_decision == 'False':
+ save_size += prod(shape) * 4
+ sb._set_attr(force_mirroring=stage_decision)
+
+ if plan_info is not None:
+ plan_info['max_size'] = max_size
+ plan_info['save_size'] = save_size
+ return sym
+
+
+def get_cost(sym, type_dict=None, **kwargs):
+ """Get the cost of the current symbolic plan by running bind on CPU.
+
+ sym : Symbolic Variable
+
+ """
+ texec = sym.simple_bind(ctx=mx.gpu(),
+ grad_req='write',
+ type_dict=type_dict,
+ **kwargs)
+ return int(texec.debug_str().split('\n')[-3].split()[1])
+
+
+def search_plan(sym, ntrial=6, type_dict=None, **kwargs):
+ """Quickly heurestic search over possible plans to find good memory plan.
+
+ Parameters
+ ----------
+ sym : symbolic
+ Symbolic configurations
+
+ ntrial: integer
+ Additional grid search steps
+ """
+ history = []
+ threshold = 0
+ min_threshold = None
+ min_cost = None
+ nbegin = 3
+
+ for k in range(nbegin):
+ info = {}
+ sym = make_mirror_plan(sym,
+ threshold=threshold,
+ plan_info=info,
+ **kwargs)
+ cost = get_cost(sym, type_dict, **kwargs)
+ save_size = info['save_size'] >> 20
+ local_size = info['max_size'] >> 20
+ guess = int(math.sqrt(save_size * local_size / 2))
+ if min_cost is None or min_cost > cost:
+ min_cost = cost
+ if min_threshold is None or local_size < min_threshold:
+ min_threshold = local_size
+ print("Search threshold=%d MB, cost=%d MB" % (threshold, cost))
+ history.append((cost, threshold, sym))
+ threshold = guess
+
+ max_threshold = threshold * math.sqrt(2)
+ step = int((max_threshold - min_threshold) / ntrial)
+ threshold = min_threshold + step
+ if step > 0:
+ for k in range(ntrial):
+ sym = make_mirror_plan(sym,
+ threshold=threshold,
+ plan_info=info,
+ **kwargs)
+ cost = get_cost(sym, type_dict, **kwargs)
+ print("Search threshold=%d MB, cost=%d MB" % (threshold, cost))
+ history.append((cost, threshold, sym))
+ threshold += step
+
+ history.sort(key=lambda x: x[0])
+ cost, threshold, sym = history[0]
+ print('Find best plan with threshold=%d, cost=%d MB' % (threshold, cost))
+ return sym
+
+
+def make_mirror_plan_to_layer(sym,
+ layer_name,
+ threshold,
+ plan_info=None,
+ **kwargs):
+ """
+ sym is the original symbal
+ layer_name is a name to which layer of the network should be set as mirror
+ threshhold is the approximate size of each mirror block
+ """
+ threshold = threshold << 20
+ sym = sym.__copy__()
+ internals = sym.get_internals()
+ _, out_shapes, _ = internals.infer_shape(**kwargs)
+ shape_dict = list(zip(internals.list_outputs(), out_shapes))
+ total_size = 0
+ param_size = 0
+ local_size = 0
+ save_size = 0
+ max_size = 0
+ last_stage = ''
+ stage_decision = ''
+ switch = True
+
+ for idx, item in enumerate(shape_dict):
+ sb = internals[idx]
+ name, shape = item
+ #print(name, switch)
+ if is_param(name):
+ param_size += prod(shape) * 4
+ continue
+ elif switch and not 'bn' in name:
+ total_size += prod(shape) * 4
+ local_size += prod(shape) * 4
+ sb._set_attr(force_mirroring='True')
+ print('set force_mirroring', name, total_size, local_size)
+ if layer_name != '' and layer_name in name:
+ switch = False
+
+ if sb.attr('mirror_stage') is not None:
+ stage = sb.attr('mirror_stage')
+ #print(name, stage)
+ if stage == 'True' or stage != last_stage:
+ if local_size > threshold:
+ save_size += prod(shape) * 4
+ max_size = max(max_size, local_size)
+ local_size = 0
+ stage_decision = 'False'
+ sb._set_attr(force_mirroring=stage_decision)
+ else:
+ stage_decision = 'True'
+ pass
+ last_stage = stage
+ elif stage == last_stage and stage_decision == 'False':
+ save_size += prod(shape) * 4
+ sb._set_attr(force_mirroring=stage_decision)
+
+ if plan_info is not None:
+ plan_info['max_size'] = max_size
+ plan_info['save_size'] = save_size
+ return sym
+
+
+def search_plan_to_layer(sym,
+ layer_name=None,
+ threshold=500,
+ ntrial=6,
+ type_dict=None,
+ **kwargs):
+ """Quickly heurestic search over possible plans to find good memory plan.
+
+ Parameters
+ ----------
+ sym : symbolic
+ Symbolic configurations
+
+ ntrial: integer
+ Additional grid search steps
+ """
+ history = []
+ min_threshold = None
+ min_cost = None
+ nbegin = 10
+
+ for k in range(nbegin):
+ info = {}
+ sym = make_mirror_plan_to_layer(sym,
+ layer_name=layer_name,
+ threshold=threshold,
+ plan_info=info,
+ **kwargs)
+ cost = get_cost(sym, type_dict, **kwargs)
+ save_size = info['save_size'] >> 20
+ local_size = info['max_size'] >> 20
+ guess = 300 * (k + 1)
+ if min_cost is None or min_cost > cost:
+ min_cost = cost
+ if min_threshold is None or local_size < min_threshold:
+ min_threshold = local_size
+ print("Search threshold=%d MB, cost=%d MB" % (threshold, cost))
+ history.append((cost, threshold, sym))
+ threshold = guess
+
+ max_threshold = threshold * math.sqrt(2)
+ step = int((max_threshold - min_threshold) / ntrial)
+ print(min_threshold, max_threshold, step)
+ threshold = min_threshold + step
+ if step > 0:
+ for k in range(ntrial):
+ sym = make_mirror_plan_to_layer(sym,
+ layer_name=layer_name,
+ threshold=threshold,
+ plan_info=info,
+ **kwargs)
+ cost = get_cost(sym, type_dict, **kwargs)
+ print("Search threshold=%d MB, cost=%d MB" % (threshold, cost))
+ history.append((cost, threshold, sym))
+ threshold += step
+
+ history.sort(key=lambda x: x[0])
+ cost, threshold, sym = history[0]
+ print('Find best plan with threshold=%d, cost=%d MB' % (threshold, cost))
+ return sym
diff --git a/gender-age/symbol_utils.py b/recognition/subcenter_arcface/symbol/symbol_utils.py
similarity index 53%
rename from gender-age/symbol_utils.py
rename to recognition/subcenter_arcface/symbol/symbol_utils.py
index 0a48697..3eb6f1c 100644
--- a/gender-age/symbol_utils.py
+++ b/recognition/subcenter_arcface/symbol/symbol_utils.py
@@ -1,4 +1,8 @@
+import sys
+import os
import mxnet as mx
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from config import config
def Conv(**kwargs):
@@ -12,11 +16,14 @@ def Conv(**kwargs):
def Act(data, act_type, name):
#ignore param act_type, set it in this function
- body = mx.sym.LeakyReLU(data=data, act_type='prelu', name=name)
+ if act_type == 'prelu':
+ body = mx.sym.LeakyReLU(data=data, act_type='prelu', name=name)
+ else:
+ body = mx.sym.Activation(data=data, act_type=act_type, name=name)
return body
-bn_mom = 0.9
+bn_mom = config.bn_mom
def Linear(data,
@@ -42,8 +49,7 @@ def Linear(data,
return bn
-def get_fc1(last_conv, num_classes, fc_type):
- bn_mom = 0.9
+def get_fc1(last_conv, num_classes, fc_type, input_channel=512):
body = last_conv
if fc_type == 'Z':
body = mx.sym.BatchNorm(data=body,
@@ -68,13 +74,67 @@ def get_fc1(last_conv, num_classes, fc_type):
eps=2e-5,
momentum=bn_mom,
name='fc1')
+ elif fc_type == 'FC':
+ body = mx.sym.BatchNorm(data=body,
+ fix_gamma=False,
+ eps=2e-5,
+ momentum=bn_mom,
+ name='bn1')
+ fc1 = mx.sym.FullyConnected(data=body,
+ num_hidden=num_classes,
+ name='pre_fc1')
+ fc1 = mx.sym.BatchNorm(data=fc1,
+ fix_gamma=True,
+ eps=2e-5,
+ momentum=bn_mom,
+ name='fc1')
+ elif fc_type == 'SFC':
+ body = mx.sym.BatchNorm(data=body,
+ fix_gamma=False,
+ eps=2e-5,
+ momentum=bn_mom,
+ name='bn1')
+ body = Conv(data=body,
+ num_filter=input_channel,
+ kernel=(3, 3),
+ stride=(2, 2),
+ pad=(1, 1),
+ no_bias=True,
+ name="convf",
+ num_group=input_channel)
+ body = mx.sym.BatchNorm(data=body,
+ fix_gamma=False,
+ eps=2e-5,
+ momentum=bn_mom,
+ name='bnf')
+ body = Act(data=body, act_type=config.net_act, name='reluf')
+ body = Conv(data=body,
+ num_filter=input_channel,
+ kernel=(1, 1),
+ pad=(0, 0),
+ stride=(1, 1),
+ name="convf2")
+ body = mx.sym.BatchNorm(data=body,
+ fix_gamma=False,
+ eps=2e-5,
+ momentum=bn_mom,
+ name='bnf2')
+ body = Act(data=body, act_type=config.net_act, name='reluf2')
+ fc1 = mx.sym.FullyConnected(data=body,
+ num_hidden=num_classes,
+ name='pre_fc1')
+ fc1 = mx.sym.BatchNorm(data=fc1,
+ fix_gamma=True,
+ eps=2e-5,
+ momentum=bn_mom,
+ name='fc1')
elif fc_type == 'GAP':
bn1 = mx.sym.BatchNorm(data=body,
fix_gamma=False,
eps=2e-5,
momentum=bn_mom,
name='bn1')
- relu1 = Act(data=bn1, act_type='relu', name='relu1')
+ relu1 = Act(data=bn1, act_type=config.net_act, name='relu1')
# Although kernel is not used here when global_pool=True, we should put one
pool1 = mx.sym.Pooling(data=relu1,
global_pool=True,
@@ -105,7 +165,7 @@ def get_fc1(last_conv, num_classes, fc_type):
eps=2e-5,
momentum=0.9,
name='convx_bn')
- body = Act(data=body, act_type='relu', name='convx_relu')
+ body = Act(data=body, act_type=config.net_act, name='convx_relu')
filters_in = num_classes
else:
body = last_conv
@@ -149,8 +209,8 @@ def get_fc1(last_conv, num_classes, fc_type):
name='fc1')
elif fc_type == "GDC": #mobilefacenet_v1
conv_6_dw = Linear(last_conv,
- num_filter=512,
- num_group=512,
+ num_filter=input_channel,
+ num_group=input_channel,
kernel=(7, 7),
pad=(0, 0),
stride=(1, 1),
@@ -209,46 +269,6 @@ def get_fc1(last_conv, num_classes, fc_type):
eps=2e-5,
momentum=bn_mom,
name='fc1')
- else:
- bn1 = mx.sym.BatchNorm(data=body,
- fix_gamma=False,
- eps=2e-5,
- momentum=bn_mom,
- name='bn1')
- relu1 = Act(data=bn1, act_type='relu', name='relu1')
- # Although kernel is not used here when global_pool=True, we should put one
- pool1 = mx.sym.Pooling(data=relu1,
- global_pool=True,
- kernel=(7, 7),
- pool_type='avg',
- name='pool1')
- flat = mx.sym.Flatten(data=pool1)
- if len(fc_type) > 1:
- if fc_type[1] == 'X':
- print('dropout mode')
- flat = mx.symbol.Dropout(data=flat, p=0.2)
- fc_type = fc_type[0]
- if fc_type == 'A':
- fc1 = flat
- else:
- #B-D
- #B
- fc1 = mx.sym.FullyConnected(data=flat,
- num_hidden=num_classes,
- name='pre_fc1')
- if fc_type == 'C':
- fc1 = mx.sym.BatchNorm(data=fc1,
- fix_gamma=True,
- eps=2e-5,
- momentum=bn_mom,
- name='fc1')
- elif fc_type == 'D':
- fc1 = mx.sym.BatchNorm(data=fc1,
- fix_gamma=True,
- eps=2e-5,
- momentum=bn_mom,
- name='fc1')
- fc1 = Act(data=fc1, act_type='relu', name='fc1_relu')
return fc1
@@ -293,7 +313,7 @@ def residual_unit_v3(data, num_filter, stride, dim_match, name, **kwargs):
eps=2e-5,
momentum=bn_mom,
name=name + '_bn2')
- act1 = Act(data=bn2, act_type='relu', name=name + '_relu1')
+ act1 = Act(data=bn2, act_type=config.net_act, name=name + '_relu1')
conv2 = Conv(data=act1,
num_filter=num_filter,
kernel=(3, 3),
@@ -328,9 +348,203 @@ def residual_unit_v3(data, num_filter, stride, dim_match, name, **kwargs):
return bn3 + shortcut
+def residual_unit_v1l(data, num_filter, stride, dim_match, name, bottle_neck):
+ """Return ResNet Unit symbol for building ResNet
+ Parameters
+ ----------
+ data : str
+ Input data
+ num_filter : int
+ Number of output channels
+ bnf : int
+ Bottle neck channels factor with regard to num_filter
+ stride : tuple
+ Stride used in convolution
+ dim_match : Boolean
+ True means channel number between input and output is the same, otherwise means differ
+ name : str
+ Base name of the operators
+ workspace : int
+ Workspace used in convolution operator
+ """
+ workspace = config.workspace
+ bn_mom = config.bn_mom
+ memonger = False
+ use_se = config.net_se
+ act_type = config.net_act
+ #print('in unit1')
+ if bottle_neck:
+ conv1 = Conv(data=data,
+ num_filter=int(num_filter * 0.25),
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ no_bias=True,
+ workspace=workspace,
+ name=name + '_conv1')
+ bn1 = mx.sym.BatchNorm(data=conv1,
+ fix_gamma=False,
+ eps=2e-5,
+ momentum=bn_mom,
+ name=name + '_bn1')
+ act1 = Act(data=bn1, act_type=act_type, name=name + '_relu1')
+ conv2 = Conv(data=act1,
+ num_filter=int(num_filter * 0.25),
+ kernel=(3, 3),
+ stride=(1, 1),
+ pad=(1, 1),
+ no_bias=True,
+ workspace=workspace,
+ name=name + '_conv2')
+ bn2 = mx.sym.BatchNorm(data=conv2,
+ fix_gamma=False,
+ eps=2e-5,
+ momentum=bn_mom,
+ name=name + '_bn2')
+ act2 = Act(data=bn2, act_type=act_type, name=name + '_relu2')
+ conv3 = Conv(data=act2,
+ num_filter=num_filter,
+ kernel=(1, 1),
+ stride=stride,
+ pad=(0, 0),
+ no_bias=True,
+ workspace=workspace,
+ name=name + '_conv3')
+ bn3 = mx.sym.BatchNorm(data=conv3,
+ fix_gamma=False,
+ eps=2e-5,
+ momentum=bn_mom,
+ name=name + '_bn3')
+
+ if use_se:
+ #se begin
+ body = mx.sym.Pooling(data=bn3,
+ global_pool=True,
+ kernel=(7, 7),
+ pool_type='avg',
+ name=name + '_se_pool1')
+ body = Conv(data=body,
+ num_filter=num_filter // 16,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ name=name + "_se_conv1",
+ workspace=workspace)
+ body = Act(data=body, act_type=act_type, name=name + '_se_relu1')
+ body = Conv(data=body,
+ num_filter=num_filter,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ name=name + "_se_conv2",
+ workspace=workspace)
+ body = mx.symbol.Activation(data=body,
+ act_type='sigmoid',
+ name=name + "_se_sigmoid")
+ bn3 = mx.symbol.broadcast_mul(bn3, body)
+ #se end
+
+ if dim_match:
+ shortcut = data
+ else:
+ conv1sc = Conv(data=data,
+ num_filter=num_filter,
+ kernel=(1, 1),
+ stride=stride,
+ no_bias=True,
+ workspace=workspace,
+ name=name + '_conv1sc')
+ shortcut = mx.sym.BatchNorm(data=conv1sc,
+ fix_gamma=False,
+ eps=2e-5,
+ momentum=bn_mom,
+ name=name + '_sc')
+ if memonger:
+ shortcut._set_attr(mirror_stage='True')
+ return Act(data=bn3 + shortcut,
+ act_type=act_type,
+ name=name + '_relu3')
+ else:
+ conv1 = Conv(data=data,
+ num_filter=num_filter,
+ kernel=(3, 3),
+ stride=(1, 1),
+ pad=(1, 1),
+ no_bias=True,
+ workspace=workspace,
+ name=name + '_conv1')
+ bn1 = mx.sym.BatchNorm(data=conv1,
+ fix_gamma=False,
+ momentum=bn_mom,
+ eps=2e-5,
+ name=name + '_bn1')
+ act1 = Act(data=bn1, act_type=act_type, name=name + '_relu1')
+ conv2 = Conv(data=act1,
+ num_filter=num_filter,
+ kernel=(3, 3),
+ stride=stride,
+ pad=(1, 1),
+ no_bias=True,
+ workspace=workspace,
+ name=name + '_conv2')
+ bn2 = mx.sym.BatchNorm(data=conv2,
+ fix_gamma=False,
+ momentum=bn_mom,
+ eps=2e-5,
+ name=name + '_bn2')
+ if use_se:
+ #se begin
+ body = mx.sym.Pooling(data=bn2,
+ global_pool=True,
+ kernel=(7, 7),
+ pool_type='avg',
+ name=name + '_se_pool1')
+ body = Conv(data=body,
+ num_filter=num_filter // 16,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ name=name + "_se_conv1",
+ workspace=workspace)
+ body = Act(data=body, act_type=act_type, name=name + '_se_relu1')
+ body = Conv(data=body,
+ num_filter=num_filter,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ name=name + "_se_conv2",
+ workspace=workspace)
+ body = mx.symbol.Activation(data=body,
+ act_type='sigmoid',
+ name=name + "_se_sigmoid")
+ bn2 = mx.symbol.broadcast_mul(bn2, body)
+ #se end
+
+ if dim_match:
+ shortcut = data
+ else:
+ conv1sc = Conv(data=data,
+ num_filter=num_filter,
+ kernel=(1, 1),
+ stride=stride,
+ no_bias=True,
+ workspace=workspace,
+ name=name + '_conv1sc')
+ shortcut = mx.sym.BatchNorm(data=conv1sc,
+ fix_gamma=False,
+ momentum=bn_mom,
+ eps=2e-5,
+ name=name + '_sc')
+ if memonger:
+ shortcut._set_attr(mirror_stage='True')
+ return Act(data=bn2 + shortcut,
+ act_type=act_type,
+ name=name + '_relu3')
+
+
def get_head(data, version_input, num_filter):
- bn_mom = 0.9
- workspace = 256
+ bn_mom = config.bn_mom
+ workspace = config.workspace
kwargs = {'bn_mom': bn_mom, 'workspace': workspace}
data = data - 127.5
data = data * 0.0078125
@@ -349,7 +563,7 @@ def get_head(data, version_input, num_filter):
eps=2e-5,
momentum=bn_mom,
name='bn0')
- body = Act(data=body, act_type='relu', name='relu0')
+ body = Act(data=body, act_type=config.net_act, name='relu0')
body = mx.sym.Pooling(data=body,
kernel=(3, 3),
stride=(2, 2),
@@ -371,10 +585,11 @@ def get_head(data, version_input, num_filter):
eps=2e-5,
momentum=bn_mom,
name='bn0')
- body = Act(data=body, act_type='relu', name='relu0')
- body = residual_unit_v3(body,
- _num_filter, (2, 2),
- False,
- name='head',
- **kwargs)
+ body = Act(data=body, act_type=config.net_act, name='relu0')
+ #body = residual_unit_v3(body, _num_filter, (2, 2), False, name='head', **kwargs)
+ body = residual_unit_v1l(body,
+ _num_filter, (2, 2),
+ False,
+ name='head',
+ bottle_neck=False)
return body
diff --git a/recognition/subcenter_arcface/symbol/vargfacenet.py b/recognition/subcenter_arcface/symbol/vargfacenet.py
new file mode 100644
index 0000000..434e8be
--- /dev/null
+++ b/recognition/subcenter_arcface/symbol/vargfacenet.py
@@ -0,0 +1,578 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+'''
+Author: Horizon Robotics Inc.
+The company is committed to be the global leader of edge AI platform.
+The model implemented in this scripts runs ~200fps on the Sunrise 2.
+Sunrise 2 is the second generation of an embedded AI chip designed by Horizon Robotics,
+targeting to empower AIoT devices by AI.
+
+Implemented the following paper:
+Mengjia Yan, Mengao Zhao, Zining Xu, Qian Zhang, Guoli Wang, Zhizhong Su. "VarGFaceNet: An Efficient Variable Group Convolutional Neural Network for Lightweight Face Recognition" (https://arxiv.org/abs/1910.04985)
+
+'''
+
+import os
+import sys
+
+import mxnet as mx
+import symbol_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from config import config
+
+
+def Act(data, act_type, name):
+ if act_type == 'prelu':
+ body = mx.sym.LeakyReLU(data=data, act_type='prelu', name=name)
+ else:
+ body = mx.symbol.Activation(data=data, act_type=act_type, name=name)
+ return body
+
+
+def get_setting_params(**kwargs):
+ # bn_params
+ bn_mom = kwargs.get('bn_mom', 0.9)
+ bn_eps = kwargs.get('bn_eps', 2e-5)
+ fix_gamma = kwargs.get('fix_gamma', False)
+ use_global_stats = kwargs.get('use_global_stats', False)
+ # net_setting param
+ workspace = kwargs.get('workspace', 512)
+ act_type = kwargs.get('act_type', 'prelu')
+ use_se = kwargs.get('use_se', True)
+ se_ratio = kwargs.get('se_ratio', 4)
+ group_base = kwargs.get('group_base', 8)
+
+ setting_params = {}
+ setting_params['bn_mom'] = bn_mom
+ setting_params['bn_eps'] = bn_eps
+ setting_params['fix_gamma'] = fix_gamma
+ setting_params['use_global_stats'] = use_global_stats
+ setting_params['workspace'] = workspace
+ setting_params['act_type'] = act_type
+ setting_params['use_se'] = use_se
+ setting_params['se_ratio'] = se_ratio
+ setting_params['group_base'] = group_base
+
+ return setting_params
+
+
+def se_block(data, num_filter, setting_params, name):
+ se_ratio = setting_params['se_ratio']
+ act_type = setting_params['act_type']
+
+ pool1 = mx.sym.Pooling(data=data,
+ global_pool=True,
+ pool_type='avg',
+ name=name + '_se_pool1')
+ conv1 = mx.sym.Convolution(data=pool1,
+ num_filter=num_filter // se_ratio,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ name=name + "_se_conv1")
+ act1 = Act(data=conv1, act_type=act_type, name=name + '_se_act1')
+
+ conv2 = mx.sym.Convolution(data=act1,
+ num_filter=num_filter,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ name=name + "_se_conv2")
+ act2 = mx.symbol.Activation(data=conv2,
+ act_type='sigmoid',
+ name=name + "_se_sigmoid")
+ out_data = mx.symbol.broadcast_mul(data, act2)
+ return out_data
+
+
+def separable_conv2d(data,
+ in_channels,
+ out_channels,
+ kernel,
+ pad,
+ setting_params,
+ stride=(1, 1),
+ factor=1,
+ bias=False,
+ bn_dw_out=True,
+ act_dw_out=True,
+ bn_pw_out=True,
+ act_pw_out=True,
+ dilate=1,
+ name=None):
+ bn_mom = setting_params['bn_mom']
+ bn_eps = setting_params['bn_eps']
+ fix_gamma = setting_params['fix_gamma']
+ use_global_stats = setting_params['use_global_stats']
+ workspace = setting_params['workspace']
+ group_base = setting_params['group_base']
+ act_type = setting_params['act_type']
+ assert in_channels % group_base == 0
+
+ # depthwise
+ dw_out = mx.sym.Convolution(data=data,
+ num_filter=int(in_channels * factor),
+ kernel=kernel,
+ pad=pad,
+ stride=stride,
+ no_bias=False if bias else True,
+ num_group=int(in_channels / group_base),
+ dilate=(dilate, dilate),
+ workspace=workspace,
+ name=name + '_conv2d_depthwise')
+ if bn_dw_out:
+ dw_out = mx.sym.BatchNorm(data=dw_out,
+ fix_gamma=fix_gamma,
+ eps=bn_eps,
+ momentum=bn_mom,
+ use_global_stats=use_global_stats,
+ name=name + '_conv2d_depthwise_bn')
+ if act_dw_out:
+ dw_out = Act(data=dw_out,
+ act_type=act_type,
+ name=name + '_conv2d_depthwise_act')
+ # pointwise
+ pw_out = mx.sym.Convolution(data=dw_out,
+ num_filter=out_channels,
+ kernel=(1, 1),
+ stride=(1, 1),
+ pad=(0, 0),
+ num_group=1,
+ no_bias=False if bias else True,
+ workspace=workspace,
+ name=name + '_conv2d_pointwise')
+ if bn_pw_out:
+ pw_out = mx.sym.BatchNorm(data=pw_out,
+ fix_gamma=fix_gamma,
+ eps=bn_eps,
+ momentum=bn_mom,
+ use_global_stats=use_global_stats,
+ name=name + '_conv2d_pointwise_bn')
+ if act_pw_out:
+ pw_out = Act(data=pw_out,
+ act_type=act_type,
+ name=name + '_conv2d_pointwise_act')
+ return pw_out
+
+
+def vargnet_block(data,
+ n_out_ch1,
+ n_out_ch2,
+ n_out_ch3,
+ setting_params,
+ factor=2,
+ dim_match=True,
+ multiplier=1,
+ kernel=(3, 3),
+ stride=(1, 1),
+ dilate=1,
+ with_dilate=False,
+ name=None):
+ use_se = setting_params['use_se']
+ act_type = setting_params['act_type']
+
+ out_channels_1 = int(n_out_ch1 * multiplier)
+ out_channels_2 = int(n_out_ch2 * multiplier)
+ out_channels_3 = int(n_out_ch3 * multiplier)
+
+ pad = (((kernel[0] - 1) * dilate + 1) // 2,
+ ((kernel[1] - 1) * dilate + 1) // 2)
+
+ if with_dilate:
+ stride = (1, 1)
+ if dim_match:
+ short_cut = data
+ else:
+ short_cut = separable_conv2d(data=data,
+ in_channels=out_channels_1,
+ out_channels=out_channels_3,
+ kernel=kernel,
+ pad=pad,
+ setting_params=setting_params,
+ stride=stride,
+ factor=factor,
+ bias=False,
+ act_pw_out=False,
+ dilate=dilate,
+ name=name + '_shortcut')
+ sep1_data = separable_conv2d(data=data,
+ in_channels=out_channels_1,
+ out_channels=out_channels_2,
+ kernel=kernel,
+ pad=pad,
+ setting_params=setting_params,
+ stride=stride,
+ factor=factor,
+ bias=False,
+ dilate=dilate,
+ name=name + '_sep1_data')
+ sep2_data = separable_conv2d(data=sep1_data,
+ in_channels=out_channels_2,
+ out_channels=out_channels_3,
+ kernel=kernel,
+ pad=pad,
+ setting_params=setting_params,
+ stride=(1, 1),
+ factor=factor,
+ bias=False,
+ dilate=dilate,
+ act_pw_out=False,
+ name=name + '_sep2_data')
+
+ if use_se:
+ sep2_data = se_block(data=sep2_data,
+ num_filter=out_channels_3,
+ setting_params=setting_params,
+ name=name)
+
+ out_data = sep2_data + short_cut
+ out_data = Act(data=out_data,
+ act_type=act_type,
+ name=name + '_out_data_act')
+ return out_data
+
+
+def vargnet_branch_merge_block(data,
+ n_out_ch1,
+ n_out_ch2,
+ n_out_ch3,
+ setting_params,
+ factor=2,
+ dim_match=False,
+ multiplier=1,
+ kernel=(3, 3),
+ stride=(2, 2),
+ dilate=1,
+ with_dilate=False,
+ name=None):
+ act_type = setting_params['act_type']
+
+ out_channels_1 = int(n_out_ch1 * multiplier)
+ out_channels_2 = int(n_out_ch2 * multiplier)
+ out_channels_3 = int(n_out_ch3 * multiplier)
+
+ pad = (((kernel[0] - 1) * dilate + 1) // 2,
+ ((kernel[1] - 1) * dilate + 1) // 2)
+
+ if with_dilate:
+ stride = (1, 1)
+ if dim_match:
+ short_cut = data
+ else:
+ short_cut = separable_conv2d(data=data,
+ in_channels=out_channels_1,
+ out_channels=out_channels_3,
+ kernel=kernel,
+ pad=pad,
+ setting_params=setting_params,
+ stride=stride,
+ factor=factor,
+ bias=False,
+ act_pw_out=False,
+ dilate=dilate,
+ name=name + '_shortcut')
+ sep1_data_brach1 = separable_conv2d(data=data,
+ in_channels=out_channels_1,
+ out_channels=out_channels_2,
+ kernel=kernel,
+ pad=pad,
+ setting_params=setting_params,
+ stride=stride,
+ factor=factor,
+ bias=False,
+ dilate=dilate,
+ act_pw_out=False,
+ name=name + '_sep1_data_branch')
+ sep1_data_brach2 = separable_conv2d(data=data,
+ in_channels=out_channels_1,
+ out_channels=out_channels_2,
+ kernel=kernel,
+ pad=pad,
+ setting_params=setting_params,
+ stride=stride,
+ factor=factor,
+ bias=False,
+ dilate=dilate,
+ act_pw_out=False,
+ name=name + '_sep2_data_branch')
+ sep1_data = sep1_data_brach1 + sep1_data_brach2
+ sep1_data = Act(data=sep1_data,
+ act_type=act_type,
+ name=name + '_sep1_data_act')
+ sep2_data = separable_conv2d(data=sep1_data,
+ in_channels=out_channels_2,
+ out_channels=out_channels_3,
+ kernel=kernel,
+ pad=pad,
+ setting_params=setting_params,
+ stride=(1, 1),
+ factor=factor,
+ bias=False,
+ dilate=dilate,
+ act_pw_out=False,
+ name=name + '_sep2_data')
+ out_data = sep2_data + short_cut
+ out_data = Act(data=out_data,
+ act_type=act_type,
+ name=name + '_out_data_act')
+ return out_data
+
+
+def add_vargnet_conv_block(data,
+ stage,
+ units,
+ in_channels,
+ out_channels,
+ setting_params,
+ kernel=(3, 3),
+ stride=(2, 2),
+ multiplier=1,
+ factor=2,
+ dilate=1,
+ with_dilate=False,
+ name=None):
+ assert stage >= 2, 'stage is {}, stage must be set >=2'.format(stage)
+ data = vargnet_branch_merge_block(data=data,
+ n_out_ch1=in_channels,
+ n_out_ch2=out_channels,
+ n_out_ch3=out_channels,
+ setting_params=setting_params,
+ factor=factor,
+ dim_match=False,
+ multiplier=multiplier,
+ kernel=kernel,
+ stride=stride,
+ dilate=dilate,
+ with_dilate=with_dilate,
+ name=name +
+ '_stage_{}_unit_1'.format(stage))
+ for i in range(units - 1):
+ data = vargnet_block(data=data,
+ n_out_ch1=out_channels,
+ n_out_ch2=out_channels,
+ n_out_ch3=out_channels,
+ setting_params=setting_params,
+ factor=factor,
+ dim_match=True,
+ multiplier=multiplier,
+ kernel=kernel,
+ stride=(1, 1),
+ dilate=dilate,
+ with_dilate=with_dilate,
+ name=name +
+ '_stage_{}_unit_{}'.format(stage, i + 2))
+ return data
+
+
+def add_head_block(data,
+ num_filter,
+ setting_params,
+ multiplier,
+ head_pooling=False,
+ kernel=(3, 3),
+ stride=(2, 2),
+ pad=(1, 1),
+ name=None):
+ bn_mom = setting_params['bn_mom']
+ bn_eps = setting_params['bn_eps']
+ fix_gamma = setting_params['fix_gamma']
+ use_global_stats = setting_params['use_global_stats']
+ workspace = setting_params['workspace']
+ act_type = setting_params['act_type']
+ channels = int(num_filter * multiplier)
+
+ conv1 = mx.sym.Convolution(data=data,
+ num_filter=channels,
+ kernel=kernel,
+ pad=pad,
+ stride=stride,
+ no_bias=True,
+ num_group=1,
+ workspace=workspace,
+ name=name + '_conv1')
+ bn1 = mx.sym.BatchNorm(data=conv1,
+ fix_gamma=fix_gamma,
+ eps=bn_eps,
+ momentum=bn_mom,
+ use_global_stats=use_global_stats,
+ name=name + '_conv1_bn')
+
+ act1 = Act(data=bn1, act_type=act_type, name=name + '_conv1_act')
+
+ if head_pooling:
+ head_data = mx.symbol.Pooling(data=act1,
+ kernel=(3, 3),
+ stride=(2, 2),
+ pad=(1, 1),
+ pool_type='max',
+ name=name + '_max_pooling')
+ else:
+ head_data = vargnet_block(data=act1,
+ n_out_ch1=num_filter,
+ n_out_ch2=num_filter,
+ n_out_ch3=num_filter,
+ setting_params=setting_params,
+ factor=1,
+ dim_match=False,
+ multiplier=multiplier,
+ kernel=kernel,
+ stride=(2, 2),
+ dilate=1,
+ with_dilate=False,
+ name=name + '_head_pooling')
+ return head_data
+
+
+def add_emb_block(data,
+ input_channels,
+ last_channels,
+ emb_size,
+ fc_type,
+ setting_params,
+ bias=False,
+ name=None):
+ bn_mom = setting_params['bn_mom']
+ bn_eps = setting_params['bn_eps']
+ fix_gamma = setting_params['fix_gamma']
+ use_global_stats = setting_params['use_global_stats']
+ workspace = setting_params['workspace']
+ act_type = setting_params['act_type']
+ group_base = setting_params['group_base']
+ # last channels
+ if input_channels != last_channels:
+ data = mx.sym.Convolution(data=data,
+ num_filter=last_channels,
+ kernel=(1, 1),
+ pad=(0, 0),
+ stride=(1, 1),
+ no_bias=False if bias else True,
+ workspace=workspace,
+ name=name + '_convx')
+ data = mx.sym.BatchNorm(data=data,
+ fix_gamma=fix_gamma,
+ eps=bn_eps,
+ momentum=bn_mom,
+ use_global_stats=use_global_stats,
+ name=name + '_convx_bn')
+ data = Act(data=data, act_type=act_type, name=name + '_convx_act')
+ # depthwise
+ convx_depthwise = mx.sym.Convolution(data=data,
+ num_filter=last_channels,
+ num_group=int(last_channels /
+ group_base),
+ kernel=(7, 7),
+ pad=(0, 0),
+ stride=(1, 1),
+ no_bias=False if bias else True,
+ workspace=workspace,
+ name=name + '_convx_depthwise')
+ convx_depthwise = mx.sym.BatchNorm(data=convx_depthwise,
+ fix_gamma=fix_gamma,
+ eps=bn_eps,
+ momentum=bn_mom,
+ use_global_stats=use_global_stats,
+ name=name + '_convx_depthwise_bn')
+ # pointwise
+ convx_pointwise = mx.sym.Convolution(data=convx_depthwise,
+ num_filter=last_channels // 2,
+ kernel=(1, 1),
+ pad=(0, 0),
+ stride=(1, 1),
+ no_bias=False if bias else True,
+ workspace=workspace,
+ name=name + '_convx_pointwise')
+ convx_pointwise = mx.sym.BatchNorm(data=convx_pointwise,
+ fix_gamma=fix_gamma,
+ eps=bn_eps,
+ momentum=bn_mom,
+ use_global_stats=use_global_stats,
+ name=name + '_convx_pointwise_bn')
+ convx_pointwise = Act(data=convx_pointwise,
+ act_type=act_type,
+ name=name + '_convx_pointwise_act')
+
+ fc1 = symbol_utils.get_fc1(convx_pointwise, emb_size, fc_type)
+ return fc1
+
+
+def get_symbol():
+ multiplier = config.net_multiplier
+ emb_size = config.emb_size
+ fc_type = config.net_output
+
+ kwargs = {
+ 'use_se': config.net_se,
+ 'act_type': config.net_act,
+ 'bn_mom': config.bn_mom,
+ 'workspace': config.workspace,
+ }
+
+ setting_params = get_setting_params(**kwargs)
+
+ factor = 2
+ head_pooling = False
+ num_stage = 3
+ stage_list = [2, 3, 4]
+ units = [3, 7, 4]
+ filter_list = [32, 64, 128, 256]
+ last_channels = 1024
+ dilate_list = [1, 1, 1]
+ with_dilate_list = [False, False, False]
+
+ data = mx.sym.Variable(name='data')
+ data = mx.sym.identity(data=data, name='id')
+ data = data - 127.5
+ data = data * 0.0078125
+
+ body = add_head_block(data=data,
+ num_filter=filter_list[0],
+ setting_params=setting_params,
+ multiplier=multiplier,
+ head_pooling=head_pooling,
+ kernel=(3, 3),
+ stride=(1, 1),
+ pad=(1, 1),
+ name="vargface_head")
+
+ for i in range(num_stage):
+ body = add_vargnet_conv_block(data=body,
+ stage=stage_list[i],
+ units=units[i],
+ in_channels=filter_list[i],
+ out_channels=filter_list[i + 1],
+ setting_params=setting_params,
+ kernel=(3, 3),
+ stride=(2, 2),
+ multiplier=multiplier,
+ factor=factor,
+ dilate=dilate_list[i],
+ with_dilate=with_dilate_list[i],
+ name="vargface")
+ emb_feat = add_emb_block(data=body,
+ input_channels=filter_list[3],
+ last_channels=last_channels,
+ emb_size=emb_size,
+ fc_type=fc_type,
+ setting_params=setting_params,
+ bias=False,
+ name='embed')
+ return emb_feat
+
+
+if __name__ == '__main__':
+ get_symbol()
diff --git a/recognition/SubCenter-ArcFace/train_parall.py b/recognition/subcenter_arcface/train_parall.py
similarity index 99%
rename from recognition/SubCenter-ArcFace/train_parall.py
rename to recognition/subcenter_arcface/train_parall.py
index d444f6f..72b1ff7 100644
--- a/recognition/SubCenter-ArcFace/train_parall.py
+++ b/recognition/subcenter_arcface/train_parall.py
@@ -16,8 +16,10 @@ from mxnet import ndarray as nd
import argparse
import mxnet.optimizer as optimizer
from config import config, default, generate_config
+sys.path.append(os.path.join(os.path.dirname(__file__), 'symbol'))
+sys.path.append(os.path.join(os.path.dirname(__file__), 'common'))
import verification
-sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'symbol'))
+
import fresnet
import fmobilefacenet
import fmobilenet
diff --git a/recognition/tools/face2rec2.py b/recognition/tools/face2rec2.py
deleted file mode 100644
index a6af12c..0000000
--- a/recognition/tools/face2rec2.py
+++ /dev/null
@@ -1,320 +0,0 @@
-import os
-import sys
-import mxnet as mx
-import random
-import argparse
-import cv2
-import time
-import traceback
-from easydict import EasyDict as edict
-sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'common'))
-import face_align
-
-try:
- import multiprocessing
-except ImportError:
- multiprocessing = None
-
-
-def parse_lst_line(line):
- vec = line.strip().split("\t")
- assert len(vec) >= 3
- aligned = int(vec[0])
- image_path = vec[1]
- label = int(vec[2])
- bbox = None
- landmark = None
- #print(vec)
- if len(vec) > 3:
- bbox = np.zeros((4, ), dtype=np.int32)
- for i in xrange(3, 7):
- bbox[i - 3] = int(vec[i])
- landmark = None
- if len(vec) > 7:
- _l = []
- for i in xrange(7, 17):
- _l.append(float(vec[i]))
- landmark = np.array(_l).reshape((2, 5)).T
- #print(aligned)
- return image_path, label, bbox, landmark, aligned
-
-
-def read_list(path_in):
- with open(path_in) as fin:
- identities = []
- last = [-1, -1]
- _id = 1
- while True:
- line = fin.readline()
- if not line:
- break
- item = edict()
- item.flag = 0
- item.image_path, label, item.bbox, item.landmark, item.aligned = parse_lst_line(
- line)
- if not item.aligned and item.landmark is None:
- #print('ignore line', line)
- continue
- item.id = _id
- item.label = [label, item.aligned]
- yield item
- if label != last[0]:
- if last[1] >= 0:
- identities.append((last[1], _id))
- last[0] = label
- last[1] = _id
- _id += 1
- identities.append((last[1], _id))
- item = edict()
- item.flag = 2
- item.id = 0
- item.label = [float(_id), float(_id + len(identities))]
- yield item
- for identity in identities:
- item = edict()
- item.flag = 2
- item.id = _id
- _id += 1
- item.label = [float(identity[0]), float(identity[1])]
- yield item
-
-
-def image_encode(args, i, item, q_out):
- oitem = [item.id]
- #print('flag', item.flag)
- if item.flag == 0:
- fullpath = item.image_path
- header = mx.recordio.IRHeader(item.flag, item.label, item.id, 0)
- #print('write', item.flag, item.id, item.label)
- if item.aligned:
- with open(fullpath, 'rb') as fin:
- img = fin.read()
- s = mx.recordio.pack(header, img)
- q_out.put((i, s, oitem))
- else:
- img = cv2.imread(fullpath, args.color)
- assert item.landmark is not None
- img = face_align.norm_crop(img, item.landmark)
- s = mx.recordio.pack_img(header,
- img,
- quality=args.quality,
- img_fmt=args.encoding)
- q_out.put((i, s, oitem))
- else:
- header = mx.recordio.IRHeader(item.flag, item.label, item.id, 0)
- #print('write', item.flag, item.id, item.label)
- s = mx.recordio.pack(header, '')
- q_out.put((i, s, oitem))
-
-
-def read_worker(args, q_in, q_out):
- while True:
- deq = q_in.get()
- if deq is None:
- break
- i, item = deq
- image_encode(args, i, item, q_out)
-
-
-def write_worker(q_out, fname, working_dir):
- pre_time = time.time()
- count = 0
- fname = os.path.basename(fname)
- fname_rec = os.path.splitext(fname)[0] + '.rec'
- fname_idx = os.path.splitext(fname)[0] + '.idx'
- record = mx.recordio.MXIndexedRecordIO(
- os.path.join(working_dir, fname_idx),
- os.path.join(working_dir, fname_rec), 'w')
- buf = {}
- more = True
- while more:
- deq = q_out.get()
- if deq is not None:
- i, s, item = deq
- buf[i] = (s, item)
- else:
- more = False
- while count in buf:
- s, item = buf[count]
- del buf[count]
- if s is not None:
- #print('write idx', item[0])
- record.write_idx(item[0], s)
-
- if count % 1000 == 0:
- cur_time = time.time()
- print('time:', cur_time - pre_time, ' count:', count)
- pre_time = cur_time
- count += 1
-
-
-def parse_args():
- parser = argparse.ArgumentParser(
- formatter_class=argparse.ArgumentDefaultsHelpFormatter,
- description='Create an image list or \
- make a record database by reading from an image list')
- parser.add_argument('prefix',
- help='prefix of input/output lst and rec files.')
- #parser.add_argument('root', help='path to folder containing images.')
-
- cgroup = parser.add_argument_group('Options for creating image lists')
- cgroup.add_argument(
- '--list',
- type=bool,
- default=False,
- help=
- 'If this is set im2rec will create image list(s) by traversing root folder\
- and output to .lst.\
- Otherwise im2rec will read .lst and create a database at .rec'
- )
- cgroup.add_argument('--exts',
- nargs='+',
- default=['.jpeg', '.jpg'],
- help='list of acceptable image extensions.')
- cgroup.add_argument('--chunks',
- type=int,
- default=1,
- help='number of chunks.')
- cgroup.add_argument('--train-ratio',
- type=float,
- default=1.0,
- help='Ratio of images to use for training.')
- cgroup.add_argument('--test-ratio',
- type=float,
- default=0,
- help='Ratio of images to use for testing.')
- cgroup.add_argument(
- '--recursive',
- type=bool,
- default=False,
- help=
- 'If true recursively walk through subdirs and assign an unique label\
- to images in each folder. Otherwise only include images in the root folder\
- and give them label 0.')
- cgroup.add_argument('--shuffle',
- type=bool,
- default=True,
- help='If this is set as True, \
- im2rec will randomize the image order in .lst')
-
- rgroup = parser.add_argument_group('Options for creating database')
- rgroup.add_argument(
- '--quality',
- type=int,
- default=95,
- help=
- 'JPEG quality for encoding, 1-100; or PNG compression for encoding, 1-9'
- )
- rgroup.add_argument(
- '--num-thread',
- type=int,
- default=1,
- help=
- 'number of thread to use for encoding. order of images will be different\
- from the input list if >1. the input list will be modified to match the\
- resulting order.')
- rgroup.add_argument('--color',
- type=int,
- default=1,
- choices=[-1, 0, 1],
- help='specify the color mode of the loaded image.\
- 1: Loads a color image. Any transparency of image will be neglected. It is the default flag.\
- 0: Loads image in grayscale mode.\
- -1:Loads image as such including alpha channel.')
- rgroup.add_argument('--encoding',
- type=str,
- default='.jpg',
- choices=['.jpg', '.png'],
- help='specify the encoding of the images.')
- rgroup.add_argument(
- '--pack-label',
- type=bool,
- default=False,
- help='Whether to also pack multi dimensional label in the record file')
- args = parser.parse_args()
- args.prefix = os.path.abspath(args.prefix)
- #args.root = os.path.abspath(args.root)
- return args
-
-
-if __name__ == '__main__':
- args = parse_args()
- if args.list:
- pass
- #make_list(args)
- else:
- if os.path.isdir(args.prefix):
- working_dir = args.prefix
- else:
- working_dir = os.path.dirname(args.prefix)
- image_size = (112, 112)
- print('image_size', image_size)
- args.image_h = image_size[0]
- args.image_w = image_size[1]
- files = [
- os.path.join(working_dir, fname)
- for fname in os.listdir(working_dir)
- if os.path.isfile(os.path.join(working_dir, fname))
- ]
- count = 0
- for fname in files:
- if fname.startswith(args.prefix) and fname.endswith('.lst'):
- print('Creating .rec file from', fname, 'in', working_dir)
- count += 1
- image_list = read_list(fname)
- # -- write_record -- #
- if args.num_thread > 1 and multiprocessing is not None:
- q_in = [
- multiprocessing.Queue(1024)
- for i in range(args.num_thread)
- ]
- q_out = multiprocessing.Queue(1024)
- read_process = [multiprocessing.Process(target=read_worker, args=(args, q_in[i], q_out)) \
- for i in range(args.num_thread)]
- for p in read_process:
- p.start()
- write_process = multiprocessing.Process(
- target=write_worker, args=(q_out, fname, working_dir))
- write_process.start()
-
- for i, item in enumerate(image_list):
- q_in[i % len(q_in)].put((i, item))
- for q in q_in:
- q.put(None)
- for p in read_process:
- p.join()
-
- q_out.put(None)
- write_process.join()
- else:
- print(
- 'multiprocessing not available, fall back to single threaded encoding'
- )
- try:
- import Queue as queue
- except ImportError:
- import queue
- q_out = queue.Queue()
- fname = os.path.basename(fname)
- fname_rec = os.path.splitext(fname)[0] + '.rec'
- fname_idx = os.path.splitext(fname)[0] + '.idx'
- record = mx.recordio.MXIndexedRecordIO(
- os.path.join(working_dir, fname_idx),
- os.path.join(working_dir, fname_rec), 'w')
- cnt = 0
- pre_time = time.time()
- for i, item in enumerate(image_list):
- image_encode(args, i, item, q_out)
- if q_out.empty():
- continue
- _, s, item = q_out.get()
- #header, _ = mx.recordio.unpack(s)
- #print('write header label', header.label)
- record.write_idx(item[0], s)
- if cnt % 1000 == 0:
- cur_time = time.time()
- print('time:', cur_time - pre_time, ' count:', cnt)
- pre_time = cur_time
- cnt += 1
- if not count:
- print('Did not find and list file with prefix %s' % args.prefix)
diff --git a/resources/11513D05.jpg b/resources/11513D05.jpg
deleted file mode 100644
index c38bb0d..0000000
Binary files a/resources/11513D05.jpg and /dev/null differ
diff --git a/resources/arcface.png b/resources/arcface.png
deleted file mode 100644
index fa43f9e..0000000
Binary files a/resources/arcface.png and /dev/null differ
diff --git a/resources/cov_test.jpg b/resources/cov_test.jpg
deleted file mode 100644
index 8d5bbe5..0000000
Binary files a/resources/cov_test.jpg and /dev/null differ
diff --git a/resources/facerecognitionfromvideo.PNG b/resources/facerecognitionfromvideo.PNG
deleted file mode 100644
index 96f7724..0000000
Binary files a/resources/facerecognitionfromvideo.PNG and /dev/null differ
diff --git a/resources/lfr19_wechat1.jpg b/resources/lfr19_wechat1.jpg
deleted file mode 100644
index 3ba0dad..0000000
Binary files a/resources/lfr19_wechat1.jpg and /dev/null differ
diff --git a/resources/mainsteps.png b/resources/mainsteps.png
deleted file mode 100644
index 1d3d1ff..0000000
Binary files a/resources/mainsteps.png and /dev/null differ
diff --git a/resources/memoryspeed.png b/resources/memoryspeed.png
deleted file mode 100644
index d4e1a44..0000000
Binary files a/resources/memoryspeed.png and /dev/null differ
diff --git a/resources/mfrlogo.jpg b/resources/mfrlogo.jpg
deleted file mode 100644
index 112896c..0000000
Binary files a/resources/mfrlogo.jpg and /dev/null differ
diff --git a/resources/retina_R50_ex1.jpg b/resources/retina_R50_ex1.jpg
deleted file mode 100644
index 603cda3..0000000
Binary files a/resources/retina_R50_ex1.jpg and /dev/null differ
diff --git a/resources/retina_R50_ex2.jpg b/resources/retina_R50_ex2.jpg
deleted file mode 100644
index ec2a606..0000000
Binary files a/resources/retina_R50_ex2.jpg and /dev/null differ
diff --git a/resources/subcenterarcfacediff.png b/resources/subcenterarcfacediff.png
deleted file mode 100644
index 541fa76..0000000
Binary files a/resources/subcenterarcfacediff.png and /dev/null differ
diff --git a/resources/subcenterarcfaceframework.png b/resources/subcenterarcfaceframework.png
deleted file mode 100644
index b7be824..0000000
Binary files a/resources/subcenterarcfaceframework.png and /dev/null differ
diff --git a/resources/widerfacevaltest.png b/resources/widerfacevaltest.png
deleted file mode 100644
index 65807de..0000000
Binary files a/resources/widerfacevaltest.png and /dev/null differ
diff --git a/sample-images/t2.jpg b/sample-images/t2.jpg
deleted file mode 100644
index dcca930..0000000
Binary files a/sample-images/t2.jpg and /dev/null differ