feat: let compile evaluation cython automatically

pull/600/head
l1aoxingyu 2021-10-21 14:41:38 +08:00
parent 10a5f38aaa
commit 43da387b77
5 changed files with 42 additions and 10 deletions

View File

@ -1,8 +1,6 @@
from .evaluator import DatasetEvaluator, inference_context, inference_on_dataset
from .rank import evaluate_rank
from .reid_evaluation import ReidEvaluator
from .clas_evaluator import ClasEvaluator
from .roc import evaluate_roc
from .testing import print_csv_format, verify_results
__all__ = [k for k in globals().keys() if not k.startswith("_")]

View File

@ -1,7 +1,6 @@
all:
python3 setup.py build_ext --inplace
rm -rf build
python3 test_cython.py
clean:
rm -rf build
rm -f rank_cy.c *.so

View File

@ -2,4 +2,19 @@
"""
@author: liaoxingyu
@contact: sherlockliao01@gmail.com
"""
"""
def compile_helper():
"""Compile helper function at runtime. Make sure this
is invoked on a single process."""
import os
import subprocess
path = os.path.abspath(os.path.dirname(__file__))
ret = subprocess.run(["make", "-C", path])
if ret.returncode != 0:
print("Making cython reid evaluation module failed, exiting.")
import sys
sys.exit(1)

View File

@ -5,8 +5,8 @@ import os.path as osp
sys.path.insert(0, osp.dirname(osp.abspath(__file__)) + '/../../..')
from fastreid.evaluation import evaluate_rank
from fastreid.evaluation import evaluate_roc
from fastreid.evaluation.rank import evaluate_rank
from fastreid.evaluation.roc import evaluate_roc
"""
Test the speed of cython-based evaluation code. The speed improvements
@ -24,8 +24,8 @@ import sys
import os.path as osp
import numpy as np
sys.path.insert(0, osp.dirname(osp.abspath(__file__)) + '/../../..')
from fastreid.evaluation import evaluate_rank
from fastreid.evaluation import evaluate_roc
from fastreid.evaluation.rank import evaluate_rank
from fastreid.evaluation.roc import evaluate_roc
num_q = 30
num_g = 300
dim = 512

View File

@ -5,6 +5,7 @@
"""
import copy
import logging
import time
import itertools
from collections import OrderedDict
@ -17,8 +18,7 @@ from fastreid.utils import comm
from fastreid.utils.compute_dist import build_dist
from .evaluator import DatasetEvaluator
from .query_expansion import aqe
from .rank import evaluate_rank
from .roc import evaluate_roc
from .rank_cylib import compile_helper
logger = logging.getLogger(__name__)
@ -32,6 +32,7 @@ class ReidEvaluator(DatasetEvaluator):
self._cpu_device = torch.device('cpu')
self._predictions = []
self._compile_dependencies()
def reset(self):
self._predictions = []
@ -102,6 +103,7 @@ class ReidEvaluator(DatasetEvaluator):
rerank_dist = build_dist(query_features, gallery_features, metric="jaccard", k1=k1, k2=k2)
dist = rerank_dist * (1 - lambda_value) + dist * lambda_value
from .rank import evaluate_rank
cmc, all_AP, all_INP = evaluate_rank(dist, query_pids, gallery_pids, query_camids, gallery_camids)
mAP = np.mean(all_AP)
@ -113,6 +115,7 @@ class ReidEvaluator(DatasetEvaluator):
self._results["metric"] = (mAP + cmc[0]) / 2 * 100
if self.cfg.TEST.ROC.ENABLED:
from .roc import evaluate_roc
scores, labels = evaluate_roc(dist, query_pids, gallery_pids, query_camids, gallery_camids)
fprs, tprs, thres = metrics.roc_curve(labels, scores)
@ -121,3 +124,20 @@ class ReidEvaluator(DatasetEvaluator):
self._results["TPR@FPR={:.0e}".format(fpr)] = tprs[ind]
return copy.deepcopy(self._results)
def _compile_dependencies(self):
# Since we only evaluate results in rank(0), so we just need to compile
# cython evaluation tool on rank(0)
if comm.is_main_process():
try:
from .rank_cylib.rank_cy import evaluate_cy
except ImportError:
start_time = time.time()
logger.info("> compiling reid evaluation cython tool")
compile_helper()
logger.info(
">>> done with reid evaluation cython tool. Compilation time: {:.3f} "
"seconds".format(time.time() - start_time))
comm.synchronize()