fast-reid/tools/test.py

64 lines
1.7 KiB
Python
Raw Normal View History

# encoding: utf-8
"""
2019-04-21 13:38:55 +08:00
@author: l1aoxingyu
@contact: sherlockliao01@gmail.com
"""
import argparse
import os
import sys
2019-01-10 18:39:31 +08:00
from os import mkdir
import torch
2018-10-18 19:04:28 +08:00
from torch.backends import cudnn
2019-01-10 18:39:31 +08:00
sys.path.append('.')
from config import cfg
2019-04-21 13:38:55 +08:00
from data import get_data_bunch
2019-01-10 18:39:31 +08:00
from engine.inference import inference
from utils.logger import setup_logger
2019-01-10 18:39:31 +08:00
from modeling import build_model
2019-01-10 18:39:31 +08:00
def main():
parser = argparse.ArgumentParser(description="ReID Baseline Inference")
parser.add_argument('-cfg',
2019-01-10 18:39:31 +08:00
"--config_file", default="", help="path to config file", type=str
)
parser.add_argument("opts", help="Modify config options using the command-line", default=None,
nargs=argparse.REMAINDER)
2019-01-10 18:39:31 +08:00
args = parser.parse_args()
2019-01-10 18:39:31 +08:00
num_gpus = int(os.environ["WORLD_SIZE"]) if "WORLD_SIZE" in os.environ else 1
2019-01-10 18:39:31 +08:00
if args.config_file != "":
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
if not os.path.exists(cfg.OUTPUT_DIR): os.makedirs(cfg.OUTPUT_DIR)
logger = setup_logger("reid_baseline", cfg.OUTPUT_DIR, 0)
2019-01-10 18:39:31 +08:00
logger.info("Using {} GPUS".format(num_gpus))
logger.info(args)
2019-01-10 18:39:31 +08:00
if args.config_file != "":
logger.info("Loaded configuration file {}".format(args.config_file))
logger.info("Running with config:\n{}".format(cfg))
2018-10-18 19:04:28 +08:00
cudnn.benchmark = True
2019-01-10 18:39:31 +08:00
data_bunch, test_labels, num_query = get_data_bunch(cfg)
2019-08-01 15:49:31 +08:00
# model = build_model(cfg, data_bunch.c)
# state_dict = torch.load(cfg.TEST.WEIGHT)
# model.load_state_dict(state_dict['model'])
# model.cuda()
model = torch.jit.load("/export/home/lxy/reid_baseline/pcb_model_v0.2.pt")
2019-01-10 18:39:31 +08:00
inference(cfg, model, data_bunch, test_labels, num_query)
if __name__ == '__main__':
main()