Minor changes

Some minor changes, such as class name changing, remove extra blank line, etc.
pull/504/head
liaoxingyu 2021-05-31 17:27:14 +08:00
parent 8ab3554958
commit 91ff631184
18 changed files with 137 additions and 136 deletions

View File

@ -2,18 +2,9 @@
We provide a command line tool to run a simple demo of builtin models.
You can run this command to get rank visualization results by cosine similarites between different images.
You can run this command to get cosine similarites between different images
```shell script
python3 demo/visualize_result.py --config-file logs/dukemtmc/mgn_R50-ibn/config.yaml \
--parallel --vis-label --dataset-name 'DukeMTMC' --output logs/mgn_duke_vis \
--opts MODEL.WEIGHTS logs/dukemtmc/mgn_R50-ibn/model_final.pth
```
You can also run this command to extract image features.
```shell script
python3 demo/demo.py --config-file logs/dukemtmc/sbs_R50/config.yaml \
--parallel --input tools/deploy/test_data/*.jpg --output sbs_R50_feat \
--opts MODEL.WEIGHTS logs/dukemtmc/sbs_R50/model_final.pth
```bash
cd demo/
sh run_demo.sh
```

View File

@ -9,6 +9,7 @@ import glob
import os
import sys
import torch.nn.functional as F
import cv2
import numpy as np
import tqdm
@ -23,7 +24,7 @@ from fastreid.utils.file_io import PathManager
from predictor import FeatureExtractionDemo
# import some modules added in project like this below
# sys.path.append('../projects/PartialReID')
# sys.path.append("projects/PartialReID")
# from partialreid import *
cudnn.benchmark = True
@ -72,6 +73,13 @@ def get_parser():
return parser
def postprocess(features):
# Normalize feature to compute cosine distance
features = F.normalize(features)
features = features.cpu().data.numpy()
return features
if __name__ == '__main__':
args = get_parser().parse_args()
cfg = setup_cfg(args)
@ -85,5 +93,5 @@ if __name__ == '__main__':
for path in tqdm.tqdm(args.input):
img = cv2.imread(path)
feat = demo.run_on_image(img)
feat = feat.numpy()
feat = postprocess(feat)
np.save(os.path.join(args.output, os.path.basename(path).split('.')[0] + '.npy'), feat)

View File

@ -78,8 +78,8 @@ def build_transforms(cfg, is_train=True):
if do_cj:
res.append(T.RandomApply([T.ColorJitter(cj_brightness, cj_contrast, cj_saturation, cj_hue)], p=cj_prob))
if do_affine:
res.append(T.RandomAffine(degrees=0, translate=None, scale=[0.9, 1.1], shear=None, resample=False,
fillcolor=128))
res.append(T.RandomAffine(degrees=10, translate=None, scale=[0.9, 1.1], shear=0.1, resample=False,
fillcolor=0))
if do_augmix:
res.append(AugMix(prob=augmix_prob))
res.append(ToTensor())

View File

@ -5,11 +5,15 @@
"""
from .activation import *
from .batch_drop import BatchDrop
from .batch_norm import *
from .context_block import ContextBlock
from .drop import DropPath, DropBlock2d, drop_block_2d, drop_path
from .frn import FRN, TLU
from .gather_layer import GatherLayer
from .helpers import to_ntuple, to_2tuple, to_3tuple, to_4tuple, make_divisible
from .non_local import Non_local
from .se_layer import SELayer
from .splat import SplAtConv2d, DropBlock2D
from .gather_layer import GatherLayer
from .weight_init import (
trunc_normal_, variance_scaling_, lecun_normal_, weights_init_kaiming, weights_init_classifier
)

View File

@ -23,7 +23,7 @@ class Linear(nn.Module):
self.m = margin
def forward(self, logits, targets):
return logits
return logits.mul_(self.s)
def extra_repr(self):
return f"num_classes={self.num_classes}, scale={self.s}, margin={self.m}"

View File

@ -1,32 +0,0 @@
# encoding: utf-8
"""
@author: liaoxingyu
@contact: sherlockliao01@gmail.com
"""
import random
from torch import nn
class BatchDrop(nn.Module):
"""ref: https://github.com/daizuozhuo/batch-dropblock-network/blob/master/models/networks.py
batch drop mask
"""
def __init__(self, h_ratio, w_ratio):
super(BatchDrop, self).__init__()
self.h_ratio = h_ratio
self.w_ratio = w_ratio
def forward(self, x):
if self.training:
h, w = x.size()[-2:]
rh = round(self.h_ratio * h)
rw = round(self.w_ratio * w)
sx = random.randint(0, h - rh)
sy = random.randint(0, w - rw)
mask = x.new_ones(x.size())
mask[:, :, sx:sx + rh, sy:sy + rw] = 0
x = x * mask
return x

View File

@ -61,7 +61,7 @@ class GeneralizedMeanPooling(nn.Module):
be the same as that of the input.
"""
def __init__(self, norm=3, output_size=1, eps=1e-6, *args, **kwargs):
def __init__(self, norm=3, output_size=(1, 1), eps=1e-6, *args, **kwargs):
super(GeneralizedMeanPooling, self).__init__()
assert norm > 0
self.p = float(norm)
@ -82,7 +82,7 @@ class GeneralizedMeanPoolingP(GeneralizedMeanPooling):
""" Same, but norm is trainable
"""
def __init__(self, norm=3, output_size=1, eps=1e-6, *args, **kwargs):
def __init__(self, norm=3, output_size=(1, 1), eps=1e-6, *args, **kwargs):
super(GeneralizedMeanPoolingP, self).__init__(norm, output_size, eps)
self.p = nn.Parameter(torch.ones(1) * norm)

View File

@ -42,7 +42,7 @@ def hard_example_mining(dist_mat, is_pos, is_neg):
dist_ap, _ = torch.max(dist_mat * is_pos, dim=1)
# `dist_an` means distance(anchor, negative)
# both `dist_an` and `relative_n_inds` with shape [N]
dist_an, _ = torch.min(dist_mat * is_neg + is_pos * 99999999., dim=1)
dist_an, _ = torch.min(dist_mat * is_neg + is_pos * 1e9, dim=1)
return dist_ap, dist_an

View File

@ -10,7 +10,7 @@ from torch import nn
from fastreid.modeling.heads import EmbeddingHead
from fastreid.modeling.heads.build import REID_HEADS_REGISTRY
from fastreid.utils.weight_init import weights_init_kaiming
from fastreid.layers.weight_init import weights_init_kaiming
@REID_HEADS_REGISTRY.register()

View File

@ -5,4 +5,6 @@
"""
from .bee_ant import *
from .distracted_driver import *
from .dataset import ClasDataset
from .trainer import ClasTrainer

View File

@ -10,6 +10,7 @@ import os
from fastreid.data.datasets import DATASET_REGISTRY
from fastreid.data.datasets.bases import ImageDataset
__all__ = ["Hymenoptera"]

View File

@ -12,18 +12,22 @@ from fastreid.data.data_utils import read_image
class ClasDataset(Dataset):
"""Image Person ReID Dataset"""
def __init__(self, img_items, transform=None):
def __init__(self, img_items, transform=None, idx_to_class=None):
self.img_items = img_items
self.transform = transform
classes = set()
for i in img_items:
classes.add(i[1])
if idx_to_class is not None:
self.idx_to_class = idx_to_class
self.class_to_idx = {clas_name: int(i) for i, clas_name in self.idx_to_class.items()}
self.classes = sorted(list(self.idx_to_class.values()))
else:
classes = set()
for i in img_items:
classes.add(i[1])
self.classes = list(classes)
self.classes.sort()
self.class_to_idx = {cls_name: i for i, cls_name in enumerate(self.classes)}
self.idx_to_class = {idx: clas for clas, idx in self.class_to_idx.items()}
self.classes = sorted(list(classes))
self.class_to_idx = {cls_name: i for i, cls_name in enumerate(self.classes)}
self.idx_to_class = {idx: clas for clas, idx in self.class_to_idx.items()}
def __len__(self):
return len(self.img_items)

View File

@ -0,0 +1,82 @@
# encoding: utf-8
"""
@author: xingyu liao
@contact: sherlockliao01@gmail.com
"""
import json
import logging
import os
from fastreid.data.build import _root
from fastreid.data.build import build_reid_train_loader, build_reid_test_loader
from fastreid.data.datasets import DATASET_REGISTRY
from fastreid.data.transforms import build_transforms
from fastreid.engine import DefaultTrainer
from fastreid.evaluation.clas_evaluator import ClasEvaluator
from fastreid.utils import comm
from fastreid.utils.checkpoint import PathManager
from .dataset import ClasDataset
class ClasTrainer(DefaultTrainer):
idx2class = None
@classmethod
def build_train_loader(cls, cfg):
"""
Returns:
iterable
It now calls :func:`fastreid.data.build_reid_train_loader`.
Overwrite it if you'd like a different data loader.
"""
logger = logging.getLogger("fastreid.clas_dataset")
logger.info("Prepare training set")
train_items = list()
for d in cfg.DATASETS.NAMES:
data = DATASET_REGISTRY.get(d)(root=_root)
if comm.is_main_process():
data.show_train()
train_items.extend(data.train)
transforms = build_transforms(cfg, is_train=True)
train_set = ClasDataset(train_items, transforms)
cls.idx2class = train_set.idx_to_class
data_loader = build_reid_train_loader(cfg, train_set=train_set)
return data_loader
@classmethod
def build_test_loader(cls, cfg, dataset_name):
"""
Returns:
iterable
It now calls :func:`fastreid.data.build_reid_test_loader`.
Overwrite it if you'd like a different data loader.
"""
data = DATASET_REGISTRY.get(dataset_name)(root=_root)
if comm.is_main_process():
data.show_test()
transforms = build_transforms(cfg, is_train=False)
test_set = ClasDataset(data.query, transforms, cls.idx2class)
data_loader, _ = build_reid_test_loader(cfg, test_set=test_set)
return data_loader
@classmethod
def build_evaluator(cls, cfg, dataset_name, output_dir=None):
data_loader = cls.build_test_loader(cfg, dataset_name)
return data_loader, ClasEvaluator(cfg, output_dir)
@staticmethod
def auto_scale_hyperparams(cfg, num_classes):
cfg = DefaultTrainer.auto_scale_hyperparams(cfg, num_classes)
# Save index to class dictionary
output_dir = cfg.OUTPUT_DIR
if comm.is_main_process() and output_dir:
path = os.path.join(output_dir, "idx2class.json")
with PathManager.open(path, "w") as f:
json.dump(ClasTrainer.idx2class, f)
return cfg

View File

@ -14,75 +14,11 @@ sys.path.append('.')
from fastreid.config import get_cfg
from fastreid.engine import default_argument_parser, default_setup, launch
from fastreid.data.build import build_reid_train_loader, build_reid_test_loader
from fastreid.evaluation.clas_evaluator import ClasEvaluator
from fastreid.utils.checkpoint import Checkpointer, PathManager
from fastreid.utils import comm
from fastreid.engine import DefaultTrainer
from fastreid.data.datasets import DATASET_REGISTRY
from fastreid.data.transforms import build_transforms
from fastreid.data.build import _root
from fastclas import *
class ClasTrainer(DefaultTrainer):
@classmethod
def build_train_loader(cls, cfg):
"""
Returns:
iterable
It now calls :func:`fastreid.data.build_reid_train_loader`.
Overwrite it if you'd like a different data loader.
"""
logger = logging.getLogger("fastreid.clas_dataset")
logger.info("Prepare training set")
train_items = list()
for d in cfg.DATASETS.NAMES:
data = DATASET_REGISTRY.get(d)(root=_root)
if comm.is_main_process():
data.show_train()
train_items.extend(data.train)
transforms = build_transforms(cfg, is_train=True)
train_set = ClasDataset(train_items, transforms)
data_loader = build_reid_train_loader(cfg, train_set=train_set)
# Save index to class dictionary
output_dir = cfg.OUTPUT_DIR
if comm.is_main_process() and output_dir:
path = os.path.join(output_dir, "idx2class.json")
with PathManager.open(path, "w") as f:
json.dump(train_set.idx_to_class, f)
return data_loader
@classmethod
def build_test_loader(cls, cfg, dataset_name):
"""
Returns:
iterable
It now calls :func:`fastreid.data.build_reid_test_loader`.
Overwrite it if you'd like a different data loader.
"""
data = DATASET_REGISTRY.get(dataset_name)(root=_root)
if comm.is_main_process():
data.show_test()
transforms = build_transforms(cfg, is_train=False)
test_set = ClasDataset(data.query, transforms)
data_loader, _ = build_reid_test_loader(cfg, test_set=test_set)
return data_loader
@classmethod
def build_evaluator(cls, cfg, dataset_name, output_dir=None):
data_loader = cls.build_test_loader(cfg, dataset_name)
return data_loader, ClasEvaluator(cfg, output_dir)
def setup(args):
"""
Create configs and perform basic setups.
@ -105,6 +41,16 @@ def main(args):
Checkpointer(model).load(cfg.MODEL.WEIGHTS) # load trained model
try:
output_dir = os.path.dirname(cfg.MODEL.WEIGHTS)
path = os.path.join(output_dir, "idx2class.json")
with PathManager.open(path, 'r') as f:
idx2class = json.load(f)
ClasTrainer.idx2class = idx2class
except:
logger = logging.getLogger("fastreid.fastclas")
logger.info(f"Cannot find idx2class dict in {os.path.dirname(cfg.MODEL.WEIGHTS)}")
res = ClasTrainer.test(cfg, model)
return res

View File

@ -55,9 +55,6 @@ INPUT:
PADDING:
ENABLED: True
FLIP:
ENABLED: True
DATALOADER:
SAMPLER_TRAIN: NaiveIdentitySampler
NUM_INSTANCE: 16

View File

@ -26,7 +26,7 @@ MODEL:
TRI:
MARGIN: 0.3
SCALE: 1.0
HARD_MINING: True
HARD_MINING: False
DATASETS:
NAMES: ("Market1501",)
@ -44,7 +44,6 @@ DATALOADER:
NUM_INSTANCE: 4
NUM_WORKERS: 8
SOLVER:
AMP:
ENABLED: False
@ -71,4 +70,4 @@ TEST:
CUDNN_BENCHMARK: True
OUTPUT_DIR: "projects/PartialReID/logs/test_partial"
OUTPUT_DIR: projects/PartialReID/logs/test_partial

View File

@ -11,7 +11,7 @@ from torch import nn
from fastreid.layers import *
from fastreid.modeling.heads import EmbeddingHead
from fastreid.modeling.heads.build import REID_HEADS_REGISTRY
from fastreid.utils.weight_init import weights_init_kaiming
from fastreid.layers.weight_init import weights_init_kaiming
class OcclusionUnit(nn.Module):

View File

@ -28,7 +28,6 @@ from fastreid.utils.logger import setup_logger
# sys.path.append("projects/FastDistill")
# from fastdistill import *
setup_logger(name="fastreid")
logger = logging.getLogger("fastreid.onnx_export")