Update stat to classify papers (#139)
* update stat to classify papers * modify tag of paperspull/140/head
parent
63f38988eb
commit
c8109d1eca
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
[BACKBONE]
|
||||
[ALGORITHM]
|
||||
|
||||
```latex
|
||||
@ARTICLE{6795724,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
[BACKBONE]
|
||||
[ALGORITHM]
|
||||
|
||||
```latex
|
||||
@INPROCEEDINGS{8578572,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
[BACKBONE]
|
||||
[ALGORITHM]
|
||||
|
||||
```latex
|
||||
@inproceedings{he2016deep,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
[BACKBONE]
|
||||
[ALGORITHM]
|
||||
|
||||
```latex
|
||||
@inproceedings{xie2017aggregated,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
[BACKBONE]
|
||||
[ALGORITHM]
|
||||
|
||||
```latex
|
||||
@inproceedings{hu2018squeeze,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
[BACKBONE]
|
||||
[ALGORITHM]
|
||||
|
||||
```latex
|
||||
@inproceedings{hu2018squeeze,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
[BACKBONE]
|
||||
[ALGORITHM]
|
||||
|
||||
```latex
|
||||
@inproceedings{zhang2018shufflenet,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
[BACKBONE]
|
||||
[ALGORITHM]
|
||||
|
||||
```latex
|
||||
@inproceedings{ma2018shufflenet,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
[BACKBONE]
|
||||
[ALGORITHM]
|
||||
|
||||
```latex
|
||||
@article{simonyan2014very,
|
||||
|
|
23
docs/stat.py
23
docs/stat.py
|
@ -1,8 +1,11 @@
|
|||
#!/usr/bin/env python
|
||||
import functools as func
|
||||
import glob
|
||||
import os.path as osp
|
||||
import re
|
||||
|
||||
import numpy as np
|
||||
|
||||
url_prefix = 'https://github.com/open-mmlab/mmclassification/blob/master/'
|
||||
|
||||
files = sorted(glob.glob('../configs/*/README.md'))
|
||||
|
@ -18,7 +21,7 @@ for f in files:
|
|||
with open(f, 'r') as content_file:
|
||||
content = content_file.read()
|
||||
|
||||
title = content.split('\n')[0].replace('# ', '')
|
||||
title = content.split('\n')[0].replace('# ', '').strip()
|
||||
|
||||
ckpts = set(x.lower().strip()
|
||||
for x in re.findall(r'\[model\]\((https?.*)\)', content))
|
||||
|
@ -26,20 +29,34 @@ for f in files:
|
|||
if len(ckpts) == 0:
|
||||
continue
|
||||
|
||||
_papertype = [x for x in re.findall(r'\[([A-Z]+)\]', content)]
|
||||
assert len(_papertype) > 0
|
||||
papertype = _papertype[0]
|
||||
|
||||
paper = set([(papertype, title)])
|
||||
|
||||
num_ckpts += len(ckpts)
|
||||
titles.append(title)
|
||||
|
||||
statsmsg = f"""
|
||||
\t* [{title}]({url}) ({len(ckpts)} ckpts)
|
||||
\t* [{papertype}] [{title}]({url}) ({len(ckpts)} ckpts)
|
||||
"""
|
||||
stats.append((title, ckpts, statsmsg))
|
||||
stats.append((paper, ckpts, statsmsg))
|
||||
|
||||
allpapers = func.reduce(lambda a, b: a.union(b), [p for p, _, _ in stats])
|
||||
msglist = '\n'.join(x for _, _, x in stats)
|
||||
|
||||
papertypes, papercounts = np.unique([t for t, _ in allpapers],
|
||||
return_counts=True)
|
||||
countstr = '\n'.join(
|
||||
[f' - {t}: {c}' for t, c in zip(papertypes, papercounts)])
|
||||
|
||||
modelzoo = f"""
|
||||
# Model Zoo Statistics
|
||||
|
||||
* Number of papers: {len(set(titles))}
|
||||
{countstr}
|
||||
|
||||
* Number of checkpoints: {num_ckpts}
|
||||
{msglist}
|
||||
"""
|
||||
|
|
Loading…
Reference in New Issue