508 lines
28 KiB
HTML
508 lines
28 KiB
HTML
|
||
|
||
<!DOCTYPE html>
|
||
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
|
||
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
|
||
<head>
|
||
<meta charset="utf-8">
|
||
|
||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||
|
||
<title>Torchreid — torchreid 1.0.6 documentation</title>
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
<script type="text/javascript" src="_static/js/modernizr.min.js"></script>
|
||
|
||
|
||
<script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
|
||
<script type="text/javascript" src="_static/jquery.js"></script>
|
||
<script type="text/javascript" src="_static/underscore.js"></script>
|
||
<script type="text/javascript" src="_static/doctools.js"></script>
|
||
<script type="text/javascript" src="_static/language_data.js"></script>
|
||
<script async="async" type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
|
||
|
||
<script type="text/javascript" src="_static/js/theme.js"></script>
|
||
|
||
|
||
|
||
|
||
<link rel="stylesheet" href="_static/css/theme.css" type="text/css" />
|
||
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
|
||
<link rel="index" title="Index" href="genindex.html" />
|
||
<link rel="search" title="Search" href="search.html" />
|
||
<link rel="next" title="How-to" href="user_guide.html" />
|
||
</head>
|
||
|
||
<body class="wy-body-for-nav">
|
||
|
||
|
||
<div class="wy-grid-for-nav">
|
||
|
||
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
|
||
<div class="wy-side-scroll">
|
||
<div class="wy-side-nav-search" >
|
||
|
||
|
||
|
||
<a href="#" class="icon icon-home"> torchreid
|
||
|
||
|
||
|
||
</a>
|
||
|
||
|
||
|
||
|
||
<div class="version">
|
||
1.0.6
|
||
</div>
|
||
|
||
|
||
|
||
|
||
<div role="search">
|
||
<form id="rtd-search-form" class="wy-form" action="search.html" method="get">
|
||
<input type="text" name="q" placeholder="Search docs" />
|
||
<input type="hidden" name="check_keywords" value="yes" />
|
||
<input type="hidden" name="area" value="default" />
|
||
</form>
|
||
</div>
|
||
|
||
|
||
</div>
|
||
|
||
<div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
|
||
|
||
|
||
|
||
|
||
|
||
|
||
<ul>
|
||
<li class="toctree-l1"><a class="reference internal" href="user_guide.html">How-to</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="datasets.html">Datasets</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="evaluation.html">Evaluation</a></li>
|
||
</ul>
|
||
<p class="caption"><span class="caption-text">Package Reference</span></p>
|
||
<ul>
|
||
<li class="toctree-l1"><a class="reference internal" href="pkg/data.html">torchreid.data</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="pkg/engine.html">torchreid.engine</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="pkg/losses.html">torchreid.losses</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="pkg/metrics.html">torchreid.metrics</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="pkg/models.html">torchreid.models</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="pkg/optim.html">torchreid.optim</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="pkg/utils.html">torchreid.utils</a></li>
|
||
</ul>
|
||
<p class="caption"><span class="caption-text">Resources</span></p>
|
||
<ul>
|
||
<li class="toctree-l1"><a class="reference internal" href="AWESOME_REID.html">Awesome-ReID</a></li>
|
||
<li class="toctree-l1"><a class="reference internal" href="MODEL_ZOO.html">Model Zoo</a></li>
|
||
</ul>
|
||
|
||
|
||
|
||
</div>
|
||
</div>
|
||
</nav>
|
||
|
||
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
|
||
|
||
|
||
<nav class="wy-nav-top" aria-label="top navigation">
|
||
|
||
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
|
||
<a href="#">torchreid</a>
|
||
|
||
</nav>
|
||
|
||
|
||
<div class="wy-nav-content">
|
||
|
||
<div class="rst-content">
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
<div role="navigation" aria-label="breadcrumbs navigation">
|
||
|
||
<ul class="wy-breadcrumbs">
|
||
|
||
<li><a href="#">Docs</a> »</li>
|
||
|
||
<li>Torchreid</li>
|
||
|
||
|
||
<li class="wy-breadcrumbs-aside">
|
||
|
||
|
||
<a href="_sources/index.rst.txt" rel="nofollow"> View page source</a>
|
||
|
||
|
||
</li>
|
||
|
||
</ul>
|
||
|
||
|
||
<hr/>
|
||
</div>
|
||
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
|
||
<div itemprop="articleBody">
|
||
|
||
<div class="section" id="torchreid">
|
||
<h1>Torchreid<a class="headerlink" href="#torchreid" title="Permalink to this headline">¶</a></h1>
|
||
<a class="reference external image-reference" href="https://github.com/KaiyangZhou/deep-person-reid/blob/master/LICENSE"><img alt="GitHub license" src="https://img.shields.io/github/license/KaiyangZhou/deep-person-reid" /></a>
|
||
<p>Torchreid is a library for deep-learning person re-identification in <a class="reference external" href="https://pytorch.org/">PyTorch</a>.</p>
|
||
<p>It features:</p>
|
||
<ul class="simple">
|
||
<li>multi-GPU training</li>
|
||
<li>support both image- and video-reid</li>
|
||
<li>end-to-end training and evaluation</li>
|
||
<li>incredibly easy preparation of reid datasets</li>
|
||
<li>multi-dataset training</li>
|
||
<li>cross-dataset evaluation</li>
|
||
<li>standard protocol used by most research papers</li>
|
||
<li>highly extensible (easy to add models, datasets, training methods, etc.)</li>
|
||
<li>implementations of state-of-the-art deep reid models</li>
|
||
<li>access to pretrained reid models</li>
|
||
<li>advanced training techniques</li>
|
||
<li>visualization tools (tensorboard, ranks, etc.)</li>
|
||
</ul>
|
||
<p>Code: <a class="reference external" href="https://github.com/KaiyangZhou/deep-person-reid">https://github.com/KaiyangZhou/deep-person-reid</a>.</p>
|
||
<p>Documentation: <a class="reference external" href="https://kaiyangzhou.github.io/deep-person-reid/">https://kaiyangzhou.github.io/deep-person-reid/</a>.</p>
|
||
<p>How-to instructions: <a class="reference external" href="https://kaiyangzhou.github.io/deep-person-reid/user_guide">https://kaiyangzhou.github.io/deep-person-reid/user_guide</a>.</p>
|
||
<p>Model zoo: <a class="reference external" href="https://kaiyangzhou.github.io/deep-person-reid/MODEL_ZOO">https://kaiyangzhou.github.io/deep-person-reid/MODEL_ZOO</a>.</p>
|
||
<p>Tech report: <a class="reference external" href="https://arxiv.org/abs/1910.10093">https://arxiv.org/abs/1910.10093</a>.</p>
|
||
<p>You can find some research projects that are built on top of Torchreid <a class="reference external" href="https://github.com/KaiyangZhou/deep-person-reid/tree/master/projects">here</a>.</p>
|
||
<div class="section" id="installation">
|
||
<h2>Installation<a class="headerlink" href="#installation" title="Permalink to this headline">¶</a></h2>
|
||
<p>Make sure <a class="reference external" href="https://www.anaconda.com/distribution/">conda</a> is installed.</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="c1"># cd to your preferred directory and clone this repo</span>
|
||
git clone https://github.com/KaiyangZhou/deep-person-reid.git
|
||
|
||
<span class="c1"># create environment</span>
|
||
<span class="nb">cd</span> deep-person-reid/
|
||
conda create --name torchreid <span class="nv">python</span><span class="o">=</span><span class="m">3</span>.7
|
||
conda activate torchreid
|
||
|
||
<span class="c1"># install dependencies</span>
|
||
<span class="c1"># make sure `which python` and `which pip` point to the correct path</span>
|
||
pip install -r requirements.txt
|
||
|
||
<span class="c1"># install torch and torchvision (select the proper cuda version to suit your machine)</span>
|
||
conda install pytorch torchvision <span class="nv">cudatoolkit</span><span class="o">=</span><span class="m">9</span>.0 -c pytorch
|
||
|
||
<span class="c1"># install torchreid (don't need to re-build it if you modify the source code)</span>
|
||
python setup.py develop
|
||
</pre></div>
|
||
</div>
|
||
</div>
|
||
<div class="section" id="get-started-30-seconds-to-torchreid">
|
||
<h2>Get started: 30 seconds to Torchreid<a class="headerlink" href="#get-started-30-seconds-to-torchreid" title="Permalink to this headline">¶</a></h2>
|
||
<ol class="arabic simple">
|
||
<li>Import <code class="docutils literal notranslate"><span class="pre">torchreid</span></code></li>
|
||
</ol>
|
||
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">import</span> <span class="nn">torchreid</span>
|
||
</pre></div>
|
||
</div>
|
||
<ol class="arabic simple" start="2">
|
||
<li>Load data manager</li>
|
||
</ol>
|
||
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">datamanager</span> <span class="o">=</span> <span class="n">torchreid</span><span class="o">.</span><span class="n">data</span><span class="o">.</span><span class="n">ImageDataManager</span><span class="p">(</span>
|
||
<span class="n">root</span><span class="o">=</span><span class="s1">'reid-data'</span><span class="p">,</span>
|
||
<span class="n">sources</span><span class="o">=</span><span class="s1">'market1501'</span><span class="p">,</span>
|
||
<span class="n">targets</span><span class="o">=</span><span class="s1">'market1501'</span><span class="p">,</span>
|
||
<span class="n">height</span><span class="o">=</span><span class="mi">256</span><span class="p">,</span>
|
||
<span class="n">width</span><span class="o">=</span><span class="mi">128</span><span class="p">,</span>
|
||
<span class="n">batch_size_train</span><span class="o">=</span><span class="mi">32</span><span class="p">,</span>
|
||
<span class="n">batch_size_test</span><span class="o">=</span><span class="mi">100</span><span class="p">,</span>
|
||
<span class="n">transforms</span><span class="o">=</span><span class="p">[</span><span class="s1">'random_flip'</span><span class="p">,</span> <span class="s1">'random_crop'</span><span class="p">]</span>
|
||
<span class="p">)</span>
|
||
</pre></div>
|
||
</div>
|
||
<p>3 Build model, optimizer and lr_scheduler</p>
|
||
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">model</span> <span class="o">=</span> <span class="n">torchreid</span><span class="o">.</span><span class="n">models</span><span class="o">.</span><span class="n">build_model</span><span class="p">(</span>
|
||
<span class="n">name</span><span class="o">=</span><span class="s1">'resnet50'</span><span class="p">,</span>
|
||
<span class="n">num_classes</span><span class="o">=</span><span class="n">datamanager</span><span class="o">.</span><span class="n">num_train_pids</span><span class="p">,</span>
|
||
<span class="n">loss</span><span class="o">=</span><span class="s1">'softmax'</span><span class="p">,</span>
|
||
<span class="n">pretrained</span><span class="o">=</span><span class="bp">True</span>
|
||
<span class="p">)</span>
|
||
|
||
<span class="n">model</span> <span class="o">=</span> <span class="n">model</span><span class="o">.</span><span class="n">cuda</span><span class="p">()</span>
|
||
|
||
<span class="n">optimizer</span> <span class="o">=</span> <span class="n">torchreid</span><span class="o">.</span><span class="n">optim</span><span class="o">.</span><span class="n">build_optimizer</span><span class="p">(</span>
|
||
<span class="n">model</span><span class="p">,</span>
|
||
<span class="n">optim</span><span class="o">=</span><span class="s1">'adam'</span><span class="p">,</span>
|
||
<span class="n">lr</span><span class="o">=</span><span class="mf">0.0003</span>
|
||
<span class="p">)</span>
|
||
|
||
<span class="n">scheduler</span> <span class="o">=</span> <span class="n">torchreid</span><span class="o">.</span><span class="n">optim</span><span class="o">.</span><span class="n">build_lr_scheduler</span><span class="p">(</span>
|
||
<span class="n">optimizer</span><span class="p">,</span>
|
||
<span class="n">lr_scheduler</span><span class="o">=</span><span class="s1">'single_step'</span><span class="p">,</span>
|
||
<span class="n">stepsize</span><span class="o">=</span><span class="mi">20</span>
|
||
<span class="p">)</span>
|
||
</pre></div>
|
||
</div>
|
||
<ol class="arabic simple" start="4">
|
||
<li>Build engine</li>
|
||
</ol>
|
||
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">engine</span> <span class="o">=</span> <span class="n">torchreid</span><span class="o">.</span><span class="n">engine</span><span class="o">.</span><span class="n">ImageSoftmaxEngine</span><span class="p">(</span>
|
||
<span class="n">datamanager</span><span class="p">,</span>
|
||
<span class="n">model</span><span class="p">,</span>
|
||
<span class="n">optimizer</span><span class="o">=</span><span class="n">optimizer</span><span class="p">,</span>
|
||
<span class="n">scheduler</span><span class="o">=</span><span class="n">scheduler</span><span class="p">,</span>
|
||
<span class="n">label_smooth</span><span class="o">=</span><span class="bp">True</span>
|
||
<span class="p">)</span>
|
||
</pre></div>
|
||
</div>
|
||
<ol class="arabic simple" start="5">
|
||
<li>Run training and test</li>
|
||
</ol>
|
||
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">engine</span><span class="o">.</span><span class="n">run</span><span class="p">(</span>
|
||
<span class="n">save_dir</span><span class="o">=</span><span class="s1">'log/resnet50'</span><span class="p">,</span>
|
||
<span class="n">max_epoch</span><span class="o">=</span><span class="mi">60</span><span class="p">,</span>
|
||
<span class="n">eval_freq</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span>
|
||
<span class="n">print_freq</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span>
|
||
<span class="n">test_only</span><span class="o">=</span><span class="bp">False</span>
|
||
<span class="p">)</span>
|
||
</pre></div>
|
||
</div>
|
||
</div>
|
||
<div class="section" id="a-unified-interface">
|
||
<h2>A unified interface<a class="headerlink" href="#a-unified-interface" title="Permalink to this headline">¶</a></h2>
|
||
<p>In “deep-person-reid/scripts/”, we provide a unified interface to train and test a model. See “scripts/main.py” and “scripts/default_config.py” for more details. “configs/” contains some predefined configs which you can use as a starting point.</p>
|
||
<p>Below we provide an example to train and test <a class="reference external" href="https://arxiv.org/abs/1905.00953">OSNet (Zhou et al. ICCV‘19)</a>. Assume <code class="code docutils literal notranslate"><span class="pre">PATH_TO_DATA</span></code> is the directory containing reid datasets.</p>
|
||
<div class="section" id="conventional-setting">
|
||
<h3>Conventional setting<a class="headerlink" href="#conventional-setting" title="Permalink to this headline">¶</a></h3>
|
||
<p>To train OSNet on Market1501, do</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span>python scripts/main.py <span class="se">\</span>
|
||
--config-file configs/im_osnet_x1_0_softmax_256x128_amsgrad_cosine.yaml <span class="se">\</span>
|
||
--transforms random_flip random_erase <span class="se">\</span>
|
||
--root <span class="nv">$PATH_TO_DATA</span> <span class="se">\</span>
|
||
--gpu-devices <span class="m">0</span>
|
||
</pre></div>
|
||
</div>
|
||
<p>The config file sets Market1501 as the default dataset. If you wanna use DukeMTMC-reID, do</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span>python scripts/main.py <span class="se">\</span>
|
||
--config-file configs/im_osnet_x1_0_softmax_256x128_amsgrad_cosine.yaml <span class="se">\</span>
|
||
-s dukemtmcreid <span class="se">\</span>
|
||
-t dukemtmcreid <span class="se">\</span>
|
||
--transforms random_flip random_erase <span class="se">\</span>
|
||
--root <span class="nv">$PATH_TO_DATA</span> <span class="se">\</span>
|
||
--gpu-devices <span class="m">0</span> <span class="se">\</span>
|
||
data.save_dir log/osnet_x1_0_dukemtmcreid_softmax_cosinelr
|
||
</pre></div>
|
||
</div>
|
||
<p>The code will automatically (download and) load the ImageNet pretrained weights. After the training is done, the model will be saved as “log/osnet_x1_0_market1501_softmax_cosinelr/model.pth.tar-250”. Under the same folder, you can find the <a class="reference external" href="https://pytorch.org/docs/stable/tensorboard.html">tensorboard</a> file. To visualize the learning curves using tensorboard, you can run <code class="code docutils literal notranslate"><span class="pre">tensorboard</span> <span class="pre">--logdir=log/osnet_x1_0_market1501_softmax_cosinelr</span></code> in the terminal and visit <code class="code docutils literal notranslate"><span class="pre">http://localhost:6006/</span></code> in your web browser.</p>
|
||
<p>Evaluation is automatically performed at the end of training. To run the test again using the trained model, do</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span>python scripts/main.py <span class="se">\</span>
|
||
--config-file configs/im_osnet_x1_0_softmax_256x128_amsgrad_cosine.yaml <span class="se">\</span>
|
||
--root <span class="nv">$PATH_TO_DATA</span> <span class="se">\</span>
|
||
--gpu-devices <span class="m">0</span> <span class="se">\</span>
|
||
model.load_weights log/osnet_x1_0_market1501_softmax_cosinelr/model.pth.tar-250 <span class="se">\</span>
|
||
test.evaluate True
|
||
</pre></div>
|
||
</div>
|
||
</div>
|
||
<div class="section" id="cross-domain-setting">
|
||
<h3>Cross-domain setting<a class="headerlink" href="#cross-domain-setting" title="Permalink to this headline">¶</a></h3>
|
||
<p>Suppose you wanna train OSNet on DukeMTMC-reID and test its performance on Market1501, you can do</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span>python scripts/main.py <span class="se">\</span>
|
||
--config-file configs/im_osnet_x1_0_softmax_256x128_amsgrad.yaml <span class="se">\</span>
|
||
-s dukemtmcreid <span class="se">\</span>
|
||
-t market1501 <span class="se">\</span>
|
||
--transforms random_flip color_jitter <span class="se">\</span>
|
||
--root <span class="nv">$PATH_TO_DATA</span> <span class="se">\</span>
|
||
--gpu-devices <span class="m">0</span>
|
||
</pre></div>
|
||
</div>
|
||
<p>Here we only test the cross-domain performance. However, if you also want to test the performance on the source dataset, i.e. DukeMTMC-reID, you can set <code class="code docutils literal notranslate"><span class="pre">-t</span> <span class="pre">dukemtmcreid</span> <span class="pre">market1501</span></code>, which will evaluate the model on the two datasets separately.</p>
|
||
<p>Different from the same-domain setting, here we replace <code class="code docutils literal notranslate"><span class="pre">random_erase</span></code> with <code class="code docutils literal notranslate"><span class="pre">color_jitter</span></code>. This can improve the generalization performance on the unseen target dataset.</p>
|
||
<p>Pretrained models are available in the <a class="reference external" href="https://kaiyangzhou.github.io/deep-person-reid/MODEL_ZOO.html">Model Zoo</a>.</p>
|
||
</div>
|
||
</div>
|
||
<div class="section" id="datasets">
|
||
<h2>Datasets<a class="headerlink" href="#datasets" title="Permalink to this headline">¶</a></h2>
|
||
<div class="section" id="image-reid-datasets">
|
||
<h3>Image-reid datasets<a class="headerlink" href="#image-reid-datasets" title="Permalink to this headline">¶</a></h3>
|
||
<ul class="simple">
|
||
<li><a class="reference external" href="https://www.cv-foundation.org/openaccess/content_iccv_2015/papers/Zheng_Scalable_Person_Re-Identification_ICCV_2015_paper.pdf">Market1501</a></li>
|
||
<li><a class="reference external" href="https://www.cv-foundation.org/openaccess/content_cvpr_2014/papers/Li_DeepReID_Deep_Filter_2014_CVPR_paper.pdf">CUHK03</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1701.07717">DukeMTMC-reID</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1711.08565">MSMT17</a></li>
|
||
<li><a class="reference external" href="http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.331.7285&rep=rep1&type=pdf">VIPeR</a></li>
|
||
<li><a class="reference external" href="http://www.eecs.qmul.ac.uk/~txiang/publications/LoyXiangGong_cvpr_2009.pdf">GRID</a></li>
|
||
<li><a class="reference external" href="http://www.ee.cuhk.edu.hk/~xgwang/papers/liZWaccv12.pdf">CUHK01</a></li>
|
||
<li><a class="reference external" href="http://openaccess.thecvf.com/content_cvpr_2017/papers/Zhao_Spindle_Net_Person_CVPR_2017_paper.pdf">SenseReID</a></li>
|
||
<li><a class="reference external" href="http://www.eecs.qmul.ac.uk/~sgg/papers/ZhengGongXiang_BMVC09.pdf">QMUL-iLIDS</a></li>
|
||
<li><a class="reference external" href="https://pdfs.semanticscholar.org/4c1b/f0592be3e535faf256c95e27982db9b3d3d3.pdf">PRID</a></li>
|
||
</ul>
|
||
</div>
|
||
<div class="section" id="video-reid-datasets">
|
||
<h3>Video-reid datasets<a class="headerlink" href="#video-reid-datasets" title="Permalink to this headline">¶</a></h3>
|
||
<ul class="simple">
|
||
<li><a class="reference external" href="http://www.liangzheng.org/1320.pdf">MARS</a></li>
|
||
<li><a class="reference external" href="https://www.eecs.qmul.ac.uk/~sgg/papers/WangEtAl_ECCV14.pdf">iLIDS-VID</a></li>
|
||
<li><a class="reference external" href="https://pdfs.semanticscholar.org/4c1b/f0592be3e535faf256c95e27982db9b3d3d3.pdf">PRID2011</a></li>
|
||
<li><a class="reference external" href="http://openaccess.thecvf.com/content_cvpr_2018/papers/Wu_Exploit_the_Unknown_CVPR_2018_paper.pdf">DukeMTMC-VideoReID</a></li>
|
||
</ul>
|
||
</div>
|
||
</div>
|
||
<div class="section" id="models">
|
||
<h2>Models<a class="headerlink" href="#models" title="Permalink to this headline">¶</a></h2>
|
||
<div class="section" id="imagenet-classification-models">
|
||
<h3>ImageNet classification models<a class="headerlink" href="#imagenet-classification-models" title="Permalink to this headline">¶</a></h3>
|
||
<ul class="simple">
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1512.03385">ResNet</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1611.05431">ResNeXt</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1709.01507">SENet</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1608.06993">DenseNet</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1602.07261">Inception-ResNet-V2</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1602.07261">Inception-V4</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1610.02357">Xception</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1807.09441">IBN-Net</a></li>
|
||
</ul>
|
||
</div>
|
||
<div class="section" id="lightweight-models">
|
||
<h3>Lightweight models<a class="headerlink" href="#lightweight-models" title="Permalink to this headline">¶</a></h3>
|
||
<ul class="simple">
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1707.07012">NASNet</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1801.04381">MobileNetV2</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1707.01083">ShuffleNet</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1807.11164">ShuffleNetV2</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1602.07360">SqueezeNet</a></li>
|
||
</ul>
|
||
</div>
|
||
<div class="section" id="reid-specific-models">
|
||
<h3>ReID-specific models<a class="headerlink" href="#reid-specific-models" title="Permalink to this headline">¶</a></h3>
|
||
<ul class="simple">
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1709.05165">MuDeep</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1711.08106">ResNet-mid</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1802.08122">HACNN</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1711.09349">PCB</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1803.09132">MLFN</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1905.00953">OSNet</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1910.06827">OSNet-AIN</a></li>
|
||
</ul>
|
||
</div>
|
||
</div>
|
||
<div class="section" id="losses">
|
||
<h2>Losses<a class="headerlink" href="#losses" title="Permalink to this headline">¶</a></h2>
|
||
<ul class="simple">
|
||
<li><a class="reference external" href="https://www.cv-foundation.org/openaccess/content_cvpr_2016/papers/Szegedy_Rethinking_the_Inception_CVPR_2016_paper.pdf">Softmax (cross entropy loss with label smoothing)</a></li>
|
||
<li><a class="reference external" href="https://arxiv.org/abs/1703.07737">Triplet (hard example mining triplet loss)</a></li>
|
||
</ul>
|
||
</div>
|
||
<div class="section" id="useful-links">
|
||
<h2>Useful links<a class="headerlink" href="#useful-links" title="Permalink to this headline">¶</a></h2>
|
||
<ul class="simple">
|
||
<li><a class="reference external" href="https://github.com/RodMech/OSNet-IBN1-Lite">OSNet-IBN1-Lite (test-only code with lite docker container)</a></li>
|
||
</ul>
|
||
</div>
|
||
<div class="section" id="citation">
|
||
<h2>Citation<a class="headerlink" href="#citation" title="Permalink to this headline">¶</a></h2>
|
||
<p>If you find this code useful to your research, please cite the following papers.</p>
|
||
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span>@article<span class="o">{</span>torchreid,
|
||
<span class="nv">title</span><span class="o">={</span>Torchreid: A Library <span class="k">for</span> Deep Learning Person Re-Identification in Pytorch<span class="o">}</span>,
|
||
<span class="nv">author</span><span class="o">={</span>Zhou, Kaiyang and Xiang, Tao<span class="o">}</span>,
|
||
<span class="nv">journal</span><span class="o">={</span>arXiv preprint arXiv:1910.10093<span class="o">}</span>,
|
||
<span class="nv">year</span><span class="o">={</span><span class="m">2019</span><span class="o">}</span>
|
||
<span class="o">}</span>
|
||
|
||
@inproceedings<span class="o">{</span>zhou2019osnet,
|
||
<span class="nv">title</span><span class="o">={</span>Omni-Scale Feature Learning <span class="k">for</span> Person Re-Identification<span class="o">}</span>,
|
||
<span class="nv">author</span><span class="o">={</span>Zhou, Kaiyang and Yang, Yongxin and Cavallaro, Andrea and Xiang, Tao<span class="o">}</span>,
|
||
<span class="nv">booktitle</span><span class="o">={</span>ICCV<span class="o">}</span>,
|
||
<span class="nv">year</span><span class="o">={</span><span class="m">2019</span><span class="o">}</span>
|
||
<span class="o">}</span>
|
||
|
||
@article<span class="o">{</span>zhou2019learning,
|
||
<span class="nv">title</span><span class="o">={</span>Learning Generalisable Omni-Scale Representations <span class="k">for</span> Person Re-Identification<span class="o">}</span>,
|
||
<span class="nv">author</span><span class="o">={</span>Zhou, Kaiyang and Yang, Yongxin and Cavallaro, Andrea and Xiang, Tao<span class="o">}</span>,
|
||
<span class="nv">journal</span><span class="o">={</span>arXiv preprint arXiv:1910.06827<span class="o">}</span>,
|
||
<span class="nv">year</span><span class="o">={</span><span class="m">2019</span><span class="o">}</span>
|
||
<span class="o">}</span>
|
||
</pre></div>
|
||
</div>
|
||
<div class="toctree-wrapper compound">
|
||
</div>
|
||
<div class="toctree-wrapper compound">
|
||
</div>
|
||
<div class="toctree-wrapper compound">
|
||
</div>
|
||
</div>
|
||
</div>
|
||
<div class="section" id="indices-and-tables">
|
||
<h1>Indices and tables<a class="headerlink" href="#indices-and-tables" title="Permalink to this headline">¶</a></h1>
|
||
<ul class="simple">
|
||
<li><a class="reference internal" href="genindex.html"><span class="std std-ref">Index</span></a></li>
|
||
<li><a class="reference internal" href="py-modindex.html"><span class="std std-ref">Module Index</span></a></li>
|
||
</ul>
|
||
</div>
|
||
|
||
|
||
</div>
|
||
|
||
</div>
|
||
<footer>
|
||
|
||
<div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
|
||
|
||
<a href="user_guide.html" class="btn btn-neutral float-right" title="How-to" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a>
|
||
|
||
|
||
</div>
|
||
|
||
|
||
<hr/>
|
||
|
||
<div role="contentinfo">
|
||
<p>
|
||
© Copyright 2019, Kaiyang Zhou
|
||
|
||
</p>
|
||
</div>
|
||
Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
|
||
|
||
</footer>
|
||
|
||
</div>
|
||
</div>
|
||
|
||
</section>
|
||
|
||
</div>
|
||
|
||
|
||
|
||
<script type="text/javascript">
|
||
jQuery(function () {
|
||
SphinxRtdTheme.Navigation.enable(true);
|
||
});
|
||
</script>
|
||
|
||
|
||
|
||
|
||
|
||
|
||
</body>
|
||
</html> |