mmpretrain/docs/en/tutorials/MMClassification_python.ipynb

2038 lines
1019 KiB
Plaintext
Raw Normal View History

{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"accelerator": "GPU",
"colab": {
"name": "MMClassification_python.ipynb",
"provenance": [],
"collapsed_sections": [],
"toc_visible": true
},
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.11"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"badf240bbb7d442fbd214e837edbffe2": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_520112917e0f4844995d418c5041d23a",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_9f3f6b72b4d14e2a96b9185331c8081b",
"IPY_MODEL_a275bef3584b49ab9b680b528420d461",
"IPY_MODEL_c4b2c6914a05497b8d2b691bd6dda6da"
]
}
},
"520112917e0f4844995d418c5041d23a": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"9f3f6b72b4d14e2a96b9185331c8081b": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_863d2a8cc4074f2e890ba6aea7c54384",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": "100%",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_be55ab36267d4dcab1d83dfaa8540270"
}
},
"a275bef3584b49ab9b680b528420d461": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_31475aa888da4c8d844ba99a0b3397f5",
"_dom_classes": [],
"description": "",
"_model_name": "FloatProgressModel",
"bar_style": "success",
"max": 14206911,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 14206911,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_e310c50e610248dd897fbbf5dd09dd7a"
}
},
"c4b2c6914a05497b8d2b691bd6dda6da": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_8a8ab7c27e404459951cffe7a32b8faa",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 13.5M/13.5M [00:01<00:00, 9.60MB/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_e1a3dce90c1a4804a9ef0c687a9c0703"
}
},
"863d2a8cc4074f2e890ba6aea7c54384": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"be55ab36267d4dcab1d83dfaa8540270": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"31475aa888da4c8d844ba99a0b3397f5": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"e310c50e610248dd897fbbf5dd09dd7a": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"8a8ab7c27e404459951cffe7a32b8faa": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"e1a3dce90c1a4804a9ef0c687a9c0703": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
}
}
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "XjQxmm04iTx4"
},
"source": [
"<a href=\"https://colab.research.google.com/github/open-mmlab/mmclassification/blob/master/docs/tutorials/MMClassification_python.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "UdMfIsMpiODD"
},
"source": [
"# MMClassification Python API tutorial on Colab\n",
"\n",
"In this tutorial, we will introduce the following content:\n",
"\n",
"* How to install MMCls\n",
"* Inference a model with Python API\n",
"* Fine-tune a model with Python API"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "iOl0X9UEiRvE"
},
"source": [
"## Install MMClassification\n",
"\n",
"Before using MMClassification, we need to prepare the environment with the following steps:\n",
"\n",
"1. Install Python, CUDA, C/C++ compiler and git\n",
"2. Install PyTorch (CUDA version)\n",
"3. Install mmcv\n",
"4. Clone mmcls source code from GitHub and install it\n",
"\n",
"Because this tutorial is on Google Colab, and the basic environment has been completed, we can skip the first two steps."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "_i7cjqS_LtoP"
},
"source": [
"### Check environment"
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "c6MbAw10iUJI",
"outputId": "dd37cdf5-7bcf-4a03-f5b5-4b17c3ca16de"
},
"source": [
"%cd /content"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"/content\n"
]
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "4IyFL3MaiYRu",
"outputId": "5008efdf-0356-4d93-ba9d-e51787036213"
},
"source": [
"!pwd"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"/content\n"
]
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "DMw7QwvpiiUO",
"outputId": "33fa5eb8-d083-4a1f-d094-ab0f59e2818e"
},
"source": [
"# Check nvcc version\n",
"!nvcc -V"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"nvcc: NVIDIA (R) Cuda compiler driver\n",
"Copyright (c) 2005-2020 NVIDIA Corporation\n",
"Built on Mon_Oct_12_20:09:46_PDT_2020\n",
"Cuda compilation tools, release 11.1, V11.1.105\n",
"Build cuda_11.1.TC455_06.29190527_0\n"
]
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "4VIBU7Fain4D",
"outputId": "ec20652d-ca24-4b82-b407-e90354d728f8"
},
"source": [
"# Check GCC version\n",
"!gcc --version"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n",
"Copyright (C) 2017 Free Software Foundation, Inc.\n",
"This is free software; see the source for copying conditions. There is NO\n",
"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n",
"\n"
]
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "24lDLCqFisZ9",
"outputId": "30ec9a1c-cdb3-436c-cdc8-f2a22afe254f"
},
"source": [
"# Check PyTorch installation\n",
"import torch, torchvision\n",
"print(torch.__version__)\n",
"print(torch.cuda.is_available())"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"1.9.0+cu111\n",
"True\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "R2aZNLUwizBs"
},
"source": [
"### Install MMCV\n",
"\n",
"MMCV is the basic package of all OpenMMLab packages. We have pre-built wheels on Linux, so we can download and install them directly.\n",
"\n",
"Please pay attention to PyTorch and CUDA versions to match the wheel.\n",
"\n",
"In the above steps, we have checked the version of PyTorch and CUDA, and they are 1.9.0 and 11.1 respectively, so we need to choose the corresponding wheel.\n",
"\n",
"In addition, we can also install the full version of mmcv (mmcv-full). It includes full features and various CUDA ops out of the box, but needs a longer time to build."
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "nla40LrLi7oo",
"outputId": "162bf14d-0d3e-4540-e85e-a46084a786b1"
},
"source": [
"# Install mmcv\n",
"!pip install mmcv -f https://download.openmmlab.com/mmcv/dist/cu111/torch1.9.0/index.html\n",
"# !pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu110/torch1.9.0/index.html"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Looking in links: https://download.openmmlab.com/mmcv/dist/cu111/torch1.9.0/index.html\n",
"Collecting mmcv\n",
" Downloading mmcv-1.3.15.tar.gz (352 kB)\n",
"\u001b[K |████████████████████████████████| 352 kB 5.2 MB/s \n",
"\u001b[?25hCollecting addict\n",
" Downloading addict-2.4.0-py3-none-any.whl (3.8 kB)\n",
"Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmcv) (1.19.5)\n",
"Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from mmcv) (21.0)\n",
"Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmcv) (7.1.2)\n",
"Requirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from mmcv) (3.13)\n",
"Collecting yapf\n",
" Downloading yapf-0.31.0-py2.py3-none-any.whl (185 kB)\n",
"\u001b[K |████████████████████████████████| 185 kB 49.9 MB/s \n",
"\u001b[?25hRequirement already satisfied: pyparsing>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from packaging->mmcv) (2.4.7)\n",
"Building wheels for collected packages: mmcv\n",
" Building wheel for mmcv (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
" Created wheel for mmcv: filename=mmcv-1.3.15-py2.py3-none-any.whl size=509835 sha256=793fe3796421336ca7a7740a1397a54016ba71ce95fd80cb80a116644adb4070\n",
" Stored in directory: /root/.cache/pip/wheels/b2/f4/4e/8f6d2dd2bef6b7eb8c89aa0e5d61acd7bff60aaf3d4d4b29b0\n",
"Successfully built mmcv\n",
"Installing collected packages: yapf, addict, mmcv\n",
"Successfully installed addict-2.4.0 mmcv-1.3.15 yapf-0.31.0\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "GDTUrYvXjlRb"
},
"source": [
"### Clone and install MMClassification\n",
"\n",
"Next, we clone the latest mmcls repository from GitHub and install it."
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "Bwme6tWHjl5s",
"outputId": "eae20624-4695-4cd9-c3e5-9c59596d150a"
},
"source": [
"# Clone mmcls repository\n",
"!git clone https://github.com/open-mmlab/mmclassification.git\n",
"%cd mmclassification/\n",
"\n",
"# Install MMClassification from source\n",
"!pip install -e . "
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Cloning into 'mmclassification'...\n",
"remote: Enumerating objects: 4152, done.\u001b[K\n",
"remote: Counting objects: 100% (994/994), done.\u001b[K\n",
"remote: Compressing objects: 100% (576/576), done.\u001b[K\n",
"remote: Total 4152 (delta 476), reused 765 (delta 401), pack-reused 3158\u001b[K\n",
"Receiving objects: 100% (4152/4152), 8.20 MiB | 21.00 MiB/s, done.\n",
"Resolving deltas: 100% (2524/2524), done.\n"
]
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "hFg_oSG4j3zB",
"outputId": "05a91f9b-d41c-4ae7-d4fe-c30a30d3f639"
},
"source": [
"# Check MMClassification installation\n",
"import mmcls\n",
"print(mmcls.__version__)"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"0.16.0\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "4Mi3g6yzj96L"
},
"source": [
"## Inference a model with Python API\n",
"\n",
"MMClassification provides many pre-trained models, and you can check them by the link of [model zoo](https://mmclassification.readthedocs.io/en/latest/model_zoo.html). Almost all models can reproduce the results in original papers or reach higher metrics. And we can use these models directly.\n",
"\n",
"To use the pre-trained model, we need to do the following steps:\n",
"\n",
"- Prepare the model\n",
" - Prepare the config file\n",
" - Prepare the checkpoint file\n",
"- Build the model\n",
"- Inference with the model"
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "nDQchz8CkJaT",
"outputId": "9805bd7d-cc2a-4269-b43d-257412f1df93"
},
"source": [
"# Get the demo image\n",
"!wget https://www.dropbox.com/s/k5fsqi6qha09l1v/banana.png?dl=0 -O demo/banana.png"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"--2021-10-21 03:52:36-- https://www.dropbox.com/s/k5fsqi6qha09l1v/banana.png?dl=0\n",
"Resolving www.dropbox.com (www.dropbox.com)... 162.125.3.18, 2620:100:601b:18::a27d:812\n",
"Connecting to www.dropbox.com (www.dropbox.com)|162.125.3.18|:443... connected.\n",
"HTTP request sent, awaiting response... 301 Moved Permanently\n",
"Location: /s/raw/k5fsqi6qha09l1v/banana.png [following]\n",
"--2021-10-21 03:52:36-- https://www.dropbox.com/s/raw/k5fsqi6qha09l1v/banana.png\n",
"Reusing existing connection to www.dropbox.com:443.\n",
"HTTP request sent, awaiting response... 302 Found\n",
"Location: https://uc10f85c3c33c4b5233bac4d074e.dl.dropboxusercontent.com/cd/0/inline/BYYklQk6LNPXNm7o5xE_fxE2GA9reePyNajQgoe9roPlSrtsJd4WN6RVww7zrtNZWFq8iZv349MNQJlm7vVaqRBxTcd0ufxkqbcJYJvOrORpxOPV7mHmhMjKYUncez8YNqELGwDd-aeZqLGKBC8spSnx/file# [following]\n",
"--2021-10-21 03:52:36-- https://uc10f85c3c33c4b5233bac4d074e.dl.dropboxusercontent.com/cd/0/inline/BYYklQk6LNPXNm7o5xE_fxE2GA9reePyNajQgoe9roPlSrtsJd4WN6RVww7zrtNZWFq8iZv349MNQJlm7vVaqRBxTcd0ufxkqbcJYJvOrORpxOPV7mHmhMjKYUncez8YNqELGwDd-aeZqLGKBC8spSnx/file\n",
"Resolving uc10f85c3c33c4b5233bac4d074e.dl.dropboxusercontent.com (uc10f85c3c33c4b5233bac4d074e.dl.dropboxusercontent.com)... 162.125.3.15, 2620:100:601b:15::a27d:80f\n",
"Connecting to uc10f85c3c33c4b5233bac4d074e.dl.dropboxusercontent.com (uc10f85c3c33c4b5233bac4d074e.dl.dropboxusercontent.com)|162.125.3.15|:443... connected.\n",
"HTTP request sent, awaiting response... 200 OK\n",
"Length: 297299 (290K) [image/png]\n",
"Saving to: demo/banana.png\n",
"\n",
"demo/banana.png 100%[===================>] 290.33K --.-KB/s in 0.08s \n",
"\n",
"2021-10-21 03:52:36 (3.47 MB/s) - demo/banana.png saved [297299/297299]\n",
"\n"
]
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 420
},
"id": "o2eiitWnkQq_",
"outputId": "192b3ebb-202b-4d6e-e178-561223024318"
},
"source": [
"from PIL import Image\n",
"Image.open('demo/banana.png')"
],
"execution_count": null,
"outputs": [
{
"output_type": "execute_result",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYkAAAGTCAYAAADdkO5AAAABd2lDQ1BJQ0MgUHJvZmlsZQAAeJx1kc0rRFEYxn8zaDA0CxaTqLsYsqAmSpaMhc0kDcpgM3PdmVF3Zm733kmTrbKxUBZi42vhP2CrbCmlSEkW/gJfG+l6j6tG4tzOfX895zxv5zwHgklTLzr1cSiWXDs1ntBm03Na6JEwUZroojmjO9bo5GSSf8fbNQFVr/pVr//3/TnCi4ajQ6BReEi3bFd4RDi57FqK14Xb9UJmUXhPuM+WAwqfKz3r84PivM8viu3p1BgEVU8t/4OzP1gv2EXhXuFY0azo3+dRN2kxSjNTUjtkduKQYpwEGlkqLGHi0i+1JJn97Yt/+SYoi0eXv0UVWxx5CuLtE7UiXQ2pOdEN+UyqKvffeTq5wQG/e0sCGu4977kbQpvwseF57/ue93EAdXdwWqr5y5LT8KvoGzUttguRVTg+q2nZLThZg+itlbEzX1KdzGAuB09H0JqGtktonvez+l7n8AamV+SJLmB7B3pkf2ThE7z6Z+tvc+SlAAEAAElEQVR4nLz9S6ws25aeh31jPiIiM9dae+/zuHWqWHy4YViCDD1sqCfZXUvuGLDbBqG2AUN9wbYoWDbctiULBGSxRcA2BJMQIIsPyZRokiWbpkULIGmKZJGsB+s+zjl7r7UyI2I+hhtjzMi1q27de9lxAvfuc85eKzMyYs4xx/jH//9D/jf/xr+hqkpKiQCoKACqQMf+WUDU/mOIgS7Cy7YSEHJKQEPVfi+lhKrSWiOmBEDvnVYrMQZEAhICqoqqMsWJLopqp/d+/PecM/teuF2fmeYTKUYQEASAnDO9d2rvpBgRkeMaemt24SLUUvFvgYggQYgx0fZKRzlfztRaj2vf953q12r3QUHv/zzPma56/HsIgVorH58/Mk0zT49P3K43WqucTjMxZlQ727bbJSHEmBEBbY2YJ0IM7PuKANu2Mc9nUk60WgkxECSw7zv7euPp/XtEOkqA1thrI4To96OiQK2V1hopJfseChICQkCCECTY8xUopYAqIoHeO3my+xpCIIRIErvq0hoShPM8232UYD8TE7V3tHVOy4k4TYjA+Xzi6emJx8cHPnz5FX/gV/8AMUZ+kdfLyws//OEP+fjtt3z/7Sdu2yu9d24vK6VvtNYQVVTEn2xn3wt9b4QYEHv09CC0pkgMBIRt3UDU/q53YhRyTrTW6N3eqew7L88vnE4nnh4utNrRCEFhb50kgZgSVZSgQkqzr/8GCAElxsS+7dz2wnle2PaNT8/P/PKv/AohCrVX1utKq0rKiXmaeb4+Q+vMy4k8Z6RDKZWQo312Kcwpk1Km9krviiSh10qtnXma7P6KEHx9AlRRtm1HBKaY6QLSlZQzYl8aiYkQ7vtnmiZCsDXfWiP6/sohoCH4v0NtHREhpYiqEkNkmjJVO1o7RHvPJJE8TYgIKASUbz9+ZF1XHh8fOJ8eUNrxmfu2IyIsp4uvNaH3SmudEKB3ECBPEzEkgtj6jjEiQWi+n0O0ddpRtFZASFMmIKgIOWdCgADUrrTWOU0TIWcC2PX4GhP/c7yCx7DeGhJsL4x7FWOk+71trR2/O2KUqvp36mhrNBS6stdCKYWy7fz4xz+m0ZnzBAp9L7R+fxbX2xVBOJ1OlFKP+LauKwpHTOwiKIB2QIgx0Hujq1LKTlCIOUFXeu+klECh9QaqpJhI+HpSVZq/mQQBxA4HEQsi/iWbKoIwe5DOObGu5biZtd0ftmCbcUSk1pQYleSBtTelR+zvVQkiEGyT11oRgdP5kRiE1hVRqN0OgJSz/emHx7j5IxD11izYRVs8tTZUO9SOhk6jMeXpeHghhCM4igRyTpTS6K2Rc0aCHThiEcYCRa120E0TX3/1tR8oSi0bwb9vSoLfPcS/ateGNqXWjTkKMU7HwpvnxT67FnpT2/QRTqcz0zSjKvSuxASSM4lAFLGg6QtYxD6rtUYIgiBI78QY0K4Q/XF1u167Z4Jqp1U7DLR3tCuaMzEEcgwExTdDp/dCnjKIIBIJMYAnByEEAtC126NHWdeVy+Xy+x4Mb1/zPJOmTMoTec6UNrOuK1vbLUV4s05UlUAgp0xtSggRkW5rTUFipGoHDwqt24bKORGzBTchEIOfmjGxLDMxRvbWqa0yS7aNJ4GYImoLGySi2u3AR2z9KpRW+fj8id6V05Rp2ii90nslhUQvtra72MZWIBDpMTDlcU8tNwlqz3PKmTBltlJopZCiHSSEyDRFJAWkC006SMBCXyeMRdGgS7cgE4M9X18nQe4JzzRN94TPA5+qkv0AEt/bdj+TPWsRamsQLQaklOihUWtjSokQoq89SzgrcDmfuTxejmRDgiAKEoR0udi+ShEloCgpzah2SinkHEgpEmMiSLDEIIgnNoERzlOKdJSMIPNClICI0rvSaYjfj94VEWXOCUmRI3r5ATViytsgf+yzEMgeiwA6SgxCwOLJ+J0RY1qzmDIS6AYEAk06np8TUuR0PlNKsaghEKbE9z/8no+fPpJy5osPX7CcFkiJFAPURmsdBcq2sXVlOp8s6LdGo5FiomsjihDBf14JmmiiMBKDbrEzxUjrjYRXCdKVLrahA4Hu2SVd7QEKiAqtFWKyLxzxgBmD3TAFDQLaUcQybhFbaKidbiHQ+ng4FvBH4vP2xA3BFnKIdi0528kYWgDtnu1Hu/G9WUDzhzEWPP6nVUqRsnfLpFq3xRWCZR1iQbG2RoqWDanagdE9EARJLMtC1+pZOV5FQfPDJYZIa42cZ6aUaKgdhl1JMdimUyXEiPaOyIygKN2Cut8XO1ACKVmWFrBDwLI9UKzyWm83luVCTMk2/V4s4IRAmCZSjHRt9m4x2e+OHeQxMQW7v6qdrkqyiGEb5E01NaVkvwBe8XkV0hp5ORE1oNIptxshRup5prXOuq2s68qnlxeWZfmFq4kUoh1wggcEYZkXRJR9345n7Q8A1YaqBdXeGnutlgSIoq2xdft3W/gV6bboguLr3tZvj5GH0wUJQhdlSQmNYs/AK5KO0iVTS6XrTkqJ2pU5+d/XwjTPjFiTc+KyLERVmidMp2lBZCdmC3Q5JcSfd0OprSAdJNoKERG0Nsq2U1qzA4lAium4B10gqPgeVjqdqp0lT4gq120jq11Po5M12GGrSsz5ntyJJSIShRCjrXM/mHNKxx7r3RLKFCIpRCQmJDia0DM59iOIxxgtadRKa/D4/j2N7vtlonvFP00JkWjLUwK9dVpvVi1IYmTMy3Lye2CVW0qRkKPVcqlZYA2Rpt0qJrH9CZ3elYb4+hFLgPLs+0g9CbOKQN/EkLfVxPjvIwapWsWaQ7DqXSCKUN/8zqiKYkpH8hxHzOuWbEwp00OEp0e2dePj80dondO0MJ9O9O+/Y982QxhipvRqh4uv52VeALhdb0zR9tBeO1EC6/XK7bbyxRcfrIqaJiJehXUhJqErdI9HXazCSSEE1m0jiHA6LfSuHoTUHogHHXonADkm9t6JKYJ2WnW4B8sCqnZqV6IHectqhRADeZqOzAQRew8gBL84r1haa3S1Babw5jSGoMpWu5W4Djd1L6VSjJ+Vd+PU772RUmaaJ3qzDMc+0wLv2yqiqxIk0NWuK6ZALQZvpCURiKgotTXaXphO9lD2fSPniSSB0zwRQ2TvhVobMQRSytAbI08JDsuV0uhajmxERCyb9w1gSaEvQvW9KoHrdeO7n3zLV19HnvIDm9o15ZT84LRyN4gQgm1yBVopiESmnGxdxUjbi8cz8ViQoFtFYRvbNlaM90xKVdn2SoiRNC8QFNFITEKKgmL3rU/Ktq7k65Wy7cTz6ecfENEO5NNy4iW9UEolxkzOndY2C8p1VHXNn2MgCKz7yt52kMgpBnqzZCZKQAKUplxfrrxcX/ilH3xFSonmGzQS7CBuDUKi1U5R5ZwSEgy2sIDQvXy3QJZiIkhHgx0mOcBTzlZpdlub87IQUoLxXEJgipMd3L2ybiuP+QGRCLWgfr/lgFjF4JYUueRpFO50UQIGdFntYBUC2gkhEVs9Mt7JD
"text/plain": [
"<PIL.PngImagePlugin.PngImageFile image mode=RGBA size=393x403 at 0x7FD3A038A490>"
]
},
"metadata": {},
"execution_count": 20
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "sRfAui8EkTDX"
},
"source": [
"### Prepare the config file and checkpoint file\n",
"\n",
"We configure a model with a config file and save weights with a checkpoint file.\n",
"\n",
"On GitHub, you can find all these pre-trained models in the config folder of MMClassification. For example, you can find the config files and checkpoints of Mobilenet V2 in [this link](https://github.com/open-mmlab/mmclassification/tree/master/configs/mobilenet_v2).\n",
"\n",
"We have integrated many config files for various models in the MMClassification repository. As for the checkpoint, we can download it in advance, or just pass an URL to API, and MMClassification will download it before load weights."
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "VvRoZpBGkgpC",
"outputId": "68282782-015e-4f5c-cef2-79be3bf6a9b7"
},
"source": [
"# Confirm the config file exists\n",
"!ls configs/mobilenet_v2/mobilenet-v2_8xb32_in1k.py\n",
"\n",
"# Specify the path of the config file and checkpoint file.\n",
"config_file = 'configs/mobilenet_v2/mobilenet-v2_8xb32_in1k.py'\n",
"checkpoint_file = 'https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth'"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"configs/mobilenet_v2/mobilenet-v2_8xb32_in1k.py\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "eiYdsHoIkpD1"
},
"source": [
"### Inference the model\n",
"\n",
"MMClassification provides high-level Python API to inference models.\n",
"\n",
"At first, we build the MobilenetV2 model and load the checkpoint."
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 323,
"referenced_widgets": [
"badf240bbb7d442fbd214e837edbffe2",
"520112917e0f4844995d418c5041d23a",
"9f3f6b72b4d14e2a96b9185331c8081b",
"a275bef3584b49ab9b680b528420d461",
"c4b2c6914a05497b8d2b691bd6dda6da",
"863d2a8cc4074f2e890ba6aea7c54384",
"be55ab36267d4dcab1d83dfaa8540270",
"31475aa888da4c8d844ba99a0b3397f5",
"e310c50e610248dd897fbbf5dd09dd7a",
"8a8ab7c27e404459951cffe7a32b8faa",
"e1a3dce90c1a4804a9ef0c687a9c0703"
]
},
"id": "KwJWlR2QkpiV",
"outputId": "982b365e-d3be-4e3d-dee7-c507a8020292"
},
"source": [
"import mmcv\n",
"from mmcls.apis import inference_model, init_model, show_result_pyplot\n",
"\n",
"# Specify the device, if you cannot use GPU, you can also use CPU \n",
"# by specifying `device='cpu'`.\n",
"device = 'cuda:0'\n",
"# device = 'cpu'\n",
"\n",
"# Build the model according to the config file and load the checkpoint.\n",
"model = init_model(config_file, checkpoint_file, device=device)"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stderr",
"text": [
"/usr/local/lib/python3.7/dist-packages/mmcv/cnn/bricks/transformer.py:28: UserWarning: Fail to import ``MultiScaleDeformableAttention`` from ``mmcv.ops.multi_scale_deform_attn``, You should install ``mmcv-full`` if you need this module. \n",
" warnings.warn('Fail to import ``MultiScaleDeformableAttention`` from '\n",
"/usr/lib/python3.7/importlib/_bootstrap.py:219: RuntimeWarning: numpy.ufunc size changed, may indicate binary incompatibility. Expected 192 from C header, got 216 from PyObject\n",
" return f(*args, **kwds)\n",
"/usr/lib/python3.7/importlib/_bootstrap.py:219: RuntimeWarning: numpy.ufunc size changed, may indicate binary incompatibility. Expected 192 from C header, got 216 from PyObject\n",
" return f(*args, **kwds)\n",
"/usr/lib/python3.7/importlib/_bootstrap.py:219: RuntimeWarning: numpy.ufunc size changed, may indicate binary incompatibility. Expected 192 from C header, got 216 from PyObject\n",
" return f(*args, **kwds)\n",
"/usr/local/lib/python3.7/dist-packages/yaml/constructor.py:126: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3,and in 3.9 it will stop working\n",
" if not isinstance(key, collections.Hashable):\n"
]
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"Use load_from_http loader\n"
]
},
{
"output_type": "stream",
"name": "stderr",
"text": [
"Downloading: \"https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth\" to /root/.cache/torch/hub/checkpoints/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth\n"
]
},
{
"output_type": "display_data",
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "badf240bbb7d442fbd214e837edbffe2",
"version_minor": 0,
"version_major": 2
},
"text/plain": [
" 0%| | 0.00/13.5M [00:00<?, ?B/s]"
]
},
"metadata": {}
},
{
"output_type": "stream",
"name": "stderr",
"text": [
"/content/mmclassification/mmcls/apis/inference.py:44: UserWarning: Class names are not saved in the checkpoint's meta data, use imagenet by default.\n",
" warnings.warn('Class names are not saved in the checkpoint\\'s '\n"
]
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "GiSACYFgkvNE",
"outputId": "252ae93d-a4fd-4581-f98e-6dadfde6c078"
},
"source": [
"# The model's inheritance relationship\n",
"model.__class__.__mro__"
],
"execution_count": null,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"(mmcls.models.classifiers.image.ImageClassifier,\n",
" mmcls.models.classifiers.base.BaseClassifier,\n",
" mmcv.runner.base_module.BaseModule,\n",
" torch.nn.modules.module.Module,\n",
" object)"
]
},
"metadata": {},
"execution_count": 23
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "FyjY7hP9k0_D",
"outputId": "6cc4f9aa-5d25-46ae-ff21-4f24e68760c9"
},
"source": [
"# The inference result in a single image\n",
"img = 'demo/banana.png'\n",
"img_array = mmcv.imread(img)\n",
"result = inference_model(model, img_array)\n",
"result"
],
"execution_count": null,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"{'pred_class': 'banana', 'pred_label': 954, 'pred_score': 0.9999284744262695}"
]
},
"metadata": {},
"execution_count": 24
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 427
},
"id": "ndwdD8eUk96g",
"outputId": "5cf3639c-a857-4e92-dc09-21ea0ec474f9"
},
"source": [
"%matplotlib inline\n",
"# Visualize the inference result\n",
"show_result_pyplot(model, img, result)"
],
"execution_count": null,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAZAAAAGaCAYAAAAhJBWqAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAFiQAABYkBbWid+gAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9e7BvW1Xf+RlzzrXW77f3OffJQ0QJpbQiaFX+ADRRELVVoFOKCna1gXSI1amIhHSSrga7uruqG7pb0FQUtapTrZ2YRKPy8kV83YsBBaNcBFEBeYlg0Mg5wD3nnr1/v7XmnKP/GGPOtfY5F6TzT1dX3XXr1Dl3799vrbnmHHM8vuM7xhRV5aHroeuh66Hroeuh6//tFf6/HsBD10PXQ9dD10PX/z+vhwzIQ9dD10PXQ9dD13/SlW7+wStf9jIFCCEgIkAFAUFQlf45URBVqgiIkKnUWhmHgePx4B8SROxeWisxRlQVVaWUij1HGIaBUgqlKMMwIqKoVlTV7gGUWvt3gwhVAZRa7T7TNIEqCj5u7LMhoKrUUqj+/RgjpVRUK1QlDpGcM0MaGMaRWmu/R62VZcmMYyTnSsmZlEYkCCUXxjFRayXESCkFgBgjtKlSeOD6NVBlmHZM04SiHA+z/VIFCQIKOR8Zx4lxnJiXmSCCKqQ0kEumFiXEQIqRGFN/dyiEGBAJlFwJPgcF+30phZKzjS0lQhACQgwREAjB/lYl52zjQai1ADY+ESEAMSZSjKjYKw4poWrzFFMkDiMQQGEIgZASEiMn+4nTS5e4fPkyd951J3c97GHcdtttpHSLCN5y5Zz55Kc+ydUrV7n/k5/iges3OBwOPHDjOssy2zpuLzX5yXMGVURMFgBUAsWkBLpcVELA5jAIWipaFVC0VubjTKmVYRwQCYhCjPaOIoLEQEVRhRgSIQT7LuoemnJ+OHB+ds44jIzjyAPnZ4QQuPvOOwE4LjNLLqgK4zDaz+YjMUZ2uwkRqEUptZKC3VWCrd2yLFArMSSbelVb4xAJKtSgBPU1plJqYV6yr2c0ed3sGxH7roj9vq1R8L24lEIMgSEl+06M9rsmJyH0PRtTZIgJDSZPtVSGlAghuj7A5Edtb0kURAJDSIgogoDLI8AQB9QUkq9rJedMCIGUbF8ECX0tmx7Lc0ZEiDGg7V1iJIq4HlFUKiEEYgz2/6pEicRhIEhAEN9nstGPdL0kIv3fbc5KKaj4Pqmmr9q+bfcppVBLIcRIrZVSK6JQtJLnmTkvlFK4fu06eck2FQqqhU/8xRWuXb9GGgbuuvMu9vsdpGR7IhdKqZwfzlmOR1RhPNnbvNdCpRJjMh2ttm8Oh3Ob53GkqO2TKGLvoUp02XvWs79N4EEMiI8NcWUMSpBAS5WI6TzMbgRKyQQXoBCiCXIM/V60BQKq6moMsMFICJRqC9cmtj/LjZOwGiEJJgBJTFhrNUNQSrYNEwJVTQFs8zsiYkLjix5jIC8VBWqpJnQhUN0IqC9+DIFxSDYPQagSUa0ESW4Mim0cIPlGtPeUPpaURoYYqZgyNEUTSEGogITgis4UhVJtq7sBNPMdiBFSigQEcUOq/jnVynw8MgyTKXFVynK0sYiQhsEUPxUUQkwX59rnKfqYVCvVBUb8gxJj3yQpRlOmfQ0g50KpB+I4kVyZLfMRJDAM0RXX7Mr/BtNuRwjB3/EzX0ECMQSCK4EYTdHGEChl6ca7vVTNaotoE0QuC4h9D63kUl1eIuJazJyidSMIQpXANE62L8JmriQQgim7Nne5KEvOxGhyLbiSr0qUwDhNpJQIUZjGgSi27hVIIRGHSFElpghVGYeBNER7x1qppZhCbYNQKEtmOc423gRRIkGCv3dF3eCB7z2tqMBuGKilMOcMCDEFUyi4gpTmbIS+5lUhxUAEU5AiSAhdWaoqWm2fxBAYojkPEsSV1oAm7Qq+KaOqGdXEdHpKoVKrKeC6mNIfxwQE1yVCLZWqhSAChO6kTtPO5lzc0UmRkMxJ0qGYsRfbk7bfgs2VP7NK9c8IYxJCSIQYXZdATLHL6qeT2eh7pM1Z8O+EYEYvipA3hrr9bc+xcYUmf+7Ijmmghgi3XeZ4OHLt+v1Qld0wMe331Ps/xXw8utFM5FpMf5uXzW7aAXA4PzD6eixFCQiHGzc4HI7cddedpJSIw2hGVYSE+ZZVzR0SQEVh46/dakBCYJ5nigi73eRKWFl1sVtfVYLCEBKzmgGQagvRlI55wWqb1dwF9/3MW2neflP0MYU+oW4QbYPVStVqm0dkM1olqHLMZrS0VoqqCQgQQ7xg7bsXqmZ5x2mklkLOxTw8Vcypi2v0Qou+TBHHFMhLQXQh7RJCRFFKLZR5YdhNACzLTEoDSQL7aSRIYNFMzjYXaUigleh6TmIkqrIshaoLwzD0cdfS3gEk+Krai5g3HAKHw8z9n/wUl2+7jdtToihkN4BmyNRXz4S1zWMpxaKjlMzTk0hdcp8r8flAqzsNJky2ydswzIAtuUAQdmmAqEAgxEQI7mxkpeTKPC8cz8+ZD0d247i+z6e5YohM48R+t+PGMLjXnEhpwKR5jWzp3qDddpkzc56pqmYIJBAqJDHlXkU5HBduPHCdaRq4fPmSe8XaHQ6ViiLuVFRzKmJoXpQpAq2Ay4h719X/P4TAPgamquZtVpO/wRU0pVD9e1ItKl6WhVwL0zgiBKgtul2doDbOYfRND6iYompuh7jzZqtBf4YEc6gGRwnwfattn2gl9ShaiR7tSAgMITCMo+2/EBkGi8xqzZRcGMaJEGOXlxbRhCFsUIKN0hexCNkjAn8FtHnHMfq7uAEJhZwFkUpKiVQHQhCP1II/Wy7ukxDd4NhVVBFtzqJ0Y9T0g8ZosisBiXQD0vakiPQ5X9w7FxEGR1mKqkWlIu5oiO/dyFhD/8w2cml7UlzrhJCIg0CtlCLsw2SRSbnEMs8Eidx15x1cvnyJvCyoCFmzGTxW+RURduxIoSEjlWGw6DEvRx44e4DT0z0iuwu6mwBaTA+kkMxJl4CG1TG/xYDkZeH8xnV2J6f+EwvHYwxoaVEJrtTtpWOTZjFPUUUd4pBuvSXafZolCpvFNY/QhDFEdY88+Ka0TaFLJYwmKO1ry1LIeSHPMyenp4QYyXkBEfcs6N5OdggniDAvlVoXUkruQWEeSKnkrMRYmcbRn6Pm4bs1j9GkaV4WmGdbiCBQYamFWAqIUCtorWQx7zOXwrws5Jw5OTkhIBznbEZUC5oN4gsO9bWrCVjz8kuxRWtOQPOqQgjsT3bsT/eIBLRkkohFHSFQc/YIbmNIq0E4KNCNijKk0IXQYAk8EpS+5gGbMxFIMVGl0uCuiC1dDZXJIZthSMQgFK0c55nxeLQxfTYkQIFhGBjGiTQMxNS84gaDBmI0GLIZENSUpUaTv7ahihbzBENECWheqHmmlozWRKlKrQWtyjAMFr47dNuMdXLopkOIQZCqJIlIDB3qiZvoxDxlh0hCYNpARyElQlVytYg4xMgQAlIWPEY2OfV7N2NZ/b8YozkFqv4c8QDFvH1R5fxwTs6Z3TSRPPpsjmBlddpqKW7UlLIUpikiIRAbXIUg0aGtabDIKZq5qiXBZBFii/jtHT0KTglx56yhCsMwEGNgWQwWbc8Rn+9cs8On0mVdGZiXhSHGrggRM+hNFrRW2yNB0FoJdTVmiEAptq4iBCz6anpRJBLFIwl3wMQdDnG5h6bThCGYnmuWz/aFG5iNv9tg3wYRSy48mPjbFrO9FQnUKCSTKMqkXAKOKZmTsZhDomBzMgxIFHN4lnV/DEPi/HCOZjMeIRmMJTHyOZ/zSEQiSymIR68thVC1ksRhPYmIOIrh1y0GpIqyOznh8qUTy3ko5gEhaPBIAoUg7ukbXFPtg927aAIpQUh4HqJWalFiMsHRaqtairLMs79s5PT0pLkoHhZXqiiDKvPRIIJhtLxJC3NjtM2q6oIbAm1/1+JhczCBGEYXJDcO9n2HtnSF25p3CKYkVd24pUTIBRHtApJidEjLJCAl29BLzmR/DxqMMQz2u1rQZUZps
"text/plain": [
"<Figure size 393.01x403.01 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
}
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "oDMr3Bx_lESy"
},
"source": [
"## Fine-tune a model with Python API\n",
"\n",
"Fine-tuning is to re-train a model which has been trained on another dataset (like ImageNet) to fit our target dataset. Compared with training from scratch, fine-tuning is much faster can avoid over-fitting problems during training on a small dataset.\n",
"\n",
"The basic steps of fine-tuning are as below:\n",
"\n",
"1. Prepare the target dataset and meet MMClassification's requirements.\n",
"2. Modify the training config.\n",
"3. Start training and validation.\n",
"\n",
"More details are in [the docs](https://mmclassification.readthedocs.io/en/latest/tutorials/finetune.html)."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "TJtKKwAvlHX_"
},
"source": [
"### Prepare the target dataset\n",
"\n",
"Here we download the cats & dogs dataset directly. You can find more introduction about the dataset in the [tools tutorial](https://colab.research.google.com/github/open-mmlab/mmclassification/blob/master/docs/tutorials/MMClassification_tools.ipynb)."
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "3vBfU8GGlFPS",
"outputId": "b12dadb4-ccbc-45b4-bb08-3d24977ed93c"
},
"source": [
"# Download the cats & dogs dataset\n",
"!wget https://www.dropbox.com/s/wml49yrtdo53mie/cats_dogs_dataset_reorg.zip?dl=0 -O cats_dogs_dataset.zip\n",
"!mkdir -p data\n",
"!unzip -qo cats_dogs_dataset.zip -d ./data/"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"--2021-10-21 03:57:58-- https://www.dropbox.com/s/wml49yrtdo53mie/cats_dogs_dataset_reorg.zip?dl=0\n",
"Resolving www.dropbox.com (www.dropbox.com)... 162.125.80.18, 2620:100:6018:18::a27d:312\n",
"Connecting to www.dropbox.com (www.dropbox.com)|162.125.80.18|:443... connected.\n",
"HTTP request sent, awaiting response... 301 Moved Permanently\n",
"Location: /s/raw/wml49yrtdo53mie/cats_dogs_dataset_reorg.zip [following]\n",
"--2021-10-21 03:57:58-- https://www.dropbox.com/s/raw/wml49yrtdo53mie/cats_dogs_dataset_reorg.zip\n",
"Reusing existing connection to www.dropbox.com:443.\n",
"HTTP request sent, awaiting response... 302 Found\n",
"Location: https://ucfd8157272a6270e100392293da.dl.dropboxusercontent.com/cd/0/inline/BYbFG6Zo1S3l2kJtqLrJIne9lTLgQn-uoJxmUjhLSkp36V7AoiwlyR2gP0XVoUQt9WzF2ZsmeERagMy7rpsNoIYG4MjsYA90i_JsarFDs9PHhXHw9qwHpHqBvgd4YU_mwDQHuouJ_oCU1kft04QgCVRg/file# [following]\n",
"--2021-10-21 03:57:59-- https://ucfd8157272a6270e100392293da.dl.dropboxusercontent.com/cd/0/inline/BYbFG6Zo1S3l2kJtqLrJIne9lTLgQn-uoJxmUjhLSkp36V7AoiwlyR2gP0XVoUQt9WzF2ZsmeERagMy7rpsNoIYG4MjsYA90i_JsarFDs9PHhXHw9qwHpHqBvgd4YU_mwDQHuouJ_oCU1kft04QgCVRg/file\n",
"Resolving ucfd8157272a6270e100392293da.dl.dropboxusercontent.com (ucfd8157272a6270e100392293da.dl.dropboxusercontent.com)... 162.125.3.15, 2620:100:6018:15::a27d:30f\n",
"Connecting to ucfd8157272a6270e100392293da.dl.dropboxusercontent.com (ucfd8157272a6270e100392293da.dl.dropboxusercontent.com)|162.125.3.15|:443... connected.\n",
"HTTP request sent, awaiting response... 302 Found\n",
"Location: /cd/0/inline2/BYYSXb-0kWS7Lpk-cdrgBGzcOBfsvy7KjhqWEgjI5L9xfcaXohKlVeFMNFVyqvCwZLym2kWCD0nwURRpQ2mnHICrNsrvTvavbn24hk1Bd3_lXX08LBBe3C6YvD2U_iP8UMXROqm-B3JtnBjeMpk1R4YZ0O6aVLgKu0eET9RXsRaNCczD2lTK_i72zmbYhGmBvlRWmf_yQnnS5WKpGhSAobznIqKzw78yPzo5FsgGiEj5VXb91AElrKVAW8HFC9EhdUs7RrL3q9f0mQ9TbQpauoAp32TL3YQcuAp891Rv-EmDVxzfMwKVTGU8hxR2SiIWkse4u2QGhliqhdha7qBu7sIPcIoeI5-DdSoc6XG77vTYTRhrs_cf7rQuTPH2gTIUwTY/file [following]\n",
"--2021-10-21 03:57:59-- https://ucfd8157272a6270e100392293da.dl.dropboxusercontent.com/cd/0/inline2/BYYSXb-0kWS7Lpk-cdrgBGzcOBfsvy7KjhqWEgjI5L9xfcaXohKlVeFMNFVyqvCwZLym2kWCD0nwURRpQ2mnHICrNsrvTvavbn24hk1Bd3_lXX08LBBe3C6YvD2U_iP8UMXROqm-B3JtnBjeMpk1R4YZ0O6aVLgKu0eET9RXsRaNCczD2lTK_i72zmbYhGmBvlRWmf_yQnnS5WKpGhSAobznIqKzw78yPzo5FsgGiEj5VXb91AElrKVAW8HFC9EhdUs7RrL3q9f0mQ9TbQpauoAp32TL3YQcuAp891Rv-EmDVxzfMwKVTGU8hxR2SiIWkse4u2QGhliqhdha7qBu7sIPcIoeI5-DdSoc6XG77vTYTRhrs_cf7rQuTPH2gTIUwTY/file\n",
"Reusing existing connection to ucfd8157272a6270e100392293da.dl.dropboxusercontent.com:443.\n",
"HTTP request sent, awaiting response... 200 OK\n",
"Length: 228802825 (218M) [application/zip]\n",
"Saving to: cats_dogs_dataset.zip\n",
"\n",
"cats_dogs_dataset.z 100%[===================>] 218.20M 86.3MB/s in 2.5s \n",
"\n",
"2021-10-21 03:58:02 (86.3 MB/s) - cats_dogs_dataset.zip saved [228802825/228802825]\n",
"\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "15iKNG0SlV9y"
},
"source": [
"### Read the config file and modify the config\n",
"\n",
"In the [tools tutorial](https://colab.research.google.com/github/open-mmlab/mmclassification/blob/master/docs/tutorials/MMClassification_tools.ipynb), we have introduced all parts of the config file, and here we can modify the loaded config by Python code."
]
},
{
"cell_type": "code",
"metadata": {
"id": "WCfnDavFlWrK"
},
"source": [
"# Load the base config file\n",
"from mmcv import Config\n",
"cfg = Config.fromfile('configs/mobilenet_v2/mobilenet-v2_8xb32_in1k.py')\n",
"\n",
"# Modify the number of classes in the head.\n",
"cfg.model.head.num_classes = 2\n",
"cfg.model.head.topk = (1, )\n",
"\n",
"# Load the pre-trained model's checkpoint.\n",
"cfg.model.backbone.init_cfg = dict(type='Pretrained', checkpoint=checkpoint_file, prefix='backbone')\n",
"\n",
"# Specify sample size and number of workers.\n",
"cfg.data.samples_per_gpu = 32\n",
"cfg.data.workers_per_gpu = 2\n",
"\n",
"# Specify the path and meta files of training dataset\n",
"cfg.data.train.data_prefix = 'data/cats_dogs_dataset/training_set/training_set'\n",
"cfg.data.train.classes = 'data/cats_dogs_dataset/classes.txt'\n",
"\n",
"# Specify the path and meta files of validation dataset\n",
"cfg.data.val.data_prefix = 'data/cats_dogs_dataset/val_set/val_set'\n",
"cfg.data.val.ann_file = 'data/cats_dogs_dataset/val.txt'\n",
"cfg.data.val.classes = 'data/cats_dogs_dataset/classes.txt'\n",
"\n",
"# Specify the path and meta files of test dataset\n",
"cfg.data.test.data_prefix = 'data/cats_dogs_dataset/test_set/test_set'\n",
"cfg.data.test.ann_file = 'data/cats_dogs_dataset/test.txt'\n",
"cfg.data.test.classes = 'data/cats_dogs_dataset/classes.txt'\n",
"\n",
"# Specify the normalization parameters in data pipeline\n",
"normalize_cfg = dict(type='Normalize', mean=[124.508, 116.050, 106.438], std=[58.577, 57.310, 57.437], to_rgb=True)\n",
"cfg.data.train.pipeline[3] = normalize_cfg\n",
"cfg.data.val.pipeline[3] = normalize_cfg\n",
"cfg.data.test.pipeline[3] = normalize_cfg\n",
"\n",
"# Modify the evaluation metric\n",
"cfg.evaluation['metric_options']={'topk': (1, )}\n",
"\n",
"# Specify the optimizer\n",
"cfg.optimizer = dict(type='SGD', lr=0.005, momentum=0.9, weight_decay=0.0001)\n",
"cfg.optimizer_config = dict(grad_clip=None)\n",
"\n",
"# Specify the learning rate scheduler\n",
"cfg.lr_config = dict(policy='step', step=1, gamma=0.1)\n",
"cfg.runner = dict(type='EpochBasedRunner', max_epochs=2)\n",
"\n",
"# Specify the work directory\n",
"cfg.work_dir = './work_dirs/cats_dogs_dataset'\n",
"\n",
"# Output logs for every 10 iterations\n",
"cfg.log_config.interval = 10\n",
"\n",
"# Set the random seed and enable the deterministic option of cuDNN\n",
"# to keep the results' reproducible.\n",
"from mmcls.apis import set_random_seed\n",
"cfg.seed = 0\n",
"set_random_seed(0, deterministic=True)\n",
"\n",
"cfg.gpu_ids = range(1)"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "HDerVUPFmNR0"
},
"source": [
"### Fine-tune the model\n",
"\n",
"Use the API `train_model` to fine-tune our model on the cats & dogs dataset."
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "P7unq5cNmN8G",
"outputId": "bf32711b-7bdf-45ee-8db5-e8699d3eff91"
},
"source": [
"import time\n",
"import mmcv\n",
"import os.path as osp\n",
"\n",
"from mmcls.datasets import build_dataset\n",
"from mmcls.models import build_classifier\n",
"from mmcls.apis import train_model\n",
"\n",
"# Create the work directory\n",
"mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n",
"# Build the classifier\n",
"model = build_classifier(cfg.model)\n",
"model.init_weights()\n",
"# Build the dataset\n",
"datasets = [build_dataset(cfg.data.train)]\n",
"# Add `CLASSES` attributes to help visualization\n",
"model.CLASSES = datasets[0].CLASSES\n",
"# Start fine-tuning\n",
"train_model(\n",
" model,\n",
" datasets,\n",
" cfg,\n",
" distributed=False,\n",
" validate=True,\n",
" timestamp=time.strftime('%Y%m%d_%H%M%S', time.localtime()),\n",
" meta=dict())"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stderr",
"text": [
"2021-10-21 04:04:12,758 - mmcv - INFO - initialize MobileNetV2 with init_cfg {'type': 'Pretrained', 'checkpoint': 'https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth', 'prefix': 'backbone'}\n",
"2021-10-21 04:04:12,759 - mmcv - INFO - load backbone in model from: https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth\n",
"2021-10-21 04:04:12,815 - mmcv - INFO - initialize LinearClsHead with init_cfg {'type': 'Normal', 'layer': 'Linear', 'std': 0.01}\n",
"2021-10-21 04:04:12,818 - mmcv - INFO - \n",
"backbone.conv1.conv.weight - torch.Size([32, 3, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,821 - mmcv - INFO - \n",
"backbone.conv1.bn.weight - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,823 - mmcv - INFO - \n",
"backbone.conv1.bn.bias - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,824 - mmcv - INFO - \n",
"backbone.layer1.0.conv.0.conv.weight - torch.Size([32, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,826 - mmcv - INFO - \n",
"backbone.layer1.0.conv.0.bn.weight - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,827 - mmcv - INFO - \n",
"backbone.layer1.0.conv.0.bn.bias - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,829 - mmcv - INFO - \n",
"backbone.layer1.0.conv.1.conv.weight - torch.Size([16, 32, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,830 - mmcv - INFO - \n",
"backbone.layer1.0.conv.1.bn.weight - torch.Size([16]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,832 - mmcv - INFO - \n",
"backbone.layer1.0.conv.1.bn.bias - torch.Size([16]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,833 - mmcv - INFO - \n",
"backbone.layer2.0.conv.0.conv.weight - torch.Size([96, 16, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,835 - mmcv - INFO - \n",
"backbone.layer2.0.conv.0.bn.weight - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,836 - mmcv - INFO - \n",
"backbone.layer2.0.conv.0.bn.bias - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,838 - mmcv - INFO - \n",
"backbone.layer2.0.conv.1.conv.weight - torch.Size([96, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,839 - mmcv - INFO - \n",
"backbone.layer2.0.conv.1.bn.weight - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,841 - mmcv - INFO - \n",
"backbone.layer2.0.conv.1.bn.bias - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,842 - mmcv - INFO - \n",
"backbone.layer2.0.conv.2.conv.weight - torch.Size([24, 96, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,844 - mmcv - INFO - \n",
"backbone.layer2.0.conv.2.bn.weight - torch.Size([24]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,845 - mmcv - INFO - \n",
"backbone.layer2.0.conv.2.bn.bias - torch.Size([24]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,847 - mmcv - INFO - \n",
"backbone.layer2.1.conv.0.conv.weight - torch.Size([144, 24, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,848 - mmcv - INFO - \n",
"backbone.layer2.1.conv.0.bn.weight - torch.Size([144]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,850 - mmcv - INFO - \n",
"backbone.layer2.1.conv.0.bn.bias - torch.Size([144]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,851 - mmcv - INFO - \n",
"backbone.layer2.1.conv.1.conv.weight - torch.Size([144, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,853 - mmcv - INFO - \n",
"backbone.layer2.1.conv.1.bn.weight - torch.Size([144]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,854 - mmcv - INFO - \n",
"backbone.layer2.1.conv.1.bn.bias - torch.Size([144]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,856 - mmcv - INFO - \n",
"backbone.layer2.1.conv.2.conv.weight - torch.Size([24, 144, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,857 - mmcv - INFO - \n",
"backbone.layer2.1.conv.2.bn.weight - torch.Size([24]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,858 - mmcv - INFO - \n",
"backbone.layer2.1.conv.2.bn.bias - torch.Size([24]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,860 - mmcv - INFO - \n",
"backbone.layer3.0.conv.0.conv.weight - torch.Size([144, 24, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,861 - mmcv - INFO - \n",
"backbone.layer3.0.conv.0.bn.weight - torch.Size([144]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,863 - mmcv - INFO - \n",
"backbone.layer3.0.conv.0.bn.bias - torch.Size([144]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,864 - mmcv - INFO - \n",
"backbone.layer3.0.conv.1.conv.weight - torch.Size([144, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,866 - mmcv - INFO - \n",
"backbone.layer3.0.conv.1.bn.weight - torch.Size([144]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,867 - mmcv - INFO - \n",
"backbone.layer3.0.conv.1.bn.bias - torch.Size([144]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,869 - mmcv - INFO - \n",
"backbone.layer3.0.conv.2.conv.weight - torch.Size([32, 144, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,870 - mmcv - INFO - \n",
"backbone.layer3.0.conv.2.bn.weight - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,872 - mmcv - INFO - \n",
"backbone.layer3.0.conv.2.bn.bias - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,873 - mmcv - INFO - \n",
"backbone.layer3.1.conv.0.conv.weight - torch.Size([192, 32, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,875 - mmcv - INFO - \n",
"backbone.layer3.1.conv.0.bn.weight - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,876 - mmcv - INFO - \n",
"backbone.layer3.1.conv.0.bn.bias - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,878 - mmcv - INFO - \n",
"backbone.layer3.1.conv.1.conv.weight - torch.Size([192, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,879 - mmcv - INFO - \n",
"backbone.layer3.1.conv.1.bn.weight - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,882 - mmcv - INFO - \n",
"backbone.layer3.1.conv.1.bn.bias - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,883 - mmcv - INFO - \n",
"backbone.layer3.1.conv.2.conv.weight - torch.Size([32, 192, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,885 - mmcv - INFO - \n",
"backbone.layer3.1.conv.2.bn.weight - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,886 - mmcv - INFO - \n",
"backbone.layer3.1.conv.2.bn.bias - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,887 - mmcv - INFO - \n",
"backbone.layer3.2.conv.0.conv.weight - torch.Size([192, 32, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,889 - mmcv - INFO - \n",
"backbone.layer3.2.conv.0.bn.weight - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,890 - mmcv - INFO - \n",
"backbone.layer3.2.conv.0.bn.bias - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,892 - mmcv - INFO - \n",
"backbone.layer3.2.conv.1.conv.weight - torch.Size([192, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,894 - mmcv - INFO - \n",
"backbone.layer3.2.conv.1.bn.weight - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,895 - mmcv - INFO - \n",
"backbone.layer3.2.conv.1.bn.bias - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,896 - mmcv - INFO - \n",
"backbone.layer3.2.conv.2.conv.weight - torch.Size([32, 192, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,898 - mmcv - INFO - \n",
"backbone.layer3.2.conv.2.bn.weight - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,899 - mmcv - INFO - \n",
"backbone.layer3.2.conv.2.bn.bias - torch.Size([32]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,901 - mmcv - INFO - \n",
"backbone.layer4.0.conv.0.conv.weight - torch.Size([192, 32, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,903 - mmcv - INFO - \n",
"backbone.layer4.0.conv.0.bn.weight - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,907 - mmcv - INFO - \n",
"backbone.layer4.0.conv.0.bn.bias - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,908 - mmcv - INFO - \n",
"backbone.layer4.0.conv.1.conv.weight - torch.Size([192, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,910 - mmcv - INFO - \n",
"backbone.layer4.0.conv.1.bn.weight - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,911 - mmcv - INFO - \n",
"backbone.layer4.0.conv.1.bn.bias - torch.Size([192]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,913 - mmcv - INFO - \n",
"backbone.layer4.0.conv.2.conv.weight - torch.Size([64, 192, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,914 - mmcv - INFO - \n",
"backbone.layer4.0.conv.2.bn.weight - torch.Size([64]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,915 - mmcv - INFO - \n",
"backbone.layer4.0.conv.2.bn.bias - torch.Size([64]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,917 - mmcv - INFO - \n",
"backbone.layer4.1.conv.0.conv.weight - torch.Size([384, 64, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,918 - mmcv - INFO - \n",
"backbone.layer4.1.conv.0.bn.weight - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,920 - mmcv - INFO - \n",
"backbone.layer4.1.conv.0.bn.bias - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,921 - mmcv - INFO - \n",
"backbone.layer4.1.conv.1.conv.weight - torch.Size([384, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,923 - mmcv - INFO - \n",
"backbone.layer4.1.conv.1.bn.weight - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,924 - mmcv - INFO - \n",
"backbone.layer4.1.conv.1.bn.bias - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,925 - mmcv - INFO - \n",
"backbone.layer4.1.conv.2.conv.weight - torch.Size([64, 384, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,927 - mmcv - INFO - \n",
"backbone.layer4.1.conv.2.bn.weight - torch.Size([64]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,928 - mmcv - INFO - \n",
"backbone.layer4.1.conv.2.bn.bias - torch.Size([64]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,930 - mmcv - INFO - \n",
"backbone.layer4.2.conv.0.conv.weight - torch.Size([384, 64, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,932 - mmcv - INFO - \n",
"backbone.layer4.2.conv.0.bn.weight - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,933 - mmcv - INFO - \n",
"backbone.layer4.2.conv.0.bn.bias - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,935 - mmcv - INFO - \n",
"backbone.layer4.2.conv.1.conv.weight - torch.Size([384, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,936 - mmcv - INFO - \n",
"backbone.layer4.2.conv.1.bn.weight - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,938 - mmcv - INFO - \n",
"backbone.layer4.2.conv.1.bn.bias - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,939 - mmcv - INFO - \n",
"backbone.layer4.2.conv.2.conv.weight - torch.Size([64, 384, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,941 - mmcv - INFO - \n",
"backbone.layer4.2.conv.2.bn.weight - torch.Size([64]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,942 - mmcv - INFO - \n",
"backbone.layer4.2.conv.2.bn.bias - torch.Size([64]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,944 - mmcv - INFO - \n",
"backbone.layer4.3.conv.0.conv.weight - torch.Size([384, 64, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,945 - mmcv - INFO - \n",
"backbone.layer4.3.conv.0.bn.weight - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,946 - mmcv - INFO - \n",
"backbone.layer4.3.conv.0.bn.bias - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,948 - mmcv - INFO - \n",
"backbone.layer4.3.conv.1.conv.weight - torch.Size([384, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,949 - mmcv - INFO - \n",
"backbone.layer4.3.conv.1.bn.weight - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,951 - mmcv - INFO - \n",
"backbone.layer4.3.conv.1.bn.bias - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,952 - mmcv - INFO - \n",
"backbone.layer4.3.conv.2.conv.weight - torch.Size([64, 384, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,954 - mmcv - INFO - \n",
"backbone.layer4.3.conv.2.bn.weight - torch.Size([64]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,955 - mmcv - INFO - \n",
"backbone.layer4.3.conv.2.bn.bias - torch.Size([64]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,957 - mmcv - INFO - \n",
"backbone.layer5.0.conv.0.conv.weight - torch.Size([384, 64, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,958 - mmcv - INFO - \n",
"backbone.layer5.0.conv.0.bn.weight - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,959 - mmcv - INFO - \n",
"backbone.layer5.0.conv.0.bn.bias - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,961 - mmcv - INFO - \n",
"backbone.layer5.0.conv.1.conv.weight - torch.Size([384, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,963 - mmcv - INFO - \n",
"backbone.layer5.0.conv.1.bn.weight - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,964 - mmcv - INFO - \n",
"backbone.layer5.0.conv.1.bn.bias - torch.Size([384]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n"
]
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"Use load_from_http loader\n"
]
},
{
"output_type": "stream",
"name": "stderr",
"text": [
"2021-10-21 04:04:12,965 - mmcv - INFO - \n",
"backbone.layer5.0.conv.2.conv.weight - torch.Size([96, 384, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,967 - mmcv - INFO - \n",
"backbone.layer5.0.conv.2.bn.weight - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,969 - mmcv - INFO - \n",
"backbone.layer5.0.conv.2.bn.bias - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,970 - mmcv - INFO - \n",
"backbone.layer5.1.conv.0.conv.weight - torch.Size([576, 96, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,972 - mmcv - INFO - \n",
"backbone.layer5.1.conv.0.bn.weight - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,973 - mmcv - INFO - \n",
"backbone.layer5.1.conv.0.bn.bias - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,975 - mmcv - INFO - \n",
"backbone.layer5.1.conv.1.conv.weight - torch.Size([576, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,976 - mmcv - INFO - \n",
"backbone.layer5.1.conv.1.bn.weight - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,978 - mmcv - INFO - \n",
"backbone.layer5.1.conv.1.bn.bias - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,979 - mmcv - INFO - \n",
"backbone.layer5.1.conv.2.conv.weight - torch.Size([96, 576, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,981 - mmcv - INFO - \n",
"backbone.layer5.1.conv.2.bn.weight - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,982 - mmcv - INFO - \n",
"backbone.layer5.1.conv.2.bn.bias - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,984 - mmcv - INFO - \n",
"backbone.layer5.2.conv.0.conv.weight - torch.Size([576, 96, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,985 - mmcv - INFO - \n",
"backbone.layer5.2.conv.0.bn.weight - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,986 - mmcv - INFO - \n",
"backbone.layer5.2.conv.0.bn.bias - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,988 - mmcv - INFO - \n",
"backbone.layer5.2.conv.1.conv.weight - torch.Size([576, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,989 - mmcv - INFO - \n",
"backbone.layer5.2.conv.1.bn.weight - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,991 - mmcv - INFO - \n",
"backbone.layer5.2.conv.1.bn.bias - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,992 - mmcv - INFO - \n",
"backbone.layer5.2.conv.2.conv.weight - torch.Size([96, 576, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,994 - mmcv - INFO - \n",
"backbone.layer5.2.conv.2.bn.weight - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,995 - mmcv - INFO - \n",
"backbone.layer5.2.conv.2.bn.bias - torch.Size([96]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,997 - mmcv - INFO - \n",
"backbone.layer6.0.conv.0.conv.weight - torch.Size([576, 96, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,998 - mmcv - INFO - \n",
"backbone.layer6.0.conv.0.bn.weight - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:12,999 - mmcv - INFO - \n",
"backbone.layer6.0.conv.0.bn.bias - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,001 - mmcv - INFO - \n",
"backbone.layer6.0.conv.1.conv.weight - torch.Size([576, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,002 - mmcv - INFO - \n",
"backbone.layer6.0.conv.1.bn.weight - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,004 - mmcv - INFO - \n",
"backbone.layer6.0.conv.1.bn.bias - torch.Size([576]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,005 - mmcv - INFO - \n",
"backbone.layer6.0.conv.2.conv.weight - torch.Size([160, 576, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,007 - mmcv - INFO - \n",
"backbone.layer6.0.conv.2.bn.weight - torch.Size([160]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,008 - mmcv - INFO - \n",
"backbone.layer6.0.conv.2.bn.bias - torch.Size([160]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,010 - mmcv - INFO - \n",
"backbone.layer6.1.conv.0.conv.weight - torch.Size([960, 160, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,011 - mmcv - INFO - \n",
"backbone.layer6.1.conv.0.bn.weight - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,013 - mmcv - INFO - \n",
"backbone.layer6.1.conv.0.bn.bias - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,014 - mmcv - INFO - \n",
"backbone.layer6.1.conv.1.conv.weight - torch.Size([960, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,015 - mmcv - INFO - \n",
"backbone.layer6.1.conv.1.bn.weight - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,017 - mmcv - INFO - \n",
"backbone.layer6.1.conv.1.bn.bias - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,018 - mmcv - INFO - \n",
"backbone.layer6.1.conv.2.conv.weight - torch.Size([160, 960, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,021 - mmcv - INFO - \n",
"backbone.layer6.1.conv.2.bn.weight - torch.Size([160]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,022 - mmcv - INFO - \n",
"backbone.layer6.1.conv.2.bn.bias - torch.Size([160]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,024 - mmcv - INFO - \n",
"backbone.layer6.2.conv.0.conv.weight - torch.Size([960, 160, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,025 - mmcv - INFO - \n",
"backbone.layer6.2.conv.0.bn.weight - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,027 - mmcv - INFO - \n",
"backbone.layer6.2.conv.0.bn.bias - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,028 - mmcv - INFO - \n",
"backbone.layer6.2.conv.1.conv.weight - torch.Size([960, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,030 - mmcv - INFO - \n",
"backbone.layer6.2.conv.1.bn.weight - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,031 - mmcv - INFO - \n",
"backbone.layer6.2.conv.1.bn.bias - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,033 - mmcv - INFO - \n",
"backbone.layer6.2.conv.2.conv.weight - torch.Size([160, 960, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,034 - mmcv - INFO - \n",
"backbone.layer6.2.conv.2.bn.weight - torch.Size([160]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,036 - mmcv - INFO - \n",
"backbone.layer6.2.conv.2.bn.bias - torch.Size([160]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,037 - mmcv - INFO - \n",
"backbone.layer7.0.conv.0.conv.weight - torch.Size([960, 160, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,039 - mmcv - INFO - \n",
"backbone.layer7.0.conv.0.bn.weight - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,040 - mmcv - INFO - \n",
"backbone.layer7.0.conv.0.bn.bias - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,041 - mmcv - INFO - \n",
"backbone.layer7.0.conv.1.conv.weight - torch.Size([960, 1, 3, 3]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,043 - mmcv - INFO - \n",
"backbone.layer7.0.conv.1.bn.weight - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,045 - mmcv - INFO - \n",
"backbone.layer7.0.conv.1.bn.bias - torch.Size([960]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,046 - mmcv - INFO - \n",
"backbone.layer7.0.conv.2.conv.weight - torch.Size([320, 960, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,048 - mmcv - INFO - \n",
"backbone.layer7.0.conv.2.bn.weight - torch.Size([320]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,049 - mmcv - INFO - \n",
"backbone.layer7.0.conv.2.bn.bias - torch.Size([320]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,051 - mmcv - INFO - \n",
"backbone.conv2.conv.weight - torch.Size([1280, 320, 1, 1]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,052 - mmcv - INFO - \n",
"backbone.conv2.bn.weight - torch.Size([1280]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,054 - mmcv - INFO - \n",
"backbone.conv2.bn.bias - torch.Size([1280]): \n",
"PretrainedInit: load from https://download.openmmlab.com/mmclassification/v0/mobilenet_v2/mobilenet_v2_batch256_imagenet_20200708-3b2dc3af.pth \n",
" \n",
"2021-10-21 04:04:13,055 - mmcv - INFO - \n",
"head.fc.weight - torch.Size([2, 1280]): \n",
"NormalInit: mean=0, std=0.01, bias=0 \n",
" \n",
"2021-10-21 04:04:13,057 - mmcv - INFO - \n",
"head.fc.bias - torch.Size([2]): \n",
"NormalInit: mean=0, std=0.01, bias=0 \n",
" \n",
"2021-10-21 04:04:13,408 - mmcls - INFO - Start running, host: root@cc5b42005207, work_dir: /content/mmclassification/work_dirs/cats_dogs_dataset\n",
"2021-10-21 04:04:13,412 - mmcls - INFO - Hooks will be executed in the following order:\n",
"before_run:\n",
"(VERY_HIGH ) StepLrUpdaterHook \n",
"(NORMAL ) CheckpointHook \n",
"(LOW ) EvalHook \n",
"(VERY_LOW ) TextLoggerHook \n",
" -------------------- \n",
"before_train_epoch:\n",
"(VERY_HIGH ) StepLrUpdaterHook \n",
"(LOW ) IterTimerHook \n",
"(LOW ) EvalHook \n",
"(VERY_LOW ) TextLoggerHook \n",
" -------------------- \n",
"before_train_iter:\n",
"(VERY_HIGH ) StepLrUpdaterHook \n",
"(LOW ) IterTimerHook \n",
"(LOW ) EvalHook \n",
" -------------------- \n",
"after_train_iter:\n",
"(ABOVE_NORMAL) OptimizerHook \n",
"(NORMAL ) CheckpointHook \n",
"(LOW ) IterTimerHook \n",
"(LOW ) EvalHook \n",
"(VERY_LOW ) TextLoggerHook \n",
" -------------------- \n",
"after_train_epoch:\n",
"(NORMAL ) CheckpointHook \n",
"(LOW ) EvalHook \n",
"(VERY_LOW ) TextLoggerHook \n",
" -------------------- \n",
"before_val_epoch:\n",
"(LOW ) IterTimerHook \n",
"(VERY_LOW ) TextLoggerHook \n",
" -------------------- \n",
"before_val_iter:\n",
"(LOW ) IterTimerHook \n",
" -------------------- \n",
"after_val_iter:\n",
"(LOW ) IterTimerHook \n",
" -------------------- \n",
"after_val_epoch:\n",
"(VERY_LOW ) TextLoggerHook \n",
" -------------------- \n",
"2021-10-21 04:04:13,417 - mmcls - INFO - workflow: [('train', 1)], max: 2 epochs\n",
"2021-10-21 04:04:18,924 - mmcls - INFO - Epoch [1][10/201]\tlr: 5.000e-03, eta: 0:03:29, time: 0.535, data_time: 0.259, memory: 1709, loss: 0.3917\n",
"2021-10-21 04:04:21,743 - mmcls - INFO - Epoch [1][20/201]\tlr: 5.000e-03, eta: 0:02:35, time: 0.281, data_time: 0.019, memory: 1709, loss: 0.3508\n",
"2021-10-21 04:04:24,552 - mmcls - INFO - Epoch [1][30/201]\tlr: 5.000e-03, eta: 0:02:15, time: 0.280, data_time: 0.020, memory: 1709, loss: 0.3955\n",
"2021-10-21 04:04:27,371 - mmcls - INFO - Epoch [1][40/201]\tlr: 5.000e-03, eta: 0:02:04, time: 0.282, data_time: 0.021, memory: 1709, loss: 0.2485\n",
"2021-10-21 04:04:30,202 - mmcls - INFO - Epoch [1][50/201]\tlr: 5.000e-03, eta: 0:01:56, time: 0.283, data_time: 0.021, memory: 1709, loss: 0.4196\n",
"2021-10-21 04:04:33,021 - mmcls - INFO - Epoch [1][60/201]\tlr: 5.000e-03, eta: 0:01:50, time: 0.282, data_time: 0.023, memory: 1709, loss: 0.4994\n",
"2021-10-21 04:04:35,800 - mmcls - INFO - Epoch [1][70/201]\tlr: 5.000e-03, eta: 0:01:45, time: 0.278, data_time: 0.020, memory: 1709, loss: 0.4372\n",
"2021-10-21 04:04:38,595 - mmcls - INFO - Epoch [1][80/201]\tlr: 5.000e-03, eta: 0:01:40, time: 0.280, data_time: 0.019, memory: 1709, loss: 0.3179\n",
"2021-10-21 04:04:41,351 - mmcls - INFO - Epoch [1][90/201]\tlr: 5.000e-03, eta: 0:01:36, time: 0.276, data_time: 0.018, memory: 1709, loss: 0.3175\n",
"2021-10-21 04:04:44,157 - mmcls - INFO - Epoch [1][100/201]\tlr: 5.000e-03, eta: 0:01:32, time: 0.280, data_time: 0.021, memory: 1709, loss: 0.3412\n",
"2021-10-21 04:04:46,974 - mmcls - INFO - Epoch [1][110/201]\tlr: 5.000e-03, eta: 0:01:28, time: 0.282, data_time: 0.019, memory: 1709, loss: 0.2985\n",
"2021-10-21 04:04:49,767 - mmcls - INFO - Epoch [1][120/201]\tlr: 5.000e-03, eta: 0:01:25, time: 0.280, data_time: 0.021, memory: 1709, loss: 0.2778\n",
"2021-10-21 04:04:52,553 - mmcls - INFO - Epoch [1][130/201]\tlr: 5.000e-03, eta: 0:01:21, time: 0.278, data_time: 0.021, memory: 1709, loss: 0.2229\n",
"2021-10-21 04:04:55,356 - mmcls - INFO - Epoch [1][140/201]\tlr: 5.000e-03, eta: 0:01:18, time: 0.280, data_time: 0.021, memory: 1709, loss: 0.2318\n",
"2021-10-21 04:04:58,177 - mmcls - INFO - Epoch [1][150/201]\tlr: 5.000e-03, eta: 0:01:14, time: 0.282, data_time: 0.022, memory: 1709, loss: 0.2333\n",
"2021-10-21 04:05:01,025 - mmcls - INFO - Epoch [1][160/201]\tlr: 5.000e-03, eta: 0:01:11, time: 0.285, data_time: 0.020, memory: 1709, loss: 0.2783\n",
"2021-10-21 04:05:03,833 - mmcls - INFO - Epoch [1][170/201]\tlr: 5.000e-03, eta: 0:01:08, time: 0.281, data_time: 0.022, memory: 1709, loss: 0.2132\n",
"2021-10-21 04:05:06,648 - mmcls - INFO - Epoch [1][180/201]\tlr: 5.000e-03, eta: 0:01:05, time: 0.281, data_time: 0.019, memory: 1709, loss: 0.2096\n",
"2021-10-21 04:05:09,472 - mmcls - INFO - Epoch [1][190/201]\tlr: 5.000e-03, eta: 0:01:02, time: 0.282, data_time: 0.020, memory: 1709, loss: 0.1729\n",
"2021-10-21 04:05:12,229 - mmcls - INFO - Epoch [1][200/201]\tlr: 5.000e-03, eta: 0:00:59, time: 0.275, data_time: 0.018, memory: 1709, loss: 0.1969\n",
"2021-10-21 04:05:12,275 - mmcls - INFO - Saving checkpoint at 1 epochs\n"
]
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"[>>>>>>>>>>>>>>>>>>>>>>>>>>] 1601/1601, 104.1 task/s, elapsed: 15s, ETA: 0s"
]
},
{
"output_type": "stream",
"name": "stderr",
"text": [
"2021-10-21 04:05:27,767 - mmcls - INFO - Epoch(val) [1][51]\taccuracy_top-1: 95.6277\n",
"2021-10-21 04:05:32,987 - mmcls - INFO - Epoch [2][10/201]\tlr: 5.000e-04, eta: 0:00:57, time: 0.505, data_time: 0.238, memory: 1709, loss: 0.1764\n",
"2021-10-21 04:05:35,779 - mmcls - INFO - Epoch [2][20/201]\tlr: 5.000e-04, eta: 0:00:54, time: 0.278, data_time: 0.020, memory: 1709, loss: 0.1514\n",
"2021-10-21 04:05:38,537 - mmcls - INFO - Epoch [2][30/201]\tlr: 5.000e-04, eta: 0:00:51, time: 0.276, data_time: 0.020, memory: 1709, loss: 0.1395\n",
"2021-10-21 04:05:41,283 - mmcls - INFO - Epoch [2][40/201]\tlr: 5.000e-04, eta: 0:00:48, time: 0.275, data_time: 0.020, memory: 1709, loss: 0.1508\n",
"2021-10-21 04:05:44,017 - mmcls - INFO - Epoch [2][50/201]\tlr: 5.000e-04, eta: 0:00:44, time: 0.274, data_time: 0.021, memory: 1709, loss: 0.1771\n",
"2021-10-21 04:05:46,800 - mmcls - INFO - Epoch [2][60/201]\tlr: 5.000e-04, eta: 0:00:41, time: 0.278, data_time: 0.020, memory: 1709, loss: 0.1438\n",
"2021-10-21 04:05:49,570 - mmcls - INFO - Epoch [2][70/201]\tlr: 5.000e-04, eta: 0:00:38, time: 0.277, data_time: 0.020, memory: 1709, loss: 0.1321\n",
"2021-10-21 04:05:52,314 - mmcls - INFO - Epoch [2][80/201]\tlr: 5.000e-04, eta: 0:00:35, time: 0.275, data_time: 0.021, memory: 1709, loss: 0.1629\n",
"2021-10-21 04:05:55,052 - mmcls - INFO - Epoch [2][90/201]\tlr: 5.000e-04, eta: 0:00:32, time: 0.273, data_time: 0.021, memory: 1709, loss: 0.1574\n",
"2021-10-21 04:05:57,791 - mmcls - INFO - Epoch [2][100/201]\tlr: 5.000e-04, eta: 0:00:29, time: 0.274, data_time: 0.019, memory: 1709, loss: 0.1220\n",
"2021-10-21 04:06:00,534 - mmcls - INFO - Epoch [2][110/201]\tlr: 5.000e-04, eta: 0:00:26, time: 0.274, data_time: 0.021, memory: 1709, loss: 0.2550\n",
"2021-10-21 04:06:03,295 - mmcls - INFO - Epoch [2][120/201]\tlr: 5.000e-04, eta: 0:00:23, time: 0.276, data_time: 0.019, memory: 1709, loss: 0.1528\n",
"2021-10-21 04:06:06,048 - mmcls - INFO - Epoch [2][130/201]\tlr: 5.000e-04, eta: 0:00:20, time: 0.275, data_time: 0.022, memory: 1709, loss: 0.1223\n",
"2021-10-21 04:06:08,811 - mmcls - INFO - Epoch [2][140/201]\tlr: 5.000e-04, eta: 0:00:17, time: 0.276, data_time: 0.021, memory: 1709, loss: 0.1734\n",
"2021-10-21 04:06:11,576 - mmcls - INFO - Epoch [2][150/201]\tlr: 5.000e-04, eta: 0:00:14, time: 0.277, data_time: 0.020, memory: 1709, loss: 0.1527\n",
"2021-10-21 04:06:14,330 - mmcls - INFO - Epoch [2][160/201]\tlr: 5.000e-04, eta: 0:00:11, time: 0.276, data_time: 0.020, memory: 1709, loss: 0.1910\n",
"2021-10-21 04:06:17,106 - mmcls - INFO - Epoch [2][170/201]\tlr: 5.000e-04, eta: 0:00:09, time: 0.277, data_time: 0.019, memory: 1709, loss: 0.1922\n",
"2021-10-21 04:06:19,855 - mmcls - INFO - Epoch [2][180/201]\tlr: 5.000e-04, eta: 0:00:06, time: 0.274, data_time: 0.023, memory: 1709, loss: 0.1760\n",
"2021-10-21 04:06:22,638 - mmcls - INFO - Epoch [2][190/201]\tlr: 5.000e-04, eta: 0:00:03, time: 0.278, data_time: 0.019, memory: 1709, loss: 0.1739\n",
"2021-10-21 04:06:25,367 - mmcls - INFO - Epoch [2][200/201]\tlr: 5.000e-04, eta: 0:00:00, time: 0.272, data_time: 0.020, memory: 1709, loss: 0.1654\n",
"2021-10-21 04:06:25,410 - mmcls - INFO - Saving checkpoint at 2 epochs\n"
]
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"[>>>>>>>>>>>>>>>>>>>>>>>>>>] 1601/1601, 105.5 task/s, elapsed: 15s, ETA: 0s"
]
},
{
"output_type": "stream",
"name": "stderr",
"text": [
"2021-10-21 04:06:40,694 - mmcls - INFO - Epoch(val) [2][51]\taccuracy_top-1: 97.5016\n"
]
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 304
},
"id": "HsoGBZA3miui",
"outputId": "eb2e09f5-55ce-4165-b754-3b75dbc829ab"
},
"source": [
"%matplotlib inline\n",
"# Validate the fine-tuned model\n",
"\n",
"img = mmcv.imread('data/cats_dogs_dataset/training_set/training_set/cats/cat.1.jpg')\n",
"\n",
"model.cfg = cfg\n",
"result = inference_model(model, img)\n",
"\n",
"show_result_pyplot(model, img, result)"
],
"execution_count": null,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAATMAAAEfCAYAAAAtNiETAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAFiQAABYkBbWid+gAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9ebRt2VXe91vNbk5zz+3vq3rVqaoQqpKKAmF6JGUQED226ZIQJzYYQ4IFIXEDdmC4ATuQ4dgYJMAjjQcgMKFzUIdBIAE2rQTCkkpt6dWrV69ee9/tTrO71eWPtfa+51VJ/JUxMpKhXeOM++o255y991rf/OY3vzmPCCHwyeOTxyePTx7/Xz/k/9tv4JPHJ49PHp88/p84Pglmnzw+eXzy+P/FoV/4jcc+7fEAIJCAQAgNCIIXgGBvb4+uazg7O6OqF+R5znQ6xgfLfLHAOcjKkhACGxsbXL9+nd3dXay1eO8RQgAwnU5ZrVYcHBzQdR1d17FYLAidBUAphdaaLMuQUuI9dF0HCLTSSKlxztF1HSEE8jynKDKMMSgt8d7Sti3WWoQQKKXiCescJTVZVpBnY7TOUCpDiAxBQAYYlTld12GMoSgLhJQY0xFEwKTnE0oghCCEQAgBpEBLBV4ggyDPc5qmIcuK+J6kxlqHUorNzU2Oj0/IsozP+ZzPw1rLe97zHsrJmFtnJ9z/8ENcuXwZERx/4ZWfyfHtm5wcHjI/OWakFBf397iwv8fOxgYSKLQC73DWUYzGeEAoQdc1fOqnvpS2bTg7OybLM2azGUdHR1y9eo2yKMnzkqpqKPIy3ucAs9mMhx56iBACz155lrppQEAA6rZlurGBkIKXPPIw+wcHPH/tGicnJ7RtS1c3qABCCHwI5HlBUYxwnnT+GXkxQkpN11kQAus8q2VFkRc401JoyUsffZSmaTidnzGeTDg5OaGzhrwoWNUVOzs7XLj3Ho6Ojzk6OuLs7AwhBPdduJcMQaYFITh0pvHecXR0SNs2FEWB9x6t4xryPmCNx1pHXPceKR3OW0QArTVKZfgg6YzDCUU+mtEFydGi5vZ8QW1BFiMcCmMdSoKUEiklhADBA5E5hODx3uOcxVmD955MKvI8pyxziqJAFzlVVdG2LcYYjG2HdS6lpOviut7e3mS5XOK9Z29vl6at4z5zEELA+fj8udJorfHW0LYNVV1x4cIFxkXJYrHAmpbxeIwQgrpuCMS1LaXEOcfZ2RmLRdzr29vb7O/vs1qt8N4TQsBai5QSpRTWWup6hRCCrutwzjGbzdjZ2aFtW+7cucNkMiHPczY3NxFCUFXVsJcWiwUBx+bmBC0DzizQ2vFFr/lcplPN9uaIve0p1tVY2xGC4wte/c3i44JZDzYkKa3X1HpprQeUoigwtiGEQNu2BBzee4zxjKYaay2TyQSAyWTCarUCwFpLCAGlFCEElsslW1tb3HvvvTz99NMYF3DO4ZyLiyG9B2sdxhgyneOcw3uGmxtCwHtP13XDRenPQ4hz0Fk/xxc90s+ctVgrMc5inEVahZTx/wPxdZACFSI4+viESCnTBobOWXCSRbViMo7nUBQFXnSczefITLOoVoxGI45OjnnFK57gox/7GEdnJwgh2N3dpcxzTo4OyfOMLMsYjUas5vE1i7Ik+Aiis9mMpl4xHU84Pj5mb7aBkIrFco5SitFoRFHkLFdzqqoarmdZFEgp0TqjKALBn1+zqqqGBWatRSlFZwyBgBSC5XLJ5tYmTdMwnU55+CUvQUnJ/OyM07ZFhIASEdSXiwV13TKdznDWIVBIBM66BCoZoyJDCkHXGcpRSb2Yc/XqVe677z688xweHqK0GgLchQsXsM5x5dkrFEVBU9UYY9jZ3mY+PyMXAi0Eo1FJnmV4H9JrWlamRUqFGk3IMo3MFB0Wb2uapkNITzFWBB/wIeCNQXkARQjgg6duGtogaE2LsRbjIHQtNgi88xR5jvduWGsyXfPgYiAcjUo64/GuX3UeiMDQdi3Xbt6gruM5SSnJ8njuWZahtQbivfLeD2vaWjcAqLUdRVGQySLtDTsE/rbt2BhPECFQ1yus7RKQllTVkrOzUzrj2N7eZjqd0nUdUkqm0ylbW1vs7OwMe61tW4qiIM/zBNBuICI9kPX7whiDEIKNjQ2Wy+Ww1mJQiaDZ71vvI6aQCTKdUxTpHE38na7rCN4Rd+15cvkiMBuYRpCE0F8sASEyNWstZZkzmUxwPp6Qcx4h+ygWwUVrjTGG6XQ6MCdrLVmW0bYtbduyWCyoqort7W0efvhhzs7OOD08ommagclF8IxgqpRaAzgPnDMuAO9dXDzpewMwr51bD179jT9/TgnBE6SAjwd2/cXPs+EGAfh0E5z1BAnWWKSQdMZERucsZVmi8wyVaYyzWO+YzjbIdMYHP/xhXvWa1/CZn/UXeOfv/DbT8Yijo2M+8zOe5PbNLa4//zz1Yo63Fq0UwnvyLEsL2IAQdG3HdG8/LijrcMHGDddWHB0dEYJjPp/jveP4+JjJZMLW1harVUVdVyiVofOM4ANN07BYLJjNZkwmE4qypCgKltWKuq7RUkYG1rTcvnGT3e0dJhvxHmdZxnQ8oVoucYn9xKDlESIQb51HyIBpO5rWMB5LdnZ2yPKc6zeug3NoLTk+OWI0KrHO0JmWnJwABO/YmExwwXN2dkpVrajqilFRUOY585MTXPzFCBA42rbBOkOWKbrO0TQ13keGVJYTdKYpwwgQGNfgnMG6CLYKiRAKlWUR/FC0DgQCrTVFURCcwEmNCGClAxGBs9/MudYoKUFoECCVQDmJVwopI/AqpYZ1eeHChYENuQSA/b7s13iWRRAoyzLeD9MN2chkMolBTGdYa2nbeL5KKfIiY3Nzk9VqRQiOyWRCpvUQwIoiR2fx/RhjcM6xsbHBZDJhNpuR5zmXLl2iqiqcc2itGY/HA3jleY6U5UBI+syq3/vT6ZSzs7P4nrtu2H8hnJOYQKDrLMEL8olGKTEQmuDAdBapAloplDqHsE8IZv0m7QmNQA1gJkQxRIq2beNiTSgrRMbZYsHm5ianp6dMp9Pha9u2lGU5nJz3nrZtqapquAGnRIreX4SeHUC8gQK5xrLOGVHPztaPdRA6BzyPFz5RZJ/+xkNww3kgzoEwCAhSJOot0SqLP5MxjVLI+FwChJSgAjrLcM4xmW3QdRaVZ9jgKfKCvCzjhZeRmR6fnXC6mCO0QipFY1rE/Iy6jinD6ekpXbUkGIt1FmcsnemYlCXWOlarmGZMp1O2t7dZrCrK8Zj9C3vcuHGN27dvs1jMkQoeeeRhFovFEG3jNQvkuUrnLdI9hM60zLIZ+/t7jMdjTk5POLzj4mYZj1hWSzqbc+nSx7hw4QLVYpE20pi2qmjbDq01o6LEGEtbNwQh4/VQGQKBEgFrWpQS7O5sY9qGWzeuMS4Likxx7fpVHnzwQR588H4uXX6Guq4oxznPX73C5uYmD95/kSvPPcfu9ibee65fe56tjSkaiXdgnaOua7quQSnBdHMTaw137tyJKVznsDYwnW6QFxlCgjAB42pQUVYJCIIQCCmQWiNUhhQZMkhs5rB5SeYFVigcAmsdbVXhnYib01qCM5FVSYX3ntPTUwgBQSDTkXlnmUZIUEqytbMT1zRuCOxxPcZgHFmMo2k8CLCuw1XxNay1TMpRCngBaztsZxA+UGSaXMdAn+caKTK0UnhrMG2DRDAejzi4cB/z+ZyTkxO892xtbTGdTrHWcnJywvHxMWVZkmUZSqkhy4pEp0TKmNb2GZhzLuFEoEzB0XtP08S0v3+enomaxPIE0Booch1lLhczO2tBhxDviTgnMx8XzO4GtsiA+rwzAlJOlmVr1NLHjSyiVtS2LXke08G9vT3m8zl5nt+VtvTUdbVacXp6OqC1MSZSTGA8HqOUSulrTDOjttO/v7vfb9TW/IvSzPXzMcZAECgVo4AQMWUlnasSDMB4Hg3P/91fcKHPI6mUEqnPI0xjOwSQ6YJSaRrTQdvROcvZYo7Wmq7tsM4hlOLP3vdeDm/dZlXXVF3Dx
"text/plain": [
"<Figure size 300.01x280.01 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
}
}
]
}
]
}