Created using Colaboratory

pull/9743/merge
Glenn Jocher 2022-11-18 16:32:29 +01:00
parent 241d798bb4
commit 946765bbc3
1 changed files with 589 additions and 945 deletions

640
segment/tutorial.ipynb vendored
View File

@ -36,27 +36,27 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 1,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "wbvMlHd_QwMG",
"outputId": "0f9ee467-cea4-48e8-9050-7a76ae1b6141"
"outputId": "d1e33dfc-9ad4-436e-f1e5-01acee40c029"
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"name": "stderr",
"text": [
"YOLOv5 🚀 v6.2-225-gf223cb2 Python-3.7.12 torch-1.12.1+cu102 CUDA:0 (Tesla V100-SXM2-16GB, 16161MiB)\n"
"YOLOv5 🚀 v6.2-251-g241d798 Python-3.7.15 torch-1.12.1+cu113 CUDA:0 (Tesla T4, 15110MiB)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"name": "stdout",
"text": [
"Setup complete ✅ (4 CPUs, 14.7 GB RAM, 107.3/196.6 GB disk)\n"
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 22.6/78.2 GB disk)\n"
]
}
],
@ -94,27 +94,30 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 2,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "zR9ZbuQCH7FX",
"outputId": "60647b99-e8d4-402c-f444-331bf6746da4"
"outputId": "e206fcec-cf42-4754-8a42-39bc3603eba8"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"name": "stdout",
"text": [
"\u001b[34m\u001b[1msegment/predict: \u001b[0mweights=['yolov5s-seg.pt'], source=data/images, data=data/coco128.yaml, imgsz=[640, 640], conf_thres=0.25, iou_thres=0.45, max_det=1000, device=, view_img=False, save_txt=False, save_conf=False, save_crop=False, nosave=False, classes=None, agnostic_nms=False, augment=False, visualize=False, update=False, project=runs/predict-seg, name=exp, exist_ok=False, line_thickness=3, hide_labels=False, hide_conf=False, half=False, dnn=False, vid_stride=1, retina_masks=False\n",
"YOLOv5 🚀 v6.2-225-gf223cb2 Python-3.7.12 torch-1.12.1+cu102 CUDA:0 (Tesla V100-SXM2-16GB, 16161MiB)\n",
"YOLOv5 🚀 v6.2-251-g241d798 Python-3.7.15 torch-1.12.1+cu113 CUDA:0 (Tesla T4, 15110MiB)\n",
"\n",
"Downloading https://github.com/ultralytics/yolov5/releases/download/v6.2/yolov5s-seg.pt to yolov5s-seg.pt...\n",
"100% 14.9M/14.9M [00:03<00:00, 3.93MB/s]\n",
"\n",
"Fusing layers... \n",
"YOLOv5s-seg summary: 224 layers, 7611485 parameters, 0 gradients, 26.4 GFLOPs\n",
"image 1/2 /home/paguerrie/yolov5/data/images/bus.jpg: 640x480 4 persons, 1 bus, 5.6ms\n",
"image 2/2 /home/paguerrie/yolov5/data/images/zidane.jpg: 384x640 2 persons, 1 tie, 5.5ms\n",
"Speed: 0.4ms pre-process, 5.6ms inference, 1.1ms NMS per image at shape (1, 3, 640, 640)\n",
"image 1/2 /content/yolov5/data/images/bus.jpg: 640x480 4 persons, 1 bus, 17.2ms\n",
"image 2/2 /content/yolov5/data/images/zidane.jpg: 384x640 2 persons, 1 tie, 13.7ms\n",
"Speed: 0.4ms pre-process, 15.5ms inference, 22.2ms NMS per image at shape (1, 3, 640, 640)\n",
"Results saved to \u001b[1mruns/predict-seg/exp\u001b[0m\n"
]
}
@ -146,82 +149,66 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 3,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 49,
"referenced_widgets": [
"9b8caa3522fc4cbab31e13b5dfc7808d",
"574140e4c4bc48c9a171541a02cd0211",
"35e03ce5090346c9ae602891470fc555",
"c942c208e72d46568b476bb0f2d75496",
"65881db1db8a4e9c930fab9172d45143",
"60b913d755b34d638478e30705a2dde1",
"0856bea36ec148b68522ff9c9eb258d8",
"76879f6f2aa54637a7a07faeea2bd684",
"0ace3934ec6f4d36a1b3a9e086390926",
"d6b7a2243e0c4beca714d99dceec23d6",
"5966ba6e6f114d8c9d8d1d6b1bd4f4c7"
]
"base_uri": "https://localhost:8080/"
},
"id": "WQPtK1QYVaD_",
"outputId": "102dabed-bc31-42fe-9133-d9ce28a2c01e"
"outputId": "f7eba0ae-49d1-405b-a1cf-169212fadc2c"
},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "89f5f0a84ca642378724f1bf05f17e0d",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
" 0%| | 0.00/6.79M [00:00<?, ?B/s]"
"output_type": "stream",
"name": "stdout",
"text": [
"Downloading https://github.com/ultralytics/yolov5/releases/download/v1.0/coco2017labels-segments.zip ...\n",
"Downloading http://images.cocodataset.org/zips/val2017.zip ...\n",
"######################################################################## 100.0%\n",
"######################################################################## 100.0%\n"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Download COCO val\n",
"torch.hub.download_url_to_file('https://ultralytics.com/assets/coco128-seg.zip', 'tmp.zip') # download (780M - 5000 images)\n",
"!unzip -q tmp.zip -d ../datasets && rm tmp.zip # unzip"
"!bash data/scripts/get_coco.sh --val --segments # download (780M - 5000 images)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 4,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "X58w8JLpMnjH",
"outputId": "daf60b1b-b098-4657-c863-584f4c9cf078"
"outputId": "73533135-6995-4f2d-adb0-3acb5ef9b300"
},
"outputs": [
{
"metadata": {
"tags": null
},
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[34m\u001b[1msegment/val: \u001b[0mdata=/home/paguerrie/yolov5/data/coco128-seg.yaml, weights=['yolov5s-seg.pt'], batch_size=32, imgsz=640, conf_thres=0.001, iou_thres=0.6, max_det=300, task=val, device=, workers=8, single_cls=False, augment=False, verbose=False, save_txt=False, save_hybrid=False, save_conf=False, save_json=False, project=runs/val-seg, name=exp, exist_ok=False, half=True, dnn=False\n",
"YOLOv5 🚀 v6.2-225-gf223cb2 Python-3.7.12 torch-1.12.1+cu102 CUDA:0 (Tesla V100-SXM2-16GB, 16161MiB)\n",
"\u001b[34m\u001b[1msegment/val: \u001b[0mdata=/content/yolov5/data/coco.yaml, weights=['yolov5s-seg.pt'], batch_size=32, imgsz=640, conf_thres=0.001, iou_thres=0.6, max_det=300, task=val, device=, workers=8, single_cls=False, augment=False, verbose=False, save_txt=False, save_hybrid=False, save_conf=False, save_json=False, project=runs/val-seg, name=exp, exist_ok=False, half=True, dnn=False\n",
"YOLOv5 🚀 v6.2-251-g241d798 Python-3.7.15 torch-1.12.1+cu113 CUDA:0 (Tesla T4, 15110MiB)\n",
"\n",
"Fusing layers... \n",
"YOLOv5s-seg summary: 224 layers, 7611485 parameters, 0 gradients, 26.4 GFLOPs\n",
"\u001b[34m\u001b[1mval: \u001b[0mScanning '/home/paguerrie/datasets/coco128-seg/labels/train2017' images and\u001b[0m\n",
"\u001b[34m\u001b[1mval: \u001b[0mNew cache created: /home/paguerrie/datasets/coco128-seg/labels/train2017.cache\n",
" Class Images Instances Box(P R mAP50 m\n",
" all 128 929 0.711 0.651 0.711 0.488 0.678 0.628 0.66 0.403\n",
"Speed: 3.2ms pre-process, 2.7ms inference, 6.5ms NMS per image at shape (32, 3, 640, 640)\n",
"\u001b[34m\u001b[1mval: \u001b[0mScanning '/content/datasets/coco/val2017' images and labels...4952 found, 48 missing, 0 empty, 0 corrupt: 100% 5000/5000 [00:03<00:00, 1420.92it/s]\n",
"\u001b[34m\u001b[1mval: \u001b[0mNew cache created: /content/datasets/coco/val2017.cache\n",
" Class Images Instances Box(P R mAP50 mAP50-95) Mask(P R mAP50 mAP50-95): 100% 157/157 [01:54<00:00, 1.37it/s]\n",
" all 5000 36335 0.673 0.517 0.566 0.373 0.672 0.49 0.532 0.319\n",
"Speed: 0.9ms pre-process, 3.9ms inference, 3.0ms NMS per image at shape (32, 3, 640, 640)\n",
"Results saved to \u001b[1mruns/val-seg/exp\u001b[0m\n"
]
}
],
"source": [
"# Validate YOLOv5s-seg on COCO val\n",
"!python segment/val.py --weights yolov5s-seg.pt --data coco128-seg.yaml --img 640 --half"
"!python segment/val.py --weights yolov5s-seg.pt --data coco.yaml --img 640 --half"
]
},
{
@ -275,31 +262,39 @@
" %pip install -q comet_ml\n",
" import comet_ml; comet_ml.init()\n",
"elif logger == 'ClearML':\n",
" %pip install -q clearml && clearml-init"
" import clearml; clearml.browser_login()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 5,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "1NcFxRcFdJ_O",
"outputId": "baa6d4be-3379-4aab-844a-d5a5396c0e49"
"outputId": "8e349df5-9910-4a91-a845-748def15d3d7"
},
"outputs": [
{
"metadata": {
"tags": null
},
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[34m\u001b[1msegment/train: \u001b[0mweights=yolov5s-seg.pt, cfg=, data=coco128-seg.yaml, hyp=data/hyps/hyp.scratch-low.yaml, epochs=3, batch_size=16, imgsz=640, rect=False, resume=False, nosave=False, noval=False, noautoanchor=False, noplots=False, evolve=None, bucket=, cache=ram, image_weights=False, device=, multi_scale=False, single_cls=False, optimizer=SGD, sync_bn=False, workers=8, project=runs/train-seg, name=exp, exist_ok=False, quad=False, cos_lr=False, label_smoothing=0.0, patience=100, freeze=[0], save_period=-1, seed=0, local_rank=-1, mask_ratio=4, no_overlap=False\n",
"\u001b[34m\u001b[1mgithub: \u001b[0mup to date with https://github.com/ultralytics/yolov5 ✅\n",
"YOLOv5 🚀 v6.2-225-gf223cb2 Python-3.7.12 torch-1.12.1+cu102 CUDA:0 (Tesla V100-SXM2-16GB, 16161MiB)\n",
"YOLOv5 🚀 v6.2-251-g241d798 Python-3.7.15 torch-1.12.1+cu113 CUDA:0 (Tesla T4, 15110MiB)\n",
"\n",
"\u001b[34m\u001b[1mhyperparameters: \u001b[0mlr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=0.05, cls=0.5, cls_pw=1.0, obj=1.0, obj_pw=1.0, iou_t=0.2, anchor_t=4.0, fl_gamma=0.0, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0\n",
"\u001b[34m\u001b[1mTensorBoard: \u001b[0mStart with 'tensorboard --logdir runs/train-seg', view at http://localhost:6006/\n",
"\n",
"Dataset not found ⚠️, missing paths ['/content/datasets/coco128-seg/images/train2017']\n",
"Downloading https://ultralytics.com/assets/coco128-seg.zip to coco128-seg.zip...\n",
"100% 6.79M/6.79M [00:01<00:00, 4.42MB/s]\n",
"Dataset download success ✅ (2.8s), saved to \u001b[1m/content/datasets\u001b[0m\n",
"\n",
" from n params module arguments \n",
" 0 -1 1 3520 models.common.Conv [3, 32, 6, 2, 2] \n",
" 1 -1 1 18560 models.common.Conv [32, 64, 3, 2] \n",
@ -331,119 +326,115 @@
"Transferred 367/367 items from yolov5s-seg.pt\n",
"\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n",
"\u001b[34m\u001b[1moptimizer:\u001b[0m SGD(lr=0.01) with parameter groups 60 weight(decay=0.0), 63 weight(decay=0.0005), 63 bias\n",
"\u001b[34m\u001b[1mtrain: \u001b[0mScanning '/home/paguerrie/datasets/coco128-seg/labels/train2017.cache' im\u001b[0m\n",
"\u001b[34m\u001b[1mtrain: \u001b[0mCaching images (0.1GB ram): 100%|██████████| 128/128 [00:00<00:00, 544.41\u001b[0m\n",
"\u001b[34m\u001b[1mval: \u001b[0mScanning '/home/paguerrie/datasets/coco128-seg/labels/train2017.cache' imag\u001b[0m\n",
"\u001b[34m\u001b[1mval: \u001b[0mCaching images (0.1GB ram): 100%|██████████| 128/128 [00:00<00:00, 138.66it\u001b[0m\n",
"\u001b[34m\u001b[1malbumentations: \u001b[0mBlur(p=0.01, blur_limit=(3, 7)), MedianBlur(p=0.01, blur_limit=(3, 7)), ToGray(p=0.01), CLAHE(p=0.01, clip_limit=(1, 4.0), tile_grid_size=(8, 8))\n",
"\u001b[34m\u001b[1mtrain: \u001b[0mScanning '/content/datasets/coco128-seg/labels/train2017' images and labels...126 found, 2 missing, 0 empty, 0 corrupt: 100% 128/128 [00:00<00:00, 1383.68it/s]\n",
"\u001b[34m\u001b[1mtrain: \u001b[0mNew cache created: /content/datasets/coco128-seg/labels/train2017.cache\n",
"\u001b[34m\u001b[1mtrain: \u001b[0mCaching images (0.1GB ram): 100% 128/128 [00:00<00:00, 241.77it/s]\n",
"\u001b[34m\u001b[1mval: \u001b[0mScanning '/content/datasets/coco128-seg/labels/train2017.cache' images and labels... 126 found, 2 missing, 0 empty, 0 corrupt: 100% 128/128 [00:00<?, ?it/s]\n",
"\u001b[34m\u001b[1mval: \u001b[0mCaching images (0.1GB ram): 100% 128/128 [00:01<00:00, 92.38it/s]\n",
"\n",
"\u001b[34m\u001b[1mAutoAnchor: \u001b[0m4.27 anchors/target, 0.994 Best Possible Recall (BPR). Current anchors are a good fit to dataset ✅\n",
"Plotting labels to runs/train-seg/exp/labels.jpg... \n",
"Image sizes 640 train, 640 val\n",
"Using 4 dataloader workers\n",
"Using 2 dataloader workers\n",
"Logging results to \u001b[1mruns/train-seg/exp\u001b[0m\n",
"Starting training for 3 epochs...\n",
"\n",
" Epoch GPU_mem box_loss seg_loss obj_loss cls_loss Instances Size\n",
" 0/2 4.67G 0.04464 0.05134 0.06548 0.01895 219 \n",
" Class Images Instances Box(P R mAP50 m\n",
" all 128 929 0.727 0.661 0.725 0.496 0.688 0.629 0.673 0.413\n",
" 0/2 4.92G 0.0417 0.04646 0.06066 0.02126 192 640: 100% 8/8 [00:07<00:00, 1.13it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95) Mask(P R mAP50 mAP50-95): 100% 4/4 [00:02<00:00, 1.79it/s]\n",
" all 128 929 0.737 0.649 0.715 0.492 0.719 0.617 0.658 0.408\n",
"\n",
" Epoch GPU_mem box_loss seg_loss obj_loss cls_loss Instances Size\n",
" 1/2 6.36G 0.04102 0.04702 0.06873 0.01734 263 \n",
" Class Images Instances Box(P R mAP50 m\n",
" all 128 929 0.752 0.676 0.743 0.51 0.704 0.64 0.682 0.425\n",
" 1/2 6.29G 0.04157 0.04503 0.05772 0.01777 208 640: 100% 8/8 [00:09<00:00, 1.18s/it]\n",
" Class Images Instances Box(P R mAP50 mAP50-95) Mask(P R mAP50 mAP50-95): 100% 4/4 [00:02<00:00, 1.85it/s]\n",
" all 128 929 0.756 0.674 0.738 0.506 0.725 0.64 0.68 0.422\n",
"\n",
" Epoch GPU_mem box_loss seg_loss obj_loss cls_loss Instances Size\n",
" 2/2 6.36G 0.0421 0.04463 0.05951 0.01746 245 \n",
" Class Images Instances Box(P R mAP50 m\n",
" all 128 929 0.776 0.674 0.757 0.514 0.72 0.632 0.684 0.429\n",
" 2/2 6.29G 0.0425 0.04793 0.06784 0.01863 161 640: 100% 8/8 [00:03<00:00, 2.04it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95) Mask(P R mAP50 mAP50-95): 100% 4/4 [00:02<00:00, 1.55it/s]\n",
" all 128 929 0.736 0.694 0.747 0.522 0.769 0.622 0.683 0.427\n",
"\n",
"3 epochs completed in 0.006 hours.\n",
"3 epochs completed in 0.008 hours.\n",
"Optimizer stripped from runs/train-seg/exp/weights/last.pt, 15.6MB\n",
"Optimizer stripped from runs/train-seg/exp/weights/best.pt, 15.6MB\n",
"\n",
"Validating runs/train-seg/exp/weights/best.pt...\n",
"Fusing layers... \n",
"Model summary: 165 layers, 7611485 parameters, 0 gradients, 26.4 GFLOPs\n",
" Class Images Instances Box(P R mAP50 m\n",
" all 128 929 0.775 0.673 0.758 0.515 0.72 0.632 0.684 0.427\n",
" person 128 254 0.829 0.745 0.833 0.545 0.776 0.697 0.764 0.406\n",
" bicycle 128 6 0.614 0.333 0.539 0.331 0.614 0.333 0.531 0.308\n",
" car 128 46 0.774 0.413 0.571 0.266 0.693 0.37 0.493 0.204\n",
" motorcycle 128 5 0.817 0.901 0.895 0.678 0.817 0.901 0.895 0.47\n",
" airplane 128 6 1 0.951 0.995 0.71 0.882 0.833 0.839 0.515\n",
" bus 128 7 0.695 0.714 0.757 0.661 0.695 0.714 0.757 0.627\n",
" train 128 3 1 0.935 0.995 0.566 1 0.935 0.995 0.731\n",
" truck 128 12 0.741 0.417 0.463 0.283 0.741 0.417 0.4 0.27\n",
" boat 128 6 0.653 0.32 0.452 0.17 0.653 0.32 0.328 0.149\n",
" traffic light 128 14 0.627 0.36 0.527 0.234 0.503 0.289 0.409 0.293\n",
" stop sign 128 2 0.829 1 0.995 0.747 0.829 1 0.995 0.821\n",
" bench 128 9 0.822 0.667 0.76 0.414 0.685 0.556 0.678 0.228\n",
" bird 128 16 0.967 1 0.995 0.675 0.906 0.938 0.909 0.516\n",
" cat 128 4 0.778 0.89 0.945 0.728 0.778 0.89 0.945 0.69\n",
" dog 128 9 1 0.65 0.973 0.697 1 0.65 0.939 0.615\n",
" horse 128 2 0.727 1 0.995 0.672 0.727 1 0.995 0.2\n",
" elephant 128 17 1 0.912 0.946 0.704 0.871 0.794 0.822 0.565\n",
" bear 128 1 0.626 1 0.995 0.895 0.626 1 0.995 0.895\n",
" zebra 128 4 0.865 1 0.995 0.934 0.865 1 0.995 0.822\n",
" giraffe 128 9 0.975 1 0.995 0.672 0.866 0.889 0.876 0.473\n",
" backpack 128 6 1 0.573 0.707 0.38 0.891 0.5 0.524 0.249\n",
" umbrella 128 18 0.744 0.889 0.926 0.552 0.465 0.556 0.483 0.262\n",
" handbag 128 19 0.799 0.209 0.432 0.225 0.799 0.209 0.403 0.201\n",
" tie 128 7 0.968 0.857 0.857 0.53 0.968 0.857 0.857 0.519\n",
" suitcase 128 4 0.821 1 0.995 0.696 0.821 1 0.995 0.665\n",
" frisbee 128 5 0.777 0.8 0.761 0.613 0.777 0.8 0.761 0.558\n",
" skis 128 1 0.721 1 0.995 0.497 0.721 1 0.995 0.398\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" snowboard 128 7 0.851 0.857 0.887 0.599 0.284 0.286 0.253 0.151\n",
" sports ball 128 6 0.961 0.667 0.687 0.429 0.721 0.5 0.481 0.476\n",
" kite 128 10 0.508 0.312 0.48 0.238 0.508 0.312 0.406 0.122\n",
" baseball bat 128 4 0.331 0.5 0.526 0.249 0.331 0.5 0.376 0.102\n",
" baseball glove 128 7 0.876 0.571 0.579 0.282 0.657 0.429 0.429 0.343\n",
" skateboard 128 5 1 0.697 0.824 0.471 0.707 0.497 0.552 0.299\n",
" tennis racket 128 7 0.524 0.714 0.646 0.426 0.524 0.714 0.646 0.452\n",
" bottle 128 18 0.657 0.389 0.531 0.359 0.657 0.389 0.569 0.362\n",
" wine glass 128 16 0.752 0.938 0.924 0.435 0.451 0.562 0.568 0.341\n",
" cup 128 36 0.859 0.676 0.848 0.503 0.823 0.648 0.793 0.496\n",
" fork 128 6 0.904 0.333 0.462 0.309 0.452 0.167 0.195 0.107\n",
" knife 128 16 0.749 0.5 0.665 0.413 0.655 0.438 0.523 0.314\n",
" spoon 128 22 0.787 0.409 0.577 0.275 0.787 0.409 0.528 0.236\n",
" bowl 128 28 0.793 0.679 0.744 0.577 0.751 0.643 0.688 0.366\n",
" banana 128 1 0.931 1 0.995 0.398 0.931 1 0.995 0.497\n",
" sandwich 128 2 1 0 0.828 0.713 1 0 0.498 0.449\n",
" orange 128 4 0.588 1 0.995 0.666 0.588 1 0.995 0.672\n",
" broccoli 128 11 0.563 0.455 0.356 0.258 0.563 0.455 0.362 0.259\n",
" carrot 128 24 0.683 0.75 0.753 0.489 0.758 0.833 0.835 0.451\n",
" hot dog 128 2 0.583 1 0.995 0.995 0.583 1 0.995 0.796\n",
" pizza 128 5 0.801 0.8 0.962 0.644 0.801 0.8 0.962 0.583\n",
" donut 128 14 0.704 1 0.889 0.759 0.704 1 0.889 0.683\n",
" cake 128 4 0.904 1 0.995 0.896 0.904 1 0.995 0.838\n",
" chair 128 35 0.672 0.543 0.629 0.333 0.708 0.571 0.583 0.284\n",
" couch 128 6 0.827 0.5 0.821 0.583 0.827 0.5 0.681 0.352\n",
" potted plant 128 14 0.809 0.908 0.884 0.584 0.809 0.908 0.884 0.474\n",
" bed 128 3 1 0.654 0.913 0.36 1 0.654 0.913 0.418\n",
" dining table 128 13 0.803 0.385 0.557 0.361 0.321 0.154 0.126 0.0487\n",
" toilet 128 2 0.802 1 0.995 0.921 0.802 1 0.995 0.698\n",
" tv 128 2 0.59 1 0.995 0.846 0.59 1 0.995 0.846\n",
" laptop 128 3 1 0 0.451 0.324 1 0 0.372 0.157\n",
" Class Images Instances Box(P R mAP50 mAP50-95) Mask(P R mAP50 mAP50-95): 100% 4/4 [00:06<00:00, 1.55s/it]\n",
" all 128 929 0.738 0.694 0.746 0.522 0.759 0.625 0.682 0.426\n",
" person 128 254 0.845 0.756 0.836 0.55 0.861 0.669 0.759 0.407\n",
" bicycle 128 6 0.475 0.333 0.549 0.341 0.711 0.333 0.526 0.322\n",
" car 128 46 0.612 0.565 0.539 0.257 0.555 0.435 0.477 0.171\n",
" motorcycle 128 5 0.73 0.8 0.752 0.571 0.747 0.8 0.752 0.42\n",
" airplane 128 6 1 0.943 0.995 0.732 0.92 0.833 0.839 0.555\n",
" bus 128 7 0.677 0.714 0.722 0.653 0.711 0.714 0.722 0.593\n",
" train 128 3 1 0.951 0.995 0.551 1 0.884 0.995 0.781\n",
" truck 128 12 0.555 0.417 0.457 0.285 0.624 0.417 0.397 0.277\n",
" boat 128 6 0.624 0.5 0.584 0.186 1 0.326 0.412 0.133\n",
" traffic light 128 14 0.513 0.302 0.411 0.247 0.435 0.214 0.376 0.251\n",
" stop sign 128 2 0.824 1 0.995 0.796 0.906 1 0.995 0.747\n",
" bench 128 9 0.75 0.667 0.763 0.367 0.724 0.585 0.698 0.209\n",
" bird 128 16 0.961 1 0.995 0.686 0.918 0.938 0.91 0.525\n",
" cat 128 4 0.771 0.857 0.945 0.752 0.76 0.8 0.945 0.728\n",
" dog 128 9 0.987 0.778 0.963 0.681 1 0.705 0.89 0.574\n",
" horse 128 2 0.703 1 0.995 0.697 0.759 1 0.995 0.249\n",
" elephant 128 17 0.916 0.882 0.93 0.691 0.811 0.765 0.829 0.537\n",
" bear 128 1 0.664 1 0.995 0.995 0.701 1 0.995 0.895\n",
" zebra 128 4 0.864 1 0.995 0.921 0.879 1 0.995 0.804\n",
" giraffe 128 9 0.883 0.889 0.94 0.683 0.845 0.778 0.78 0.463\n",
" backpack 128 6 1 0.59 0.701 0.372 1 0.474 0.52 0.252\n",
" umbrella 128 18 0.654 0.839 0.887 0.52 0.517 0.556 0.427 0.229\n",
" handbag 128 19 0.54 0.211 0.408 0.221 0.796 0.206 0.396 0.196\n",
" tie 128 7 0.864 0.857 0.857 0.577 0.925 0.857 0.857 0.534\n",
" suitcase 128 4 0.716 1 0.945 0.647 0.767 1 0.945 0.634\n",
" frisbee 128 5 0.708 0.8 0.761 0.643 0.737 0.8 0.761 0.501\n",
" skis 128 1 0.691 1 0.995 0.796 0.761 1 0.995 0.199\n",
" snowboard 128 7 0.918 0.857 0.904 0.604 0.32 0.286 0.235 0.137\n",
" sports ball 128 6 0.902 0.667 0.701 0.466 0.727 0.5 0.497 0.471\n",
" kite 128 10 0.586 0.4 0.511 0.231 0.663 0.394 0.417 0.139\n",
" baseball bat 128 4 0.359 0.5 0.401 0.169 0.631 0.5 0.526 0.133\n",
" baseball glove 128 7 1 0.519 0.58 0.327 0.687 0.286 0.455 0.328\n",
" skateboard 128 5 0.729 0.8 0.862 0.631 0.599 0.6 0.604 0.379\n",
" tennis racket 128 7 0.57 0.714 0.645 0.448 0.608 0.714 0.645 0.412\n",
" bottle 128 18 0.469 0.393 0.537 0.357 0.661 0.389 0.543 0.349\n",
" wine glass 128 16 0.677 0.938 0.866 0.441 0.53 0.625 0.67 0.334\n",
" cup 128 36 0.777 0.722 0.812 0.466 0.725 0.583 0.762 0.467\n",
" fork 128 6 0.948 0.333 0.425 0.27 0.527 0.167 0.18 0.102\n",
" knife 128 16 0.757 0.587 0.669 0.458 0.79 0.5 0.552 0.34\n",
" spoon 128 22 0.74 0.364 0.559 0.269 0.925 0.364 0.513 0.213\n",
" bowl 128 28 0.766 0.714 0.725 0.559 0.803 0.584 0.665 0.353\n",
" banana 128 1 0.408 1 0.995 0.398 0.539 1 0.995 0.497\n",
" sandwich 128 2 1 0 0.695 0.536 1 0 0.498 0.448\n",
" orange 128 4 0.467 1 0.995 0.693 0.518 1 0.995 0.663\n",
" broccoli 128 11 0.462 0.455 0.383 0.259 0.548 0.455 0.384 0.256\n",
" carrot 128 24 0.631 0.875 0.77 0.533 0.757 0.909 0.853 0.499\n",
" hot dog 128 2 0.555 1 0.995 0.995 0.578 1 0.995 0.796\n",
" pizza 128 5 0.89 0.8 0.962 0.796 1 0.778 0.962 0.766\n",
" donut 128 14 0.695 1 0.893 0.772 0.704 1 0.893 0.696\n",
" cake 128 4 0.826 1 0.995 0.92 0.862 1 0.995 0.846\n",
" chair 128 35 0.53 0.571 0.613 0.336 0.67 0.6 0.538 0.271\n",
" couch 128 6 0.972 0.667 0.833 0.627 1 0.62 0.696 0.394\n",
" potted plant 128 14 0.7 0.857 0.883 0.552 0.836 0.857 0.883 0.473\n",
" bed 128 3 0.979 0.667 0.83 0.366 1 0 0.83 0.373\n",
" dining table 128 13 0.775 0.308 0.505 0.364 0.644 0.231 0.25 0.0804\n",
" toilet 128 2 0.836 1 0.995 0.846 0.887 1 0.995 0.797\n",
" tv 128 2 0.6 1 0.995 0.846 0.655 1 0.995 0.896\n",
" laptop 128 3 0.822 0.333 0.445 0.307 1 0 0.392 0.12\n",
" mouse 128 2 1 0 0 0 1 0 0 0\n",
" remote 128 8 0.831 0.5 0.625 0.495 0.831 0.5 0.629 0.436\n",
" cell phone 128 8 0.867 0.375 0.482 0.26 0.578 0.25 0.302 0.127\n",
" microwave 128 3 0.782 1 0.995 0.695 0.782 1 0.995 0.585\n",
" oven 128 5 0.389 0.4 0.432 0.299 0.584 0.6 0.642 0.411\n",
" sink 128 6 0.657 0.5 0.491 0.373 0.657 0.5 0.436 0.303\n",
" refrigerator 128 5 0.729 0.8 0.778 0.547 0.729 0.8 0.778 0.496\n",
" book 128 29 0.77 0.231 0.451 0.186 0.77 0.231 0.399 0.136\n",
" clock 128 9 0.798 0.889 0.956 0.747 0.798 0.889 0.926 0.68\n",
" vase 128 2 0.437 1 0.995 0.895 0.437 1 0.995 0.796\n",
" scissors 128 1 0 0 0.0226 0.0113 0 0 0 0\n",
" teddy bear 128 21 0.815 0.629 0.877 0.521 0.753 0.582 0.793 0.435\n",
" toothbrush 128 5 1 0.719 0.995 0.737 1 0.719 0.995 0.606\n",
" remote 128 8 0.745 0.5 0.62 0.459 0.821 0.5 0.624 0.449\n",
" cell phone 128 8 0.686 0.375 0.502 0.272 0.488 0.25 0.28 0.132\n",
" microwave 128 3 0.831 1 0.995 0.722 0.867 1 0.995 0.592\n",
" oven 128 5 0.439 0.4 0.435 0.294 0.823 0.6 0.645 0.418\n",
" sink 128 6 0.677 0.5 0.565 0.448 0.722 0.5 0.46 0.362\n",
" refrigerator 128 5 0.533 0.8 0.783 0.524 0.558 0.8 0.783 0.527\n",
" book 128 29 0.732 0.379 0.423 0.196 0.69 0.207 0.38 0.131\n",
" clock 128 9 0.889 0.778 0.917 0.677 0.908 0.778 0.875 0.604\n",
" vase 128 2 0.375 1 0.995 0.995 0.455 1 0.995 0.796\n",
" scissors 128 1 1 0 0.0166 0.00166 1 0 0 0\n",
" teddy bear 128 21 0.813 0.829 0.841 0.457 0.826 0.678 0.786 0.422\n",
" toothbrush 128 5 0.806 1 0.995 0.733 0.991 1 0.995 0.628\n",
"Results saved to \u001b[1mruns/train-seg/exp\u001b[0m\n"
]
}
@ -581,7 +572,6 @@
"metadata": {
"accelerator": "GPU",
"colab": {
"collapsed_sections": [],
"machine_shape": "hm",
"name": "YOLOv5 Tutorial",
"provenance": [],
@ -603,354 +593,8 @@
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.12"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"0856bea36ec148b68522ff9c9eb258d8": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "1.5.0",
"model_name": "DescriptionStyleModel",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"0ace3934ec6f4d36a1b3a9e086390926": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "1.5.0",
"model_name": "ProgressStyleModel",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"35e03ce5090346c9ae602891470fc555": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "1.5.0",
"model_name": "FloatProgressModel",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_76879f6f2aa54637a7a07faeea2bd684",
"max": 818322941,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_0ace3934ec6f4d36a1b3a9e086390926",
"value": 818322941
}
},
"574140e4c4bc48c9a171541a02cd0211": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "1.5.0",
"model_name": "HTMLModel",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_60b913d755b34d638478e30705a2dde1",
"placeholder": "",
"style": "IPY_MODEL_0856bea36ec148b68522ff9c9eb258d8",
"value": "100%"
}
},
"5966ba6e6f114d8c9d8d1d6b1bd4f4c7": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "1.5.0",
"model_name": "DescriptionStyleModel",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"60b913d755b34d638478e30705a2dde1": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"65881db1db8a4e9c930fab9172d45143": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"76879f6f2aa54637a7a07faeea2bd684": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"9b8caa3522fc4cbab31e13b5dfc7808d": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "1.5.0",
"model_name": "HBoxModel",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_574140e4c4bc48c9a171541a02cd0211",
"IPY_MODEL_35e03ce5090346c9ae602891470fc555",
"IPY_MODEL_c942c208e72d46568b476bb0f2d75496"
],
"layout": "IPY_MODEL_65881db1db8a4e9c930fab9172d45143"
}
},
"c942c208e72d46568b476bb0f2d75496": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "1.5.0",
"model_name": "HTMLModel",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_d6b7a2243e0c4beca714d99dceec23d6",
"placeholder": "",
"style": "IPY_MODEL_5966ba6e6f114d8c9d8d1d6b1bd4f4c7",
"value": " 780M/780M [02:19&lt;00:00, 6.24MB/s]"
}
},
"d6b7a2243e0c4beca714d99dceec23d6": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
}
}
}
},
"nbformat": 4,
"nbformat_minor": 1
"nbformat_minor": 0
}