From db44d16e023fd248280943c0bac79b9b5ce11042 Mon Sep 17 00:00:00 2001 From: Ziyi Wu Date: Thu, 6 May 2021 23:16:46 +0800 Subject: [PATCH] infer batch size using len(result) in test function (#532) --- mmseg/apis/test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmseg/apis/test.py b/mmseg/apis/test.py index 1597df6aa..9728de4c6 100644 --- a/mmseg/apis/test.py +++ b/mmseg/apis/test.py @@ -149,7 +149,7 @@ def multi_gpu_test(model, results.append(result) if rank == 0: - batch_size = data['img'][0].size(0) + batch_size = len(result) for _ in range(batch_size * world_size): prog_bar.update()