mmcv/tests/test_device/test_mlu/test_mlu_parallel.py
Zaida Zhou 6a03918f55
[Feature] Add support for mps (#2092)
* [Feature] Add support for MPS

* fix import error

* update ut

* fix error

* trigger CI

* use a unique basename for test file modules

* avoid bc-breaking
2022-07-07 16:05:49 +08:00

38 lines
996 B
Python

# Copyright (c) OpenMMLab. All rights reserved.
from unittest.mock import MagicMock, patch
import torch.nn as nn
from mmcv.device.mlu import MLUDataParallel, MLUDistributedDataParallel
from mmcv.parallel import is_module_wrapper
from mmcv.utils import IS_MLU_AVAILABLE
def mock(*args, **kwargs):
pass
@patch('torch.distributed._broadcast_coalesced', mock)
@patch('torch.distributed.broadcast', mock)
@patch('torch.nn.parallel.DistributedDataParallel._ddp_init_helper', mock)
def test_is_module_wrapper():
class Model(nn.Module):
def __init__(self):
super().__init__()
self.conv = nn.Conv2d(2, 2, 1)
def forward(self, x):
return self.conv(x)
model = Model()
assert not is_module_wrapper(model)
if IS_MLU_AVAILABLE:
mludp = MLUDataParallel(model)
assert is_module_wrapper(mludp)
mluddp = MLUDistributedDataParallel(model, process_group=MagicMock())
assert is_module_wrapper(mluddp)