mirror of
https://github.com/open-mmlab/mmcv.git
synced 2025-06-03 21:54:52 +08:00
* init npu * add npu extension and focal loss adapter * clean code * clean code * clean code * clean code * fix autocast bugs on npu (#2273) fix autocast bugs on npu (#2273) * code format * code format * code format * bug fix * pytorch_npu_helper.hpp clean code * Npu dev (#2306) * fix autocast bugs on npu * using scatter_kwargs in mmcv.device.scatter_gather * raise ImportError when compile with npu * add npu test case (#2307) * add npu test case * Update focal_loss.py * add comment * clean lint * update dtype assert * update DDP forward and comment * fix bug Co-authored-by: Zaida Zhou <58739961+zhouzaida@users.noreply.github.com> Co-authored-by: ckirchhoff <515629648@qq.com> Co-authored-by: Zaida Zhou <58739961+zhouzaida@users.noreply.github.com>
38 lines
996 B
Python
38 lines
996 B
Python
# Copyright (c) OpenMMLab. All rights reserved.
|
|
from unittest.mock import MagicMock, patch
|
|
|
|
import torch.nn as nn
|
|
|
|
from mmcv.device.npu import NPUDataParallel, NPUDistributedDataParallel
|
|
from mmcv.parallel import is_module_wrapper
|
|
from mmcv.utils import IS_NPU_AVAILABLE
|
|
|
|
|
|
def mock(*args, **kwargs):
|
|
pass
|
|
|
|
|
|
@patch('torch.distributed._broadcast_coalesced', mock)
|
|
@patch('torch.distributed.broadcast', mock)
|
|
@patch('torch.nn.parallel.DistributedDataParallel._ddp_init_helper', mock)
|
|
def test_is_module_wrapper():
|
|
|
|
class Model(nn.Module):
|
|
|
|
def __init__(self):
|
|
super().__init__()
|
|
self.conv = nn.Conv2d(2, 2, 1)
|
|
|
|
def forward(self, x):
|
|
return self.conv(x)
|
|
|
|
model = Model()
|
|
assert not is_module_wrapper(model)
|
|
|
|
if IS_NPU_AVAILABLE:
|
|
npudp = NPUDataParallel(model)
|
|
assert is_module_wrapper(npudp)
|
|
|
|
npuddp = NPUDistributedDataParallel(model, process_group=MagicMock())
|
|
assert is_module_wrapper(npuddp)
|