Remove DDP MultiHeadAttention fix (#3768)

pull/3770/head
Glenn Jocher 2021-06-25 12:52:05 +02:00 committed by GitHub
parent 374957317a
commit f2d97ebb25
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 3 deletions

View File

@ -252,9 +252,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary
# DDP mode
if cuda and RANK != -1:
model = DDP(model, device_ids=[LOCAL_RANK], output_device=LOCAL_RANK,
# nn.MultiheadAttention incompatibility with DDP https://github.com/pytorch/pytorch/issues/26698
find_unused_parameters=any(isinstance(layer, nn.MultiheadAttention) for layer in model.modules()))
model = DDP(model, device_ids=[LOCAL_RANK], output_device=LOCAL_RANK)
# Model parameters
hyp['box'] *= 3. / nl # scale to layers