From 841a078e69e71e3d8462aa8f1e6aa8984d95aba8 Mon Sep 17 00:00:00 2001 From: Rui Xu Date: Sat, 24 Apr 2021 19:10:13 +0800 Subject: [PATCH] [Fix]: fix data type in fused-bias-leakyrelu for apex fp16 training (#981) --- mmcv/ops/fused_bias_leakyrelu.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/mmcv/ops/fused_bias_leakyrelu.py b/mmcv/ops/fused_bias_leakyrelu.py index eefaf74da..367e66179 100644 --- a/mmcv/ops/fused_bias_leakyrelu.py +++ b/mmcv/ops/fused_bias_leakyrelu.py @@ -45,10 +45,9 @@ class FusedBiasLeakyReLUFunctionBackward(Function): # The second order deviation, in fact, contains two parts, while the # the first part is zero. Thus, we direct consider the second part # which is similar with the first order deviation in implementation. - gradgrad_out = ext_module.fused_bias_leakyrelu(gradgrad_input, - gradgrad_bias, out, 3, - 1, ctx.negative_slope, - ctx.scale) + gradgrad_out = ext_module.fused_bias_leakyrelu( + gradgrad_input, gradgrad_bias.to(out.dtype), out, 3, 1, + ctx.negative_slope, ctx.scale) return gradgrad_out, None, None, None @@ -139,7 +138,8 @@ def fused_bias_leakyrelu(input, bias, negative_slope=0.2, scale=2**0.5): if not input.is_cuda: return bias_leakyrelu_ref(input, bias, negative_slope, scale) - return FusedBiasLeakyReLUFunction.apply(input, bias, negative_slope, scale) + return FusedBiasLeakyReLUFunction.apply(input, bias.to(input.dtype), + negative_slope, scale) def bias_leakyrelu_ref(x, bias, negative_slope=0.2, scale=2**0.5):