diff --git a/utils/activations.py b/utils/activations.py index 954d2e198..aa3ddf071 100644 --- a/utils/activations.py +++ b/utils/activations.py @@ -5,7 +5,7 @@ import torch.nn as nn import torch.nn.functional as F -# SiLU https://arxiv.org/pdf/1905.02244.pdf ---------------------------------------------------------------------------- +# SiLU https://arxiv.org/pdf/1606.08415.pdf ---------------------------------------------------------------------------- class SiLU(nn.Module): # export-friendly version of nn.SiLU() @staticmethod def forward(x):