glenn-jocher
commited on
Commit
•
131782a
1
Parent(s):
4b074d9
Funnel ReLU (FReLU) (#556)
Browse filesSigned-off-by: Glenn Jocher <[email protected]>
- utils/activations.py +4 -6
utils/activations.py
CHANGED
@@ -1,7 +1,6 @@
|
|
1 |
import torch
|
2 |
import torch.nn as nn
|
3 |
import torch.nn.functional as F
|
4 |
-
import torch.nn as nn
|
5 |
|
6 |
|
7 |
# Swish ------------------------------------------------------------------------
|
@@ -61,16 +60,15 @@ class Mish(nn.Module): # https://github.com/digantamisra98/Mish
|
|
61 |
@staticmethod
|
62 |
def forward(x):
|
63 |
return x * F.softplus(x).tanh()
|
64 |
-
|
65 |
-
|
66 |
# FReLU https://arxiv.org/abs/2007.11824 --------------------------------------
|
67 |
class FReLU(nn.Module):
|
68 |
def __init__(self, c1, k=3): # ch_in, kernel
|
69 |
-
super().
|
70 |
self.conv = nn.Conv2d(c1, c1, k, 1, 1, groups=c1)
|
71 |
self.bn = nn.BatchNorm2d(c1)
|
72 |
-
|
73 |
@staticmethod
|
74 |
def forward(self, x):
|
75 |
return torch.max(x, self.bn(self.conv(x)))
|
76 |
-
|
|
|
1 |
import torch
|
2 |
import torch.nn as nn
|
3 |
import torch.nn.functional as F
|
|
|
4 |
|
5 |
|
6 |
# Swish ------------------------------------------------------------------------
|
|
|
60 |
@staticmethod
|
61 |
def forward(x):
|
62 |
return x * F.softplus(x).tanh()
|
63 |
+
|
64 |
+
|
65 |
# FReLU https://arxiv.org/abs/2007.11824 --------------------------------------
|
66 |
class FReLU(nn.Module):
|
67 |
def __init__(self, c1, k=3): # ch_in, kernel
|
68 |
+
super(FReLU, self).__init__()
|
69 |
self.conv = nn.Conv2d(c1, c1, k, 1, 1, groups=c1)
|
70 |
self.bn = nn.BatchNorm2d(c1)
|
71 |
+
|
72 |
@staticmethod
|
73 |
def forward(self, x):
|
74 |
return torch.max(x, self.bn(self.conv(x)))
|
|