Abdullah-Nazhat
commited on
Update activator_only_GEGLU.py
Browse files- activator_only_GEGLU.py +2 -22
activator_only_GEGLU.py
CHANGED
@@ -2,23 +2,6 @@ import torch
|
|
2 |
from torch import nn
|
3 |
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
"""class FeedForward(nn.Module):
|
9 |
-
def __init__(self, dim, hidden_dim, dropout):
|
10 |
-
super().__init__()
|
11 |
-
self.net = nn.Sequential(
|
12 |
-
nn.Linear(dim, hidden_dim),
|
13 |
-
nn.GELU(),
|
14 |
-
nn.Dropout(dropout),
|
15 |
-
nn.Linear(hidden_dim, dim),
|
16 |
-
nn.Dropout(dropout)
|
17 |
-
)
|
18 |
-
def forward(self, x):
|
19 |
-
return self.net(x)"""
|
20 |
-
|
21 |
-
|
22 |
|
23 |
|
24 |
|
@@ -54,16 +37,13 @@ class ActivatorBlock(nn.Module):
|
|
54 |
|
55 |
self.norm = nn.LayerNorm(d_model)
|
56 |
self.actgu = ActivatorGatingUnit(d_model, d_ffn)
|
57 |
-
|
58 |
def forward(self, x):
|
59 |
residual = x
|
60 |
x = self.norm(x)
|
61 |
x = self.actgu(x)
|
62 |
x = x + residual
|
63 |
-
|
64 |
-
#x = self.norm(x)
|
65 |
-
#x = self.ffn(x)
|
66 |
-
#out = x + residual
|
67 |
out = x
|
68 |
return out
|
69 |
|
|
|
2 |
from torch import nn
|
3 |
|
4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
|
7 |
|
|
|
37 |
|
38 |
self.norm = nn.LayerNorm(d_model)
|
39 |
self.actgu = ActivatorGatingUnit(d_model, d_ffn)
|
40 |
+
|
41 |
def forward(self, x):
|
42 |
residual = x
|
43 |
x = self.norm(x)
|
44 |
x = self.actgu(x)
|
45 |
x = x + residual
|
46 |
+
|
|
|
|
|
|
|
47 |
out = x
|
48 |
return out
|
49 |
|