Abdullah-Nazhat commited on
Commit
85560b0
·
verified ·
1 Parent(s): 75a44ab

Update activator_only_GEGLU.py

Browse files
Files changed (1) hide show
  1. activator_only_GEGLU.py +2 -22
activator_only_GEGLU.py CHANGED
@@ -2,23 +2,6 @@ import torch
2
  from torch import nn
3
 
4
 
5
-
6
-
7
-
8
- """class FeedForward(nn.Module):
9
- def __init__(self, dim, hidden_dim, dropout):
10
- super().__init__()
11
- self.net = nn.Sequential(
12
- nn.Linear(dim, hidden_dim),
13
- nn.GELU(),
14
- nn.Dropout(dropout),
15
- nn.Linear(hidden_dim, dim),
16
- nn.Dropout(dropout)
17
- )
18
- def forward(self, x):
19
- return self.net(x)"""
20
-
21
-
22
 
23
 
24
 
@@ -54,16 +37,13 @@ class ActivatorBlock(nn.Module):
54
 
55
  self.norm = nn.LayerNorm(d_model)
56
  self.actgu = ActivatorGatingUnit(d_model, d_ffn)
57
- #self.ffn = FeedForward(d_model,d_ffn,dropout)
58
  def forward(self, x):
59
  residual = x
60
  x = self.norm(x)
61
  x = self.actgu(x)
62
  x = x + residual
63
- #residual = x
64
- #x = self.norm(x)
65
- #x = self.ffn(x)
66
- #out = x + residual
67
  out = x
68
  return out
69
 
 
2
  from torch import nn
3
 
4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
 
7
 
 
37
 
38
  self.norm = nn.LayerNorm(d_model)
39
  self.actgu = ActivatorGatingUnit(d_model, d_ffn)
40
+
41
  def forward(self, x):
42
  residual = x
43
  x = self.norm(x)
44
  x = self.actgu(x)
45
  x = x + residual
46
+
 
 
 
47
  out = x
48
  return out
49