File size: 1,242 Bytes
ea3b94f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import torch
from torch import nn



 

class ActivatorGatingUnit(nn.Module):
    def __init__(self,dim):
        super().__init__()
        self.proj_1 =  nn.Linear(dim,dim)
        self.proj_2 =  nn.Linear(dim,dim)
        self.proj_3 = nn.Linear(dim,dim)     
        self.gelu = nn.GELU()
       
             	   
    def forward(self, x):
        u, v = x, x 
        u = self.proj_1(u)
        u = self.gelu(u)
        
        
        v = self.proj_2(v)
        
       
        g = u * v
        
        out = self.proj_3(g)
        return out



class ActivatorBlock(nn.Module):
    def __init__(self, d_model):
        super().__init__()
       
        self.norm = nn.LayerNorm(d_model)       
        self.actgu = ActivatorGatingUnit(d_model)
      
    def forward(self, x):
        residual = x
        x = self.norm(x)
        x = self.actgu(x)           
        x = x + residual            
        out = x
        return out



class ACTIVATOR(nn.Module):
    def __init__(self, d_model, num_layers):
        super().__init__()
        
        self.model = nn.Sequential(
            *[ActivatorBlock(d_model) for _ in range(num_layers)]
        )

    def forward(self, x):
       
        return self.model(x)