File size: 1,697 Bytes
589ece6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import torch
from torch import nn


       
 

class GatingUnit(nn.Module):
    def __init__(self,dim):
        super().__init__()
        self.proj_1 =  nn.Linear(dim,dim)
        self.proj_2 =  nn.Linear(dim,dim)
        self.proj_3 = nn.Linear(dim,dim)     
        self.silu = nn.SiLU()
        
             	   
    def forward(self, x):
        u, v = x, x 
        u = self.proj_1(u)
        u = self.silu(u)
       
        
        v = self.proj_2(v)
     
       
        g = u * v
        g = self.proj_3(g)
       
        out = g
        return out



class NormalizerBlock(nn.Module):
    def __init__(self, d_model, num_tokens):
        super().__init__()
       
         
        self.norm_global = nn.LayerNorm(d_model * num_tokens)
        self.norm_local = nn.LayerNorm(d_model)                    
        self.gating = GatingUnit(d_model)
        
    def forward(self, x):
                  
        residual = x
        
        dim0 = x.shape[0]
        dim1 = x.shape[1]
        dim2 = x.shape[2]
        x = x.reshape([dim0,dim1*dim2])
        x = self.norm_global(x)
      
        x = x.reshape([dim0,dim1,dim2])
        x = x + residual 
        
        
        residual = x
        
                  
        x = self.norm_local(x)
        x = self.gating(x)
                  
        out = x + residual
        
        
        return out



class Normalizer(nn.Module):
    def __init__(self, d_model,num_tokens, num_layers):
        super().__init__()
        
        self.model = nn.Sequential(
            *[NormalizerBlock(d_model,num_tokens) for _ in range(num_layers)]
        )

    def forward(self, x):
       
        return self.model(x)