File size: 2,383 Bytes
a364693 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
import torch
import numpy as np
from torch import nn
from torch.nn import functional as F
from einops.layers.torch import Rearrange
class FeedForward(nn.Module):
def __init__(self, dim, hidden_dim, dropout):
super().__init__()
self.net = nn.Sequential(
nn.Linear(dim, hidden_dim),
nn.GELU(),
nn.Dropout(dropout),
nn.Linear(hidden_dim, dim),
nn.Dropout(dropout)
)
def forward(self, x):
return self.net(x)
class MixerBlock(nn.Module):
def __init__(self, dim, num_patch, token_dim, channel_dim, dropout):
super().__init__()
self.token_mix = nn.Sequential(
nn.LayerNorm(dim),
Rearrange('b n d -> b d n'),
FeedForward(num_patch, token_dim, dropout),
Rearrange('b d n -> b n d')
)
self.channel_mix = nn.Sequential(
nn.LayerNorm(dim),
FeedForward(dim, channel_dim, dropout),
)
def forward(self, x):
x = x + self.token_mix(x)
x = x + self.channel_mix(x)
return x
class MixerGatingUnit(nn.Module):
def __init__(self,dim, seq_len, token_dim, channel_dim, dropout):
super().__init__()
self.Mixer = MixerBlock(dim, seq_len, token_dim, channel_dim, dropout)
self.proj = nn.Linear(dim,dim)
def forward(self, x):
u, v = x, x
u = self.proj(u)
v = self.Mixer(v)
out = u * v
return out
class NiNBlock(nn.Module):
def __init__(self, d_model, d_ffn, seq_len,dropout):
super().__init__()
self.norm = nn.LayerNorm(d_model)
self.mgu = MixerGatingUnit(d_model,seq_len,d_ffn,d_ffn,dropout)
self.ffn = FeedForward(d_model,d_ffn,dropout)
def forward(self, x):
residual = x
x = self.norm(x)
x = self.mgu(x)
x = x + residual
residual = x
x = self.norm(x)
x = self.ffn(x)
out = x + residual
return out
class NiNformer(nn.Module):
def __init__(self, d_model, d_ffn, seq_len, num_layers,dropout):
super().__init__()
self.model = nn.Sequential(
*[NiNBlock(d_model, d_ffn, seq_len,dropout) for _ in range(num_layers)]
)
def forward(self, x):
return self.model(x)
|