Add custom embedding (#53)
Browse files- Add custom embedding (11ef03e9e6dbfd4f118f904a57e726d99d022c14)
Co-authored-by: Brandon Cui <bcui19@users.noreply.huggingface.co>
- custom_embedding.py +12 -0
custom_embedding.py
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch import Tensor
|
5 |
+
|
6 |
+
|
7 |
+
class SharedEmbedding(nn.Embedding):
|
8 |
+
|
9 |
+
def forward(self, input: Tensor, unembed: bool = False) -> Tensor:
|
10 |
+
if unembed:
|
11 |
+
return F.linear(input, self.weight)
|
12 |
+
return super().forward(input)
|