Add custom embedding (#42)
Browse files- Add custom embedding (216226e780056807fd082a6e5f41a4ece987b49c)
Co-authored-by: Brandon Cui <[email protected]>
- custom_embedding.py +12 -0
custom_embedding.py
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch import Tensor
|
5 |
+
|
6 |
+
|
7 |
+
class SharedEmbedding(nn.Embedding):
|
8 |
+
|
9 |
+
def forward(self, input: Tensor, unembed: bool = False) -> Tensor:
|
10 |
+
if unembed:
|
11 |
+
return F.linear(input, self.weight)
|
12 |
+
return super().forward(input)
|