SUPIR/llava/model/language_model/mpt/custom_embedding.py

11 lines
305 B
Python
Raw Normal View History

2024-01-25 15:42:59 +01:00
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
class SharedEmbedding(nn.Embedding):
def forward(self, input: Tensor, unembed: bool=False) -> Tensor:
if unembed:
return F.linear(input, self.weight)
return super().forward(input)