🐛 positional encoding buffer

This commit is contained in:
Varuna Jayasiri
2020-09-27 16:40:22 +05:30
parent b2b305ff4d
commit 06e68d012c

View File

@ -28,7 +28,7 @@ class PositionalEncoding(Module):
super().__init__()
self.dropout = nn.Dropout(dropout_prob)
self.register_buffer('positional_encodings', get_positional_encoding(d_model, max_len))
self.register_buffer('positional_encodings', get_positional_encoding(d_model, max_len), False)
def __call__(self, x: torch.Tensor):
pe = self.positional_encodings[:x.shape[0]].detach().requires_grad_(False)