math comment fix

This commit is contained in:
Varuna Jayasiri
2021-10-21 13:25:11 +05:30
parent 77bf55e03a
commit 14fb057045
2 changed files with 2 additions and 2 deletions

View File

@ -181,7 +181,7 @@ class FastWeightsAttention(Module):
The model first retrieves the current value
$\bar{v}^{(i)}$ paired with the key $k^{(i)}$.
Then stores a combination $v^{(i)}_{new}$
of the retrieved value $\bar{v}^{̄(i)}$ and the input $v^{(i)}$.
of the retrieved value $\bar{v}^{(i)}$ and the input $v^{(i)}$.
\begin{align}
k^{(i)}, v^{(i)}, q^{(i)} &=

View File

@ -50,7 +50,7 @@ def get_positional_encoding(d_model: int, max_len: int = 5000):
position = torch.arange(0, max_len, dtype=torch.float32).unsqueeze(1)
# $2 * i$
two_i = torch.arange(0, d_model, 2, dtype=torch.float32)
# $10000^{\frac{2i}{d_{model}}$
# $10000^{\frac{2i}{d_{model}}}$
div_term = torch.exp(two_i * -(math.log(10000.0) / d_model))
# $PE_{p,2i} = sin\Bigg(\frac{p}{10000^{\frac{2i}{d_{model}}}}\Bigg)$
encodings[:, 0::2] = torch.sin(position * div_term)