Sifal commited on
Commit
4d8256e
1 Parent(s): 1126f36

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -2,7 +2,7 @@ import torch
2
  import torch.nn as nn
3
  from torch import Tensor
4
  from torch.nn import Transformer
5
-
6
  # helper Module that adds positional encoding to the token embedding to introduce a notion of word order.
7
  class PositionalEncoding(nn.Module):
8
  def __init__(self,
@@ -10,7 +10,7 @@ class PositionalEncoding(nn.Module):
10
  dropout: float,
11
  maxlen: int = 5000):
12
  super(PositionalEncoding, self).__init__()
13
- den = torch.exp(- torch.arange(0, emb_size, 2)* torch.log(10000) / emb_size)
14
  pos = torch.arange(0, maxlen).reshape(maxlen, 1)
15
  pos_embedding = torch.zeros((maxlen, emb_size))
16
  pos_embedding[:, 0::2] = torch.sin(pos * den)
 
2
  import torch.nn as nn
3
  from torch import Tensor
4
  from torch.nn import Transformer
5
+ import math
6
  # helper Module that adds positional encoding to the token embedding to introduce a notion of word order.
7
  class PositionalEncoding(nn.Module):
8
  def __init__(self,
 
10
  dropout: float,
11
  maxlen: int = 5000):
12
  super(PositionalEncoding, self).__init__()
13
+ den = torch.exp(- torch.arange(0, emb_size, 2)* math.log(10000) / emb_size)
14
  pos = torch.arange(0, maxlen).reshape(maxlen, 1)
15
  pos_embedding = torch.zeros((maxlen, emb_size))
16
  pos_embedding[:, 0::2] = torch.sin(pos * den)