-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathpositional_encoding.py
71 lines (48 loc) · 1.96 KB
/
positional_encoding.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
try:
import cupy as np
is_cupy_available = True
except:
import numpy as np
is_cupy_available = False
class PositionalEncoding():
""" Implements the sinusoidal positional encoding.
"""
def __init__(self,max_len, d_model, dropout_rate=0.1, data_type = np.float32):
super(PositionalEncoding, self).__init__()
self.d_model = d_model
self.dropout_rate = dropout_rate
self.max_len = max_len
self.data_type = data_type
pe = np.zeros((max_len, d_model)) # (max_len, d_model)
position = np.arange(0, max_len)[:, np.newaxis]# (max_len, 1)
div_term = np.exp(np.arange(0, d_model, 2) * (-np.log(10000.0) / d_model)) # (d_model,)
pe[:, 0::2] = np.sin(position * div_term)
pe[:, 1::2] = np.cos(position * div_term)
self.pe = pe[:, np.newaxis, :].astype(self.data_type) # (max_len, 1, d_model)
def forward(self, x):
""" x: (batch_size, seq_len, d_model)
"""
x = x + self.pe[:x.shape[0], :] # (batch_size, seq_len, d_model)
# x: (batch_size, seq_len, d_model)
return x
def backward(self, error):
""" error: (batch_size, seq_len, d_model)
"""
return error
# class PositionalEncoding():
# """ Implements the sinusoidal positional encoding.
# """
# def __init__(self, max_len, d_model, data_type = np.float32):
# self.d_model = d_model
# self.data_type = data_type
# def forward(self, x):
# half_dim = self.d_model // 2
# embeddings = np.log(10000) / (half_dim - 1)
# embeddings = np.exp(np.arange(half_dim) * -embeddings)
# embeddings = x[:, None] * embeddings[None, :]
# embeddings = np.concatenate((np.sin(embeddings), np.cos(embeddings)), axis = -1)
# return embeddings.astype(self.data_type)
# def backward(self, error):
# """ error: (batch_size, seq_len, d_model)
# """
# return error