Skip to content

Commit 1e70f2e

Browse files
committed
Default to postnorm
1 parent d41fd99 commit 1e70f2e

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

supar/modules/transformer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ def forward(self, x, mask):
126126

127127
class RelativePositionTransformerEncoder(nn.Module):
128128

129-
def __init__(self, n_layers, n_heads=8, n_model=1024, n_inner=2048, pre_norm=True, dropout=0.1):
129+
def __init__(self, n_layers, n_heads=8, n_model=1024, n_inner=2048, pre_norm=False, dropout=0.1):
130130
super(RelativePositionTransformerEncoder, self).__init__()
131131

132132
self.n_layers = n_layers
@@ -212,7 +212,7 @@ def forward(self, q, k, v, mask):
212212

213213
class RelativePositionTransformerEncoderLayer(nn.Module):
214214

215-
def __init__(self, n_heads, n_model, n_inner, activation='relu', pre_norm=True, dropout=0.1):
215+
def __init__(self, n_heads, n_model, n_inner, activation='relu', pre_norm=False, dropout=0.1):
216216
super(RelativePositionTransformerEncoderLayer, self).__init__()
217217

218218
self.pre_norm = pre_norm

0 commit comments

Comments
 (0)