Skip to content

Commit 86b8316

Browse files
committed
move dropout to be faithful to paper
1 parent 48954bb commit 86b8316

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

conformer/conformer.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -118,11 +118,11 @@ def forward(self, x, context = None, mask = None, context_mask = None):
118118
dots.masked_fill_(~mask, mask_value)
119119

120120
attn = dots.softmax(dim = -1)
121-
attn = self.dropout(attn)
122121

123122
out = einsum('b h i j, b h j d -> b h i d', attn, v)
124123
out = rearrange(out, 'b h n d -> b n (h d)')
125-
return self.to_out(out)
124+
out = self.to_out(out)
125+
return self.dropout(out)
126126

127127
class FeedForward(nn.Module):
128128
def __init__(

setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
setup(
44
name = 'conformer',
55
packages = find_packages(),
6-
version = '0.2.2',
6+
version = '0.2.3',
77
license='MIT',
88
description = 'The convolutional module from the Conformer paper',
99
author = 'Phil Wang',

0 commit comments

Comments
 (0)