From 86b8316b0f9f7336b0f2fede74659dd184b3dd9b Mon Sep 17 00:00:00 2001 From: Phil Wang Date: Sun, 3 Jan 2021 18:59:05 -0800 Subject: [PATCH] move dropout to be faithful to paper --- conformer/conformer.py | 4 ++-- setup.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/conformer/conformer.py b/conformer/conformer.py index f8b2e70..ef27685 100644 --- a/conformer/conformer.py +++ b/conformer/conformer.py @@ -118,11 +118,11 @@ def forward(self, x, context = None, mask = None, context_mask = None): dots.masked_fill_(~mask, mask_value) attn = dots.softmax(dim = -1) - attn = self.dropout(attn) out = einsum('b h i j, b h j d -> b h i d', attn, v) out = rearrange(out, 'b h n d -> b n (h d)') - return self.to_out(out) + out = self.to_out(out) + return self.dropout(out) class FeedForward(nn.Module): def __init__( diff --git a/setup.py b/setup.py index f4e3a0c..527361e 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name = 'conformer', packages = find_packages(), - version = '0.2.2', + version = '0.2.3', license='MIT', description = 'The convolutional module from the Conformer paper', author = 'Phil Wang',