Skip to content

Commit

Permalink
fix positional embedding for sequence lengths that are not powers of …
Browse files Browse the repository at this point in the history
…two of causal case
  • Loading branch information
lucidrains committed Aug 8, 2021
1 parent 826eb98 commit 4e94794
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion h_transformer_1d/h_transformer_1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def forward(self, x, **kwargs):
# rotary embedding

if exists(self.pos_emb):
freqs = self.pos_emb(torch.arange(n, device = device), cache_key = n)
freqs = self.pos_emb(torch.arange(pad_to_len, device = device), cache_key = pad_to_len)
freqs = rearrange(freqs, 'n d -> () n d')
q, k, v = map(lambda t: apply_rotary_emb(freqs, t), (q, k, v))

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'h-transformer-1d',
packages = find_packages(),
version = '0.0.9',
version = '0.1.0',
license='MIT',
description = 'H-Transformer 1D - Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit 4e94794

Please sign in to comment.