Skip to content

Commit

Permalink
fix multihead point transformer layer
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jan 14, 2022
1 parent d2ef57a commit ac51d0b
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,9 @@ def __init__(
attn_inner_dim = inner_dim * attn_mlp_hidden_mult

self.attn_mlp = nn.Sequential(
nn.Conv2d(inner_dim, attn_inner_dim, 1),
nn.Conv2d(inner_dim, attn_inner_dim, 1, groups = heads),
nn.ReLU(),
nn.Conv2d(attn_inner_dim, heads, 1),
nn.Conv2d(attn_inner_dim, heads, 1, groups = heads),
)

def forward(self, x, pos, mask = None):
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'point-transformer-pytorch',
packages = find_packages(),
version = '0.1.1',
version = '0.1.2',
license='MIT',
description = 'Point Transformer - Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit ac51d0b

Please sign in to comment.