Skip to content

Commit

Permalink
Remove patch dropout layer as it should be integrated into packing
Browse files Browse the repository at this point in the history
  • Loading branch information
rwightman committed Sep 13, 2023
1 parent 6461405 commit d81f75b
Showing 1 changed file with 0 additions and 6 deletions.
6 changes: 0 additions & 6 deletions timm/models/vision_transformer_packed.py
Original file line number Diff line number Diff line change
Expand Up @@ -603,12 +603,6 @@ def __init__(
self.pos_embed_h = nn.Parameter(torch.randn(grid_h, embed_dim) * .02)
self.pos_embed_w = nn.Parameter(torch.randn(grid_w, embed_dim) * .02)
self.pos_drop = nn.Dropout(p=pos_drop_rate)
if patch_drop_rate > 0:
self.patch_drop = PatchDropout(
patch_drop_rate,
)
else:
self.patch_drop = nn.Identity()
self.norm_pre = norm_layer(embed_dim) if pre_norm else nn.Identity()

dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule
Expand Down

0 comments on commit d81f75b

Please sign in to comment.