Skip to content

Commit

Permalink
account for standalone layernorm
Browse files Browse the repository at this point in the history
  • Loading branch information
rsomani95 committed Jun 18, 2024
1 parent 38bcad6 commit f065ee6
Showing 1 changed file with 7 additions and 2 deletions.
9 changes: 7 additions & 2 deletions src/open_clip/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -820,16 +820,21 @@ def lock_text_transformer(
]

def _unlock(x):
ln_status = False if freeze_layer_norm else True
if isinstance(x, Sequence):
for g in x:
_unlock(g)
else:
if isinstance(x, torch.nn.Parameter):
x.requires_grad = True
elif isinstance(x, torch.nn.LayerNorm):
for p in x.parameters():
p.requires_grad = ln_status
else:
for n,p in x.named_parameters():
if n.startswith("ln_"): # If LayerNorm layer
p.requires_grad = False if freeze_layer_norm else True
# This should grab LayerNorm inside `ResidualAttentionBlock` blocks
if n.startswith("ln_"):
p.requires_grad = ln_status
else:
p.requires_grad = True

Expand Down

0 comments on commit f065ee6

Please sign in to comment.