We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent b9c7df7 commit b23a412Copy full SHA for b23a412
ding/model/template/language_transformer.py
@@ -50,7 +50,9 @@ def __init__(
50
self.model = AutoModelForTokenClassification.from_pretrained(model_name)
51
in_channel = hidden_dim if not add_linear else embedding_size
52
self.value_head = nn.Linear(in_channel, 1)
53
- self.norm = nn.Identity() if not norm_embedding else nn.LayerNorm(normalized_shape=in_channel)
+ self.norm = nn.Identity() if not norm_embedding else nn.LayerNorm(
54
+ normalized_shape=in_channel, elementwise_affine=False
55
+ )
56
57
# Freeze transformer encoder and only train the linear layer
58
if freeze_encoder:
0 commit comments