Skip to content

Commit 37bdd92

Browse files
committed
Remove duplicated attn_qkv tensors
1 parent 204548b commit 37bdd92

File tree

1 file changed

+0
-2
lines changed

1 file changed

+0
-2
lines changed

examples/mtmd/clip.cpp

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2958,7 +2958,6 @@ struct clip_model_loader {
29582958

29592959
// other attention tensors (output / norms / ln) left as-is
29602960
layer.o_w = get_tensor(string_format(TN_ATTN_OUTPUT, prefix, il, "weight"));
2961-
layer.qkv_w = get_tensor(string_format(TN_ATTN_QKV, prefix, il, "weight"), false);
29622961
layer.k_norm = get_tensor(string_format(TN_ATTN_K_NORM, prefix, il, "weight"), false);
29632962
layer.q_norm = get_tensor(string_format(TN_ATTN_Q_NORM, prefix, il, "weight"), false);
29642963
layer.ln_1_w = get_tensor(string_format(TN_LN_1, prefix, il, "weight"), false);
@@ -2977,7 +2976,6 @@ struct clip_model_loader {
29772976

29782977
// keep other optional biases as before
29792978
layer.o_b = get_tensor(string_format(TN_ATTN_OUTPUT, prefix, il, "bias"), false);
2980-
layer.qkv_b = get_tensor(string_format(TN_ATTN_QKV, prefix, il, "bias"), false);
29812979
layer.ln_1_b = get_tensor(string_format(TN_LN_1, prefix, il, "bias"), false);
29822980
layer.ln_2_b = get_tensor(string_format(TN_LN_2, prefix, il, "bias"), false);
29832981

0 commit comments

Comments
 (0)