From 1bd314dca12fe81bb3f0cfa278f94baccabf25d1 Mon Sep 17 00:00:00 2001 From: Mr-Neutr0n <64578610+Mr-Neutr0n@users.noreply.github.com> Date: Thu, 12 Feb 2026 00:03:12 +0530 Subject: [PATCH] Fix typo in absolute_pos_embed dimension check: C1 != C1 -> C1 != C2 In `load_pretrained()`, the dimension mismatch guard for `absolute_pos_embed` compares `C1` with itself (`C1 != C1`), which is always False. This means a dimension mismatch between pretrained and current model embedding channels is never detected, leading to a confusing runtime error instead of a clear warning. The fix changes the comparison to `C1 != C2` so the check works as intended, matching the analogous check for `relative_position_bias_table` (which correctly uses `nH1 != nH2`). --- utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils.py b/utils.py index 328ad09d..9a994328 100644 --- a/utils.py +++ b/utils.py @@ -89,7 +89,7 @@ def load_pretrained(config, model, logger): absolute_pos_embed_current = model.state_dict()[k] _, L1, C1 = absolute_pos_embed_pretrained.size() _, L2, C2 = absolute_pos_embed_current.size() - if C1 != C1: + if C1 != C2: logger.warning(f"Error in loading {k}, passing......") else: if L1 != L2: