diff --git a/internvl_chat/internvl/model/internvl_chat/modeling_internvl_chat.py b/internvl_chat/internvl/model/internvl_chat/modeling_internvl_chat.py index 53f97396..997494d6 100644 --- a/internvl_chat/internvl/model/internvl_chat/modeling_internvl_chat.py +++ b/internvl_chat/internvl/model/internvl_chat/modeling_internvl_chat.py @@ -128,7 +128,7 @@ def wrap_llm_lora(self, r=128, lora_alpha=256, lora_dropout=0.05): target_modules = ['self_attn.q_proj', 'self_attn.k_proj', 'self_attn.v_proj', 'self_attn.o_proj', 'mlp.gate_proj', 'mlp.down_proj', 'mlp.up_proj'] else: - raise NotImplemented + raise NotImplementedError(f'Unsupported architecture: {self.llm_arch_name}') lora_config = LoraConfig( r=r, target_modules=target_modules,