From 41c050de7661763df95aca8d7e6601481e33d058 Mon Sep 17 00:00:00 2001 From: ZhuDeyao Date: Mon, 23 Oct 2023 21:57:25 +0300 Subject: [PATCH] Update modeling_llama.py for transformers package compatibility --- minigpt4/models/modeling_llama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/minigpt4/models/modeling_llama.py b/minigpt4/models/modeling_llama.py index 6d28020..5d59a53 100644 --- a/minigpt4/models/modeling_llama.py +++ b/minigpt4/models/modeling_llama.py @@ -75,7 +75,7 @@ class LlamaForCausalLM(LlamaForCausalLMOrig): ) hidden_states = outputs[0] - if self.config.pretraining_tp > 1: + if hasattr(self.config, 'pretraining_tp') and self.config.pretraining_tp > 1: lm_head_slices = self.lm_head.weight.split(self.vocab_size // self.config.pretraining_tp, dim=0) logits = [F.linear(hidden_states, lm_head_slices[i]) for i in range(self.config.pretraining_tp)] logits = torch.cat(logits, dim=-1)