czczup commited on
Commit
edaa746
1 Parent(s): 94d38c0

fix compatibility issue for transformers 4.46+

Browse files
Files changed (1) hide show
  1. configuration_internvl_chat.py +3 -3
configuration_internvl_chat.py CHANGED
@@ -47,12 +47,12 @@ class InternVLChatConfig(PretrainedConfig):
47
  logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
48
 
49
  self.vision_config = InternVisionConfig(**vision_config)
50
- if llm_config.get(['architectures'])[0] == 'LlamaForCausalLM':
51
  self.llm_config = LlamaConfig(**llm_config)
52
- elif llm_config.get(['architectures'])[0] == 'InternLM2ForCausalLM':
53
  self.llm_config = InternLM2Config(**llm_config)
54
  else:
55
- raise ValueError('Unsupported architecture: {}'.format(llm_config.get(['architectures'])[0]))
56
  self.use_backbone_lora = use_backbone_lora
57
  self.use_llm_lora = use_llm_lora
58
  self.select_layer = select_layer
 
47
  logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
48
 
49
  self.vision_config = InternVisionConfig(**vision_config)
50
+ if llm_config.get('architectures')[0] == 'LlamaForCausalLM':
51
  self.llm_config = LlamaConfig(**llm_config)
52
+ elif llm_config.get('architectures')[0] == 'InternLM2ForCausalLM':
53
  self.llm_config = InternLM2Config(**llm_config)
54
  else:
55
+ raise ValueError('Unsupported architecture: {}'.format(llm_config.get('architectures')[0]))
56
  self.use_backbone_lora = use_backbone_lora
57
  self.use_llm_lora = use_llm_lora
58
  self.select_layer = select_layer