Fix: bug llm_config

#15
by yroslavcr - opened
Files changed (1) hide show
  1. configuration_internvl_chat.py +2 -1
configuration_internvl_chat.py CHANGED
@@ -47,6 +47,7 @@ class InternVLChatConfig(PretrainedConfig):
47
  logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
48
 
49
  self.vision_config = InternVisionConfig(**vision_config)
 
50
  if llm_config.get('architectures', None) is not None:
51
  if llm_config.get('architectures')[0] == 'LlamaForCausalLM':
52
  self.llm_config = LlamaConfig(**llm_config)
@@ -82,7 +83,7 @@ class InternVLChatConfig(PretrainedConfig):
82
  """
83
  output = copy.deepcopy(self.__dict__)
84
  output['vision_config'] = self.vision_config.to_dict()
85
- output['llm_config'] = self.llm_config.to_dict()
86
  output['model_type'] = self.__class__.model_type
87
  output['use_backbone_lora'] = self.use_backbone_lora
88
  output['use_llm_lora'] = self.use_llm_lora
 
47
  logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
48
 
49
  self.vision_config = InternVisionConfig(**vision_config)
50
+ self.llm_config = None
51
  if llm_config.get('architectures', None) is not None:
52
  if llm_config.get('architectures')[0] == 'LlamaForCausalLM':
53
  self.llm_config = LlamaConfig(**llm_config)
 
83
  """
84
  output = copy.deepcopy(self.__dict__)
85
  output['vision_config'] = self.vision_config.to_dict()
86
+ output['llm_config'] = self.llm_config.to_dict() if self.llm_config is not None else {}
87
  output['model_type'] = self.__class__.model_type
88
  output['use_backbone_lora'] = self.use_backbone_lora
89
  output['use_llm_lora'] = self.use_llm_lora