| from transformers import PretrainedConfig, AutoConfig, CONFIG_MAPPING | |
| from transformers.dynamic_module_utils import get_class_from_dynamic_module | |
| from transformers import Qwen2_5_VLConfig, Qwen2_5_VLForConditionalGeneration | |
| class QQMMConfig(PretrainedConfig): | |
| model_type = "qqmm" | |
| is_composition = True | |
| def __init__(self, | |
| model_config={}, | |
| **kwargs): | |
| super().__init__(**kwargs) | |
| model_type = model_config.get('model_type', '') | |
| is_remote_code = '.' in model_config.get('auto_map', {}).get('AutoConfig', '') | |
| if model_type in CONFIG_MAPPING and not is_remote_code: | |
| self.model_config = AutoConfig.for_model(**model_config) | |
| else: | |
| self.model_config = model_config | |
| def hidden_size(self): | |
| return self.model_config.hidden_size | |
| def from_dict(cls, config_dict, **kwargs): | |
| if 'name_or_path' in kwargs: | |
| config_dict['_name_or_path'] = kwargs.pop('name_or_path') | |
| return super().from_dict(config_dict, **kwargs) |