| { | |
| "_name_or_path": "/data/checkpoints/hugging_face/colqwen2-1.0-hf-internal/", | |
| "architectures": [ | |
| "ColQwen2ForRetrieval" | |
| ], | |
| "embedding_dim": 128, | |
| "hidden_size": 1536, | |
| "is_composition": false, | |
| "model_type": "colqwen2", | |
| "num_attention_heads": 12, | |
| "num_hidden_layers": 28, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.50.0.dev0", | |
| "vlm_config": { | |
| "_name_or_path": "Qwen/Qwen2-VL-2B", | |
| "bos_token_id": 151643, | |
| "eos_token_id": 151645, | |
| "hidden_size": 1536, | |
| "image_token_id": 151655, | |
| "intermediate_size": 8960, | |
| "max_window_layers": 28, | |
| "model_type": "qwen2_vl", | |
| "num_attention_heads": 12, | |
| "num_hidden_layers": 28, | |
| "num_key_value_heads": 2, | |
| "rms_norm_eps": 1e-06, | |
| "rope_scaling": { | |
| "mrope_section": [ | |
| 16, | |
| 24, | |
| 24 | |
| ], | |
| "rope_type": "default", | |
| "type": "default" | |
| }, | |
| "sliding_window": 32768, | |
| "tie_word_embeddings": true, | |
| "torch_dtype": "bfloat16", | |
| "video_token_id": 151656, | |
| "vision_config": { | |
| "hidden_size": 1536, | |
| "in_chans": 3, | |
| "spatial_patch_size": 14 | |
| }, | |
| "vision_end_token_id": 151653, | |
| "vision_start_token_id": 151652, | |
| "vision_token_id": 151654, | |
| "vocab_size": 151936 | |
| } | |
| } | |