Upload folder using huggingface_hub
Browse files- README.md +1 -0
- text_encoder/config.json +1 -1
README.md
CHANGED
|
@@ -117,6 +117,7 @@ with open(hf_hub_download(source_model_id, filename='text_encoder/config.json',
|
|
| 117 |
'num_key_value_heads': 1,
|
| 118 |
'sliding_window': 64,
|
| 119 |
'tie_word_embeddings': True,
|
|
|
|
| 120 |
'use_sliding_window': True,
|
| 121 |
'layer_types': ['full_attention', 'sliding_attention']
|
| 122 |
})
|
|
|
|
| 117 |
'num_key_value_heads': 1,
|
| 118 |
'sliding_window': 64,
|
| 119 |
'tie_word_embeddings': True,
|
| 120 |
+
'max_window_layers': 1,
|
| 121 |
'use_sliding_window': True,
|
| 122 |
'layer_types': ['full_attention', 'sliding_attention']
|
| 123 |
})
|
text_encoder/config.json
CHANGED
|
@@ -45,7 +45,7 @@
|
|
| 45 |
"sliding_attention"
|
| 46 |
],
|
| 47 |
"max_position_embeddings": 128000,
|
| 48 |
-
"max_window_layers":
|
| 49 |
"model_type": "qwen2_5_vl_text",
|
| 50 |
"num_attention_heads": 2,
|
| 51 |
"num_hidden_layers": 2,
|
|
|
|
| 45 |
"sliding_attention"
|
| 46 |
],
|
| 47 |
"max_position_embeddings": 128000,
|
| 48 |
+
"max_window_layers": 1,
|
| 49 |
"model_type": "qwen2_5_vl_text",
|
| 50 |
"num_attention_heads": 2,
|
| 51 |
"num_hidden_layers": 2,
|