Upload config.json with huggingface_hub
Browse files- config.json +3 -2
config.json
CHANGED
|
@@ -27,7 +27,7 @@
|
|
| 27 |
"num_hidden_layers": 92,
|
| 28 |
"num_key_value_heads": 8,
|
| 29 |
"num_nextn_predict_layers": 1,
|
| 30 |
-
"pad_token_id":
|
| 31 |
"partial_rotary_factor": 0.5,
|
| 32 |
"quantization": {
|
| 33 |
"group_size": 64,
|
|
@@ -44,7 +44,8 @@
|
|
| 44 |
"tie_word_embeddings": false,
|
| 45 |
"topk_group": 1,
|
| 46 |
"torch_dtype": "bfloat16",
|
| 47 |
-
"transformers_version": "4.
|
|
|
|
| 48 |
"use_cache": true,
|
| 49 |
"use_qk_norm": true,
|
| 50 |
"vocab_size": 151552
|
|
|
|
| 27 |
"num_hidden_layers": 92,
|
| 28 |
"num_key_value_heads": 8,
|
| 29 |
"num_nextn_predict_layers": 1,
|
| 30 |
+
"pad_token_id": 151330,
|
| 31 |
"partial_rotary_factor": 0.5,
|
| 32 |
"quantization": {
|
| 33 |
"group_size": 64,
|
|
|
|
| 44 |
"tie_word_embeddings": false,
|
| 45 |
"topk_group": 1,
|
| 46 |
"torch_dtype": "bfloat16",
|
| 47 |
+
"transformers_version": "4.56.2",
|
| 48 |
+
"unsloth_fixed": true,
|
| 49 |
"use_cache": true,
|
| 50 |
"use_qk_norm": true,
|
| 51 |
"vocab_size": 151552
|