Upload LlamaForCausalLM
Browse files- config.json +1 -1
config.json
CHANGED
|
@@ -18,7 +18,7 @@
|
|
| 18 |
"num_attention_heads": 32,
|
| 19 |
"num_hidden_layers": 22,
|
| 20 |
"num_key_value_heads": 4,
|
| 21 |
-
"pad_token_id":
|
| 22 |
"pretraining_tp": 1,
|
| 23 |
"rms_norm_eps": 1e-05,
|
| 24 |
"rope_scaling": null,
|
|
|
|
| 18 |
"num_attention_heads": 32,
|
| 19 |
"num_hidden_layers": 22,
|
| 20 |
"num_key_value_heads": 4,
|
| 21 |
+
"pad_token_id": 0,
|
| 22 |
"pretraining_tp": 1,
|
| 23 |
"rms_norm_eps": 1e-05,
|
| 24 |
"rope_scaling": null,
|