johanobandoc commited on
Commit
598e4a2
·
verified ·
1 Parent(s): 4573b7b

Upload LlamaForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -18,7 +18,7 @@
18
  "num_attention_heads": 32,
19
  "num_hidden_layers": 22,
20
  "num_key_value_heads": 4,
21
- "pad_token_id": 32000,
22
  "pretraining_tp": 1,
23
  "rms_norm_eps": 1e-05,
24
  "rope_scaling": null,
 
18
  "num_attention_heads": 32,
19
  "num_hidden_layers": 22,
20
  "num_key_value_heads": 4,
21
+ "pad_token_id": 0,
22
  "pretraining_tp": 1,
23
  "rms_norm_eps": 1e-05,
24
  "rope_scaling": null,