Upload tokenizer
Browse files- tokenizer_config.json +2 -0
tokenizer_config.json
CHANGED
|
@@ -317,8 +317,10 @@
|
|
| 317 |
"bos_token": "<|endoftext|>",
|
| 318 |
"clean_up_tokenization_spaces": true,
|
| 319 |
"eos_token": "<|endoftext|>",
|
|
|
|
| 320 |
"model_max_length": 2048,
|
| 321 |
"pad_token": "<|endoftext|>",
|
|
|
|
| 322 |
"tokenizer_class": "CodeGenTokenizer",
|
| 323 |
"unk_token": "<|endoftext|>"
|
| 324 |
}
|
|
|
|
| 317 |
"bos_token": "<|endoftext|>",
|
| 318 |
"clean_up_tokenization_spaces": true,
|
| 319 |
"eos_token": "<|endoftext|>",
|
| 320 |
+
"max_lenght": 8192,
|
| 321 |
"model_max_length": 2048,
|
| 322 |
"pad_token": "<|endoftext|>",
|
| 323 |
+
"padding": true,
|
| 324 |
"tokenizer_class": "CodeGenTokenizer",
|
| 325 |
"unk_token": "<|endoftext|>"
|
| 326 |
}
|