glaurung-large-001 / tokenizer_config.json
mjbommar's picture
Upload Glaurung Large 001 - RoBERTa large model for binary analysis
2a22557 verified
{
"tokenizer_class": "PreTrainedTokenizerFast",
"model_max_length": 512,
"padding_side": "right",
"truncation_side": "right",
"clean_up_tokenization_spaces": false,
"bos_token": "<|start|>",
"eos_token": "<|sep|>",
"sep_token": "<|sep|>",
"cls_token": "<|cls|>",
"unk_token": "<|unk|>",
"pad_token": "<|pad|>",
"mask_token": "<|mask|>",
"add_prefix_space": false
}