Create tokenizer.json
5c8b88b
verified
|
{ |
|
"version": "1.0", |
|
"truncation": null, |
|
"padding": null, |
|
"added_tokens": [ |
|
{ |
|
"id": 0, |
|
"content": "<s>", |
|
"single_word": false, |
|
"lstrip": false, |
|
"rstrip": false, |
|
"normalized": true, |
|
"special": true |
|
}, |
|
{ |
|
"id": 1, |
|
"content": "<pad>", |
|
"single_word": false, |
|
"lstrip": false, |
|
"rstrip": false, |
|
"normalized": true, |
|
"special": true |
|
}, |
|
{ |
|
"id": 2, |
|
"content": "</s>", |
|
"single_word": false, |
|
"lstrip": false, |
|
"rstrip": false, |
|
"normalized": true, |
|
"special": true |
|
}, |
|
{ |
|
"id": 3, |
|
"content": "<unk>", |
|
"single_word": false, |
|
"lstrip": false, |
|
"rstrip": false, |
|
"normalized": true, |
|
"special": true |
|
} |
|
], |
|
"normalizer": null, |
|
"pre_tokenizer": null, |
|
"post_processor": null, |
|
"decoder": null, |
|
"model": { |
|
"type": "BPE", |
|
"vocab": {}, |
|
"merges": [] |
|
} |
|
} |