mathugo commited on
Commit
12dfae9
1 Parent(s): 5ea2830

Upload tokenizer

Browse files
Files changed (4) hide show
  1. merges.txt +0 -0
  2. tokenizer.json +0 -0
  3. tokenizer_config.json +1 -2
  4. vocab.json +0 -0
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -48,8 +48,7 @@
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
51
- "max_len": 712,
52
- "model_max_length": 712,
53
  "pad_token": "<pad>",
54
  "sep_token": "</s>",
55
  "tokenizer_class": "RobertaTokenizer",
 
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
51
+ "model_max_length": 1000000000000000019884624838656,
 
52
  "pad_token": "<pad>",
53
  "sep_token": "</s>",
54
  "tokenizer_class": "RobertaTokenizer",
vocab.json CHANGED
The diff for this file is too large to render. See raw diff