tokenizer/special_tokens_map.json
472 B · 25 lines · json Raw
1 {
2 "bos_token": {
3 "content": "<|startoftext|>",
4 "lstrip": false,
5 "normalized": true,
6 "rstrip": false,
7 "single_word": false
8 },
9 "eos_token": {
10 "content": "<|endoftext|>",
11 "lstrip": false,
12 "normalized": true,
13 "rstrip": false,
14 "single_word": false
15 },
16 "pad_token": "<|endoftext|>",
17 "unk_token": {
18 "content": "<|endoftext|>",
19 "lstrip": false,
20 "normalized": true,
21 "rstrip": false,
22 "single_word": false
23 }
24 }
25