tokenizer/special_tokens_map.json
574 B · 31 lines · json Raw
1 {
2 "bos_token": {
3 "content": "<|startoftext|>",
4 "lstrip": false,
5 "normalized": true,
6 "rstrip": false,
7 "single_word": false
8 },
9 "eos_token": {
10 "content": "<|endoftext|>",
11 "lstrip": false,
12 "normalized": true,
13 "rstrip": false,
14 "single_word": false
15 },
16 "pad_token": {
17 "content": "!",
18 "lstrip": false,
19 "normalized": false,
20 "rstrip": false,
21 "single_word": false
22 },
23 "unk_token": {
24 "content": "<|endoftext|>",
25 "lstrip": false,
26 "normalized": true,
27 "rstrip": false,
28 "single_word": false
29 }
30 }
31