bibidentuhanoi commited on
Commit
bee7755
1 Parent(s): e8f0bbe

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -14,7 +14,7 @@
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "</s>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
 
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "<|im_end|>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
tokenizer.json CHANGED
@@ -23,15 +23,6 @@
23
  },
24
  {
25
  "id": 2,
26
- "content": "</s>",
27
- "single_word": false,
28
- "lstrip": false,
29
- "rstrip": false,
30
- "normalized": false,
31
- "special": true
32
- },
33
- {
34
- "id": 32000,
35
  "content": "<|im_end|>",
36
  "single_word": false,
37
  "lstrip": false,
@@ -40,7 +31,7 @@
40
  "special": true
41
  },
42
  {
43
- "id": 32001,
44
  "content": "<|im_start|>",
45
  "single_word": false,
46
  "lstrip": false,
@@ -152,10 +143,11 @@
152
  "end_of_word_suffix": null,
153
  "fuse_unk": true,
154
  "byte_fallback": true,
 
155
  "vocab": {
156
  "<unk>": 0,
157
  "<s>": 1,
158
- "</s>": 2,
159
  "<0x00>": 3,
160
  "<0x01>": 4,
161
  "<0x02>": 5,
 
23
  },
24
  {
25
  "id": 2,
 
 
 
 
 
 
 
 
 
26
  "content": "<|im_end|>",
27
  "single_word": false,
28
  "lstrip": false,
 
31
  "special": true
32
  },
33
  {
34
+ "id": 32000,
35
  "content": "<|im_start|>",
36
  "single_word": false,
37
  "lstrip": false,
 
143
  "end_of_word_suffix": null,
144
  "fuse_unk": true,
145
  "byte_fallback": true,
146
+ "ignore_merges": false,
147
  "vocab": {
148
  "<unk>": 0,
149
  "<s>": 1,
150
+ "<|im_end|>": 2,
151
  "<0x00>": 3,
152
  "<0x01>": 4,
153
  "<0x02>": 5,
tokenizer_config.json CHANGED
The diff for this file is too large to render. See raw diff