xiechunyu commited on
Commit
13fc8c2
1 Parent(s): 1878da4

first commit

Browse files
config.json ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "qh360_vl-70B",
3
+ "architectures": [
4
+ "QH360_VL_LlamaForCausalLM"
5
+ ],
6
+ "auto_map": {
7
+ "AutoConfig": "modeling_360vl.QH360_VLConfig",
8
+ "AutoModelForCausalLM": "modeling_360vl.QH360_VL_LlamaForCausalLM"
9
+ },
10
+ "attention_bias": false,
11
+ "attention_dropout": 0.0,
12
+ "bos_token_id": 128000,
13
+ "eos_token_id": 128001,
14
+ "freeze_mm_mlp_adapter": false,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 8192,
17
+ "image_aspect_ratio": "pad",
18
+ "image_grid_pinpoints": null,
19
+ "initializer_range": 0.02,
20
+ "intermediate_size": 28672,
21
+ "max_position_embeddings": 8192,
22
+ "mm_hidden_size": 1024,
23
+ "mm_num_tokens": 577,
24
+ "mm_projector_config": "qh360_vl-70B/proj_config.json",
25
+ "mm_projector_lr": null,
26
+ "mm_projector_type": "c-abs",
27
+ "mm_use_im_patch_token": false,
28
+ "mm_use_im_start_end": false,
29
+ "mm_vision_select_feature": "patch",
30
+ "mm_vision_select_layer": -2,
31
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
32
+ "model_type": "QH_360VL",
33
+ "num_attention_heads": 64,
34
+ "num_hidden_layers": 80,
35
+ "num_key_value_heads": 8,
36
+ "pretraining_tp": 8,
37
+ "proj_2": true,
38
+ "rms_norm_eps": 1e-05,
39
+ "rope_scaling": null,
40
+ "rope_theta": 500000.0,
41
+ "tie_word_embeddings": false,
42
+ "torch_dtype": "bfloat16",
43
+ "transformers_version": "4.37.2",
44
+ "tune_mm_mlp_adapter": false,
45
+ "use_cache": true,
46
+ "use_mm_proj": true,
47
+ "vocab_size": 128256
48
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "eos_token_id": 128001,
5
+ "transformers_version": "4.37.2"
6
+ }
model-00003-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5613841c134ad634ff0c5ded7501e0c2a31c833f4be6ff511118ca592d79e86f
3
+ size 4999711704
model-00004-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d305ecd99eee88e384a7f06e8fdbc5b1b2d50ca5841558876619ebebdb760964
3
+ size 4966157032
model-00005-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16bd577cd11dbb62bd90cb4939a2d995a0cf0ffc3158be0056d176e459235e96
3
+ size 4664134408
model-00006-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00716b4edd9f822507248e0cabb0d5b05081df9ea5d74f3162c02b38aad1d0c1
3
+ size 4664167408
model-00007-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b65f0b0ae70d23b78953121b53a0cdf6b0d23de5eea1c7b62a67c226f333aa6
3
+ size 4664167408
model-00008-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e376efb76740c1302d07fb3425cb8bf0275b5adcf40eed2695fa510335abf0c
3
+ size 4999711728
model-00009-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f900337af6a2af02578e49c9c63f77dcdb50bea11ab9979a041c1ace9d4f9764
3
+ size 4966157056
model-00010-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dca6e7b8b4cb12de7a37cf1783126ce0cd9bddf90f133da4355e040ecb61a811
3
+ size 4664134408
model-00011-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dbc2fd5dca05f3e1eebf60dc930317acaa4108d81ef6b565571dd0578e80c9a2
3
+ size 4664167408
model-00012-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:326ff96c985b9bf6f71545761a56fb9bec31967926c56a6f2dd1495d4c38e298
3
+ size 4664167408
model-00013-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08229222ad1c088edd2a5608c6fad7cc3acc1bd7cc02085811c464b6c9fb7990
3
+ size 4999711728
model-00014-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26137c2510c961ea49e68969c96a2262669a311da5b57bbd725c34cf1e8125d9
3
+ size 4966157056
model-00015-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd398c596343fe40fcef2fd8e79a4f4623fa163f79caaa08bf690977475aed4d
3
+ size 4664134408
model-00016-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d0b8b12f45da4f8feb2d55d85506d17a78ee23f92aa04a5459d634f7dcfee79
3
+ size 4664167408
model-00017-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd618b7d05ba47e2cb75f4f611fa6c0c2dfef97f7a402fdc93c7184fecc6837b
3
+ size 4664167408
model-00018-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d37d4b4e017abe933fe587318f893871379925957c64648ad8fb627428b55f8
3
+ size 4999711728
model-00019-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5982151e0a9dac021972aa74569c950d1c4fba88f082ca734be0d9ea254c930d
3
+ size 4966157056
model-00020-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a73d67c622d960eac0955bb9712fb84ff47e5b082edc99620bffc504211c9c84
3
+ size 4664134408
model-00021-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c9bb9f47a36025cf33ca94ff6fcae0767a03612e70acd226368e2fecd0e7a68
3
+ size 4664167408
model-00022-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5683d81cbf0d05b7622be60518d0ec3c87f532de6a7772f32cf9724d6f9c9064
3
+ size 4664167408
model-00023-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:000f9da748c43225d776fce439942b3b201787dc114e34802f2d8e3defd35bb0
3
+ size 4999711728
model-00024-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3db8b31f09aa2bdb5de2f035b5dc48c1d714370da552292cd0a4d5ed15eebc42
3
+ size 4966157056
model-00025-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31cad2a103e1ec0bff98992e46b428b2a4b004916dbdbcb421fba61ffa98ecbb
3
+ size 4664134408
model-00026-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68c4cba8d7e4616369ade222e8aa646d538bf9086248731db8d78593f673a525
3
+ size 4664167408
model-00027-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:660ae334f2422175848192cb909a87a7cbd4dc347bc74466eb713d508fd2ed0a
3
+ size 4664167408
model-00028-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52c9c5883c9651eb915e58a0189a44a77ca7e1f93860696e0e5978b0d91e683f
3
+ size 4999711728
model-00029-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16a5a143a1e8ecf40af7b92288bedbb710ebce84f7c26244ddcdecb62a3a1e62
3
+ size 4997960336
model-00030-of-00030.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a56c2d3ba6a36e65548842f8d0d3d45b0731f801e929bed3706cfe3f0b754a2d
3
+ size 3044389000
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
modeling_360vl.py ADDED
@@ -0,0 +1,809 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional, Tuple, Union
2
+
3
+ import torch
4
+ import torch.nn as nn
5
+
6
+ from torch.nn import CrossEntropyLoss
7
+
8
+ from transformers import AutoConfig, AutoModelForCausalLM, \
9
+ LlamaConfig, LlamaModel, LlamaForCausalLM
10
+
11
+ from transformers.modeling_outputs import CausalLMOutputWithPast
12
+
13
+ from PIL import Image
14
+
15
+ from abc import ABC, abstractmethod
16
+ import os
17
+
18
+ import math
19
+ from transformers import CLIPVisionModel, CLIPImageProcessor, CLIPVisionConfig
20
+ from functools import partial
21
+ from transformers.configuration_utils import PretrainedConfig
22
+
23
+ from timm.models.layers import LayerNorm, LayerNorm2d
24
+ from timm.models.regnet import RegStage
25
+ from torch.nn import functional as F
26
+ import math
27
+ from einops import rearrange
28
+
29
+
30
+
31
+ CONTROLLER_HEART_BEAT_EXPIRATION = 30
32
+ WORKER_HEART_BEAT_INTERVAL = 15
33
+
34
+ LOGDIR = "."
35
+
36
+ # Model Constants
37
+ IGNORE_INDEX = -100
38
+ IMAGE_TOKEN_INDEX = -200
39
+ DEFAULT_IMAGE_TOKEN = "<image>"
40
+ DEFAULT_IMAGE_PATCH_TOKEN = "<im_patch>"
41
+ DEFAULT_IM_START_TOKEN = "<im_start>"
42
+ DEFAULT_IM_END_TOKEN = "<im_end>"
43
+
44
+
45
+
46
+
47
+
48
+ class CLIPVisionTower(nn.Module):
49
+ def __init__(self, vision_tower, args, delay_load=False):
50
+ super().__init__()
51
+
52
+ self.is_loaded = False
53
+
54
+ self.vision_tower_name = vision_tower
55
+ self.select_layer = args.mm_vision_select_layer
56
+ self.select_feature = getattr(args, 'mm_vision_select_feature', 'patch')
57
+
58
+ if not delay_load:
59
+ self.load_model()
60
+ else:
61
+ self.cfg_only = CLIPVisionConfig.from_pretrained(self.vision_tower_name)
62
+
63
+ def load_model(self):
64
+ self.image_processor = CLIPImageProcessor.from_pretrained(self.vision_tower_name)
65
+ self.vision_tower = CLIPVisionModel.from_pretrained(self.vision_tower_name)
66
+ self.vision_tower.requires_grad_(False)
67
+
68
+ self.is_loaded = True
69
+
70
+ def feature_select(self, image_forward_outs):
71
+ image_features = image_forward_outs.hidden_states[self.select_layer]
72
+ if self.select_feature == 'patch':
73
+ image_features = image_features[:, 1:]
74
+ elif self.select_feature == 'cls_patch':
75
+ image_features = image_features
76
+ else:
77
+ raise ValueError(f'Unexpected select feature: {self.select_feature}')
78
+ return image_features
79
+
80
+ @torch.no_grad()
81
+ def forward(self, images):
82
+ if type(images) is list:
83
+ image_features = []
84
+ for image in images:
85
+ image_forward_out = self.vision_tower(image.to(device=self.device, dtype=self.dtype).unsqueeze(0), output_hidden_states=True)
86
+ image_feature = self.feature_select(image_forward_out).to(image.dtype)
87
+ image_features.append(image_feature)
88
+ else:
89
+ image_forward_outs = self.vision_tower(images.to(device=self.device, dtype=self.dtype), output_hidden_states=True)
90
+ image_features = self.feature_select(image_forward_outs).to(images.dtype)
91
+
92
+ return image_features
93
+
94
+ @property
95
+ def dummy_feature(self):
96
+ return torch.zeros(1, self.hidden_size, device=self.device, dtype=self.dtype)
97
+
98
+ @property
99
+ def dtype(self):
100
+ return self.vision_tower.dtype
101
+
102
+ @property
103
+ def device(self):
104
+ return self.vision_tower.device
105
+
106
+ @property
107
+ def config(self):
108
+ if self.is_loaded:
109
+ return self.vision_tower.config
110
+ else:
111
+ return self.cfg_only
112
+
113
+ @property
114
+ def hidden_size(self):
115
+ return self.config.hidden_size
116
+
117
+ @property
118
+ def num_patches(self):
119
+ return (self.config.image_size // self.config.patch_size) ** 2
120
+
121
+
122
+ def build_vision_tower(vision_tower_cfg, **kwargs):
123
+ vision_tower = getattr(vision_tower_cfg, 'mm_vision_tower', getattr(vision_tower_cfg, 'vision_tower', None))
124
+ is_absolute_path_exists = os.path.exists(vision_tower)
125
+
126
+ if is_absolute_path_exists or vision_tower.startswith("openai") or vision_tower.startswith("laion"):
127
+ return CLIPVisionTower(vision_tower, args=vision_tower_cfg, **kwargs)
128
+
129
+ raise ValueError(f'Unknown vision tower: {vision_tower}')
130
+
131
+
132
+
133
+
134
+
135
+ class HoneybeeVisualProjectorConfig(PretrainedConfig):
136
+ model_type = "mllm_visual_projector"
137
+
138
+ def __init__(
139
+ self,
140
+ projector_type: str = "resampler",
141
+ hidden_size: int = 1024, #
142
+ num_hidden_layers: int = 6, #
143
+ num_attention_heads: int = 16, #
144
+ intermediate_size: int = 4096, #
145
+ attention_probs_dropout_prob: float = 0.1, #
146
+ initializer_range: float = 0.02,
147
+ layer_norm_eps: float = 1e-6, #
148
+ encoder_hidden_size: int = 1024, # This will be overwritten by vision_model's hidden_size
149
+ pos_emb=False,
150
+ feature_layer_index=-1, # vision feature layer index; -1: last layer
151
+ num_eos_tokens=1,
152
+ use_cls=True,
153
+ prenorm=False,
154
+ **kwargs,
155
+ ):
156
+ super().__init__(**kwargs)
157
+ self.projector_type = projector_type
158
+ self.hidden_size = hidden_size
159
+ self.num_hidden_layers = num_hidden_layers
160
+ self.num_attention_heads = num_attention_heads
161
+ self.intermediate_size = intermediate_size
162
+ self.attention_probs_dropout_prob = attention_probs_dropout_prob
163
+ self.initializer_range = initializer_range
164
+ self.layer_norm_eps = layer_norm_eps
165
+ self.encoder_hidden_size = encoder_hidden_size
166
+
167
+ self.pos_emb = pos_emb
168
+ self.feature_layer_index = feature_layer_index
169
+ self.num_eos_tokens = num_eos_tokens
170
+ self.use_cls = use_cls
171
+ self.prenorm = prenorm
172
+
173
+ @classmethod
174
+ def from_pretrained(
175
+ cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs
176
+ ) -> "PretrainedConfig":
177
+ config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
178
+
179
+ # get the visual_projector config dict if we are loading from HoneybeeConfig
180
+ if config_dict.get("model_type") == "mllm":
181
+ config_dict = config_dict["visual_projector_config"]
182
+
183
+ if (
184
+ "model_type" in config_dict
185
+ and hasattr(cls, "model_type")
186
+ and config_dict["model_type"] != cls.model_type
187
+ ):
188
+ logger.warning(
189
+ f"You are using a model of type {config_dict['model_type']} to instantiate a model of type "
190
+ f"{cls.model_type}. This is not supported for all configurations of models and can yield errors."
191
+ )
192
+
193
+ return cls.from_dict(config_dict, **kwargs)
194
+
195
+ def build_pos_embeds(
196
+ config: HoneybeeVisualProjectorConfig, num_input_tokens: int, vision_hidden_size: int
197
+ ):
198
+ # pos emb
199
+ # true
200
+ if config.pos_emb:
201
+ pos_emb = torch.nn.Parameter(torch.zeros(1, num_input_tokens, vision_hidden_size))
202
+ nn.init.trunc_normal_(pos_emb, mean=0.0, std=0.02)
203
+ else:
204
+ pos_emb = None
205
+
206
+ return pos_emb
207
+
208
+
209
+ def build_eos_tokens(config: HoneybeeVisualProjectorConfig, output_hidden_size: int):
210
+ # think tokens
211
+ num_eos_tokens = config.num_eos_tokens
212
+ # 0
213
+ if num_eos_tokens:
214
+ eos_tokens = torch.nn.Parameter(torch.randn(1, num_eos_tokens, output_hidden_size))
215
+ nn.init.trunc_normal_(eos_tokens, mean=0.0, std=config.initializer_range)
216
+ else:
217
+ eos_tokens = None
218
+
219
+ return eos_tokens
220
+
221
+
222
+ def build_prenorm(config: HoneybeeVisualProjectorConfig):
223
+ # false
224
+ if config.prenorm:
225
+ prenorm = LayerNorm(config.encoder_hidden_size)
226
+ else:
227
+ prenorm = None
228
+ return prenorm
229
+
230
+
231
+ def build_mlp(depth, hidden_size, output_hidden_size):
232
+ layers = [nn.Linear(hidden_size, output_hidden_size)]
233
+ for _ in range(1, depth):
234
+ layers.append(nn.SiLU())
235
+ layers.append(nn.Linear(output_hidden_size, output_hidden_size))
236
+ return nn.Sequential(*layers)
237
+
238
+ def get_abs_pos(abs_pos, tgt_size):
239
+ # abs_pos: L, C
240
+ # tgt_size: M
241
+ # return: M, C
242
+ # 16,24
243
+ src_size = int(math.sqrt(abs_pos.size(1)))
244
+ # 32,48
245
+ tgt_size = int(math.sqrt(tgt_size))
246
+ dtype = abs_pos.dtype
247
+
248
+ if src_size != tgt_size:
249
+ return F.interpolate(
250
+ abs_pos.float().reshape(1, src_size, src_size, -1).permute(0, 3, 1, 2),
251
+ size=(tgt_size, tgt_size),
252
+ mode="bicubic",
253
+ align_corners=False,
254
+ ).permute(0, 2, 3, 1).flatten(0, 2).to(dtype=dtype)
255
+ else:
256
+ return abs_pos
257
+
258
+
259
+ class Projector(nn.Module):
260
+ """Base projector class"""
261
+
262
+ def __init__(
263
+ self,
264
+ config: HoneybeeVisualProjectorConfig,
265
+ num_input_tokens: int,
266
+ output_hidden_size: int,
267
+ ):
268
+ super().__init__()
269
+ self.config = config
270
+ self.num_input_tokens = num_input_tokens
271
+ self.output_hidden_size = output_hidden_size
272
+
273
+ # think tokens
274
+ self.eos_tokens = build_eos_tokens(config, output_hidden_size)
275
+
276
+ # pos emb
277
+ self.pos_emb = build_pos_embeds(config, num_input_tokens, config.encoder_hidden_size)
278
+
279
+ self.prenorm = build_prenorm(config)
280
+
281
+ self.build_net()
282
+
283
+ def build_net(self):
284
+ raise NotImplementedError()
285
+
286
+ def _forward(self, x):
287
+ raise NotImplementedError()
288
+
289
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
290
+ """
291
+ Args:
292
+ x: (B, L, encoder_hidden_size) tensor from the visual backbone (CLIP visual encoder), including cls token.
293
+ """
294
+ if self.prenorm is not None:
295
+ x = self.prenorm(x)
296
+
297
+ if self.pos_emb is not None:
298
+ # self.pos_emb = self.pos_emb[:,1:]
299
+ pos_emb = get_abs_pos(self.pos_emb[:,1:], x.size(1))
300
+ pos_emb = pos_emb.to(device=x.device)
301
+ x += pos_emb
302
+
303
+ x = self._forward(x) # (B, L, output_hidden_size)
304
+
305
+ B = x.size(0)
306
+ if self.eos_tokens is not None:
307
+ x = torch.cat([x, self.eos_tokens.expand(B, -1, -1)], dim=1)
308
+ return x
309
+
310
+
311
+ class ConvProjector(Projector):
312
+ def _forward(self, x):
313
+ # x: [B, L, dim]
314
+ # x = x[:, 1:] # drop cls token and 2d forward
315
+
316
+ hw = int(x.size(1) ** 0.5)
317
+ x = rearrange(x, "b (h w) d -> b d h w", h=hw, w=hw)
318
+ x = self.net(x)
319
+ x = rearrange(x, "b d h w -> b (h w) d")
320
+ x = self.readout(x)
321
+
322
+ return x
323
+
324
+
325
+ class CAbstractor(ConvProjector):
326
+ """C-Abstractor"""
327
+ def build_net(self):
328
+ encoder_hidden_size = self.config.encoder_hidden_size
329
+ hidden_size = self.config.hidden_size
330
+ output_hidden_size = self.output_hidden_size
331
+ depth = self.config.depth
332
+ mlp_depth = self.config.mlp_depth
333
+
334
+ n_queries = self.config.num_queries
335
+ assert (n_queries ** 0.5).is_integer(), "n_queries must be square number"
336
+ hw = int(n_queries ** 0.5)
337
+
338
+ # RegBlock = ResBlock + SE
339
+ RegBlock = partial(
340
+ RegStage,
341
+ stride=1,
342
+ dilation=1,
343
+ act_layer=nn.SiLU,
344
+ norm_layer=LayerNorm2d,
345
+ )
346
+
347
+ s1 = RegBlock(
348
+ depth,
349
+ encoder_hidden_size,
350
+ hidden_size,
351
+ )
352
+ sampler = nn.AdaptiveAvgPool2d((hw, hw))
353
+ s2 = RegBlock(
354
+ depth,
355
+ hidden_size,
356
+ hidden_size,
357
+ )
358
+
359
+ self.net = nn.Sequential(s1, sampler, s2)
360
+ self.readout = build_mlp(mlp_depth, hidden_size, output_hidden_size)
361
+
362
+ class IdentityMap(nn.Module):
363
+ def __init__(self):
364
+ super().__init__()
365
+
366
+ def forward(self, x, *args, **kwargs):
367
+ return x
368
+
369
+ @property
370
+ def config(self):
371
+ return {"mm_projector_type": 'identity'}
372
+
373
+
374
+ class SimpleResBlock(nn.Module):
375
+ def __init__(self, channels):
376
+ super().__init__()
377
+ self.pre_norm = nn.LayerNorm(channels)
378
+
379
+ self.proj = nn.Sequential(
380
+ nn.Linear(channels, channels),
381
+ nn.GELU(),
382
+ nn.Linear(channels, channels)
383
+ )
384
+ def forward(self, x):
385
+ x = self.pre_norm(x)
386
+ return x + self.proj(x)
387
+
388
+
389
+ def build_honeybee_projector(config, projector_type, num_tokens,lm_hidden_size):
390
+ """Build projector (abstractor) and query_tokens (optionally for resampler)"""
391
+ proj_config = config
392
+ proj_type = projector_type
393
+ num_tokens = num_tokens
394
+ output_hidden_size = lm_hidden_size # LM hidden size
395
+
396
+ abstractor = {
397
+ "c-abs": CAbstractor,
398
+ }[
399
+ proj_type
400
+ ](proj_config, num_tokens, output_hidden_size)
401
+ return abstractor
402
+
403
+
404
+ def build_vision_projector(config, delay_load=False, **kwargs):
405
+ projector_type = getattr(config, 'mm_projector_type', 'linear')
406
+
407
+ if projector_type == 'linear':
408
+ return nn.Linear(config.mm_hidden_size, config.hidden_size)
409
+
410
+ if projector_type == 'c-abs':
411
+
412
+ local_config_path = config.mm_projector_config
413
+ honeybee_config = HoneybeeVisualProjectorConfig.from_pretrained(local_config_path)
414
+
415
+ num_tokens = config.mm_num_tokens
416
+
417
+ lm_hidden_size = config.hidden_size
418
+
419
+ abstractor = build_honeybee_projector(honeybee_config,projector_type,num_tokens,lm_hidden_size)
420
+ return abstractor
421
+
422
+ mlp_gelu_match = re.match(r'^mlp(\d+)x_gelu$', projector_type)
423
+ if mlp_gelu_match:
424
+ mlp_depth = int(mlp_gelu_match.group(1))
425
+ modules = [nn.Linear(config.mm_hidden_size, config.hidden_size)]
426
+ for _ in range(1, mlp_depth):
427
+ modules.append(nn.GELU())
428
+ modules.append(nn.Linear(config.hidden_size, config.hidden_size))
429
+ return nn.Sequential(*modules)
430
+
431
+ if projector_type == 'identity':
432
+ return IdentityMap()
433
+
434
+ raise ValueError(f'Unknown projector type: {projector_type}')
435
+
436
+
437
+
438
+
439
+ class QH360_VL_MetaModel:
440
+
441
+ def __init__(self, config):
442
+ super(QH360_VL_MetaModel, self).__init__(config)
443
+ if hasattr(config, "mm_vision_tower"):
444
+ self.vision_tower = build_vision_tower(config, delay_load=True)
445
+ self.mm_projector_ctt = build_vision_projector(config)
446
+ self.mm_projector_ori = build_vision_projector(config)
447
+
448
+
449
+
450
+ def get_vision_tower(self):
451
+ vision_tower = getattr(self, 'vision_tower', None)
452
+ if type(vision_tower) is list:
453
+ vision_tower = vision_tower[0]
454
+ return vision_tower
455
+
456
+
457
+ class QH360_VL_MetaForCausalLM(ABC):
458
+
459
+ @abstractmethod
460
+ def get_model(self):
461
+ pass
462
+
463
+ def get_vision_tower(self):
464
+ return self.get_model().get_vision_tower()
465
+
466
+ def encode_images(self, images):
467
+ image_features = self.get_model().get_vision_tower()(images)
468
+ image_features = self.get_model().mm_projector(image_features)
469
+ return image_features
470
+
471
+ def encode_images_noprojector(self, images):
472
+ image_features = self.get_model().get_vision_tower()(images)
473
+ image_features = image_features.detach()
474
+ return image_features
475
+
476
+ def prepare_inputs_labels_for_multimodal(
477
+ self, input_ids, attention_mask, past_key_values, labels, images
478
+ ):
479
+ vision_tower = self.get_vision_tower()
480
+ if vision_tower is None or images is None or input_ids.shape[1] == 1:
481
+ if past_key_values is not None and vision_tower is not None and images is not None and input_ids.shape[1] == 1:
482
+ attention_mask = torch.ones((attention_mask.shape[0], past_key_values[-1][-1].shape[-2] + 1), dtype=attention_mask.dtype, device=attention_mask.device)
483
+ return input_ids, attention_mask, past_key_values, None, labels
484
+
485
+ if type(images) is list or images.ndim == 5:
486
+ image_features = []
487
+ for image in images:
488
+ if image.ndim == 3:
489
+ image_features.append(self.encode_images(image.unsqueeze(0)).squeeze(0))
490
+ elif image.ndim == 4:
491
+ #NOTE cc-plan
492
+ temp_feats = self.encode_images_noprojector(image)
493
+ src_size = int(math.sqrt(temp_feats.shape[1]))
494
+ temp_feats = temp_feats.reshape(temp_feats.shape[0]//5,5,-1, temp_feats.shape[-1])
495
+ x1 = temp_feats[:,4,:,:]
496
+ x = temp_feats[:,:4,:,:]
497
+ x = x.reshape(x.shape[0], -1, src_size, src_size, x.shape[-1])
498
+ x = x.transpose(1,2).reshape(x.shape[0], src_size,2,2, src_size, x.shape[-1])
499
+ x = x.transpose(1,2).reshape(x.shape[0], -1, x.shape[-1])
500
+ x1 = self.get_model().mm_projector_ori(x1).squeeze(0)
501
+ x = self.get_model().mm_projector_ctt(x).squeeze(0)
502
+ temp_feats_all = torch.cat([x,x1],dim=0)
503
+ image_features.append(temp_feats_all)
504
+ else:
505
+ image_features = self.encode_images(images)
506
+
507
+
508
+ new_input_embeds = []
509
+ new_labels = [] if labels is not None else None
510
+ cur_image_idx = 0
511
+ for batch_idx, cur_input_ids in enumerate(input_ids):
512
+ if (cur_input_ids == IMAGE_TOKEN_INDEX).sum() == 0:
513
+ # multimodal LLM, but the current sample is not multimodal
514
+ # FIXME: this is a hacky fix, for deepspeed zero3 to work
515
+ half_len = cur_input_ids.shape[0] // 2
516
+ cur_image_features = image_features[cur_image_idx]
517
+ cur_input_embeds_1 = self.get_model().embed_tokens(cur_input_ids[:half_len])
518
+ cur_input_embeds_2 = self.get_model().embed_tokens(cur_input_ids[half_len:])
519
+ cur_input_embeds = torch.cat([cur_input_embeds_1, cur_image_features[0:0], cur_input_embeds_2], dim=0)
520
+ new_input_embeds.append(cur_input_embeds)
521
+ if labels is not None:
522
+ new_labels.append(labels[batch_idx])
523
+ cur_image_idx += 1
524
+ continue
525
+ image_token_indices = torch.where(cur_input_ids == IMAGE_TOKEN_INDEX)[0]
526
+ cur_new_input_embeds = []
527
+ if labels is not None:
528
+ cur_labels = labels[batch_idx]
529
+ cur_new_labels = []
530
+ assert cur_labels.shape == cur_input_ids.shape
531
+ while image_token_indices.numel() > 0:
532
+ cur_image_features = image_features[cur_image_idx]
533
+ image_token_start = image_token_indices[0]
534
+ if getattr(self.config, 'tune_mm_mlp_adapter', False) and getattr(self.config, 'mm_use_im_start_end', False):
535
+ cur_new_input_embeds.append(self.get_model().embed_tokens(cur_input_ids[:image_token_start-1]).detach())
536
+ cur_new_input_embeds.append(self.get_model().embed_tokens(cur_input_ids[image_token_start-1:image_token_start]))
537
+ cur_new_input_embeds.append(cur_image_features)
538
+ cur_new_input_embeds.append(self.get_model().embed_tokens(cur_input_ids[image_token_start+1:image_token_start+2]))
539
+ if labels is not None:
540
+ cur_new_labels.append(cur_labels[:image_token_start])
541
+ cur_new_labels.append(torch.full((cur_image_features.shape[0],), IGNORE_INDEX, device=labels.device, dtype=labels.dtype))
542
+ cur_new_labels.append(cur_labels[image_token_start:image_token_start+1])
543
+ cur_labels = cur_labels[image_token_start+2:]
544
+ else:
545
+ cur_new_input_embeds.append(self.get_model().embed_tokens(cur_input_ids[:image_token_start]))
546
+ cur_new_input_embeds.append(cur_image_features)
547
+ if labels is not None:
548
+ cur_new_labels.append(cur_labels[:image_token_start])
549
+ cur_new_labels.append(torch.full((cur_image_features.shape[0],), IGNORE_INDEX, device=labels.device, dtype=labels.dtype))
550
+ cur_labels = cur_labels[image_token_start+1:]
551
+ cur_image_idx += 1
552
+ if getattr(self.config, 'tune_mm_mlp_adapter', False) and getattr(self.config, 'mm_use_im_start_end', False):
553
+ cur_input_ids = cur_input_ids[image_token_start+2:]
554
+ else:
555
+ cur_input_ids = cur_input_ids[image_token_start+1:]
556
+ image_token_indices = torch.where(cur_input_ids == IMAGE_TOKEN_INDEX)[0]
557
+ if cur_input_ids.numel() > 0:
558
+ if getattr(self.config, 'tune_mm_mlp_adapter', False) and getattr(self.config, 'mm_use_im_start_end', False):
559
+ cur_new_input_embeds.append(self.get_model().embed_tokens(cur_input_ids).detach())
560
+ else:
561
+ cur_new_input_embeds.append(self.get_model().embed_tokens(cur_input_ids))
562
+ if labels is not None:
563
+ cur_new_labels.append(cur_labels)
564
+ cur_new_input_embeds = [x.to(device=self.device) for x in cur_new_input_embeds]
565
+ cur_new_input_embeds = torch.cat(cur_new_input_embeds, dim=0)
566
+ new_input_embeds.append(cur_new_input_embeds)
567
+ if labels is not None:
568
+ cur_new_labels = torch.cat(cur_new_labels, dim=0)
569
+ new_labels.append(cur_new_labels)
570
+
571
+ if any(x.shape != new_input_embeds[0].shape for x in new_input_embeds):
572
+ max_len = max(x.shape[0] for x in new_input_embeds)
573
+
574
+ new_input_embeds_align = []
575
+ for cur_new_embed in new_input_embeds:
576
+ cur_new_embed = torch.cat((cur_new_embed, torch.zeros((max_len - cur_new_embed.shape[0], cur_new_embed.shape[1]), dtype=cur_new_embed.dtype, device=cur_new_embed.device)), dim=0)
577
+ new_input_embeds_align.append(cur_new_embed)
578
+ new_input_embeds = torch.stack(new_input_embeds_align, dim=0)
579
+
580
+ if labels is not None:
581
+ new_labels_align = []
582
+ _new_labels = new_labels
583
+ for cur_new_label in new_labels:
584
+ cur_new_label = torch.cat((cur_new_label, torch.full((max_len - cur_new_label.shape[0],), IGNORE_INDEX, dtype=cur_new_label.dtype, device=cur_new_label.device)), dim=0)
585
+ new_labels_align.append(cur_new_label)
586
+ new_labels = torch.stack(new_labels_align, dim=0)
587
+
588
+ if attention_mask is not None:
589
+ new_attention_mask = []
590
+ for cur_attention_mask, cur_new_labels, cur_new_labels_align in zip(attention_mask, _new_labels, new_labels):
591
+ new_attn_mask_pad_left = torch.full((cur_new_labels.shape[0] - labels.shape[1],), True, dtype=attention_mask.dtype, device=attention_mask.device)
592
+ new_attn_mask_pad_right = torch.full((cur_new_labels_align.shape[0] - cur_new_labels.shape[0],), False, dtype=attention_mask.dtype, device=attention_mask.device)
593
+ cur_new_attention_mask = torch.cat((new_attn_mask_pad_left, cur_attention_mask, new_attn_mask_pad_right), dim=0)
594
+ new_attention_mask.append(cur_new_attention_mask)
595
+ attention_mask = torch.stack(new_attention_mask, dim=0)
596
+ assert attention_mask.shape == new_labels.shape
597
+ else:
598
+ new_input_embeds = torch.stack(new_input_embeds, dim=0)
599
+ if labels is not None:
600
+ new_labels = torch.stack(new_labels, dim=0)
601
+
602
+ if attention_mask is not None:
603
+ new_attn_mask_pad_left = torch.full((attention_mask.shape[0], new_input_embeds.shape[1] - input_ids.shape[1]), True, dtype=attention_mask.dtype, device=attention_mask.device)
604
+ attention_mask = torch.cat((new_attn_mask_pad_left, attention_mask), dim=1)
605
+ assert attention_mask.shape == new_input_embeds.shape[:2]
606
+
607
+ return None, attention_mask, past_key_values, new_input_embeds, new_labels
608
+
609
+
610
+
611
+ class QH360_VLConfig(LlamaConfig):
612
+ model_type = "QH_360VL"
613
+
614
+
615
+ class QH360_VL_LlamaModel(QH360_VL_MetaModel, LlamaModel):
616
+ config_class = QH360_VLConfig
617
+
618
+ def __init__(self, config: LlamaConfig):
619
+ super(QH360_VL_LlamaModel, self).__init__(config)
620
+
621
+
622
+ class QH360_VL_LlamaForCausalLM(LlamaForCausalLM, QH360_VL_MetaForCausalLM):
623
+ config_class = QH360_VLConfig
624
+
625
+ def __init__(self, config):
626
+ super(LlamaForCausalLM, self).__init__(config)
627
+ config._attn_implementation == "flash_attention_2"
628
+ self.model = QH360_VL_LlamaModel(config)
629
+
630
+ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
631
+
632
+ # Initialize weights and apply final processing
633
+ self.post_init()
634
+
635
+ def get_model(self):
636
+ return self.model
637
+
638
+ def forward(
639
+ self,
640
+ input_ids: torch.LongTensor = None,
641
+ attention_mask: Optional[torch.Tensor] = None,
642
+ past_key_values: Optional[List[torch.FloatTensor]] = None,
643
+ inputs_embeds: Optional[torch.FloatTensor] = None,
644
+ labels: Optional[torch.LongTensor] = None,
645
+ use_cache: Optional[bool] = None,
646
+ output_attentions: Optional[bool] = None,
647
+ output_hidden_states: Optional[bool] = None,
648
+ images: Optional[torch.FloatTensor] = None,
649
+ return_dict: Optional[bool] = None,
650
+ ) -> Union[Tuple, CausalLMOutputWithPast]:
651
+ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
652
+ output_hidden_states = (
653
+ output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
654
+ )
655
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
656
+
657
+ input_ids, attention_mask, past_key_values, inputs_embeds, labels = self.prepare_inputs_labels_for_multimodal(input_ids, attention_mask, past_key_values, labels, images)
658
+
659
+ # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn)
660
+ outputs = self.model(
661
+ input_ids=input_ids,
662
+ attention_mask=attention_mask,
663
+ past_key_values=past_key_values,
664
+ inputs_embeds=inputs_embeds,
665
+ use_cache=use_cache,
666
+ output_attentions=output_attentions,
667
+ output_hidden_states=output_hidden_states,
668
+ return_dict=return_dict
669
+ )
670
+
671
+ hidden_states = outputs[0]
672
+ logits = self.lm_head(hidden_states)
673
+
674
+ loss = None
675
+ if labels is not None:
676
+ # Shift so that tokens < n predict n
677
+ shift_logits = logits[..., :-1, :].contiguous()
678
+ shift_labels = labels[..., 1:].contiguous()
679
+ # Flatten the tokens
680
+ loss_fct = CrossEntropyLoss()
681
+ shift_logits = shift_logits.view(-1, self.config.vocab_size)
682
+ shift_labels = shift_labels.view(-1)
683
+ # Enable model/pipeline parallelism
684
+ shift_labels = shift_labels.to(shift_logits.device)
685
+ loss = loss_fct(shift_logits, shift_labels)
686
+
687
+ if not return_dict:
688
+ output = (logits,) + outputs[1:]
689
+ return (loss,) + output if loss is not None else output
690
+
691
+ return CausalLMOutputWithPast(
692
+ loss=loss,
693
+ logits=logits,
694
+ past_key_values=outputs.past_key_values,
695
+ hidden_states=outputs.hidden_states,
696
+ attentions=outputs.attentions,
697
+ )
698
+
699
+ def prepare_inputs_for_generation(
700
+ self, input_ids, past_key_values=None, attention_mask=None, inputs_embeds=None, **kwargs
701
+ ):
702
+ if past_key_values:
703
+ input_ids = input_ids[:, -1:]
704
+
705
+ # if `inputs_embeds` are passed, we only want to use them in the 1st generation step
706
+ if inputs_embeds is not None and past_key_values is None:
707
+ model_inputs = {"inputs_embeds": inputs_embeds}
708
+ else:
709
+ model_inputs = {"input_ids": input_ids}
710
+
711
+ model_inputs.update(
712
+ {
713
+ "past_key_values": past_key_values,
714
+ "use_cache": kwargs.get("use_cache"),
715
+ "attention_mask": attention_mask,
716
+ "images": kwargs.get("images", None),
717
+ }
718
+ )
719
+ return model_inputs
720
+
721
+ def build_conversation_input_ids(
722
+ self,
723
+ tokenizer: "PreTrainedTokenizer",
724
+ query: str,
725
+ image = None,
726
+ image_processor=None,
727
+ ):
728
+
729
+ input_msg = [
730
+ {
731
+ "role": "system",
732
+ "content": "You are a multilingual, helpful, respectful and honest assistant who can respond in the same language, depending on the language of the question. Try to be as helpful as possible while still being safe. Your answer should not contain anything that is false, unhealthy, harmful, immoral, racist, sexist, toxic, dangerous, or illegal, and if the question relates to such content, please decline to answer. Make sure your answer is socially fair and positive. If a question doesn't make any sense, or is inconsistent with the facts, explain why instead of answering the wrong answer. If you don't know the answer to a question, don't share false information."
733
+ },
734
+ {
735
+ "role": "user",
736
+ "content": "<|reserved_special_token_44|>"+ '\n' + query
737
+ }
738
+ ]
739
+
740
+ input_ids = tokenizer.apply_chat_template(
741
+ input_msg,
742
+ add_generation_prompt=True,
743
+ padding="longest",
744
+ return_tensors="pt",
745
+ )
746
+ input_id_list = input_ids[0].tolist()
747
+ input_id_list[input_id_list.index(128049)]=-200
748
+ input_ids = torch.tensor(input_id_list, dtype=input_ids.dtype,device=input_ids.device)
749
+ input_ids = input_ids.unsqueeze(0)
750
+ image_tensor = self.process_images_slid_window(image,image_processor).unsqueeze(0)
751
+
752
+ return {
753
+ 'input_ids': input_ids,
754
+ 'image': image_tensor,
755
+ }
756
+
757
+
758
+
759
+ def process_images_slid_window(self, image, image_processor, vit_is=336):
760
+
761
+ def get_proper_imgsize(pil_img, vit_is):
762
+ max_w_h = vit_is * 2
763
+ new_pil_img = pil_img.resize((max_w_h, max_w_h))
764
+ return new_pil_img
765
+
766
+ def tensor_crop(tensor_array, left, upper, right, lower):
767
+ # tensor_array: C * H * W
768
+ return tensor_array[:, upper:lower, left:right]
769
+
770
+ def image_slid_window(image, num_slid_window):
771
+ # image: tensor, 3 * 336 * 336 or 3 * 672 * 672
772
+ # image: tensor, 3 * 224 * 224 or 3 * 448 * 448
773
+ if num_slid_window == 5:
774
+ image_x2, image_x1 = image[0], image[1]
775
+ vit_is = image_x1.shape[1]
776
+ h, w = image_x2.shape[1],image_x2.shape[2]
777
+ image0 = tensor_crop(image_x2, 0, 0, vit_is, vit_is)
778
+ image1 = tensor_crop(image_x2, w-vit_is, 0, w, vit_is)
779
+ image2 = tensor_crop(image_x2, 0, h-vit_is, vit_is, h)
780
+ image3 = tensor_crop(image_x2, w-vit_is, h-vit_is, w, h)
781
+ return torch.stack([image0, image1, image2, image3, image_x1])
782
+ else:
783
+ return image
784
+
785
+ def expand2square(pil_img, background_color):
786
+ width, height = pil_img.size
787
+ if width == height:
788
+ return pil_img
789
+ elif width > height:
790
+ result = Image.new(pil_img.mode, (width, width), background_color)
791
+ result.paste(pil_img, (0, (width - height) // 2))
792
+ return result
793
+ else:
794
+ result = Image.new(pil_img.mode, (height, height), background_color)
795
+ result.paste(pil_img, ((height - width) // 2, 0))
796
+ return result
797
+
798
+ vit_is = vit_is # vit_input_size, for simplicity
799
+
800
+ num_slid_window = 5
801
+
802
+ image = expand2square(image, tuple(int(x*255) for x in image_processor.image_mean))
803
+ image = get_proper_imgsize(image, vit_is)
804
+ image_x2 = image_processor.preprocess(image, return_tensors='pt', do_resize=False, do_center_crop=False)['pixel_values'][0]
805
+ image_x1 = image_processor.preprocess(image, return_tensors='pt')['pixel_values'][0]
806
+ image = [image_x2, image_x1]
807
+ image = image_slid_window(image, num_slid_window)
808
+
809
+ return image
proj_config.json ADDED
@@ -0,0 +1,337 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_commit_hash": null,
3
+ "_name_or_path": "",
4
+ "architectures": [
5
+ "HoneybeeForConditionalGeneration"
6
+ ],
7
+ "hidden_act": "silu",
8
+ "hidden_size": 5120,
9
+ "initializer_factor": 1.0,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 13824,
12
+ "keys_to_ignore_at_inference": [
13
+ "past_key_values"
14
+ ],
15
+ "lm_config": {
16
+ "_name_or_path": "",
17
+ "add_cross_attention": false,
18
+ "architectures": null,
19
+ "bad_words_ids": null,
20
+ "begin_suppress_tokens": null,
21
+ "bos_token_id": null,
22
+ "chunk_size_feed_forward": 0,
23
+ "cross_attention_hidden_size": null,
24
+ "decoder_start_token_id": null,
25
+ "delta_model_name_or_path": null,
26
+ "diversity_penalty": 0.0,
27
+ "do_sample": false,
28
+ "early_stopping": false,
29
+ "encoder_no_repeat_ngram_size": 0,
30
+ "eos_token_id": null,
31
+ "exponential_decay_length_penalty": null,
32
+ "finetuning_task": null,
33
+ "forced_bos_token_id": null,
34
+ "forced_eos_token_id": null,
35
+ "id2label": {
36
+ "0": "LABEL_0",
37
+ "1": "LABEL_1"
38
+ },
39
+ "is_decoder": false,
40
+ "is_encoder_decoder": false,
41
+ "label2id": {
42
+ "LABEL_0": 0,
43
+ "LABEL_1": 1
44
+ },
45
+ "length_penalty": 1.0,
46
+ "max_length": 20,
47
+ "min_length": 0,
48
+ "model_type": "mllm_lm",
49
+ "no_repeat_ngram_size": 0,
50
+ "num_beam_groups": 1,
51
+ "num_beams": 1,
52
+ "num_return_sequences": 1,
53
+ "output_attentions": false,
54
+ "output_hidden_states": false,
55
+ "output_scores": false,
56
+ "pad_token_id": null,
57
+ "prefix": null,
58
+ "pretrained_lm_name_or_path": "/hbox2dir/vicuna-13b-v1.5",
59
+ "pretrained_tokenizer_name_or_path": "/hbox2dir/vicuna-13b-v1.5",
60
+ "problem_type": null,
61
+ "pruned_heads": {},
62
+ "remove_invalid_values": false,
63
+ "repetition_penalty": 1.0,
64
+ "return_dict": true,
65
+ "return_dict_in_generate": false,
66
+ "sep_token_id": null,
67
+ "suppress_tokens": null,
68
+ "task_specific_params": null,
69
+ "temperature": 1.0,
70
+ "tf_legacy_loss": false,
71
+ "tie_encoder_decoder": false,
72
+ "tie_word_embeddings": true,
73
+ "tokenizer_class": null,
74
+ "top_k": 50,
75
+ "top_p": 1.0,
76
+ "torch_dtype": null,
77
+ "torchscript": false,
78
+ "transformers_version": "4.30.2",
79
+ "typical_p": 1.0,
80
+ "use_bfloat16": false
81
+ },
82
+ "max_position_embeddings": 4096,
83
+ "model_type": "mllm",
84
+ "num_attention_heads": 40,
85
+ "num_hidden_layers": 40,
86
+ "num_key_value_heads": 40,
87
+ "num_query_tokens": 576,
88
+ "pretraining_tp": 1,
89
+ "rms_norm_eps": 1e-05,
90
+ "rope_scaling": null,
91
+ "text_config": {
92
+ "_name_or_path": "",
93
+ "add_cross_attention": false,
94
+ "architectures": [
95
+ "LlamaForCausalLM"
96
+ ],
97
+ "bad_words_ids": null,
98
+ "begin_suppress_tokens": null,
99
+ "bos_token_id": 1,
100
+ "chunk_size_feed_forward": 0,
101
+ "cross_attention_hidden_size": null,
102
+ "decoder_start_token_id": null,
103
+ "diversity_penalty": 0.0,
104
+ "do_sample": false,
105
+ "early_stopping": false,
106
+ "encoder_no_repeat_ngram_size": 0,
107
+ "eos_token_id": 2,
108
+ "exponential_decay_length_penalty": null,
109
+ "finetuning_task": null,
110
+ "forced_bos_token_id": null,
111
+ "forced_eos_token_id": null,
112
+ "hidden_act": "silu",
113
+ "hidden_size": 5120,
114
+ "id2label": {
115
+ "0": "LABEL_0",
116
+ "1": "LABEL_1"
117
+ },
118
+ "initializer_range": 0.02,
119
+ "intermediate_size": 13824,
120
+ "is_decoder": false,
121
+ "is_encoder_decoder": false,
122
+ "label2id": {
123
+ "LABEL_0": 0,
124
+ "LABEL_1": 1
125
+ },
126
+ "length_penalty": 1.0,
127
+ "max_length": 4096,
128
+ "max_position_embeddings": 4096,
129
+ "min_length": 0,
130
+ "model_type": "llama",
131
+ "no_repeat_ngram_size": 0,
132
+ "num_attention_heads": 40,
133
+ "num_beam_groups": 1,
134
+ "num_beams": 1,
135
+ "num_hidden_layers": 40,
136
+ "num_key_value_heads": 40,
137
+ "num_return_sequences": 1,
138
+ "output_attentions": false,
139
+ "output_hidden_states": false,
140
+ "output_scores": false,
141
+ "pad_token_id": 0,
142
+ "prefix": null,
143
+ "pretraining_tp": 1,
144
+ "problem_type": null,
145
+ "pruned_heads": {},
146
+ "remove_invalid_values": false,
147
+ "repetition_penalty": 1.0,
148
+ "return_dict": true,
149
+ "return_dict_in_generate": false,
150
+ "rms_norm_eps": 1e-05,
151
+ "rope_scaling": null,
152
+ "sep_token_id": null,
153
+ "suppress_tokens": null,
154
+ "task_specific_params": null,
155
+ "temperature": 1.0,
156
+ "tf_legacy_loss": false,
157
+ "tie_encoder_decoder": false,
158
+ "tie_word_embeddings": false,
159
+ "tokenizer_class": null,
160
+ "top_k": 50,
161
+ "top_p": 1.0,
162
+ "torch_dtype": "float16",
163
+ "torchscript": false,
164
+ "transformers_version": "4.30.2",
165
+ "typical_p": 1.0,
166
+ "use_bfloat16": false,
167
+ "use_cache": true,
168
+ "vocab_size": 32000
169
+ },
170
+ "tie_word_embeddings": false,
171
+ "torch_dtype": "bfloat16",
172
+ "transformers_version": null,
173
+ "use_cache": true,
174
+ "use_decoder_only_language_model": true,
175
+ "vision_config": {
176
+ "_name_or_path": "",
177
+ "add_cross_attention": false,
178
+ "architectures": null,
179
+ "attention_dropout": 0.0,
180
+ "bad_words_ids": null,
181
+ "begin_suppress_tokens": null,
182
+ "bos_token_id": null,
183
+ "chunk_size_feed_forward": 0,
184
+ "cross_attention_hidden_size": null,
185
+ "decoder_start_token_id": null,
186
+ "diversity_penalty": 0.0,
187
+ "do_sample": false,
188
+ "dropout": 0.0,
189
+ "early_stopping": false,
190
+ "encoder_no_repeat_ngram_size": 0,
191
+ "encoder_type": "openai.clip",
192
+ "eos_token_id": null,
193
+ "exponential_decay_length_penalty": null,
194
+ "finetuning_task": null,
195
+ "forced_bos_token_id": null,
196
+ "forced_eos_token_id": null,
197
+ "hidden_act": "quick_gelu",
198
+ "hidden_size": 1024,
199
+ "id2label": {
200
+ "0": "LABEL_0",
201
+ "1": "LABEL_1"
202
+ },
203
+ "image_size": 336,
204
+ "initializer_factor": 1.0,
205
+ "initializer_range": 0.02,
206
+ "intermediate_size": 4096,
207
+ "is_decoder": false,
208
+ "is_encoder_decoder": false,
209
+ "label2id": {
210
+ "LABEL_0": 0,
211
+ "LABEL_1": 1
212
+ },
213
+ "layer_norm_eps": 1e-05,
214
+ "length_penalty": 1.0,
215
+ "max_length": 20,
216
+ "min_length": 0,
217
+ "model_type": "clip_vision_model",
218
+ "no_repeat_ngram_size": 0,
219
+ "num_attention_heads": 16,
220
+ "num_beam_groups": 1,
221
+ "num_beams": 1,
222
+ "num_channels": 3,
223
+ "num_hidden_layers": 24,
224
+ "num_return_sequences": 1,
225
+ "output_attentions": false,
226
+ "output_hidden_states": false,
227
+ "output_scores": false,
228
+ "pad_token_id": null,
229
+ "patch_size": 14,
230
+ "prefix": null,
231
+ "pretrained_vision_name_or_path": "/hbox2dir/clip-vit-large-patch14-336",
232
+ "problem_type": null,
233
+ "projection_dim": 768,
234
+ "pruned_heads": {},
235
+ "remove_invalid_values": false,
236
+ "repetition_penalty": 1.0,
237
+ "return_dict": true,
238
+ "return_dict_in_generate": false,
239
+ "sep_token_id": null,
240
+ "suppress_tokens": null,
241
+ "task_specific_params": null,
242
+ "temperature": 1.0,
243
+ "tf_legacy_loss": false,
244
+ "tie_encoder_decoder": false,
245
+ "tie_word_embeddings": true,
246
+ "tokenizer_class": null,
247
+ "top_k": 50,
248
+ "top_p": 1.0,
249
+ "torch_dtype": null,
250
+ "torchscript": false,
251
+ "transformers_version": "4.30.2",
252
+ "typical_p": 1.0,
253
+ "use_bfloat16": false
254
+ },
255
+ "visual_projector_config": {
256
+ "_name_or_path": "",
257
+ "add_cross_attention": false,
258
+ "architectures": null,
259
+ "attention_probs_dropout_prob": 0.1,
260
+ "bad_words_ids": null,
261
+ "begin_suppress_tokens": null,
262
+ "bos_token_id": null,
263
+ "chunk_size_feed_forward": 0,
264
+ "cross_attention_hidden_size": null,
265
+ "decoder_start_token_id": null,
266
+ "depth": 3,
267
+ "diversity_penalty": 0.0,
268
+ "do_sample": false,
269
+ "early_stopping": false,
270
+ "encoder_hidden_size": 1024,
271
+ "encoder_no_repeat_ngram_size": 0,
272
+ "eos_token_id": null,
273
+ "exponential_decay_length_penalty": null,
274
+ "feature_layer_index": -2,
275
+ "finetuning_task": null,
276
+ "forced_bos_token_id": null,
277
+ "forced_eos_token_id": null,
278
+ "hidden_size": 1024,
279
+ "id2label": {
280
+ "0": "LABEL_0",
281
+ "1": "LABEL_1"
282
+ },
283
+ "initializer_range": 0.02,
284
+ "intermediate_size": 4096,
285
+ "is_decoder": false,
286
+ "is_encoder_decoder": false,
287
+ "label2id": {
288
+ "LABEL_0": 0,
289
+ "LABEL_1": 1
290
+ },
291
+ "layer_norm_eps": 1e-06,
292
+ "length_penalty": 1.0,
293
+ "max_length": 20,
294
+ "min_length": 0,
295
+ "mlp_depth": 2,
296
+ "model_type": "mllm_visual_projector",
297
+ "no_repeat_ngram_size": 0,
298
+ "num_attention_heads": 16,
299
+ "num_beam_groups": 1,
300
+ "num_beams": 1,
301
+ "num_eos_tokens": 0,
302
+ "num_hidden_layers": 6,
303
+ "num_queries": 576,
304
+ "num_return_sequences": 1,
305
+ "output_attentions": false,
306
+ "output_hidden_states": false,
307
+ "output_scores": false,
308
+ "pad_token_id": null,
309
+ "pos_emb": true,
310
+ "prefix": null,
311
+ "prenorm": false,
312
+ "problem_type": null,
313
+ "projector_type": "c-abs",
314
+ "pruned_heads": {},
315
+ "remove_invalid_values": false,
316
+ "repetition_penalty": 1.0,
317
+ "return_dict": true,
318
+ "return_dict_in_generate": false,
319
+ "sep_token_id": null,
320
+ "suppress_tokens": null,
321
+ "task_specific_params": null,
322
+ "temperature": 1.0,
323
+ "tf_legacy_loss": false,
324
+ "tie_encoder_decoder": false,
325
+ "tie_word_embeddings": true,
326
+ "tokenizer_class": null,
327
+ "top_k": 50,
328
+ "top_p": 1.0,
329
+ "torch_dtype": null,
330
+ "torchscript": false,
331
+ "transformers_version": "4.30.2",
332
+ "typical_p": 1.0,
333
+ "use_bfloat16": false,
334
+ "use_cls": true
335
+ },
336
+ "vocab_size": 32000
337
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end_of_text|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|end_of_text|>"
17
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,2064 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|reserved_special_token_2|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_3|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|reserved_special_token_4|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|reserved_special_token_5|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_6|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_7|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_8|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_9|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_10|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_11|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_12|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_13|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_14|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_15|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_16|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_17|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_18|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_19|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_20|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_21|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_22|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_23|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_24|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_25|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_26|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_27|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_28|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_29|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_30|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_31|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_32|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_33|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_34|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_35|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_36|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_37|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_38|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_39|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_40|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_41|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_42|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_43|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_44|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_45|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_46|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_47|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_48|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_49|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_50|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_51|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_52|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_53|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_54|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_55|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_56|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_57|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_58|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_59|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_60|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_61|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_62|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_63|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_64|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_65|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_66|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_67|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_68|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_69|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_70|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_71|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_72|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_73|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_74|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_75|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_76|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_77|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_78|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_79|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_80|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_81|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_82|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_83|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_84|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_85|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_86|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_87|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_88|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_89|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_90|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_91|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_92|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_93|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_94|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_95|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_96|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_97|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_98|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_99|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_100|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_101|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_102|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_103|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_104|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_105|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_106|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_107|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_108|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_109|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_110|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_111|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_112|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_113|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_114|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_115|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_116|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_117|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_118|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_119|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_120|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_121|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_122|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_123|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_124|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_125|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_126|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_127|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_128|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_129|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_130|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_131|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_132|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_133|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_134|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_135|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_136|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_137|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_138|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_139|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_140|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_141|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_142|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_143|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_144|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_145|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_146|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_147|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_148|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_149|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_150|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_151|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_152|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_153|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_154|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_155|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_156|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_157|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_158|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_159|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_160|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_161|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_162|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_163|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_164|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_165|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_166|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_167|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_168|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_169|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_170|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_171|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_172|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_173|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_174|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_175|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_176|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_177|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_178|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_179|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_180|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_181|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_182|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_183|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_184|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_185|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_186|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_187|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_188|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_189|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_190|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_191|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_192|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_193|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_194|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_195|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_196|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_197|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_198|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_199|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_200|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_201|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_202|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_203|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_204|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_205|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_206|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_207|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_208|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_209|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_210|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_211|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_212|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_213|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_214|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_215|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_216|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_217|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_218|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_219|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_220|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_221|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_222|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_223|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_224|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_225|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_226|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_227|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_228|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_229|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_230|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_231|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_232|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_233|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_234|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_235|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_236|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_237|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_238|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_239|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_240|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_241|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_242|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_243|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_244|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_245|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_246|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_247|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_248|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_249|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_250|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
2054
+ "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|end_of_text|>",
2056
+ "model_input_names": [
2057
+ "input_ids",
2058
+ "attention_mask"
2059
+ ],
2060
+ "model_max_length": 2048,
2061
+ "pad_token": "<|end_of_text|>",
2062
+ "padding_side": "right",
2063
+ "tokenizer_class": "PreTrainedTokenizerFast"
2064
+ }