cem13/complaint_to_sythoms_mix_8x7b
Browse files- README.md +216 -0
- adapter_config.json +35 -0
- adapter_model.safetensors +3 -0
- runs/Sep04_02-24-55_556a45b2b635/events.out.tfevents.1725416700.556a45b2b635.1580.0 +3 -0
- runs/Sep04_05-08-32_556a45b2b635/events.out.tfevents.1725426518.556a45b2b635.1580.1 +3 -0
- special_tokens_map.json +24 -0
- tokenizer.json +0 -0
- tokenizer.model +3 -0
- tokenizer_config.json +43 -0
- training_args.bin +3 -0
README.md
ADDED
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
base_model: mistralai/Mixtral-8x7B-v0.1
|
3 |
+
datasets:
|
4 |
+
- generator
|
5 |
+
library_name: peft
|
6 |
+
license: apache-2.0
|
7 |
+
tags:
|
8 |
+
- trl
|
9 |
+
- sft
|
10 |
+
- generated_from_trainer
|
11 |
+
model-index:
|
12 |
+
- name: Mixtral_Alpace_v2
|
13 |
+
results: []
|
14 |
+
---
|
15 |
+
|
16 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
17 |
+
should probably proofread and complete it, then remove this comment. -->
|
18 |
+
|
19 |
+
# Mixtral_Alpace_v2
|
20 |
+
|
21 |
+
This model is a fine-tuned version of [mistralai/Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) on the generator dataset.
|
22 |
+
It achieves the following results on the evaluation set:
|
23 |
+
- Loss: 0.3154
|
24 |
+
|
25 |
+
## Model description
|
26 |
+
|
27 |
+
More information needed
|
28 |
+
|
29 |
+
## Intended uses & limitations
|
30 |
+
|
31 |
+
More information needed
|
32 |
+
|
33 |
+
## Training and evaluation data
|
34 |
+
|
35 |
+
More information needed
|
36 |
+
|
37 |
+
## Training procedure
|
38 |
+
|
39 |
+
### Training hyperparameters
|
40 |
+
|
41 |
+
The following hyperparameters were used during training:
|
42 |
+
- learning_rate: 2.5e-05
|
43 |
+
- train_batch_size: 8
|
44 |
+
- eval_batch_size: 8
|
45 |
+
- seed: 42
|
46 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
47 |
+
- lr_scheduler_type: linear
|
48 |
+
- lr_scheduler_warmup_steps: 15
|
49 |
+
- num_epochs: 5
|
50 |
+
|
51 |
+
### Training results
|
52 |
+
|
53 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
54 |
+
|:-------------:|:------:|:----:|:---------------:|
|
55 |
+
| 0.3573 | 0.0327 | 10 | 0.3448 |
|
56 |
+
| 0.3569 | 0.0654 | 20 | 0.3446 |
|
57 |
+
| 0.365 | 0.0980 | 30 | 0.3439 |
|
58 |
+
| 0.341 | 0.1307 | 40 | 0.3437 |
|
59 |
+
| 0.3101 | 0.1634 | 50 | 0.3428 |
|
60 |
+
| 0.3538 | 0.1961 | 60 | 0.3419 |
|
61 |
+
| 0.32 | 0.2288 | 70 | 0.3414 |
|
62 |
+
| 0.3361 | 0.2614 | 80 | 0.3403 |
|
63 |
+
| 0.3211 | 0.2941 | 90 | 0.3395 |
|
64 |
+
| 0.3583 | 0.3268 | 100 | 0.3386 |
|
65 |
+
| 0.3174 | 0.3595 | 110 | 0.3382 |
|
66 |
+
| 0.3097 | 0.3922 | 120 | 0.3378 |
|
67 |
+
| 0.33 | 0.4248 | 130 | 0.3374 |
|
68 |
+
| 0.3159 | 0.4575 | 140 | 0.3368 |
|
69 |
+
| 0.3636 | 0.4902 | 150 | 0.3366 |
|
70 |
+
| 0.334 | 0.5229 | 160 | 0.3356 |
|
71 |
+
| 0.348 | 0.5556 | 170 | 0.3353 |
|
72 |
+
| 0.3296 | 0.5882 | 180 | 0.3350 |
|
73 |
+
| 0.3498 | 0.6209 | 190 | 0.3338 |
|
74 |
+
| 0.3461 | 0.6536 | 200 | 0.3337 |
|
75 |
+
| 0.3378 | 0.6863 | 210 | 0.3335 |
|
76 |
+
| 0.3114 | 0.7190 | 220 | 0.3327 |
|
77 |
+
| 0.3291 | 0.7516 | 230 | 0.3324 |
|
78 |
+
| 0.3189 | 0.7843 | 240 | 0.3320 |
|
79 |
+
| 0.3214 | 0.8170 | 250 | 0.3311 |
|
80 |
+
| 0.3117 | 0.8497 | 260 | 0.3309 |
|
81 |
+
| 0.3025 | 0.8824 | 270 | 0.3310 |
|
82 |
+
| 0.2679 | 0.9150 | 280 | 0.3306 |
|
83 |
+
| 0.3592 | 0.9477 | 290 | 0.3304 |
|
84 |
+
| 0.3097 | 0.9804 | 300 | 0.3296 |
|
85 |
+
| 0.3662 | 1.0131 | 310 | 0.3295 |
|
86 |
+
| 0.2969 | 1.0458 | 320 | 0.3292 |
|
87 |
+
| 0.3109 | 1.0784 | 330 | 0.3290 |
|
88 |
+
| 0.3369 | 1.1111 | 340 | 0.3287 |
|
89 |
+
| 0.3101 | 1.1438 | 350 | 0.3287 |
|
90 |
+
| 0.3264 | 1.1765 | 360 | 0.3283 |
|
91 |
+
| 0.3328 | 1.2092 | 370 | 0.3278 |
|
92 |
+
| 0.3234 | 1.2418 | 380 | 0.3276 |
|
93 |
+
| 0.301 | 1.2745 | 390 | 0.3278 |
|
94 |
+
| 0.3357 | 1.3072 | 400 | 0.3273 |
|
95 |
+
| 0.3058 | 1.3399 | 410 | 0.3271 |
|
96 |
+
| 0.3204 | 1.3725 | 420 | 0.3266 |
|
97 |
+
| 0.3393 | 1.4052 | 430 | 0.3265 |
|
98 |
+
| 0.288 | 1.4379 | 440 | 0.3265 |
|
99 |
+
| 0.3121 | 1.4706 | 450 | 0.3259 |
|
100 |
+
| 0.301 | 1.5033 | 460 | 0.3255 |
|
101 |
+
| 0.2912 | 1.5359 | 470 | 0.3254 |
|
102 |
+
| 0.3426 | 1.5686 | 480 | 0.3253 |
|
103 |
+
| 0.3256 | 1.6013 | 490 | 0.3254 |
|
104 |
+
| 0.291 | 1.6340 | 500 | 0.3253 |
|
105 |
+
| 0.3234 | 1.6667 | 510 | 0.3249 |
|
106 |
+
| 0.3024 | 1.6993 | 520 | 0.3242 |
|
107 |
+
| 0.3628 | 1.7320 | 530 | 0.3240 |
|
108 |
+
| 0.331 | 1.7647 | 540 | 0.3234 |
|
109 |
+
| 0.321 | 1.7974 | 550 | 0.3235 |
|
110 |
+
| 0.2981 | 1.8301 | 560 | 0.3230 |
|
111 |
+
| 0.3369 | 1.8627 | 570 | 0.3233 |
|
112 |
+
| 0.3033 | 1.8954 | 580 | 0.3227 |
|
113 |
+
| 0.3578 | 1.9281 | 590 | 0.3224 |
|
114 |
+
| 0.2838 | 1.9608 | 600 | 0.3224 |
|
115 |
+
| 0.3026 | 1.9935 | 610 | 0.3221 |
|
116 |
+
| 0.2858 | 2.0261 | 620 | 0.3228 |
|
117 |
+
| 0.3001 | 2.0588 | 630 | 0.3225 |
|
118 |
+
| 0.2974 | 2.0915 | 640 | 0.3219 |
|
119 |
+
| 0.3071 | 2.1242 | 650 | 0.3217 |
|
120 |
+
| 0.3216 | 2.1569 | 660 | 0.3217 |
|
121 |
+
| 0.3056 | 2.1895 | 670 | 0.3216 |
|
122 |
+
| 0.3392 | 2.2222 | 680 | 0.3215 |
|
123 |
+
| 0.314 | 2.2549 | 690 | 0.3214 |
|
124 |
+
| 0.3243 | 2.2876 | 700 | 0.3210 |
|
125 |
+
| 0.3232 | 2.3203 | 710 | 0.3213 |
|
126 |
+
| 0.3365 | 2.3529 | 720 | 0.3211 |
|
127 |
+
| 0.3163 | 2.3856 | 730 | 0.3212 |
|
128 |
+
| 0.3086 | 2.4183 | 740 | 0.3211 |
|
129 |
+
| 0.3048 | 2.4510 | 750 | 0.3207 |
|
130 |
+
| 0.299 | 2.4837 | 760 | 0.3203 |
|
131 |
+
| 0.3203 | 2.5163 | 770 | 0.3203 |
|
132 |
+
| 0.278 | 2.5490 | 780 | 0.3200 |
|
133 |
+
| 0.3353 | 2.5817 | 790 | 0.3197 |
|
134 |
+
| 0.3314 | 2.6144 | 800 | 0.3198 |
|
135 |
+
| 0.2688 | 2.6471 | 810 | 0.3197 |
|
136 |
+
| 0.302 | 2.6797 | 820 | 0.3194 |
|
137 |
+
| 0.2843 | 2.7124 | 830 | 0.3195 |
|
138 |
+
| 0.3105 | 2.7451 | 840 | 0.3190 |
|
139 |
+
| 0.276 | 2.7778 | 850 | 0.3193 |
|
140 |
+
| 0.3206 | 2.8105 | 860 | 0.3192 |
|
141 |
+
| 0.3011 | 2.8431 | 870 | 0.3191 |
|
142 |
+
| 0.3367 | 2.8758 | 880 | 0.3189 |
|
143 |
+
| 0.2918 | 2.9085 | 890 | 0.3184 |
|
144 |
+
| 0.3343 | 2.9412 | 900 | 0.3187 |
|
145 |
+
| 0.2801 | 2.9739 | 910 | 0.3185 |
|
146 |
+
| 0.2959 | 3.0065 | 920 | 0.3185 |
|
147 |
+
| 0.3392 | 3.0392 | 930 | 0.3186 |
|
148 |
+
| 0.3197 | 3.0719 | 940 | 0.3182 |
|
149 |
+
| 0.2919 | 3.1046 | 950 | 0.3181 |
|
150 |
+
| 0.3544 | 3.1373 | 960 | 0.3182 |
|
151 |
+
| 0.2779 | 3.1699 | 970 | 0.3180 |
|
152 |
+
| 0.3001 | 3.2026 | 980 | 0.3180 |
|
153 |
+
| 0.3102 | 3.2353 | 990 | 0.3181 |
|
154 |
+
| 0.3152 | 3.2680 | 1000 | 0.3182 |
|
155 |
+
| 0.2962 | 3.3007 | 1010 | 0.3179 |
|
156 |
+
| 0.2831 | 3.3333 | 1020 | 0.3177 |
|
157 |
+
| 0.3103 | 3.3660 | 1030 | 0.3179 |
|
158 |
+
| 0.2766 | 3.3987 | 1040 | 0.3175 |
|
159 |
+
| 0.295 | 3.4314 | 1050 | 0.3175 |
|
160 |
+
| 0.3139 | 3.4641 | 1060 | 0.3176 |
|
161 |
+
| 0.299 | 3.4967 | 1070 | 0.3173 |
|
162 |
+
| 0.3034 | 3.5294 | 1080 | 0.3170 |
|
163 |
+
| 0.3052 | 3.5621 | 1090 | 0.3170 |
|
164 |
+
| 0.2937 | 3.5948 | 1100 | 0.3170 |
|
165 |
+
| 0.3046 | 3.6275 | 1110 | 0.3170 |
|
166 |
+
| 0.3094 | 3.6601 | 1120 | 0.3171 |
|
167 |
+
| 0.2875 | 3.6928 | 1130 | 0.3169 |
|
168 |
+
| 0.2847 | 3.7255 | 1140 | 0.3169 |
|
169 |
+
| 0.2947 | 3.7582 | 1150 | 0.3171 |
|
170 |
+
| 0.2925 | 3.7908 | 1160 | 0.3168 |
|
171 |
+
| 0.2938 | 3.8235 | 1170 | 0.3167 |
|
172 |
+
| 0.2955 | 3.8562 | 1180 | 0.3167 |
|
173 |
+
| 0.333 | 3.8889 | 1190 | 0.3167 |
|
174 |
+
| 0.3391 | 3.9216 | 1200 | 0.3165 |
|
175 |
+
| 0.2887 | 3.9542 | 1210 | 0.3166 |
|
176 |
+
| 0.3067 | 3.9869 | 1220 | 0.3163 |
|
177 |
+
| 0.3349 | 4.0196 | 1230 | 0.3164 |
|
178 |
+
| 0.308 | 4.0523 | 1240 | 0.3162 |
|
179 |
+
| 0.3252 | 4.0850 | 1250 | 0.3163 |
|
180 |
+
| 0.3077 | 4.1176 | 1260 | 0.3162 |
|
181 |
+
| 0.3198 | 4.1503 | 1270 | 0.3162 |
|
182 |
+
| 0.2891 | 4.1830 | 1280 | 0.3162 |
|
183 |
+
| 0.2712 | 4.2157 | 1290 | 0.3162 |
|
184 |
+
| 0.3083 | 4.2484 | 1300 | 0.3162 |
|
185 |
+
| 0.3032 | 4.2810 | 1310 | 0.3161 |
|
186 |
+
| 0.3024 | 4.3137 | 1320 | 0.3159 |
|
187 |
+
| 0.2966 | 4.3464 | 1330 | 0.3160 |
|
188 |
+
| 0.3046 | 4.3791 | 1340 | 0.3159 |
|
189 |
+
| 0.284 | 4.4118 | 1350 | 0.3158 |
|
190 |
+
| 0.2885 | 4.4444 | 1360 | 0.3157 |
|
191 |
+
| 0.2951 | 4.4771 | 1370 | 0.3158 |
|
192 |
+
| 0.2772 | 4.5098 | 1380 | 0.3157 |
|
193 |
+
| 0.305 | 4.5425 | 1390 | 0.3156 |
|
194 |
+
| 0.2834 | 4.5752 | 1400 | 0.3156 |
|
195 |
+
| 0.3365 | 4.6078 | 1410 | 0.3157 |
|
196 |
+
| 0.3128 | 4.6405 | 1420 | 0.3158 |
|
197 |
+
| 0.3004 | 4.6732 | 1430 | 0.3157 |
|
198 |
+
| 0.2844 | 4.7059 | 1440 | 0.3156 |
|
199 |
+
| 0.3193 | 4.7386 | 1450 | 0.3155 |
|
200 |
+
| 0.3053 | 4.7712 | 1460 | 0.3156 |
|
201 |
+
| 0.2961 | 4.8039 | 1470 | 0.3156 |
|
202 |
+
| 0.2999 | 4.8366 | 1480 | 0.3155 |
|
203 |
+
| 0.2644 | 4.8693 | 1490 | 0.3155 |
|
204 |
+
| 0.311 | 4.9020 | 1500 | 0.3155 |
|
205 |
+
| 0.3044 | 4.9346 | 1510 | 0.3155 |
|
206 |
+
| 0.3 | 4.9673 | 1520 | 0.3156 |
|
207 |
+
| 0.3378 | 5.0 | 1530 | 0.3154 |
|
208 |
+
|
209 |
+
|
210 |
+
### Framework versions
|
211 |
+
|
212 |
+
- PEFT 0.12.0
|
213 |
+
- Transformers 4.44.2
|
214 |
+
- Pytorch 2.4.0+cu121
|
215 |
+
- Datasets 2.21.0
|
216 |
+
- Tokenizers 0.19.1
|
adapter_config.json
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"alpha_pattern": {},
|
3 |
+
"auto_mapping": null,
|
4 |
+
"base_model_name_or_path": null,
|
5 |
+
"bias": "none",
|
6 |
+
"fan_in_fan_out": false,
|
7 |
+
"inference_mode": true,
|
8 |
+
"init_lora_weights": true,
|
9 |
+
"layer_replication": null,
|
10 |
+
"layers_pattern": null,
|
11 |
+
"layers_to_transform": null,
|
12 |
+
"loftq_config": {},
|
13 |
+
"lora_alpha": 16,
|
14 |
+
"lora_dropout": 0.1,
|
15 |
+
"megatron_config": null,
|
16 |
+
"megatron_core": "megatron.core",
|
17 |
+
"modules_to_save": null,
|
18 |
+
"peft_type": "LORA",
|
19 |
+
"r": 64,
|
20 |
+
"rank_pattern": {},
|
21 |
+
"revision": null,
|
22 |
+
"target_modules": [
|
23 |
+
"lm_head",
|
24 |
+
"k_proj",
|
25 |
+
"v_proj",
|
26 |
+
"q_proj",
|
27 |
+
"gate_proj",
|
28 |
+
"up_proj",
|
29 |
+
"down_proj",
|
30 |
+
"o_proj"
|
31 |
+
],
|
32 |
+
"task_type": "CAUSAL_LM",
|
33 |
+
"use_dora": false,
|
34 |
+
"use_rslora": false
|
35 |
+
}
|
adapter_model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:be0d5a2710ed3319aefc2d3bb8ebb3164fdf69c21b32ad62f165dcd61bc0e40b
|
3 |
+
size 751672160
|
runs/Sep04_02-24-55_556a45b2b635/events.out.tfevents.1725416700.556a45b2b635.1580.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8003c03d4092e7c1450ef827cb0bc060cf8fd5afe03dae5781c1258b948b3bd5
|
3 |
+
size 34388
|
runs/Sep04_05-08-32_556a45b2b635/events.out.tfevents.1725426518.556a45b2b635.1580.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fb2b71600b560169ef2fec3a6dfc0d7df1044fd5657e89cc3dd4487f4eac8262
|
3 |
+
size 79838
|
special_tokens_map.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": "</s>",
|
17 |
+
"unk_token": {
|
18 |
+
"content": "<unk>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
}
|
24 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
|
3 |
+
size 493443
|
tokenizer_config.json
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"0": {
|
7 |
+
"content": "<unk>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"1": {
|
15 |
+
"content": "<s>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"2": {
|
23 |
+
"content": "</s>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
}
|
30 |
+
},
|
31 |
+
"additional_special_tokens": [],
|
32 |
+
"bos_token": "<s>",
|
33 |
+
"clean_up_tokenization_spaces": false,
|
34 |
+
"eos_token": "</s>",
|
35 |
+
"legacy": true,
|
36 |
+
"model_max_length": 1000000000000000019884624838656,
|
37 |
+
"pad_token": "</s>",
|
38 |
+
"sp_model_kwargs": {},
|
39 |
+
"spaces_between_special_tokens": false,
|
40 |
+
"tokenizer_class": "LlamaTokenizer",
|
41 |
+
"unk_token": "<unk>",
|
42 |
+
"use_default_system_prompt": false
|
43 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:48f6b423a3ca410de093af33e6a3cdefb12380087bc73359e616468dbc1c9746
|
3 |
+
size 5432
|