Duplicate from FreedomIntelligence/AceGPT-7B-chat
Browse filesCo-authored-by: Xuening Sun <[email protected]>
- .gitattributes +35 -0
- LICENSE.txt +50 -0
- README.md +85 -0
- config.json +29 -0
- pytorch_model.bin +3 -0
- special_tokens_map.json +24 -0
- tokenizer.model +3 -0
- tokenizer_config.json +36 -0
.gitattributes
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
LICENSE.txt
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
This project utilizes materials from Llama 2, provided by Meta Platforms, Inc. The Llama 2 materials are licensed under the LLAMA 2 Community License, Copyright (c) Meta Platforms, Inc. All Rights Reserved.
|
2 |
+
|
3 |
+
A copy of the license agreement can be found at [Link to the License, e.g. https://github.com/facebookresearch/llama/blob/main/LICENSE].
|
4 |
+
|
5 |
+
All applicable terms and conditions outlined in the LLAMA 2 Community License Agreement have been adhered to, including but not limited to the retention of the attribution notice in all redistributed copies of the Llama Materials as follows:
|
6 |
+
|
7 |
+
"Llama 2 is licensed under the LLAMA 2 Community License, Copyright (c) Meta Platforms, Inc. All Rights Reserved."
|
8 |
+
|
9 |
+
This project complies with all applicable laws and regulations and adheres to the Acceptable Use Policy for the Llama Materials.
|
10 |
+
|
11 |
+
|
12 |
+
AceGPT COMMUNITY LICENSE AGREEMENT
|
13 |
+
AceGPT Version Release Date: Sep 23, 2023
|
14 |
+
|
15 |
+
|
16 |
+
"Agreement" means the terms and conditions for use, reproduction, distribution and modification of the AceGPT Materials set forth herein.
|
17 |
+
|
18 |
+
"Licensee" or "you" means you, or your employer or any other person or entity (if you are entering into this Agreement on such person or entity's behalf), of the age required under applicable laws, rules or regulations to provide legal consent and that has legal authority to bind your employer or such other person or entity if you are entering in this Agreement on their behalf.
|
19 |
+
|
20 |
+
"AceGPT" means the foundational large language models and software and algorithms, including machine-learning model code, trained model weights, inference-enabling code, training-enabling code, fine-tuning enabling code and other elements of the foregoing distributed by SRIBD, CUHK(shenzhen) and KAUST at https://github.com/FreedomIntelligence/AceGPT and https://huggingface.co/FreedomIntelligence/.
|
21 |
+
|
22 |
+
"AceGPT Materials" means, collectively, our proprietary AceGPT and
|
23 |
+
Documentation (and any portion thereof) made available under this Agreement.
|
24 |
+
|
25 |
+
By clicking "I Accept" below or by using or distributing any portion or element of our Materials, you agree to be bound by this Agreement.
|
26 |
+
|
27 |
+
1. License Rights and Redistribution.
|
28 |
+
|
29 |
+
a. Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable and royalty-free limited license under our intellectual property or other rights owned by our embodied in the AceGPT Materials to use, reproduce, distribute, copy, create derivative works of, and make modifications to the AceGPT Materials.
|
30 |
+
|
31 |
+
b. Redistribution and Use.
|
32 |
+
|
33 |
+
i. If you distribute or make the AceGPT Materials, or any derivative works thereof, available to a third party, you shall provide a copy of this Agreement to such third party.
|
34 |
+
ii. If you receive AceGPT Materials, or any derivative works thereof, from a Licensee as part of an integrated end user product, then Section 2 of this Agreement will not apply to you.
|
35 |
+
|
36 |
+
iii. You must retain in all copies of the AceGPT Materials that you distribute the following attribution notice within a "Notice" text file distributed as a part of such copies: "AceGPT is licensed under the AceGPT Community License"
|
37 |
+
|
38 |
+
iv. Your use of the AceGPT Materials must comply with applicable laws and regulations (including trade compliance laws and regulations) and adhere to the Acceptable Use Policy for the AceGPT Materials, which is hereby incorporated by reference into this Agreement.
|
39 |
+
|
40 |
+
v. You will not use the AceGPT Materials or any output or results of the AceGPT Materials to improve any other large language model (excluding AceGPT or derivative works thereof).
|
41 |
+
|
42 |
+
2. Additional Commercial Terms. If, on the AceGPT version release date, the monthly active users of the products or services made available by or for Licensee, or Licensee's affiliates, is greater than 700 million monthly active users in the preceding calendar month, you must request a license from SRIBD, which SRIBD may grant to you in its sole discretion, and you are not authorized to exercise any of the rights under this Agreement unless or until SRIBD otherwise expressly grants you such rights.
|
43 |
+
|
44 |
+
3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE LAW, THE ACEGPT MATERIALS AND ANY OUTPUT AND RESULTS THEREFROM ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING THE ACEGPT MATERIALS AND ASSUME ANY RISKS ASSOCIATED WITH YOUR USE OF THE ACEGPT MATERIALS AND ANY OUTPUT AND RESULTS.
|
45 |
+
|
46 |
+
4. Limitation of Liability. IN NO EVENT WILL SRIBD OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF SRIBD OR ITS AFFILIATES HAVE BEEN ADVISED OF THE POSSIBILITY OF ANY OF THE FOREGOING.
|
47 |
+
|
48 |
+
5. Intellectual Property.
|
49 |
+
|
50 |
+
a. No trademark licenses are granted under this Agreement, and in connection with the AceGPT Materials, neither SRIBD nor Licensee may use any name or m
|
README.md
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
language:
|
4 |
+
- ar
|
5 |
+
---
|
6 |
+
# <b>AceGPT</b>
|
7 |
+
AceGPT is a fully fine-tuned generative text model collection based on LlaMA2, particularly in the
|
8 |
+
Arabic language domain. This is the repository for the 7B-chat pre-trained model.
|
9 |
+
|
10 |
+
---
|
11 |
+
## Model Details
|
12 |
+
We have released the AceGPT family of large language models, which is a collection of fully fine-tuned generative text models based on LlaMA2, ranging from 7B to 13B parameters. Our models include two main categories: AceGPT and AceGPT-chat. AceGPT-chat is an optimized version specifically designed for dialogue applications. It is worth mentioning that our models have demonstrated superior performance compared to all currently available open-source Arabic dialogue models in multiple benchmark tests. Furthermore, in our human evaluations, our models have shown comparable satisfaction levels to some closed-source models, such as ChatGPT, in the Arabic language.
|
13 |
+
## Model Developers
|
14 |
+
We are from the School of Data Science, the Chinese University of Hong Kong, Shenzhen (CUHKSZ), the Shenzhen Research Institute of Big Data (SRIBD), and the King Abdullah University of Science and Technology (KAUST).
|
15 |
+
## Variations
|
16 |
+
AceGPT famils come in a range of parameter sizes —— 7B and 13B, each size of model has a base category and a -chat category.
|
17 |
+
## Input
|
18 |
+
Models input text only.
|
19 |
+
## Output
|
20 |
+
Models output text only.
|
21 |
+
## Model Evaluation Results
|
22 |
+
|
23 |
+
Experiments on Arabic Vicuna-80, Arabic AlpacaEval. Numbers are the average performance ratio of ChatGPT over three runs. We do not report the results of raw Llama-2 models since they cannot properly generate Arabic texts.
|
24 |
+
| | Arabic Vicuna-80 | Arabic AlpacaEval |
|
25 |
+
|------------------------------|--------------------|---------------------|
|
26 |
+
| Phoenix Chen et al. (2023a) | 71.92% ± 0.2% | 65.62% ± 0.3% |
|
27 |
+
| Phoenix–multiple-langs Chen et al. (2023b) | 71.67% ± 0.7% | 65.36% ± 0.1% |
|
28 |
+
| Jais-13B-chat Sengupta et al. (2023) | 75.40% ± 1.6% | 74.95% ± 0.2% |
|
29 |
+
| AceGPT-7B-chat | 94.82% ± 0.2% | 93.81% ± 0.1% |
|
30 |
+
| AceGPT-13B-chat | 100.88% ± 0.4% | 97.95% ± 0.1% |
|
31 |
+
|
32 |
+
## Samples
|
33 |
+
#### Sample1(alpaca)
|
34 |
+
* question:
|
35 |
+
|
36 |
+
ما هي أسماء بعض الممثلين المشهورين الذين بدأوا مسيراتهم المهنية على برودواي؟
|
37 |
+
* answer:
|
38 |
+
1. ليوناردو دي كابريو: بدأ مسيرته الفنية في برودواي على الرغم من أنه اشتهر بشكل أكبر من خلال أدواره في الأفلام مثل \"The Wolf of Wall Street\" و \"Titanic\".
|
39 |
+
2. فرانسيس مكدورماند: بدأت مسيرتها الفنية في برودواي وأصبحت ممثلة مشهورة في أوائل القرن العشرين بفضل أدوارها في العديد من المسرحيات الكلاسيكية.
|
40 |
+
3. جودي دنش: بدأت مسيرتها الفنية في برودواي وأصبحت ممثلة مشهورة بفضل أدائها المتميز في العديد من المسرحيات الكلاسيكية.
|
41 |
+
4. بيتر أوستروم: بدأ مسيرته الفنية في برودواي وأصبح ممثلًا مشهورًا بفضل أدائه المتميز في العديد من المسرحيات الكلاسيكية.
|
42 |
+
5. مارك رايلانس: بدأ مسيرته الفنية في برودواي وأصبح ممثلًا مشهورًا بفضل أدائه المتميز في العديد من المسرحيات الكلاسيكية.
|
43 |
+
6. جيمس كان: بدأ مسيرته الفنية في برودواي وأصبح ممثلًا مشهورًا بفضل أدائه المتميز في العديد من المسرحيات الكلاسيكية.
|
44 |
+
7. إليزابيث تايلور: بدأت مسيرتها الفنية في برودواي وأصبحت ممثلة مشهورة في أوائل القرن العشرين بفضل أدوارها في العديد من المسرحيات الكلاسيكية.
|
45 |
+
8. روبرت ويلسون: بدأ مسيرته الفنية في برودواي وأصبح ممثلًا مشهورًا بفضل أدائه المتميز في العديد من المسرحيات الكلاسيكية.
|
46 |
+
9. جودي غارلاند: بدأت مسيرتها الفنية في برودواي وأصبحت ممثلة مشهورة في أوائل القرن العشرين بفضل أدوارها في العديد من المسرحيات الكلاسيكية.
|
47 |
+
10. ماثيو برودريك: بدأ مسيرته الفنية في برودواي وأصبح ممثلًا مشهورًا بفضل أدائه المتميز في العديد من المسرحيات الكلاسيكية."
|
48 |
+
|
49 |
+
---
|
50 |
+
#### Sample2(Vicuna)
|
51 |
+
* question:
|
52 |
+
|
53 |
+
كيف يمكنني تحسين مهارات إدارة الوقت الخاصة بي؟
|
54 |
+
* answer:
|
55 |
+
1. تحديد الأولويات: قم بتحديد الأعمال الأكثر أهمية والتي تتطلب اهتمامك الأكبر.
|
56 |
+
2. تنظيم الجدول الزمني: استخدم جدول زمني أو تقويم لتنظيم وقتك وتحديد الأوقات المحددة للقيام بمهام معينة.
|
57 |
+
3. تجنب التسويف: تجنب تأجيل الأعمال التي يمكنك القيام بها الآن.
|
58 |
+
4. استخدام تقنيات الإدارة الزمنية: استخدم تقنيات مثل تقنية بومودورو، حيث تعمل لمدة 25 دقيقة ثم تأخذ استراحة لمدة 5 دقائق.
|
59 |
+
5. تعلم قول \"لا\": تعلم كيفية رفض الطلبات التي لا تتناسب مع أهدافك وجدولك الزمني.
|
60 |
+
6. استخدم التكنولوجيا: استخدم التطبيقات والأدوات التكنولوجية لتتبع الوقت وتنظيم المهام.
|
61 |
+
7. تحديد الوقت للراحة: تأكد من تحديد الوقت للراحة والاسترخاء لتجنب الإرهاق والشعور بالحرق.
|
62 |
+
8. تحسين المهارات التنظيمية: تعلم كيفية تنظيم المهام والأولويات بشكل فعال.
|
63 |
+
9. استخدم التقنيات البصرية: استخدم الرسوم البيانية والرسوم البيانية لتتبع الوقت والتقدم.
|
64 |
+
10. تحسين الصحة العامة: تأكد من الحصول على قسط كاف من النوم والتغذية الجيدة والتمارين الرياضية لتحسين قدرتك على التركيز والإنتاجية.
|
65 |
+
11. استخدم تقنية الطماطم: هذه التقنية تتطلب منك العمل لمدة 25 دقيقة دون انقطاع، ثم تأخذ استراحة لمدة 5 دقائق.
|
66 |
+
12. تعلم كيفية قول \"لا\": قدرتك على قول \"لا\" للأشياء التي لا تخدم أهدافك يمكن أن تحسن بشكل كبير إدارة الوقت الخاصة بك.
|
67 |
+
13. استخدم تقنية التركيز العميق: تقنية تستخدم التقنيات مثل التأمل والتركيز العقلي لزيادة القدرة على التركيز وتحسين الإنتاجية.
|
68 |
+
14. تعلم كيفية تقسيم المهام: تعلم كيفية تقسيم المهام الكبيرة إلى مهام أصغر وأكثر إدارة.
|
69 |
+
15. استخدم تقنية الطماطم: تقنية تتطلب منك العمل لمدة 25 دقيقة دون انقطاع، ثم تأخذ استراحة لمدة 5 دقائق.
|
70 |
+
16. تعلم كيفية تحديد الأولويات: تعلم كيفية تحديد الأولويات والتركيز على المهام الأكثر أهمية أولاً.
|
71 |
+
17. استخدم تقنية الترتيب الثلاثي: تقنية تتطلب منك ترتيب المهام حسب الأهمية والعاجلة، ثم تعمل على المهمة الأعلى أولاً.
|
72 |
+
18. تعلم كيفية تحسين التركيز: تعلم"
|
73 |
+
# You can get more details at https://github.com/FreedomIntelligence/AceGPT/tree/main
|
74 |
+
|
75 |
+
|
76 |
+
|
77 |
+
# Reference
|
78 |
+
```
|
79 |
+
@article{huang2023acegpt,
|
80 |
+
title={AceGPT, Localizing Large Language Models in Arabic},
|
81 |
+
author={Huang, Huang and Yu, Fei and Zhu, Jianqing and Sun, Xuening and Cheng, Hao and Song, Dingjie and Chen, Zhihong and Alharthi, Abdulmohsen and An, Bang and Liu, Ziche and others},
|
82 |
+
journal={arXiv preprint arXiv:2309.12053},
|
83 |
+
year={2023}
|
84 |
+
}
|
85 |
+
```
|
config.json
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/mntcephfs/lab_data/chenjunying/zhujianqing/llm_train/yufei/final_sft/llama-pretrained-v5.2/checkpoint-0-26120",
|
3 |
+
"architectures": [
|
4 |
+
"LlamaForCausalLM"
|
5 |
+
],
|
6 |
+
"bos_token_id": 1,
|
7 |
+
"dropout": 0.0,
|
8 |
+
"end_token_id": 2,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 4096,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 11008,
|
14 |
+
"max_length": 4096,
|
15 |
+
"max_position_embeddings": 2048,
|
16 |
+
"model_type": "llama",
|
17 |
+
"num_attention_heads": 32,
|
18 |
+
"num_hidden_layers": 32,
|
19 |
+
"num_key_value_heads": 32,
|
20 |
+
"pad_token_id": 2,
|
21 |
+
"pretraining_tp": 1,
|
22 |
+
"rms_norm_eps": 1e-05,
|
23 |
+
"rope_scaling": null,
|
24 |
+
"tie_word_embeddings": false,
|
25 |
+
"torch_dtype": "float16",
|
26 |
+
"transformers_version": "4.29.0",
|
27 |
+
"use_cache": true,
|
28 |
+
"vocab_size": 32000
|
29 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b8e8b7243c17c198cbe7beee9b4e75d364295822120927a32a38b3e2006192d9
|
3 |
+
size 22469511612
|
special_tokens_map.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": "<unk>",
|
17 |
+
"unk_token": {
|
18 |
+
"content": "<unk>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
}
|
24 |
+
}
|
tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
tokenizer_config.json
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"bos_token": {
|
5 |
+
"__type": "AddedToken",
|
6 |
+
"content": "<s>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
+
"clean_up_tokenization_spaces": false,
|
13 |
+
"eos_token": {
|
14 |
+
"__type": "AddedToken",
|
15 |
+
"content": "</s>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false
|
20 |
+
},
|
21 |
+
"legacy": false,
|
22 |
+
"model_max_length": 1000000000000000019884624838656,
|
23 |
+
"pad_token": null,
|
24 |
+
"sp_model_kwargs": {},
|
25 |
+
"spaces_between_special_tokens": false,
|
26 |
+
"tokenizer_class": "LlamaTokenizer",
|
27 |
+
"unk_token": {
|
28 |
+
"__type": "AddedToken",
|
29 |
+
"content": "<unk>",
|
30 |
+
"lstrip": false,
|
31 |
+
"normalized": false,
|
32 |
+
"rstrip": false,
|
33 |
+
"single_word": false
|
34 |
+
},
|
35 |
+
"use_default_system_prompt": true
|
36 |
+
}
|