Update README.md
Browse files
README.md
CHANGED
@@ -1,3 +1,71 @@
|
|
1 |
-
---
|
2 |
-
license: llama3
|
3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: llama3
|
3 |
+
language:
|
4 |
+
- ja
|
5 |
+
- en
|
6 |
+
base_model: rinna/llama-3-youko-70b
|
7 |
+
pipeline_tag: text-generation
|
8 |
+
---
|
9 |
+
|
10 |
+
**[2.2bpw](https://huggingface.co/rioshiina/llama-3-youko-70b-exl2/tree/2.2bpw)** (high quality loss, only for 24GB vRAM test.)
|
11 |
+
**[4.0bpw](https://huggingface.co/rioshiina/llama-3-youko-70b-exl2/tree/4.0bpw)**
|
12 |
+
**[6.0bpw](https://huggingface.co/rioshiina/llama-3-youko-70b-exl2/tree/6.0bpw)**
|
13 |
+
**[8.0bpw](https://huggingface.co/rioshiina/llama-3-youko-70b-exl2/tree/8.0bpw)**
|
14 |
+
|
15 |
+
# llama-3-youko-70b-exl2
|
16 |
+
- Model creator: [rinna](https://huggingface.co/rinna)
|
17 |
+
- Original model: [llama-3-youko-70b](https://huggingface.co/rinna/llama-3-youko-70b)
|
18 |
+
|
19 |
+
## Prompt template
|
20 |
+
|
21 |
+
```
|
22 |
+
西田幾多郎は、
|
23 |
+
|
24 |
+
```
|
25 |
+
|
26 |
+
# Cite
|
27 |
+
```bibtex
|
28 |
+
@misc{rinna-llama-3-youko-70b,
|
29 |
+
title = {rinna/llama-3-youko-70b},
|
30 |
+
author = {Mitsuda, Koh and Chen, Xinqi and Wakatsuki, Toshiaki and Sawada, Kei},
|
31 |
+
url = {https://huggingface.co/rinna/llama-3-youko-70b}
|
32 |
+
}
|
33 |
+
|
34 |
+
@inproceedings{sawada2024release,
|
35 |
+
title = {Release of Pre-Trained Models for the {J}apanese Language},
|
36 |
+
author = {Sawada, Kei and Zhao, Tianyu and Shing, Makoto and Mitsui, Kentaro and Kaga, Akio and Hono, Yukiya and Wakatsuki, Toshiaki and Mitsuda, Koh},
|
37 |
+
booktitle = {Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024)},
|
38 |
+
month = {5},
|
39 |
+
year = {2024},
|
40 |
+
pages = {13898--13905},
|
41 |
+
url = {https://aclanthology.org/2024.lrec-main.1213},
|
42 |
+
note = {\url{https://arxiv.org/abs/2404.01657}}
|
43 |
+
}
|
44 |
+
|
45 |
+
```
|
46 |
+
---
|
47 |
+
|
48 |
+
# References
|
49 |
+
```bibtex
|
50 |
+
@article{llama3modelcard,
|
51 |
+
title = {Llama 3 Model Card},
|
52 |
+
author = {AI@Meta},
|
53 |
+
year = {2024},
|
54 |
+
url = {https://github.com/meta-llama/llama3/blob/main/MODEL_CARD.md}
|
55 |
+
}
|
56 |
+
|
57 |
+
@software{gpt-neox-library,
|
58 |
+
title = {{GPT}-{N}eo{X}: Large Scale Autoregressive Language Modeling in {P}y{T}orch},
|
59 |
+
author = {Andonian, Alex and Anthony, Quentin and Biderman, Stella and Black, Sid and Gali, Preetham and Gao, Leo and Hallahan, Eric and Levy-Kramer, Josh and Leahy, Connor and Nestler, Lucas and Parker, Kip and Pieler, Michael and Purohit, Shivanshu and Songz, Tri and Phil, Wang and Weinbach, Samuel},
|
60 |
+
doi = {10.5281/zenodo.5879544},
|
61 |
+
month = {8},
|
62 |
+
year = {2021},
|
63 |
+
version = {0.0.1},
|
64 |
+
url = {https://www.github.com/eleutherai/gpt-neox}
|
65 |
+
}
|
66 |
+
|
67 |
+
```
|
68 |
+
---
|
69 |
+
|
70 |
+
# License
|
71 |
+
[Meta Llama 3 Community License](https://llama.meta.com/llama3/license/)
|