Correct version
#9
by
alexfromapex
- opened
README.md
CHANGED
@@ -26,9 +26,9 @@ Results will be updated soon.
|
|
26 |
import torch
|
27 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
28 |
|
29 |
-
tokenizer = AutoTokenizer.from_pretrained("saltlux/luxia-21.4b-alignment-
|
30 |
model = AutoModelForCausalLM.from_pretrained(
|
31 |
-
"saltlux/luxia-21.4b-alignment-
|
32 |
device_map="auto",
|
33 |
torch_dtype=torch.float16,
|
34 |
)
|
|
|
26 |
import torch
|
27 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
28 |
|
29 |
+
tokenizer = AutoTokenizer.from_pretrained("saltlux/luxia-21.4b-alignment-v1.0")
|
30 |
model = AutoModelForCausalLM.from_pretrained(
|
31 |
+
"saltlux/luxia-21.4b-alignment-v1.0",
|
32 |
device_map="auto",
|
33 |
torch_dtype=torch.float16,
|
34 |
)
|