Update README.md
Browse files
README.md
CHANGED
@@ -37,8 +37,8 @@ The models can be used as auto-regressive samplers as follows:
|
|
37 |
import torch
|
38 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
39 |
|
40 |
-
tokenizer = AutoTokenizer.from_pretrained("Salesforce/xgen-7b-8k-
|
41 |
-
model = AutoModelForCausalLM.from_pretrained("Salesforce/xgen-7b-8k-
|
42 |
inputs = tokenizer("The world is", return_tensors="pt")
|
43 |
sample = model.generate(**inputs, max_length=128)
|
44 |
print(tokenizer.decode(sample[0]))
|
|
|
37 |
import torch
|
38 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
39 |
|
40 |
+
tokenizer = AutoTokenizer.from_pretrained("Salesforce/xgen-7b-8k-inst", trust_remote_code=True)
|
41 |
+
model = AutoModelForCausalLM.from_pretrained("Salesforce/xgen-7b-8k-inst", torch_dtype=torch.bfloat16)
|
42 |
inputs = tokenizer("The world is", return_tensors="pt")
|
43 |
sample = model.generate(**inputs, max_length=128)
|
44 |
print(tokenizer.decode(sample[0]))
|