code md
#18
by
hikapa
- opened
README.md
CHANGED
@@ -20,7 +20,7 @@ batch_size=80,
|
|
20 |
gradient_accumulation_steps=16
|
21 |
|
22 |
------ EXAMPLE USAGE ---
|
23 |
-
|
24 |
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
25 |
|
26 |
model = AutoModelForCausalLM.from_pretrained('roneneldan/TinyStories-33M')
|
@@ -38,4 +38,5 @@ output = model.generate(input_ids, max_length = 1000, num_beams=1)
|
|
38 |
output_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
39 |
|
40 |
# Print the generated text
|
41 |
-
print(output_text)
|
|
|
|
20 |
gradient_accumulation_steps=16
|
21 |
|
22 |
------ EXAMPLE USAGE ---
|
23 |
+
```
|
24 |
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
25 |
|
26 |
model = AutoModelForCausalLM.from_pretrained('roneneldan/TinyStories-33M')
|
|
|
38 |
output_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
39 |
|
40 |
# Print the generated text
|
41 |
+
print(output_text)
|
42 |
+
```
|