catherinearnett
commited on
Commit
•
d49310c
1
Parent(s):
6a20c57
Upload README.md with huggingface_hub
Browse files
README.md
CHANGED
@@ -38,11 +38,13 @@ Checkpoints are taken at training steps: 0, 10000, 20000, 30000, 40000, 50000, 6
|
|
38 |
|
39 |
Load the model:
|
40 |
|
|
|
|
|
41 |
```
|
42 |
from transformers import AutoTokenizer, AutoModel
|
43 |
|
44 |
tokenizer = AutoTokenizer.from_pretrained("B-GPT_pl_en_sequential")
|
45 |
-
model = AutoModel.from_pretrained("B-GPT_pl_en_sequential")
|
46 |
|
47 |
|
48 |
````
|
|
|
38 |
|
39 |
Load the model:
|
40 |
|
41 |
+
Note: if you do not specify a revision, it will load the final checkpoint of the model. See above for the list of checkpoints. The checkpoint step is the name of the revision.
|
42 |
+
|
43 |
```
|
44 |
from transformers import AutoTokenizer, AutoModel
|
45 |
|
46 |
tokenizer = AutoTokenizer.from_pretrained("B-GPT_pl_en_sequential")
|
47 |
+
model = AutoModel.from_pretrained("B-GPT_pl_en_sequential", revision = "128000")
|
48 |
|
49 |
|
50 |
````
|