Files changed (1) hide show
  1. README.md +8 -8
README.md CHANGED
@@ -71,10 +71,10 @@ Find below some example scripts on how to use the model in `transformers`:
71
 
72
  ```python
73
 
74
- from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
75
 
76
  tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
77
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-256")
78
 
79
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
80
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
@@ -93,10 +93,10 @@ print(tokenizer.decode(outputs[0]))
93
 
94
  ```python
95
  # pip install accelerate
96
- from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
97
 
98
  tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
99
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-256", device_map="auto")
100
 
101
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
102
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
@@ -117,10 +117,10 @@ print(tokenizer.decode(outputs[0]))
117
 
118
  ```python
119
  # pip install accelerate
120
- from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
121
 
122
  tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
123
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-256", device_map="auto", torch_dtype=torch.float16)
124
 
125
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
126
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
@@ -139,10 +139,10 @@ print(tokenizer.decode(outputs[0]))
139
 
140
  ```python
141
  # pip install bitsandbytes accelerate
142
- from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
143
 
144
  tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
145
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-256", device_map="auto")
146
 
147
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
148
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
 
71
 
72
  ```python
73
 
74
+ from transformers import AutoTokenizer, SwitchTransformersForConditionalGeneration
75
 
76
  tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
77
+ model = SwitchTransformersForConditionalGeneration.from_pretrained("google/switch-base-256")
78
 
79
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
80
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
 
93
 
94
  ```python
95
  # pip install accelerate
96
+ from transformers import AutoTokenizer, SwitchTransformersForConditionalGeneration
97
 
98
  tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
99
+ model = SwitchTransformersForConditionalGeneration.from_pretrained("google/switch-base-256", device_map="auto")
100
 
101
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
102
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
 
117
 
118
  ```python
119
  # pip install accelerate
120
+ from transformers import AutoTokenizer, SwitchTransformersForConditionalGeneration
121
 
122
  tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
123
+ model = SwitchTransformersForConditionalGeneration.from_pretrained("google/switch-base-256", device_map="auto", torch_dtype=torch.float16)
124
 
125
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
126
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
 
139
 
140
  ```python
141
  # pip install bitsandbytes accelerate
142
+ from transformers import AutoTokenizer, SwitchTransformersForConditionalGeneration
143
 
144
  tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
145
+ model = SwitchTransformersForConditionalGeneration.from_pretrained("google/switch-base-256", device_map="auto")
146
 
147
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
148
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)