Iker commited on
Commit
e635164
1 Parent(s): 574a228

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -3
README.md CHANGED
@@ -153,7 +153,7 @@ prompt = prompt(headline=headline, body=body)
153
 
154
  tokenizer = AutoTokenizer.from_pretrained("Iker/ClickbaitFighter-10B")
155
  model = AutoModelForCausalLM.from_pretrained(
156
- "Iker/ClickbaitFighter-2B", torch_dtype=torch.bfloat16, device_map="auto"
157
  )
158
 
159
  formatted_prompt = tokenizer.apply_chat_template(
@@ -182,7 +182,6 @@ print(summary.strip().split("\n")[-1]) # Get only the summary, without the promp
182
  ## Run inference in the NoticIA dataset
183
  ```python
184
  import torch # pip install torch
185
- from newspaper import Article #pip3 install newspaper3k
186
  from datasets import load_dataset # pip install datasets
187
  from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig # pip install transformers
188
 
@@ -227,7 +226,7 @@ prompt = prompt(headline=headline, body=body)
227
 
228
  tokenizer = AutoTokenizer.from_pretrained("Iker/ClickbaitFighter-10B")
229
  model = AutoModelForCausalLM.from_pretrained(
230
- "Iker/ClickbaitFighter-2B", torch_dtype=torch.bfloat16, device_map="auto"
231
  )
232
 
233
  formatted_prompt = tokenizer.apply_chat_template(
 
153
 
154
  tokenizer = AutoTokenizer.from_pretrained("Iker/ClickbaitFighter-10B")
155
  model = AutoModelForCausalLM.from_pretrained(
156
+ "Iker/ClickbaitFighter-10B", torch_dtype=torch.bfloat16, device_map="auto"
157
  )
158
 
159
  formatted_prompt = tokenizer.apply_chat_template(
 
182
  ## Run inference in the NoticIA dataset
183
  ```python
184
  import torch # pip install torch
 
185
  from datasets import load_dataset # pip install datasets
186
  from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig # pip install transformers
187
 
 
226
 
227
  tokenizer = AutoTokenizer.from_pretrained("Iker/ClickbaitFighter-10B")
228
  model = AutoModelForCausalLM.from_pretrained(
229
+ "Iker/ClickbaitFighter-10B", torch_dtype=torch.bfloat16, device_map="auto"
230
  )
231
 
232
  formatted_prompt = tokenizer.apply_chat_template(