zelalt commited on
Commit
0541876
1 Parent(s): 96e0fa0

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -60,7 +60,7 @@ model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, tru
60
  tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path,trust_remote_code=True)
61
  model = PeftModel.from_pretrained(model, peft_model_id)
62
 
63
- #Put from dataset
64
  inputs = tokenizer(f'''{formatted_dataset['text'][120]}''', return_tensors="pt", return_attention_mask=False)
65
  outputs = model.generate(**inputs,max_new_tokens=50, pad_token_id = tokenizer.eos_token_id, eos_token_id = tokenizer.eos_token_id)
66
  text = tokenizer.batch_decode(outputs)[0]
@@ -68,7 +68,7 @@ print(text)
68
  ```
69
 
70
  ```python
71
- #Put as string
72
  inputs = tokenizer(f'''What is the title of this paper? ...[your pdf as text]..\n\nAnswer: ''', return_tensors="pt", return_attention_mask=False)
73
  outputs = model.generate(**inputs,max_new_tokens=50, pad_token_id = tokenizer.eos_token_id, eos_token_id = tokenizer.eos_token_id)
74
  text = tokenizer.batch_decode(outputs)[0]
 
60
  tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path,trust_remote_code=True)
61
  model = PeftModel.from_pretrained(model, peft_model_id)
62
 
63
+ #from dataset
64
  inputs = tokenizer(f'''{formatted_dataset['text'][120]}''', return_tensors="pt", return_attention_mask=False)
65
  outputs = model.generate(**inputs,max_new_tokens=50, pad_token_id = tokenizer.eos_token_id, eos_token_id = tokenizer.eos_token_id)
66
  text = tokenizer.batch_decode(outputs)[0]
 
68
  ```
69
 
70
  ```python
71
+ #as string
72
  inputs = tokenizer(f'''What is the title of this paper? ...[your pdf as text]..\n\nAnswer: ''', return_tensors="pt", return_attention_mask=False)
73
  outputs = model.generate(**inputs,max_new_tokens=50, pad_token_id = tokenizer.eos_token_id, eos_token_id = tokenizer.eos_token_id)
74
  text = tokenizer.batch_decode(outputs)[0]