hugo1234 commited on
Commit
24b87ce
1 Parent(s): fc5fac6

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +4 -2
utils.py CHANGED
@@ -72,8 +72,10 @@ def generate_prompt_with_history(text, history, tokenizer, max_length=2048):
72
  #model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-1.3B")
73
  #tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
74
  #model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B")
75
- tokenizer = AutoTokenizer.from_pretrained("dbmdz/electra-base-italian-xxl-cased-discriminator")
76
- model = AutoModelForCausalLM.from_pretrained("dbmdz/electra-base-italian-xxl-cased-discriminator")
 
 
77
 
78
  def load_tokenizer_and_model(base_model,load_8bit=False):
79
  if torch.cuda.is_available():
 
72
  #model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-1.3B")
73
  #tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
74
  #model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B")
75
+ #tokenizer = AutoTokenizer.from_pretrained("dbmdz/electra-base-italian-xxl-cased-discriminator")
76
+ #model = AutoModelForCausalLM.from_pretrained("dbmdz/electra-base-italian-xxl-cased-discriminator")
77
+ tokenizer = AutoTokenizer.from_pretrained("it5/it5-large-headline-generation")
78
+ model = AutoModelForCausalLM.from_pretrained("it5/it5-large-headline-generation")
79
 
80
  def load_tokenizer_and_model(base_model,load_8bit=False):
81
  if torch.cuda.is_available():