robzchhangte commited on
Commit
b059834
1 Parent(s): bd0ff01

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -5,14 +5,14 @@ from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
5
  token = os.getenv("hf_token")
6
 
7
  # Load the translation model and tokenizer from Hugging Face
8
- model_name = "robzchhangte/enmz-helsinki-case"
9
  model = AutoModelForSeq2SeqLM.from_pretrained(model_name, use_auth_token=token)
10
  tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=token)
11
 
12
  # Translation function with max_length=512
13
  def translate(text):
14
  inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True)
15
- outputs = model.generate(inputs["input_ids"], max_length=512) # Set max_length to 512
16
  translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
17
  return translated_text
18
 
 
5
  token = os.getenv("hf_token")
6
 
7
  # Load the translation model and tokenizer from Hugging Face
8
+ model_name = "robzchhangte/enmz75-helcase"
9
  model = AutoModelForSeq2SeqLM.from_pretrained(model_name, use_auth_token=token)
10
  tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=token)
11
 
12
  # Translation function with max_length=512
13
  def translate(text):
14
  inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True)
15
+ outputs = model.generate(inputs["input_ids"], max_length=512)
16
  translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
17
  return translated_text
18