codelion commited on
Commit
35d9bb0
1 Parent(s): 657a06e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -49,7 +49,7 @@ def generate(
49
  conversation.extend([{"role": "user", "content": user}, {"role": "assistant", "content": assistant}])
50
  conversation.append({"role": "user", "content": message})
51
 
52
- input_ids = tokenizer.apply_chat_template(conversation, return_tensors="pt", add_generation_prompt=True)
53
  if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
54
  input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
55
  gr.Warning(f"Trimmed input from conversation as it was longer than {MAX_INPUT_TOKEN_LENGTH} tokens.")
 
49
  conversation.extend([{"role": "user", "content": user}, {"role": "assistant", "content": assistant}])
50
  conversation.append({"role": "user", "content": message})
51
 
52
+ input_ids = tokenizer.apply_chat_template(conversation, return_tensors="pt")
53
  if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
54
  input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
55
  gr.Warning(f"Trimmed input from conversation as it was longer than {MAX_INPUT_TOKEN_LENGTH} tokens.")