Spaces:
Sleeping
Sleeping
0.46 input_ids typo fixed
Browse files
app.py
CHANGED
@@ -110,7 +110,6 @@ def generate_both(system_prompt, input_text, chatbot_a, chatbot_b, max_new_token
|
|
110 |
if "Pharia" in model_info[0]['id']:
|
111 |
formatted_conversation = apply_pharia_template(messages=new_messages_a, add_generation_prompt=True)
|
112 |
input_ids_a = tokenizer_a(formatted_conversation, return_tensors="pt").to(device)
|
113 |
-
|
114 |
else:
|
115 |
input_ids_a = tokenizer_a.apply_chat_template(
|
116 |
new_messages_a,
|
@@ -121,8 +120,7 @@ def generate_both(system_prompt, input_text, chatbot_a, chatbot_b, max_new_token
|
|
121 |
|
122 |
if "Pharia" in model_info[1]['id']:
|
123 |
formatted_conversation = apply_pharia_template(messages=new_messages_a, add_generation_prompt=True)
|
124 |
-
|
125 |
-
|
126 |
else:
|
127 |
input_ids_b = tokenizer_b.apply_chat_template(
|
128 |
new_messages_b,
|
|
|
110 |
if "Pharia" in model_info[0]['id']:
|
111 |
formatted_conversation = apply_pharia_template(messages=new_messages_a, add_generation_prompt=True)
|
112 |
input_ids_a = tokenizer_a(formatted_conversation, return_tensors="pt").to(device)
|
|
|
113 |
else:
|
114 |
input_ids_a = tokenizer_a.apply_chat_template(
|
115 |
new_messages_a,
|
|
|
120 |
|
121 |
if "Pharia" in model_info[1]['id']:
|
122 |
formatted_conversation = apply_pharia_template(messages=new_messages_a, add_generation_prompt=True)
|
123 |
+
input_ids_b = tokenizer_a(formatted_conversation, return_tensors="pt").to(device)
|
|
|
124 |
else:
|
125 |
input_ids_b = tokenizer_b.apply_chat_template(
|
126 |
new_messages_b,
|