rwitz commited on
Commit
65f8838
1 Parent(s): 4064f71

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -7
app.py CHANGED
@@ -76,18 +76,22 @@ import os
76
  # Function to get bot response
77
  def format_chatml_prompt(state):
78
  chatml_prompt = "<|im_start|>system You are a helpful assistant, who can think outside the box.<|im_end|>"
79
- chatml_prompt2 = "<|im_start|>system You are a helpful assistant, who can think outside the box.<|im_end|>"
80
  for message in state["history"][0]:
81
  if message['role'] == 'user':
82
  chatml_prompt += "\n<|im_start|>user " + message['content'] + "<|im_end|>"
83
  else:
84
  chatml_prompt += "\n<|im_start|>assistant " + message['content'] + "<|im_end|>"
85
- for message in state["history"][1]:
86
- if message['role'] == 'user':
87
- chatml_prompt2 += "\n<|im_start|>user " + message['content'] + "<|im_end|>"
88
- else:
89
- chatml_prompt2 += "\n<|im_start|>assistant " + message['content'] + "<|im_end|>"
90
- return [chatml_prompt + "\n<|im_start|>assistant", chatml_prompt2 + "\n<|im_start|>assistant"]
 
 
 
 
 
91
  import aiohttp
92
  import asyncio
93
  from tenacity import retry, stop_after_attempt, wait_exponential
 
76
  # Function to get bot response
77
  def format_chatml_prompt(state):
78
  chatml_prompt = "<|im_start|>system You are a helpful assistant, who can think outside the box.<|im_end|>"
 
79
  for message in state["history"][0]:
80
  if message['role'] == 'user':
81
  chatml_prompt += "\n<|im_start|>user " + message['content'] + "<|im_end|>"
82
  else:
83
  chatml_prompt += "\n<|im_start|>assistant " + message['content'] + "<|im_end|>"
84
+
85
+ if len(state["history"]) > 1:
86
+ chatml_prompt2 = "<|im_start|>system You are a helpful assistant, who can think outside the box.<|im_end|>"
87
+ for message in state["history"][1]:
88
+ if message['role'] == 'user':
89
+ chatml_prompt2 += "\n<|im_start|>user " + message['content'] + "<|im_end|>"
90
+ else:
91
+ chatml_prompt2 += "\n<|im_start|>assistant " + message['content'] + "<|im_end|>"
92
+ return [chatml_prompt + "\n<|im_start|>assistant", chatml_prompt2 + "\n<|im_start|>assistant"]
93
+ else:
94
+ return [chatml_prompt + "\n<|im_start|>assistant"]
95
  import aiohttp
96
  import asyncio
97
  from tenacity import retry, stop_after_attempt, wait_exponential