Shanshan Wang commited on
Commit
da76dba
1 Parent(s): 7826ae6

set default models

Browse files
Files changed (1) hide show
  1. app.py +20 -8
app.py CHANGED
@@ -59,6 +59,12 @@ def inference(image_input,
59
  if model_state is None or tokenizer_state is None:
60
  chatbot.append(("System", "Please select a model to start the conversation."))
61
  return chatbot, state, ""
 
 
 
 
 
 
62
 
63
  model = model_state
64
  tokenizer = tokenizer_state
@@ -126,22 +132,27 @@ def regenerate_response(chatbot,
126
 
127
  model = model_state
128
  tokenizer = tokenizer_state
129
-
130
-
131
  # Check if there is a previous user message
132
  if chatbot is None or len(chatbot) == 0:
133
  chatbot = []
134
  chatbot.append(("System", "Nothing to regenerate. Please start a conversation first."))
135
  return chatbot, state,
136
 
137
- # Check if there is a previous user message
138
- if state is None or len(state) == 0:
139
- chatbot.append(("System", "Nothing to regenerate. Please start a conversation first."))
140
- return chatbot, state
141
 
142
  # Get the last user message
143
  last_user_message, _ = chatbot[-1]
144
 
 
 
 
 
 
 
145
  state = state[:-1] # Remove last assistant's response from history
146
 
147
  if len(state) == 0 or not state:
@@ -191,7 +202,8 @@ with gr.Blocks() as demo:
191
  with gr.Row():
192
  model_dropdown = gr.Dropdown(
193
  choices=list(model_paths.keys()),
194
- label="Select Model"
 
195
  )
196
 
197
  # When the model selection changes, load the new model
@@ -299,4 +311,4 @@ with gr.Blocks() as demo:
299
  label = "examples",
300
  )
301
 
302
- demo.launch()
 
59
  if model_state is None or tokenizer_state is None:
60
  chatbot.append(("System", "Please select a model to start the conversation."))
61
  return chatbot, state, ""
62
+
63
+ # Check for empty or invalid user message
64
+ if not user_message or user_message.strip() == '' or user_message.lower() == 'system':
65
+ chatbot.append(("System", "Please enter a valid message to continue the conversation."))
66
+ return chatbot, state, ""
67
+
68
 
69
  model = model_state
70
  tokenizer = tokenizer_state
 
132
 
133
  model = model_state
134
  tokenizer = tokenizer_state
135
+
 
136
  # Check if there is a previous user message
137
  if chatbot is None or len(chatbot) == 0:
138
  chatbot = []
139
  chatbot.append(("System", "Nothing to regenerate. Please start a conversation first."))
140
  return chatbot, state,
141
 
142
+ # # Check if there is a previous user message
143
+ # if state is None or len(state) == 0:
144
+ # chatbot.append(("System", "Nothing to regenerate. Please start a conversation first."))
145
+ # return chatbot, state
146
 
147
  # Get the last user message
148
  last_user_message, _ = chatbot[-1]
149
 
150
+ # Check for empty or invalid last user message
151
+ if not last_user_message or last_user_message.strip() == '' or last_user_message.lower() == 'system':
152
+ chatbot.append(("System", "Cannot regenerate response for an empty or invalid message."))
153
+ return chatbot, state
154
+
155
+
156
  state = state[:-1] # Remove last assistant's response from history
157
 
158
  if len(state) == 0 or not state:
 
202
  with gr.Row():
203
  model_dropdown = gr.Dropdown(
204
  choices=list(model_paths.keys()),
205
+ label="Select Model",
206
+ value="H2OVL-Mississippi-2B"
207
  )
208
 
209
  # When the model selection changes, load the new model
 
311
  label = "examples",
312
  )
313
 
314
+ demo.launch(share=True)