Spaces:
Runtime error
Runtime error
pseudotensor
commited on
Commit
•
196f3c7
1
Parent(s):
d170bd3
Update with h2oGPT hash 562e7570d7798e9caf1425f2f7c4cf69c7bc617c
Browse files- gradio_runner.py +12 -10
gradio_runner.py
CHANGED
@@ -366,8 +366,8 @@ def go_gradio(**kwargs):
|
|
366 |
|
367 |
# Get flagged data
|
368 |
zip_data1 = functools.partial(zip_data, root_dirs=['flagged_data_points', kwargs['save_dir']])
|
369 |
-
zip_btn.click(zip_data1, inputs=None, outputs=[file_output, zip_text])
|
370 |
-
s3up_btn.click(s3up, inputs=zip_text, outputs=s3up_text)
|
371 |
|
372 |
def check_admin_pass(x):
|
373 |
return gr.update(visible=x == admin_pass)
|
@@ -809,7 +809,8 @@ def go_gradio(**kwargs):
|
|
809 |
|
810 |
add_model_event = add_model_button.click(fn=dropdown_model_list,
|
811 |
inputs=[model_options_state, new_model],
|
812 |
-
outputs=[model_choice, model_choice2, new_model, model_options_state]
|
|
|
813 |
|
814 |
def dropdown_lora_list(list0, x, model_used1, lora_used1, model_used2, lora_used2):
|
815 |
new_state = [list0[0] + [x]]
|
@@ -824,11 +825,12 @@ def go_gradio(**kwargs):
|
|
824 |
add_lora_event = add_lora_button.click(fn=dropdown_lora_list,
|
825 |
inputs=[lora_options_state, new_lora, model_used, lora_used, model_used2,
|
826 |
lora_used2],
|
827 |
-
outputs=[lora_choice, lora_choice2, new_lora, lora_options_state]
|
|
|
828 |
|
829 |
-
go_btn.click(lambda: gr.update(visible=False), None, go_btn, api_name="go" if allow_api else None) \
|
830 |
-
.then(lambda: gr.update(visible=True), None, normal_block) \
|
831 |
-
.then(**load_model_args).then(**prompt_update_args)
|
832 |
|
833 |
def compare_textbox_fun(x):
|
834 |
return gr.Textbox.update(visible=x)
|
@@ -850,16 +852,16 @@ def go_gradio(**kwargs):
|
|
850 |
callback.setup(inputs_list + [text_output, text_output2], "flagged_data_points")
|
851 |
flag_btn.click(lambda *args: callback.flag(args), inputs_list + [text_output, text_output2], None,
|
852 |
preprocess=False,
|
853 |
-
api_name='flag' if allow_api else None)
|
854 |
flag_btn_nochat.click(lambda *args: callback.flag(args), inputs_list + [text_output_nochat], None,
|
855 |
preprocess=False,
|
856 |
-
api_name='flag_nochat' if allow_api else None)
|
857 |
|
858 |
def get_system_info():
|
859 |
return gr.Textbox.update(value=system_info_print())
|
860 |
|
861 |
system_event = system_btn.click(get_system_info, outputs=system_text,
|
862 |
-
api_name='system_info' if allow_api else None)
|
863 |
|
864 |
# don't pass text_output, don't want to clear output, just stop it
|
865 |
# FIXME: have to click once to stop output and second time to stop GPUs going
|
|
|
366 |
|
367 |
# Get flagged data
|
368 |
zip_data1 = functools.partial(zip_data, root_dirs=['flagged_data_points', kwargs['save_dir']])
|
369 |
+
zip_btn.click(zip_data1, inputs=None, outputs=[file_output, zip_text], queue=False)
|
370 |
+
s3up_btn.click(s3up, inputs=zip_text, outputs=s3up_text, queue=False)
|
371 |
|
372 |
def check_admin_pass(x):
|
373 |
return gr.update(visible=x == admin_pass)
|
|
|
809 |
|
810 |
add_model_event = add_model_button.click(fn=dropdown_model_list,
|
811 |
inputs=[model_options_state, new_model],
|
812 |
+
outputs=[model_choice, model_choice2, new_model, model_options_state],
|
813 |
+
queue=False)
|
814 |
|
815 |
def dropdown_lora_list(list0, x, model_used1, lora_used1, model_used2, lora_used2):
|
816 |
new_state = [list0[0] + [x]]
|
|
|
825 |
add_lora_event = add_lora_button.click(fn=dropdown_lora_list,
|
826 |
inputs=[lora_options_state, new_lora, model_used, lora_used, model_used2,
|
827 |
lora_used2],
|
828 |
+
outputs=[lora_choice, lora_choice2, new_lora, lora_options_state],
|
829 |
+
queue=False)
|
830 |
|
831 |
+
go_btn.click(lambda: gr.update(visible=False), None, go_btn, api_name="go" if allow_api else None, queue=False) \
|
832 |
+
.then(lambda: gr.update(visible=True), None, normal_block, queue=False) \
|
833 |
+
.then(**load_model_args, queue=False).then(**prompt_update_args, queue=False)
|
834 |
|
835 |
def compare_textbox_fun(x):
|
836 |
return gr.Textbox.update(visible=x)
|
|
|
852 |
callback.setup(inputs_list + [text_output, text_output2], "flagged_data_points")
|
853 |
flag_btn.click(lambda *args: callback.flag(args), inputs_list + [text_output, text_output2], None,
|
854 |
preprocess=False,
|
855 |
+
api_name='flag' if allow_api else None, queue=False)
|
856 |
flag_btn_nochat.click(lambda *args: callback.flag(args), inputs_list + [text_output_nochat], None,
|
857 |
preprocess=False,
|
858 |
+
api_name='flag_nochat' if allow_api else None, queue=False)
|
859 |
|
860 |
def get_system_info():
|
861 |
return gr.Textbox.update(value=system_info_print())
|
862 |
|
863 |
system_event = system_btn.click(get_system_info, outputs=system_text,
|
864 |
+
api_name='system_info' if allow_api else None, queue=False)
|
865 |
|
866 |
# don't pass text_output, don't want to clear output, just stop it
|
867 |
# FIXME: have to click once to stop output and second time to stop GPUs going
|