Update app.py
Browse files
app.py
CHANGED
@@ -110,8 +110,8 @@ with gr.Blocks(theme=gr.themes.Soft()) as app:
|
|
110 |
gr.HTML("""<center><h1 style='font-size:xx-large;'>Gemma Gemini Multimodal Chatbot</h1><br><h3>Gemini Sprint submission by Rishiraj Acharya. Uses Google's Gemini 1.0 Pro Vision multimodal model from Vertex AI with Google's Gemma 7B Instruct model from Hugging Face. Google Cloud credits are provided for this project.</h3>""")
|
111 |
chat_b = gr.Chatbot(show_label=True, show_share_button=True, show_copy_button=True, likeable=True, layout="bubble", bubble_full_width=False)
|
112 |
with gr.Group():
|
113 |
-
inp = gr.Textbox(label="Prompt")
|
114 |
-
sys_inp = gr.Textbox(label="System Prompt
|
115 |
with gr.Accordion("Settings",open=False):
|
116 |
custom_prompt=gr.Textbox(label="Modify Prompt Format", info="For testing purposes. 'USER_INPUT' is where 'SYSTEM_PROMPT, PROMPT' will be placed", lines=3,value="<start_of_turn>userUSER_INPUT<end_of_turn><start_of_turn>model")
|
117 |
rand = gr.Checkbox(label="Random Seed", value=True)
|
@@ -123,9 +123,9 @@ with gr.Blocks(theme=gr.themes.Soft()) as app:
|
|
123 |
chat_mem=gr.Number(label="Chat Memory", info="Number of previous chats to retain",value=4)
|
124 |
with gr.Group():
|
125 |
with gr.Row():
|
126 |
-
btn = gr.Button("Chat")
|
127 |
-
stop_btn = gr.Button("Stop")
|
128 |
-
clear_btn = gr.Button("Clear")
|
129 |
|
130 |
chat_sub=inp.submit(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
|
131 |
go=btn.click(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
|
|
|
110 |
gr.HTML("""<center><h1 style='font-size:xx-large;'>Gemma Gemini Multimodal Chatbot</h1><br><h3>Gemini Sprint submission by Rishiraj Acharya. Uses Google's Gemini 1.0 Pro Vision multimodal model from Vertex AI with Google's Gemma 7B Instruct model from Hugging Face. Google Cloud credits are provided for this project.</h3>""")
|
111 |
chat_b = gr.Chatbot(show_label=True, show_share_button=True, show_copy_button=True, likeable=True, layout="bubble", bubble_full_width=False)
|
112 |
with gr.Group():
|
113 |
+
inp = gr.Textbox(label="User Prompt")
|
114 |
+
sys_inp = gr.Textbox(label="System Prompt")
|
115 |
with gr.Accordion("Settings",open=False):
|
116 |
custom_prompt=gr.Textbox(label="Modify Prompt Format", info="For testing purposes. 'USER_INPUT' is where 'SYSTEM_PROMPT, PROMPT' will be placed", lines=3,value="<start_of_turn>userUSER_INPUT<end_of_turn><start_of_turn>model")
|
117 |
rand = gr.Checkbox(label="Random Seed", value=True)
|
|
|
123 |
chat_mem=gr.Number(label="Chat Memory", info="Number of previous chats to retain",value=4)
|
124 |
with gr.Group():
|
125 |
with gr.Row():
|
126 |
+
btn = gr.Button("Chat", variant="primary")
|
127 |
+
stop_btn = gr.Button("Stop", variant="stop")
|
128 |
+
clear_btn = gr.Button("Clear", variant="secondary")
|
129 |
|
130 |
chat_sub=inp.submit(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
|
131 |
go=btn.click(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
|