Spaces:
Build error
Build error
Commit
•
f863672
1
Parent(s):
5aa41fe
Adding wakeup button
Browse files
app.py
CHANGED
@@ -91,7 +91,7 @@ You can see the prompt clearly displayed below the chatbot to understand what is
|
|
91 |
|
92 |
# Read this if you get an error
|
93 |
I'm using Inference Endpoint's Scale to Zero to save money on GPUs. If the staus shows its not "Running" send a
|
94 |
-
|
95 |
"""
|
96 |
|
97 |
|
@@ -104,7 +104,11 @@ def process_example(text, history=[]):
|
|
104 |
|
105 |
with gr.Blocks() as demo:
|
106 |
gr.Markdown(intro_md)
|
107 |
-
|
|
|
|
|
|
|
|
|
108 |
with gr.Tab("Arabic-RAG"):
|
109 |
chatbot = gr.Chatbot(
|
110 |
[],
|
@@ -134,6 +138,7 @@ with gr.Blocks() as demo:
|
|
134 |
fn=process_example,
|
135 |
cache_examples=True, )
|
136 |
# prompt_html.render()
|
|
|
137 |
# Turn off interactivity while generating if you click
|
138 |
txt_msg = txt_btn.click(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
|
139 |
bot, chatbot, [chatbot, prompt_html])
|
|
|
91 |
|
92 |
# Read this if you get an error
|
93 |
I'm using Inference Endpoint's Scale to Zero to save money on GPUs. If the staus shows its not "Running" send a
|
94 |
+
click **Wakeup** to wake it up. You will get a `500 error` and it will take ~7 minutes to wake up.
|
95 |
"""
|
96 |
|
97 |
|
|
|
104 |
|
105 |
with gr.Blocks() as demo:
|
106 |
gr.Markdown(intro_md)
|
107 |
+
with gr.Column(scale=3):
|
108 |
+
endpoint_status = RichTextbox(check_endpoint_status, label="Inference Endpoint Status", every=1)
|
109 |
+
with gr.Column(scale=1):
|
110 |
+
wakeup_endpoint = gr.Button('Click to Wake Up Endpoint')
|
111 |
+
wakeup_endpoint.click(partial(generate('Wakeup')))
|
112 |
with gr.Tab("Arabic-RAG"):
|
113 |
chatbot = gr.Chatbot(
|
114 |
[],
|
|
|
138 |
fn=process_example,
|
139 |
cache_examples=True, )
|
140 |
# prompt_html.render()
|
141 |
+
|
142 |
# Turn off interactivity while generating if you click
|
143 |
txt_msg = txt_btn.click(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
|
144 |
bot, chatbot, [chatbot, prompt_html])
|