Spaces:
Build error
Build error
File size: 4,093 Bytes
93c9911 26b527f 2d4aaee 3294385 911bd2d 3294385 93c9911 3294385 c707091 3294385 93c9911 c707091 93c9911 c707091 93c9911 c707091 93c9911 2d4aaee c707091 3294385 2d4aaee 3294385 c707091 2d4aaee 3294385 93c9911 2d4aaee 93c9911 911bd2d 93c9911 a8537f4 3294385 c707091 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 |
"""
Try out gradio.Chatinterface.
colab gradio-chatinterface.
%%writefile reuirements.txt
gradio
transformers
sentencepiece
torch
import gradio as gr
def greet(name):
return "Hello " + name + "!"
with gr.Blocks() as demo:
name = gr.Textbox(label="Name")
output = gr.Textbox(label="Output Box")
greet_btn = gr.Button("Greet")
greet_btn.click(fn=greet, inputs=name, outputs=output, api_name="greet")
demo.launch()
"""
# pylint: disable=line-too-long, missing-module-docstring, missing-function-docstring
# import torch
import random
import time
import gradio as gr
def stream_chat():
"""samples:
Sure [('test me', 'Sure')]
Sure, [('test me', 'Sure,')]
Sure, I [('test me', 'Sure, I')]
Sure, I' [('test me', "Sure, I'")]
Sure, I'd [('test me', "Sure, I'd")]
"""
resp = ""
for elm in range(10):
resp += str(elm)
from time import sleep
sleep(0.1)
yield resp
def chat(message="", history=[]):
# prompt = f"{system_prompt}### User: {message}\n\n### Assistant:\n"
# inputs = tokenizer(prompt, return_tensors="pt").to(device=device)
# output = model.generate(**inputs, do_sample=True, top_p=0.95, top_k=0, max_new_tokens=256)
# return tokenizer.decode(output[0], skip_special_tokens=True)
_ = """
for response, _ in chat_model.stream_chat(
tokenizer, message, history, max_length=2048, top_p=0.7, temperature=0.95
):
yield response
"""
g = update_chatbot()
g.send(None)
for response in stream_chat():
# yield response
g.send(response)
yield response
yield 'done ' + response
def update_chatbot():
while 1:
message = yield
print(f"{message=}")
def greet(name):
return "Hello " + name + "!"
with gr.Blocks() as block:
name = gr.Textbox(label="Name")
output = gr.Textbox(label="Output Box")
greet_btn = gr.Button("Greet")
# greet_btn.click(fn=greet, inputs=name, outputs=output, api_name="greet")
greet_btn.click(fn=chat, inputs=name, outputs=output, api_name="greet")
_ = """
with gr.Blocks(theme=gr.themes.Glass(text_size="sm", spacing_size="sm"),) as block:
chatbot = gr.Chatbot()
msg = gr.Textbox()
# gr.ChatInterface(
block(
chat,
[msg, chatbot],
[chatbot],
# title="gradio-chatinterface-tryout",
# examples=examples_list,
).queue(max_size=2).launch()
# """
# block.queue(max_size=2).launch()
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox()
clear = gr.ClearButton([msg, chatbot])
def respond(message, chat_history):
bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
chat_history.append((message, bot_message))
time.sleep(2)
return "", chat_history
def respond1(message, chat_history):
if chat_history is None:
chat_history = []
bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
temp = ""
chat_history.append((message, temp))
for elm in range(len(bot_message)):
temp = bot_message[:elm+1]
time.sleep(0.2)
chat_history[-1] = message, temp
yield message, chat_history
chat_history[-1] = (message, "done " + bot_message)
time.sleep(2)
yield "", chat_history
def respond2(message, chat_history):
if chat_history is None:
chat_history = []
bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
temp = ""
chat_history.append((message, temp))
for elm in range(len(bot_message)):
temp = bot_message[:elm+1]
time.sleep(0.2)
chat_history[-1] = message, temp
# yield message, chat_history
# chatbot.value = chat_history
chat_history[-1] = (message, "done " + bot_message)
time.sleep(2)
yield "", chat_history
msg.submit(respond2, [msg, chatbot], [msg, chatbot])
demo.queue(max_size=2).launch() |