Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline | |
# Initialize the conversational model pipeline | |
chatbot_pipeline = pipeline("text-generation", model="Aditya0619/Medbot") | |
# Chatbot response function | |
def respond(message, history, system_message, max_tokens, temperature, top_p): | |
if history is None: | |
history = [] | |
# Build conversation context | |
chat_input = "" | |
for user_input, bot_response in history: | |
chat_input += f"User: {user_input}\nBot: {bot_response}\n" | |
chat_input += f"User: {message}\nBot:" | |
# Generate response | |
response = chatbot_pipeline( | |
chat_input, | |
max_length=max_tokens, | |
temperature=temperature, | |
top_p=top_p, | |
pad_token_id=50256 # Avoid padding issues with GPT-2 models | |
)[0]["generated_text"].split("Bot:")[-1].strip() | |
# Update history | |
history.append((message, response)) | |
return history, history | |
# API function to expose chatbot responses programmatically | |
def api_chat(message, history=None): | |
if history is None: | |
history = [] | |
updated_history, _ = respond( | |
message, history, "", max_tokens=250, temperature=0.7, top_p=0.9 | |
) | |
return {"response": updated_history[-1][1], "history": updated_history} | |
# Gradio UI layout | |
with gr.Blocks() as demo: | |
gr.Markdown("# 🤖 AI Chatbot with API Access\nChat with AI or use the API!") | |
# Configurable parameters in an accordion menu | |
with gr.Row(): | |
with gr.Accordion("⚙️ Configure Chatbot Settings", open=False): | |
system_message = gr.Textbox(label="System Message (Optional)", placeholder="e.g., You are a helpful assistant.") | |
max_tokens = gr.Slider(label="Max Tokens", minimum=50, maximum=500, value=250, step=10) | |
temperature = gr.Slider(label="Temperature", minimum=0.0, maximum=1.0, value=0.7, step=0.1) | |
top_p = gr.Slider(label="Top P", minimum=0.0, maximum=1.0, value=0.9, step=0.1) | |
# Chatbot interface and user input field | |
chatbot = gr.Chatbot(label="Chat with AI") | |
user_input = gr.Textbox(label="Your Message", placeholder="Type a message...", lines=2) | |
state = gr.State([]) # Store conversation history | |
submit = gr.Button("Send") | |
# Link input to chatbot response | |
submit.click( | |
respond, | |
inputs=[user_input, state, system_message, max_tokens, temperature, top_p], | |
outputs=[chatbot, state] | |
) | |
# Initial greeting message | |
demo.load(lambda: [("Hi! How can I assist you today?", "")], outputs=chatbot) | |
# Launch Gradio app and print the hosted link in terminal | |
print("Launching the Gradio app...") | |
ui_url = demo.launch(share=True, server_name="0.0.0.0", server_port=7860) | |
print(f"App hosted at: {ui_url}") | |
# API endpoint setup with Gradio | |
api = gr.Interface(fn=api_chat, inputs=[gr.Textbox(), gr.State([])], outputs="json") | |
api_url = api.launch(share=True, server_name="0.0.0.0", server_port=7861) | |
print(f"API hosted at: {api_url}") | |