Spaces:
Sleeping
Sleeping
import random | |
import gradio as gr | |
import requests | |
API_URL = "https://api-inference.huggingface.co/models/facebook/blenderbot-3B" | |
headers = {"Authorization": "Bearer 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"} | |
def query(payload): | |
response = requests.post(API_URL, headers=headers, json=payload) | |
return response.json() | |
def chat(message): | |
past_user=["what is your name?"] | |
generated=["I am Sade, Funbi's AI chatbot"] | |
message = message.lower() | |
if message.startswith("what is your name"): | |
response = random.choice(["I am Sade an AI chatbot made by Funbi,how are you?","Sade, an AI chatbot made by Funbi, feel free to ask me anything"]) | |
elif "your name" in message: | |
response = random.choice(["I am Sade an AI chatbot made by Funbi,how are you?","Sade, an AI chatbot made by Funbi, feel free to ask me anything"]) | |
elif "who are you" in message: | |
response = random.choice(["I am Sade an AI chatbot made by Funbi,how are you?","Sade, an AI chatbot made by Funbi, feel free to ask me anything"]) | |
else: | |
response = query({"inputs": {"past_user_inputs":past_user,"generated_responses":generated,"text":message},}) | |
response = response['generated_text'] | |
past_user.append(message) | |
generated.append(response) | |
#history.append((message, response)) | |
return response | |
demo = gr.Interface( | |
chat, | |
inputs="text", | |
outputs="text", | |
title="Chatbot", | |
description="This is chatbot made by using a pre-train model by Facebook called blender and I then primed it with a little extra information", | |
) | |
demo.launch() | |