allen-nlp-qa / app.py
besijar's picture
Update app.py
fe88e35
raw
history blame
874 Bytes
import requests
import gradio as gr
import os
HF_TOKEN = os.environ.get("HF_TOKEN")
API_URL = "https://api-inference.huggingface.co/models/allenai/bidaf-elmo"
headers = {"Authorization": HF_TOKEN}
def query(question, context):
data = {
"inputs": {
"question": question,
"context": context
},
}
response = requests.post(API_URL, headers=headers, json=data)
return response.json()["outputs"]["answer"]
# Example question and context
passage = "The quick brown fox jumped over the lazy dog."
question = "Who jumps over the lazy dog?"
iface = gr.Interface(query,
title="Allen NLP Question Answering",
inputs=["text", gr.inputs.Textbox(lines=15)],
outputs=["text"],
examples=[["{}".format(passage), "{}".format(question)]])
iface.launch()