test01 / app.py
Navanjana
Create app.py
b293841
import requests
import json
import wikipedia
import requests
from bs4 import BeautifulSoup
import gradio as gr
from transformers import pipeline
# Set up Google SERP API credentials
serp_api_key = '5924c6cfe5fec240e39838ff06439c8d36d294a0' # Replace with your actual Google SERP API key
# Function to send a message and receive a response from the chatbot
def chat(message):
try:
# You can add your chatbot implementation here
return "This is a dummy chat response."
except Exception as e:
print("An error occurred:", e)
return ""
# Function to get the latest answers from Google SERP API
def get_latest_answers(query):
url = "https://google.serper.dev/search"
payload = json.dumps({
"q": query
})
headers = {
'X-API-KEY': serp_api_key,
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=payload)
try:
# Parse the response JSON
data = json.loads(response.text)
# Extract details from the response
output = ""
if 'knowledgeGraph' in data:
knowledge_graph = data['knowledgeGraph']
output += "Website: {}\n".format(knowledge_graph.get('website'))
output += "Description: {}\n".format(knowledge_graph.get('description'))
if 'organic' in data:
organic_results = data['organic']
for result in organic_results:
output += "Snippet: {}\n".format(result.get('snippet'))
if 'peopleAlsoAsk' in data:
people_also_ask = data['peopleAlsoAsk']
for question in people_also_ask:
output += "Snippet: {}\n".format(question.get('snippet'))
return output
except json.JSONDecodeError:
print(".")
return ""
except Exception as e:
print(".")
return ""
# Function to search Wikipedia for an answer and summarize it
def search_wikipedia(query):
try:
search_results = wikipedia.search(query)
# Get the page summary of the first search result
if search_results:
page_title = search_results[0]
page_summary = wikipedia.summary(page_title)
return page_summary
else:
print(".")
return None
except wikipedia.exceptions.DisambiguationError as e:
# Handle disambiguation error
print(".")
return None
except wikipedia.exceptions.PageError as e:
# Handle page not found error
print(".")
return None
except Exception as e:
# Handle other exceptions
print(".")
return None
# Function to generate summarized paragraph using transformer-based summarization
def generate_summary(user_input):
output = get_latest_answers(user_input)
page_summary = search_wikipedia(user_input)
chat_answer = chat(user_input)
# Generate summarized paragraph using transformer-based summarization
summarizer = pipeline("summarization")
input_text = f"\n{output}\n{page_summary}\n"
summarized_paragraph = summarizer(input_text, max_length=200, do_sample=True)[0]['summary_text']
return summarized_paragraph
# Define the Gradio interface
def summarizer_interface(user_input):
summarized_text = generate_summary(user_input)
return summarized_text
iface = gr.Interface(
fn=summarizer_interface,
inputs="text",
outputs="text",
title="Osana Web-GPT",
description="Enter your query and get the latest and better answer.",
theme="black",
layout="horizontal",
)
# Launch the interface
iface.launch()