Osana-WEB-GPT / app_update02.py
Navanjana
Rename app.py to app_update02.py
a535d16
import json
import requests
import wikipedia
import gradio as gr
import transformers
import spacy
from bs4 import BeautifulSoup
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# Set up Google SERP API credentials
serp_api_key = '5924c6cfe5fec240e39838ff06439c8d36d294a0' # Replace with your actual Google SERP API key
# Load the Pegasus model
tokenizer = AutoTokenizer.from_pretrained("facebook/bart-large-cnn")
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/bart-large-cnn")
# Function to send a message and receive a response from the chatbot
def chat(message):
try:
# You can add your chatbot implementation here
return "."
except Exception as e:
print("An error occurred:", e)
return ""
# Function to get the latest answers from Google SERP API
def get_latest_answers(query):
url = "https://google.serper.dev/search"
payload = json.dumps({
"q": query
})
headers = {
'X-API-KEY': serp_api_key,
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=payload)
try:
# Parse the response JSON
data = json.loads(response.text)
# Extract details from the response
output = ""
if 'knowledgeGraph' in data:
knowledge_graph = data['knowledgeGraph']
output += "Website: {}\n".format(knowledge_graph.get('website'))
output += "Description: {}\n".format(knowledge_graph.get('description'))
if 'organic' in data:
organic_results = data['organic']
for result in organic_results:
output += "Snippet: {}\n".format(result.get('snippet'))
if 'peopleAlsoAsk' in data:
people_also_ask = data['peopleAlsoAsk']
for question in people_also_ask:
output += "Snippet: {}\n".format(question.get('snippet'))
return output
except json.JSONDecodeError:
print(".")
return ""
except Exception as e:
print(".")
return ""
# Function to search Wikipedia for an answer and summarize it
def search_wikipedia(query):
try:
search_results = wikipedia.search(query)
# Get the page summary of the first search result
if search_results:
page_title = search_results[0]
page_summary = wikipedia.summary(page_title)
return page_summary
else:
print(".")
return None
except wikipedia.exceptions.DisambiguationError as e:
# Handle disambiguation error
print(".")
return None
except wikipedia.exceptions.PageError as e:
# Handle page not found error
print(".")
return None
except Exception as e:
# Handle other exceptions
print(".")
return None
# Function to generate summarized paragraph using Google Pegasus summarization
def generate_summary(user_input):
output = get_latest_answers(user_input)
page_summary = search_wikipedia(user_input)
chat_answer = chat(user_input)
# Combine the input text from various sources
input_text = f"Google:\n{output}\nWikipedia :\n{page_summary}\n"
# Tokenize and generate a summary
input_ids = tokenizer.encode(input_text, return_tensors="pt", max_length=1024, truncation=True)
summary_ids = model.generate(input_ids, max_length=200, min_length=50, length_penalty=2.0, num_beams=4, early_stopping=True)
# Decode the summary
summarized_paragraph = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
return summarized_paragraph
# Define the Gradio interface
def summarizer_interface(user_input):
summarized_text = generate_summary(user_input)
return summarized_text
iface = gr.Interface(
fn=summarizer_interface,
inputs="text",
outputs="text",
title="Osana Web-GPT",
description="Enter your query and get the latest and better answer.",
theme="black",
layout="horizontal",
)
# Launch the interface
iface.launch()