|
import requests |
|
import json |
|
import wikipedia |
|
import requests |
|
from bs4 import BeautifulSoup |
|
import gradio as gr |
|
from transformers import pipeline |
|
|
|
|
|
serp_api_key = '5924c6cfe5fec240e39838ff06439c8d36d294a0' |
|
|
|
|
|
def chat(message): |
|
try: |
|
|
|
return "This is a dummy chat response." |
|
except Exception as e: |
|
print("An error occurred:", e) |
|
return "" |
|
|
|
|
|
def get_latest_answers(query): |
|
url = "https://google.serper.dev/search" |
|
|
|
payload = json.dumps({ |
|
"q": query |
|
}) |
|
headers = { |
|
'X-API-KEY': serp_api_key, |
|
'Content-Type': 'application/json' |
|
} |
|
|
|
response = requests.request("POST", url, headers=headers, data=payload) |
|
|
|
try: |
|
|
|
data = json.loads(response.text) |
|
|
|
|
|
output = "" |
|
|
|
if 'knowledgeGraph' in data: |
|
knowledge_graph = data['knowledgeGraph'] |
|
output += "Website: {}\n".format(knowledge_graph.get('website')) |
|
output += "Description: {}\n".format(knowledge_graph.get('description')) |
|
|
|
if 'organic' in data: |
|
organic_results = data['organic'] |
|
for result in organic_results: |
|
output += "Snippet: {}\n".format(result.get('snippet')) |
|
|
|
if 'peopleAlsoAsk' in data: |
|
people_also_ask = data['peopleAlsoAsk'] |
|
for question in people_also_ask: |
|
output += "Snippet: {}\n".format(question.get('snippet')) |
|
|
|
return output |
|
|
|
except json.JSONDecodeError: |
|
print(".") |
|
return "" |
|
|
|
except Exception as e: |
|
print(".") |
|
return "" |
|
|
|
|
|
def search_wikipedia(query): |
|
try: |
|
search_results = wikipedia.search(query) |
|
|
|
|
|
if search_results: |
|
page_title = search_results[0] |
|
page_summary = wikipedia.summary(page_title) |
|
return page_summary |
|
else: |
|
print(".") |
|
return None |
|
except wikipedia.exceptions.DisambiguationError as e: |
|
|
|
print(".") |
|
return None |
|
except wikipedia.exceptions.PageError as e: |
|
|
|
print(".") |
|
return None |
|
except Exception as e: |
|
|
|
print(".") |
|
return None |
|
|
|
|
|
def generate_summary(user_input): |
|
output = get_latest_answers(user_input) |
|
page_summary = search_wikipedia(user_input) |
|
chat_answer = chat(user_input) |
|
|
|
|
|
summarizer = pipeline("summarization") |
|
input_text = f"\n{output}\n{page_summary}\n" |
|
summarized_paragraph = summarizer(input_text, max_length=200, do_sample=True)[0]['summary_text'] |
|
|
|
return summarized_paragraph |
|
|
|
|
|
def summarizer_interface(user_input): |
|
summarized_text = generate_summary(user_input) |
|
return summarized_text |
|
|
|
iface = gr.Interface( |
|
fn=summarizer_interface, |
|
inputs="text", |
|
outputs="text", |
|
title="Osana Web-GPT", |
|
description="Enter your query and get the latest and better answer.", |
|
theme="black", |
|
layout="horizontal", |
|
) |
|
|
|
|
|
iface.launch() |