File size: 3,742 Bytes
d2d55b6
 
 
8c7e2a4
d2d55b6
 
0d65d80
d2d55b6
 
1f16356
d2d55b6
 
8c7e2a4
d2d55b6
0d65d80
 
d2d55b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8c7e2a4
d2d55b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0d65d80
d2d55b6
 
 
8c7e2a4
d2d55b6
0d65d80
 
 
 
d2d55b6
 
 
 
 
8c7e2a4
775b462
d2d55b6
 
 
 
775b462
d2d55b6
775b462
c47287a
d2d55b6
 
 
 
0d65d80
1672aad
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
import requests
import json
import wikipedia
import requests
from bs4 import BeautifulSoup
import gradio as gr
from transformers import pipeline

# Set up Google SERP API credentials
serp_api_key = '5924c6cfe5fec240e39838ff06439c8d36d294a0'  # Replace with your actual Google SERP API key

# Function to send a message and receive a response from the chatbot
def chat(message):
    try:
        # You can add your chatbot implementation here
        return "This is a dummy chat response."
    except Exception as e:
        print("An error occurred:", e)
        return ""

# Function to get the latest answers from Google SERP API
def get_latest_answers(query):
    url = "https://google.serper.dev/search"

    payload = json.dumps({
        "q": query
    })
    headers = {
        'X-API-KEY': serp_api_key,
        'Content-Type': 'application/json'
    }

    response = requests.request("POST", url, headers=headers, data=payload)

    try:
        # Parse the response JSON
        data = json.loads(response.text)

        # Extract details from the response
        output = ""

        if 'knowledgeGraph' in data:
            knowledge_graph = data['knowledgeGraph']
            output += "Website: {}\n".format(knowledge_graph.get('website'))
            output += "Description: {}\n".format(knowledge_graph.get('description'))

        if 'organic' in data:
            organic_results = data['organic']
            for result in organic_results:
                output += "Snippet: {}\n".format(result.get('snippet'))

        if 'peopleAlsoAsk' in data:
            people_also_ask = data['peopleAlsoAsk']
            for question in people_also_ask:
                output += "Snippet: {}\n".format(question.get('snippet'))

        return output

    except json.JSONDecodeError:
        print(".")
        return ""

    except Exception as e:
        print(".")
        return ""

# Function to search Wikipedia for an answer and summarize it
def search_wikipedia(query):
    try:
        search_results = wikipedia.search(query)

        # Get the page summary of the first search result
        if search_results:
            page_title = search_results[0]
            page_summary = wikipedia.summary(page_title)
            return page_summary
        else:
            print(".")
            return None
    except wikipedia.exceptions.DisambiguationError as e:
        # Handle disambiguation error
        print(".")
        return None
    except wikipedia.exceptions.PageError as e:
        # Handle page not found error
        print(".")
        return None
    except Exception as e:
        # Handle other exceptions
        print(".")
        return None

# Function to generate summarized paragraph using transformer-based summarization
def generate_summary(user_input):
    output = get_latest_answers(user_input)
    page_summary = search_wikipedia(user_input)
    chat_answer = chat(user_input)

    # Generate summarized paragraph using transformer-based summarization
    summarizer = pipeline("summarization")
    input_text = f"Data from Google SERP API:\n{output}\nWikipedia summary:\n{page_summary}\n\nChat response:\n{chat_answer}\n\n"
    summarized_paragraph = summarizer(input_text, max_length=200, do_sample=True)[0]['summary_text']

    return summarized_paragraph

# Define the Gradio interface
def summarizer_interface(user_input):
    summarized_text = generate_summary(user_input)
    return summarized_text

iface = gr.Interface(
    fn=summarizer_interface,
    inputs="text",
    outputs="text",
    title="Osana Web-GPT",
    description="Enter your query and get the latest and better answer.",
    theme="black",
    layout="horizontal",
)

# Launch the interface
iface.launch()