import openai import requests import json import wikipedia import requests from bs4 import BeautifulSoup import gradio as gr # Set up the OpenAI API client openai.api_key = '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' # Replace with your actual API key # Set up Google SERP API credentials serp_api_key = '5924c6cfe5fec240e39838ff06439c8d36d294a0' # Replace with your actual Google SERP API key # Function to send a message and receive a response from the chatbot def chat(message): try: response = openai.Completion.create( engine='text-davinci-003', # Choose the language model/engine you want to use prompt=message, max_tokens=50, # Adjust the response length as needed n=1, # Number of responses to generate stop=None, # Specify a stop token to end the response ) return response.choices[0].text.strip() except Exception as e: print("An error occurred:", e) return "" # Function to get the latest answers from Google SERP API def get_latest_answers(query): url = "https://google.serper.dev/search" payload = json.dumps({ "q": query }) headers = { 'X-API-KEY': serp_api_key, 'Content-Type': 'application/json' } response = requests.request("POST", url, headers=headers, data=payload) try: # Parse the response JSON data = json.loads(response.text) # Extract details from the response output = "" if 'knowledgeGraph' in data: knowledge_graph = data['knowledgeGraph'] output += "Website: {}\n".format(knowledge_graph.get('website')) output += "Description: {}\n".format(knowledge_graph.get('description')) if 'organic' in data: organic_results = data['organic'] for result in organic_results: output += "Snippet: {}\n".format(result.get('snippet')) if 'peopleAlsoAsk' in data: people_also_ask = data['peopleAlsoAsk'] for question in people_also_ask: output += "Snippet: {}\n".format(question.get('snippet')) return output except json.JSONDecodeError: print(".") return "" except Exception as e: print(".") return "" # Function to search Wikipedia for an answer and summarize it def search_wikipedia(query): try: search_results = wikipedia.search(query) # Get the page summary of the first search result if search_results: page_title = search_results[0] page_summary = wikipedia.summary(page_title) return page_summary else: print(".") return None except wikipedia.exceptions.DisambiguationError as e: # Handle disambiguation error print(".") return None except wikipedia.exceptions.PageError as e: # Handle page not found error print(".") return None except Exception as e: # Handle other exceptions print(".") return None # Function to generate summarized paragraph using OpenAI API def generate_summary(user_input): output = get_latest_answers(user_input) page_summary = search_wikipedia(user_input) chat_answer = chat(user_input) # Generate summarized paragraph using OpenAI API response = openai.Completion.create( engine='text-davinci-003', prompt=f"Data from Google SERP API:\n{output}\nWikipedia summary:\n{page_summary}\n\nOpenAI chat response:\n{chat_answer}\n\nSummarize the above data into a paragraph.", max_tokens=200 ) summarized_paragraph = response.choices[0].text.strip() return summarized_paragraph # Define the Gradio interface def summarizer_interface(user_input): summarized_text = generate_summary(user_input) return summarized_text iface = gr.Interface( fn=summarizer_interface, inputs="text", outputs="text", title="Osana Web-GPT", description="Enter your query and get the latest and better answer.", theme="black", layout="horizontal", examples=[ ["What is the capital of France?"], ["How does photosynthesis work?"], ["Who is the president of the United States?"], ["What is the capital of Japan?"], ["How do I bake a chocolate cake?"], ["What is the meaning of life?"], ["Who painted the Mona Lisa?"], ["What is the population of New York City?"], ["How does the internet work?"], ["What is the largest planet in our solar system?"], ["What are the benefits of regular exercise?"], ] ) # Launch the interface iface.launch()