import gradio as gr from llama_index.readers.file import PagedCSVReader from llama_index.core.indices import VectorStoreIndex import openai import os openai.api_key = os.getenv('OPENAI_API_KEY') def load_data(): try: loader = PagedCSVReader() documents = loader.load_data('/content/aitalents.csv') index = VectorStoreIndex.from_documents(documents) query_engine = index.as_query_engine() return query_engine except Exception as e: print(f"Error loading data or creating index: {e}") return None query_engine = load_data() # Call load_data() to create the query engine def chat(message): if query_engine is None: return "An error occurred while loading data. Please try again later." try: response = query_engine.query(message) return response[0]['text'] except Exception as e: print(f"Error generating response: {e}") return "I'm still learning how to answer that question. Please try asking something else." # Create the chatbot interface interface = gr.ChatInterface(fn=chat, title="Chatbot with Llama Hub and OpenAI", initial_refresh=False) # Launch the interface interface.launch()