Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from llama_index.readers.file import PagedCSVReader
|
3 |
+
from llama_index.core.indices import VectorStoreIndex
|
4 |
+
import openai
|
5 |
+
import os
|
6 |
+
|
7 |
+
openai.api_key = os.getenv('OPENAI_API_KEY')
|
8 |
+
|
9 |
+
def load_data():
|
10 |
+
try:
|
11 |
+
loader = PagedCSVReader()
|
12 |
+
documents = loader.load_data('/content/aitalents.csv')
|
13 |
+
index = VectorStoreIndex.from_documents(documents)
|
14 |
+
query_engine = index.as_query_engine()
|
15 |
+
return query_engine
|
16 |
+
except Exception as e:
|
17 |
+
print(f"Error loading data or creating index: {e}")
|
18 |
+
return None
|
19 |
+
|
20 |
+
query_engine = load_data() # Call load_data() to create the query engine
|
21 |
+
|
22 |
+
def chat(message):
|
23 |
+
if query_engine is None:
|
24 |
+
return "An error occurred while loading data. Please try again later."
|
25 |
+
|
26 |
+
try:
|
27 |
+
response = query_engine.query(message)
|
28 |
+
return response[0]['text']
|
29 |
+
except Exception as e:
|
30 |
+
print(f"Error generating response: {e}")
|
31 |
+
return "I'm still learning how to answer that question. Please try asking something else."
|
32 |
+
|
33 |
+
# Create the chatbot interface
|
34 |
+
interface = gr.ChatInterface(fn=chat, title="Chatbot with Llama Hub and OpenAI", initial_refresh=False)
|
35 |
+
|
36 |
+
# Launch the interface
|
37 |
+
interface.launch()
|