abhishek kumar commited on
Commit
13fab0d
1 Parent(s): 13fcc41

Add application file

Browse files
Files changed (3) hide show
  1. app.py +22 -4
  2. conversation.py +67 -0
  3. indexes.py +33 -0
app.py CHANGED
@@ -1,7 +1,25 @@
1
  import gradio as gr
 
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
 
 
 
5
 
6
- iface = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ from indexes import clear_indexes
3
+ from conversation import create_conversation
4
 
5
+ # Hardcoded values
6
+ HARDCODED_PINECONE_API_KEY = '228cca47-e537-42af-bcef-569cb18fb7cf'
7
+ HARDCODED_PINECONE_ENVIRONMENT = 'us-west1-gcp-free'
8
+ HARDCODED_PINECONE_INDEX_NAME = 'chatbot'
9
+ HARDCODED_OPENAI_API_KEY = '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'
10
 
11
+ def update_chat_history(query, chatbot_state):
12
+ current_chat_history = chatbot_state if chatbot_state else []
13
+ _, updated_chat_history = create_conversation(query, current_chat_history)
14
+ return "", updated_chat_history # Return an empty string to clear the textbox
15
+
16
+ with gr.Blocks() as demo:
17
+ chatbot = gr.Chatbot(label='Talk to the Document')
18
+ msg = gr.Textbox()
19
+ clear = gr.ClearButton([msg, chatbot])
20
+
21
+ # Link the Gradio interface with the update_chat_history function
22
+ msg.submit(update_chat_history, inputs=[msg, chatbot], outputs=[msg, chatbot])
23
+
24
+ if __name__ == '__main__':
25
+ demo.launch()
conversation.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.chains import ConversationalRetrievalChain
2
+ from langchain.chat_models import ChatOpenAI
3
+ from langchain.vectorstores import Pinecone
4
+ from langchain.embeddings.openai import OpenAIEmbeddings
5
+ from langchain.memory import ConversationBufferMemory
6
+ import pinecone
7
+ import os
8
+ import logging
9
+
10
+ # Load environment variables
11
+ from dotenv import load_dotenv
12
+ load_dotenv()
13
+
14
+ # Retrieve values from .env file
15
+ pinecone_api_key = '228cca47-e537-42af-bcef-569cb18fb7cf'
16
+ pinecone_environment = 'us-west1-gcp-free'
17
+ pinecone_index_name = 'chatbot'
18
+ openai_api_key = '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'
19
+
20
+ # Set up logging
21
+ logging.basicConfig(level=logging.DEBUG)
22
+ logger = logging.getLogger(__name__)
23
+
24
+ def create_conversation(query: str, chat_history: list) -> tuple:
25
+ logger.debug("Starting create_conversation function")
26
+
27
+ try:
28
+ logger.debug("Initializing Pinecone")
29
+ pinecone.init(api_key=pinecone_api_key, environment=pinecone_environment)
30
+ logger.debug("Pinecone initialized")
31
+
32
+ logger.debug("Creating embeddings")
33
+ embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)
34
+ logger.debug("Embeddings created")
35
+
36
+ logger.debug("Accessing existing Pinecone index")
37
+ db = Pinecone.from_existing_index(index_name=pinecone_index_name, embedding=embeddings)
38
+ logger.debug("Index accessed")
39
+
40
+ logger.debug("Setting up memory for conversation")
41
+ memory = ConversationBufferMemory(memory_key='chat_history', return_messages=False)
42
+ logger.debug("Memory set up")
43
+
44
+ logger.debug("Creating ConversationalRetrievalChain")
45
+ cqa = ConversationalRetrievalChain.from_llm(
46
+ llm=ChatOpenAI(temperature=0.0, openai_api_key=openai_api_key),
47
+ retriever=db.as_retriever(),
48
+ memory=memory,
49
+ get_chat_history=lambda h: h,
50
+ )
51
+ logger.debug("ConversationalRetrievalChain created")
52
+
53
+ logger.debug(f"Processing query: {query}")
54
+ result = cqa({'question': query, 'chat_history': chat_history})
55
+ logger.debug(f"Received result: {result}")
56
+
57
+ logger.debug("Appending result to chat history")
58
+ chat_history.append((query, result['answer']))
59
+ logger.debug(f"Updated chat history: {chat_history}")
60
+
61
+ return '', chat_history
62
+
63
+ except Exception as e:
64
+ logger.exception("Exception occurred in create_conversation")
65
+ chat_history.append((query, "Error: " + str(e)))
66
+ return '', chat_history
67
+
indexes.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from PyPDF2 import PdfReader
3
+ import pinecone
4
+ from langchain.embeddings.openai import OpenAIEmbeddings
5
+ from langchain.vectorstores import Pinecone
6
+
7
+ from pinecone import init, create_index, Index
8
+ #from langchain.vectorstores import OpenAIEmbeddings
9
+ #from langchain.processing import PDFProcessor
10
+ #from langchain import configuration
11
+
12
+ debug_mode = os.getenv('DEBUG', 'False') == 'True'
13
+ verbose_mode = os.getenv('VERBOSE', 'False') == 'True'
14
+
15
+ def clear_indexes(pinecone_api_key: str, pinecone_environment: str, pinecone_index_name: str) -> str:
16
+ try:
17
+ print("Initializing Pinecone for clearing indexes...")
18
+ pinecone.init(api_key=pinecone_api_key, environment=pinecone_environment)
19
+
20
+ print("Checking existing Pinecone indexes for clearing...")
21
+ indexes_list = pinecone.list_indexes()
22
+ if pinecone_index_name in indexes_list:
23
+ print(f"Deleting Pinecone index: {pinecone_index_name}")
24
+ pinecone.delete_index(name=pinecone_index_name)
25
+ print("Index cleared.")
26
+ else:
27
+ print(f"No index named {pinecone_index_name} found to clear.")
28
+
29
+ return 'Indexes cleared.'
30
+
31
+ except Exception as e:
32
+ print(f"Error occurred in clear_indexes: {str(e)}")
33
+ return f"Error occurred: {str(e)}"