Spaces:
Sleeping
Sleeping
SnehaAkula
commited on
Commit
•
9322867
1
Parent(s):
600695a
Upload app.py
Browse files
app.py
CHANGED
@@ -5,14 +5,31 @@ from PIL import Image
|
|
5 |
import tempfile
|
6 |
from langchain_community.document_loaders import PyPDFLoader, TextLoader, Docx2txtLoader
|
7 |
from langchain.chains.question_answering import load_qa_chain
|
8 |
-
from langchain_openai import OpenAI
|
9 |
from docx import Document
|
10 |
import io
|
|
|
11 |
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
# Set OpenAI API key
|
15 |
-
os.environ["OPENAI_API_KEY"] = "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"
|
16 |
|
17 |
# Initialize conversation history list
|
18 |
if "conversation_history" not in st.session_state:
|
@@ -43,7 +60,8 @@ def process_document(uploaded_file, query):
|
|
43 |
documents = loader.load()
|
44 |
|
45 |
# Load QA chain
|
46 |
-
chain = load_qa_chain(llm=OpenAI(), verbose=True)
|
|
|
47 |
|
48 |
# Perform question answering
|
49 |
response = chain.invoke({"input_documents": documents, "question": query})
|
@@ -51,7 +69,7 @@ def process_document(uploaded_file, query):
|
|
51 |
# Remove temporary file
|
52 |
os.unlink(tmp_file.name)
|
53 |
|
54 |
-
return response["output_text"]
|
55 |
|
56 |
|
57 |
|
@@ -112,15 +130,16 @@ def main():
|
|
112 |
response, _ = process_document(uploaded_file, query)
|
113 |
if response: # Check if response is not empty
|
114 |
# Update conversation history
|
|
|
115 |
update_conversation(query, response)
|
116 |
else:
|
117 |
st.sidebar.write("Please upload a document first.")
|
118 |
|
119 |
-
# Display conversation history
|
120 |
-
st.title("Conversation History")
|
121 |
-
for item in st.session_state.conversation_history:
|
122 |
-
|
123 |
-
|
124 |
|
125 |
# Run the application
|
126 |
if __name__ == "__main__":
|
|
|
5 |
import tempfile
|
6 |
from langchain_community.document_loaders import PyPDFLoader, TextLoader, Docx2txtLoader
|
7 |
from langchain.chains.question_answering import load_qa_chain
|
8 |
+
# from langchain_openai import OpenAI
|
9 |
from docx import Document
|
10 |
import io
|
11 |
+
from langchain_community.llms import HuggingFaceHub
|
12 |
|
13 |
|
14 |
+
import getpass
|
15 |
+
|
16 |
+
# os.environ["GOOGLE_API_KEY"] = "AIzaSyC6o10htIT1d2DCPe8fJ09UR14qcX9EVPc"
|
17 |
+
|
18 |
+
# from langchain_google_genai import ChatGoogleGenerativeAI
|
19 |
+
|
20 |
+
# llm = ChatGoogleGenerativeAI(
|
21 |
+
# model="gemini-pro",
|
22 |
+
# temperature=0,
|
23 |
+
# max_tokens=None,
|
24 |
+
# timeout=None,
|
25 |
+
# max_retries=2,
|
26 |
+
# )
|
27 |
+
|
28 |
+
huggingface_token = os.getenv('HUGGINGFACEHUB_API_TOKEN')
|
29 |
+
llm=HuggingFaceHub(repo_id="microsoft/Phi-3-mini-4k-instruct", model_kwargs={"temperature":0.5, "max_length":128})
|
30 |
|
31 |
# Set OpenAI API key
|
32 |
+
# os.environ["OPENAI_API_KEY"] = "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"
|
33 |
|
34 |
# Initialize conversation history list
|
35 |
if "conversation_history" not in st.session_state:
|
|
|
60 |
documents = loader.load()
|
61 |
|
62 |
# Load QA chain
|
63 |
+
# chain = load_qa_chain(llm=OpenAI(), verbose=True)
|
64 |
+
chain = load_qa_chain(llm=llm, verbose=True)
|
65 |
|
66 |
# Perform question answering
|
67 |
response = chain.invoke({"input_documents": documents, "question": query})
|
|
|
69 |
# Remove temporary file
|
70 |
os.unlink(tmp_file.name)
|
71 |
|
72 |
+
return response["output_text"]
|
73 |
|
74 |
|
75 |
|
|
|
130 |
response, _ = process_document(uploaded_file, query)
|
131 |
if response: # Check if response is not empty
|
132 |
# Update conversation history
|
133 |
+
st.write(response)
|
134 |
update_conversation(query, response)
|
135 |
else:
|
136 |
st.sidebar.write("Please upload a document first.")
|
137 |
|
138 |
+
# # Display conversation history
|
139 |
+
# st.title("Conversation History")
|
140 |
+
# for item in st.session_state.conversation_history:
|
141 |
+
# st.write("You:", item["question"])
|
142 |
+
# st.write("AI:", item["answer"])
|
143 |
|
144 |
# Run the application
|
145 |
if __name__ == "__main__":
|