Shreyas094
commited on
Commit
•
204d06f
1
Parent(s):
149b538
Update app.py
Browse files
app.py
CHANGED
@@ -243,19 +243,26 @@ def retry_last_response(history, use_web_search, model, temperature, num_calls):
|
|
243 |
|
244 |
return chatbot_interface(last_user_msg, history, use_web_search, model, temperature, num_calls)
|
245 |
|
246 |
-
def respond(message, history, model, temperature, num_calls, use_web_search, selected_docs):
|
247 |
logging.info(f"User Query: {message}")
|
248 |
logging.info(f"Model Used: {model}")
|
249 |
logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
|
250 |
-
|
251 |
logging.info(f"Selected Documents: {selected_docs}")
|
|
|
252 |
|
253 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
if use_web_search:
|
255 |
for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature):
|
256 |
response = f"{main_content}\n\n{sources}"
|
257 |
first_line = response.split('\n')[0] if response else ''
|
258 |
-
|
259 |
yield response
|
260 |
else:
|
261 |
embed = get_embeddings()
|
@@ -289,17 +296,25 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
|
|
289 |
first_line = partial_response.split('\n')[0] if partial_response else ''
|
290 |
logging.info(f"Generated Response (first line): {first_line}")
|
291 |
yield partial_response
|
|
|
292 |
except Exception as e:
|
293 |
logging.error(f"Error with {model}: {str(e)}")
|
294 |
if "microsoft/Phi-3-mini-4k-instruct" in model:
|
295 |
logging.info("Falling back to Mistral model due to Phi-3 error")
|
296 |
fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
|
297 |
-
yield from respond(message, history, fallback_model, temperature, num_calls, use_web_search, selected_docs)
|
298 |
else:
|
299 |
yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
|
300 |
|
301 |
logging.basicConfig(level=logging.DEBUG)
|
302 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
303 |
def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):
|
304 |
headers = {
|
305 |
"Authorization": f"Bearer {API_TOKEN}",
|
@@ -467,6 +482,43 @@ css = """
|
|
467 |
}
|
468 |
"""
|
469 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
470 |
uploaded_documents = []
|
471 |
|
472 |
def display_documents():
|
@@ -483,6 +535,8 @@ use_web_search = gr.Checkbox(label="Use Web Search", value=True)
|
|
483 |
|
484 |
custom_placeholder = "Ask a question (Note: You can toggle between Web Search and PDF Chat in Additional Inputs below)"
|
485 |
|
|
|
|
|
486 |
demo = gr.ChatInterface(
|
487 |
respond,
|
488 |
additional_inputs=[
|
@@ -490,10 +544,11 @@ demo = gr.ChatInterface(
|
|
490 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.2, step=0.1, label="Temperature"),
|
491 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
492 |
use_web_search,
|
493 |
-
document_selector
|
|
|
494 |
],
|
495 |
title="AI-powered Web Search and PDF Chat Assistant",
|
496 |
-
description="Chat with your PDFs
|
497 |
theme=gr.themes.Soft(
|
498 |
primary_hue="orange",
|
499 |
secondary_hue="amber",
|
@@ -522,11 +577,11 @@ demo = gr.ChatInterface(
|
|
522 |
analytics_enabled=False,
|
523 |
textbox=gr.Textbox(placeholder=custom_placeholder, container=False, scale=7),
|
524 |
chatbot = gr.Chatbot(
|
525 |
-
|
526 |
-
|
527 |
-
|
528 |
-
|
529 |
-
)
|
530 |
)
|
531 |
|
532 |
# Add file upload functionality
|
|
|
243 |
|
244 |
return chatbot_interface(last_user_msg, history, use_web_search, model, temperature, num_calls)
|
245 |
|
246 |
+
def respond(message, history, model, temperature, num_calls, use_web_search, selected_docs, instruction_key):
|
247 |
logging.info(f"User Query: {message}")
|
248 |
logging.info(f"Model Used: {model}")
|
249 |
logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
|
|
|
250 |
logging.info(f"Selected Documents: {selected_docs}")
|
251 |
+
logging.info(f"Instruction Key: {instruction_key}")
|
252 |
|
253 |
try:
|
254 |
+
if instruction_key and instruction_key != "None":
|
255 |
+
# This is a summary generation request
|
256 |
+
instruction = INSTRUCTION_PROMPTS[instruction_key]
|
257 |
+
context_str = get_context_for_summary(selected_docs)
|
258 |
+
message = f"{instruction}\n\nUsing the following context from the PDF documents:\n{context_str}\nGenerate a detailed summary."
|
259 |
+
use_web_search = False # Ensure we use PDF search for summaries
|
260 |
+
|
261 |
if use_web_search:
|
262 |
for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature):
|
263 |
response = f"{main_content}\n\n{sources}"
|
264 |
first_line = response.split('\n')[0] if response else ''
|
265 |
+
logging.info(f"Generated Response (first line): {first_line}")
|
266 |
yield response
|
267 |
else:
|
268 |
embed = get_embeddings()
|
|
|
296 |
first_line = partial_response.split('\n')[0] if partial_response else ''
|
297 |
logging.info(f"Generated Response (first line): {first_line}")
|
298 |
yield partial_response
|
299 |
+
|
300 |
except Exception as e:
|
301 |
logging.error(f"Error with {model}: {str(e)}")
|
302 |
if "microsoft/Phi-3-mini-4k-instruct" in model:
|
303 |
logging.info("Falling back to Mistral model due to Phi-3 error")
|
304 |
fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
|
305 |
+
yield from respond(message, history, fallback_model, temperature, num_calls, use_web_search, selected_docs, instruction_key)
|
306 |
else:
|
307 |
yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
|
308 |
|
309 |
logging.basicConfig(level=logging.DEBUG)
|
310 |
|
311 |
+
INSTRUCTION_PROMPTS = {
|
312 |
+
"Asset Managers": "Summarize the key financial metrics, assets under management, and performance highlights for this asset management company.",
|
313 |
+
"Consumer Finance Companies": "Provide a summary of the company's loan portfolio, interest income, credit quality, and key operational metrics.",
|
314 |
+
"Mortgage REITs": "Summarize the REIT's mortgage-backed securities portfolio, net interest income, book value per share, and dividend yield.",
|
315 |
+
# Add more instruction prompts as needed
|
316 |
+
}
|
317 |
+
|
318 |
def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):
|
319 |
headers = {
|
320 |
"Authorization": f"Bearer {API_TOKEN}",
|
|
|
482 |
}
|
483 |
"""
|
484 |
|
485 |
+
def get_context_for_summary(selected_docs):
|
486 |
+
embed = get_embeddings()
|
487 |
+
if os.path.exists("faiss_database"):
|
488 |
+
database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
|
489 |
+
retriever = database.as_retriever(search_kwargs={"k": 5}) # Retrieve top 5 most relevant chunks
|
490 |
+
|
491 |
+
# Create a generic query that covers common financial summary topics
|
492 |
+
generic_query = "financial performance revenue profit assets liabilities cash flow key metrics highlights"
|
493 |
+
|
494 |
+
relevant_docs = retriever.get_relevant_documents(generic_query)
|
495 |
+
filtered_docs = [doc for doc in relevant_docs if doc.metadata["source"] in selected_docs]
|
496 |
+
|
497 |
+
if not filtered_docs:
|
498 |
+
return "No relevant information found in the selected documents for summary generation."
|
499 |
+
|
500 |
+
context_str = "\n".join([doc.page_content for doc in filtered_docs])
|
501 |
+
return context_str
|
502 |
+
else:
|
503 |
+
return "No documents available for summary generation."
|
504 |
+
|
505 |
+
def get_context_for_query(query, selected_docs):
|
506 |
+
embed = get_embeddings()
|
507 |
+
if os.path.exists("faiss_database"):
|
508 |
+
database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
|
509 |
+
retriever = database.as_retriever(search_kwargs={"k": 3}) # Retrieve top 3 most relevant chunks
|
510 |
+
|
511 |
+
relevant_docs = retriever.get_relevant_documents(query)
|
512 |
+
filtered_docs = [doc for doc in relevant_docs if doc.metadata["source"] in selected_docs]
|
513 |
+
|
514 |
+
if not filtered_docs:
|
515 |
+
return "No relevant information found in the selected documents for the given query."
|
516 |
+
|
517 |
+
context_str = "\n".join([doc.page_content for doc in filtered_docs])
|
518 |
+
return context_str
|
519 |
+
else:
|
520 |
+
return "No documents available to answer the query."
|
521 |
+
|
522 |
uploaded_documents = []
|
523 |
|
524 |
def display_documents():
|
|
|
535 |
|
536 |
custom_placeholder = "Ask a question (Note: You can toggle between Web Search and PDF Chat in Additional Inputs below)"
|
537 |
|
538 |
+
instruction_choices = ["None"] + list(INSTRUCTION_PROMPTS.keys())
|
539 |
+
|
540 |
demo = gr.ChatInterface(
|
541 |
respond,
|
542 |
additional_inputs=[
|
|
|
544 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.2, step=0.1, label="Temperature"),
|
545 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
546 |
use_web_search,
|
547 |
+
document_selector,
|
548 |
+
gr.Dropdown(choices=instruction_choices, label="Select Entity Type for Summary", value="None")
|
549 |
],
|
550 |
title="AI-powered Web Search and PDF Chat Assistant",
|
551 |
+
description="Chat with your PDFs, use web search to answer questions, or generate summaries. Select an Entity Type for Summary to generate a specific summary.",
|
552 |
theme=gr.themes.Soft(
|
553 |
primary_hue="orange",
|
554 |
secondary_hue="amber",
|
|
|
577 |
analytics_enabled=False,
|
578 |
textbox=gr.Textbox(placeholder=custom_placeholder, container=False, scale=7),
|
579 |
chatbot = gr.Chatbot(
|
580 |
+
show_copy_button=True,
|
581 |
+
likeable=True,
|
582 |
+
layout="bubble",
|
583 |
+
height=400,
|
584 |
+
)
|
585 |
)
|
586 |
|
587 |
# Add file upload functionality
|