Abrar20 commited on
Commit
08f4a04
1 Parent(s): 4da307f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +77 -37
app.py CHANGED
@@ -5,8 +5,36 @@ from langchain.llms import HuggingFaceHub
5
  from langchain.embeddings import HuggingFaceHubEmbeddings
6
  from langchain.vectorstores import Chroma
7
  from langchain.chains import RetrievalQA
 
8
 
9
- def loading_pdf(): return 'Loading...'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
  def pdf_changes(pdf_doc, repo_id):
12
  loader = OnlinePDFLoader(pdf_doc.name)
@@ -16,59 +44,71 @@ def pdf_changes(pdf_doc, repo_id):
16
  embeddings = HuggingFaceHubEmbeddings()
17
  db = Chroma.from_documents(texts, embeddings)
18
  retriever = db.as_retriever()
19
- llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={'temperature': 0.5, 'max_new_tokens': 2096})
 
 
 
20
  global qa
21
- qa = RetrievalQA.from_chain_type(llm=llm, chain_type='stuff', retriever=retriever, return_source_documents=True)
22
  return "Ready"
23
 
24
- def add_text(history, text):
25
- history = history + [(text, None)]
26
- return history, ''
 
 
 
 
27
 
28
- def bot(history):
29
- response = infer(history[-1][0])
30
- history[-1][1] = response['result']
31
- return history
32
-
33
- def infer(question):
34
- query = question
35
- result = qa({'query': query})
36
- return result
37
-
38
- css="""
39
  #col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
40
  """
41
 
42
  title = """
43
- <h1>Chat with PDF</h1>
44
  """
45
 
46
  with gr.Blocks(css=css, theme='Taithrah/Minimal') as demo:
47
  with gr.Column(elem_id='col-container'):
48
  gr.HTML(title)
49
 
50
- with gr.Column():
51
  pdf_doc = gr.File(label='Upload a PDF', file_types=['.pdf'])
52
- repo_id = gr.Dropdown(label='LLM',
53
- choices=[
54
- 'mistralai/Mistral-7B-Instruct-v0.1',
55
- 'HuggingFaceH4/zephyr-7b-beta',
56
- 'meta-llama/Llama-2-7b-chat-hf',
57
- '01-ai/Yi-6B-200K'
58
- 'cognitivecomputations/dolphin-2.5-mixtral-8x7b'
59
- ],
60
- value='mistralai/Mistral-7B-Instruct-v0.1')
 
 
61
  with gr.Row():
62
- langchain_status = gr.Textbox(label='Status', placeholder='', interactive=False)
 
 
63
  load_pdf = gr.Button('Load PDF to LangChain')
64
 
65
- chatbot = gr.Chatbot([], elem_id='chatbot')#.style(height=350)
66
- question = gr.Textbox(label='Question', placeholder='Type your query')
67
- submit_btn = gr.Button('Send')
 
 
 
 
 
68
 
69
- repo_id.change(pdf_changes, inputs=[pdf_doc, repo_id], outputs=[langchain_status], queue=False)
70
- load_pdf.click(pdf_changes, inputs=[pdf_doc, repo_id], outputs=[langchain_status], queue=False)
71
- question.submit(add_text, [chatbot, question], [chatbot, question]).then(bot, chatbot, chatbot)
72
- submit_btn.click(add_text, [chatbot, question], [chatbot, question]).then(bot, chatbot, chatbot)
 
 
 
 
 
 
 
73
 
74
- demo.launch()
 
5
  from langchain.embeddings import HuggingFaceHubEmbeddings
6
  from langchain.vectorstores import Chroma
7
  from langchain.chains import RetrievalQA
8
+ from langchain.prompts import PromptTemplate
9
 
10
+ def get_chain(llm, retriever):
11
+ prompt_template = """
12
+ Instructions:
13
+ You are a knowledgeable assistant focused on providing safety guidelines for coastal areas during cyclones. Your goal is to generate personalized, clear, and actionable advice based on the specific details provided about the user's infrastructure, proximity to the cyclone pathway, cyclone speed and proximity to the nearest shelter, person's location.
14
+
15
+ Please:
16
+ - Carefully analyze the provided context from the PDF.
17
+ - Offer tailored guidance that addresses the user's unique situation.
18
+ - calculating nearest shelter by location of the person(lat,lon) and shelter coordinates(lat,lon)
19
+ - calculating Proximity to cyclone by location of the person(lat,lon) and Predicted Cyclone Coordinates(lat,lon).
20
+ - Ensure that your advice is practical and directly applicable.
21
+ - If information is missing or unclear, use logical assumptions based on the context to provide the best possible recommendations.
22
+ - Be concise but thorough, offering detailed steps when necessary to enhance safety and preparedness.
23
+
24
+ Context:\n{context}\n
25
+ Question: \n{question}\n
26
+ Personalized Guideline:
27
+ """
28
+ PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
29
+ chain_type_kwargs = {"prompt": PROMPT}
30
+ qa_chain = RetrievalQA.from_chain_type(
31
+ llm=llm,
32
+ chain_type="stuff",
33
+ retriever=retriever,
34
+ chain_type_kwargs=chain_type_kwargs,
35
+ return_source_documents=True
36
+ )
37
+ return qa_chain
38
 
39
  def pdf_changes(pdf_doc, repo_id):
40
  loader = OnlinePDFLoader(pdf_doc.name)
 
44
  embeddings = HuggingFaceHubEmbeddings()
45
  db = Chroma.from_documents(texts, embeddings)
46
  retriever = db.as_retriever()
47
+ llm = HuggingFaceHub(
48
+ repo_id=repo_id,
49
+ model_kwargs={'temperature': 0.5, 'max_new_tokens': 2096}
50
+ )
51
  global qa
52
+ qa = get_chain(llm, retriever)
53
  return "Ready"
54
 
55
+ def generate_guideline(infrastructure, location, cyclone_predicted_coordinates, cyclone_speed):
56
+ if infrastructure and location and cyclone_predicted_coordinates and cyclone_speed:
57
+ user_question = f"""{infrastructure} Infrastructure, location of the person(lat,lon) is {location}, Cyclone Speed in knot is {cyclone_speed}, Predicted Cyclone Coordinates(lat,lon) is {cyclone_predicted_coordinates}. Please give guideline what will be best in this context. Give precise instruction by calculating proximity to cyclone by location of the person(lat,lon) and Predicted Cyclone Coordinates(lat,lon). Also give the location of the nearest shelter by calculating location of the person(lat,lon) and shelter coordinates(lat,lon) (from the text chunk given). Don't give Proximity to cyclone and Proximity to shelter though (only use this to generate the guideline). Also give the helpline number in last: 333."""
58
+ result = qa({'query': user_question})
59
+ return result['result']
60
+ else:
61
+ return "Please provide all inputs."
62
 
63
+ css = """
 
 
 
 
 
 
 
 
 
 
64
  #col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
65
  """
66
 
67
  title = """
68
+ <h1>🌪️ Generate Tailored Cyclone Safety Guidelines</h1>
69
  """
70
 
71
  with gr.Blocks(css=css, theme='Taithrah/Minimal') as demo:
72
  with gr.Column(elem_id='col-container'):
73
  gr.HTML(title)
74
 
 
75
  pdf_doc = gr.File(label='Upload a PDF', file_types=['.pdf'])
76
+ repo_id = gr.Dropdown(
77
+ label='LLM',
78
+ choices=[
79
+ 'mistralai/Mistral-7B-Instruct-v0.1',
80
+ 'HuggingFaceH4/zephyr-7b-beta',
81
+ 'meta-llama/Llama-2-7b-chat-hf',
82
+ '01-ai/Yi-6B-200K',
83
+ 'cognitivecomputations/dolphin-2.5-mixtral-8x7b'
84
+ ],
85
+ value='mistralai/Mistral-7B-Instruct-v0.1'
86
+ )
87
  with gr.Row():
88
+ langchain_status = gr.Textbox(
89
+ label='Status', placeholder='', interactive=False
90
+ )
91
  load_pdf = gr.Button('Load PDF to LangChain')
92
 
93
+ # Input fields for user information
94
+ infrastructure = gr.Textbox(label='Infrastructure')
95
+ location = gr.Textbox(label='Location Coordinate (lat,lon)')
96
+ cyclone_predicted_coordinates = gr.Textbox(label='Predicted Cyclone Coordinates (lat,lon)')
97
+ cyclone_speed = gr.Textbox(label='Cyclone Speed in Knots')
98
+
99
+ submit_btn = gr.Button('Generate Guideline')
100
+ output = gr.Textbox(label='Personalized Guideline', lines=10)
101
 
102
+ repo_id.change(
103
+ pdf_changes, inputs=[pdf_doc, repo_id], outputs=[langchain_status], queue=False
104
+ )
105
+ load_pdf.click(
106
+ pdf_changes, inputs=[pdf_doc, repo_id], outputs=[langchain_status], queue=False
107
+ )
108
+ submit_btn.click(
109
+ generate_guideline,
110
+ inputs=[infrastructure, location, cyclone_predicted_coordinates, cyclone_speed],
111
+ outputs=output
112
+ )
113
 
114
+ demo.launch()