File size: 6,472 Bytes
cf2c9c5
e0a99c7
cf2c9c5
 
 
 
 
08f4a04
cf2c9c5
08f4a04
 
 
bc7e478
08f4a04
 
 
 
bc7e478
 
08f4a04
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cf2c9c5
e0a99c7
 
 
cf2c9c5
 
 
 
 
 
08f4a04
 
1ca4c81
08f4a04
e0a99c7
 
cf2c9c5
08f4a04
 
bc7e478
08f4a04
 
 
 
cf2c9c5
08f4a04
cf2c9c5
 
 
 
08f4a04
cf2c9c5
 
 
 
 
 
e0a99c7
08f4a04
e0a99c7
08f4a04
 
 
 
 
bc7e478
 
 
 
 
 
 
 
 
 
 
 
 
 
08f4a04
9626076
08f4a04
e0a99c7
 
 
 
 
cf2c9c5
08f4a04
 
bc7e478
 
08f4a04
 
 
 
cf2c9c5
e0a99c7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
08f4a04
e0a99c7
 
 
08f4a04
e0a99c7
08f4a04
 
 
 
 
cf2c9c5
08f4a04
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
import gradio as gr
from langchain.document_loaders import PyPDFLoader
from langchain.text_splitter import CharacterTextSplitter
from langchain.llms import HuggingFaceHub
from langchain.embeddings import HuggingFaceHubEmbeddings
from langchain.vectorstores import Chroma
from langchain.chains import RetrievalQA
from langchain.prompts import PromptTemplate

def get_chain(llm, retriever):
    prompt_template = """
    Instructions:
    You are a knowledgeable assistant focused on providing safety guidelines for coastal areas during cyclones. Your goal is to generate personalized, clear, and actionable advice based on the specific details provided about the user's infrastructure, proximity to the cyclone pathway, cyclone speed, and proximity to the nearest shelter, person's location.
    
    Please:
    - Carefully analyze the provided context from the PDF.
    - Offer tailored guidance that addresses the user's unique situation.
    - Calculate the nearest shelter by location of the person (lat, lon) and shelter coordinates (lat, lon).
    - Calculate proximity to cyclone by location of the person (lat, lon) and predicted cyclone coordinates (lat, lon).
    - Ensure that your advice is practical and directly applicable.
    - If information is missing or unclear, use logical assumptions based on the context to provide the best possible recommendations.
    - Be concise but thorough, offering detailed steps when necessary to enhance safety and preparedness.
    
    Context:\n{context}\n
    Question: \n{question}\n
    Personalized Guideline:
    """
    PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
    chain_type_kwargs = {"prompt": PROMPT}
    qa_chain = RetrievalQA.from_chain_type(
        llm=llm,
        chain_type="stuff",
        retriever=retriever,
        chain_type_kwargs=chain_type_kwargs,
        return_source_documents=True
    )
    return qa_chain

def load_pdf_to_langchain(pdf_path, repo_id):
    # Load the PDF using PyPDFLoader
    loader = PyPDFLoader(pdf_path)
    documents = loader.load()
    text_splitter = CharacterTextSplitter(chunk_size=2096, chunk_overlap=0)
    texts = text_splitter.split_documents(documents)
    embeddings = HuggingFaceHubEmbeddings()
    db = Chroma.from_documents(texts, embeddings)
    retriever = db.as_retriever()
    llm = HuggingFaceHub(
        repo_id=repo_id,
        model_kwargs={'temperature': 0.3}
    )
    qa_chain = get_chain(llm, retriever)
    return qa_chain

def generate_guideline(infrastructure, location, cyclone_predicted_coordinates, cyclone_speed):
    if infrastructure and location and cyclone_predicted_coordinates and cyclone_speed:
        user_question = f"""{infrastructure} Infrastructure, location of the person (lat, lon) is {location}, Cyclone Speed in knots is {cyclone_speed}, Predicted Cyclone Coordinates (lat, lon) is {cyclone_predicted_coordinates}. Please give guidelines on what will be best in this context. Give precise instructions by calculating proximity to cyclone by location of the person (lat, lon) and predicted cyclone coordinates (lat, lon). Also, give the location of the nearest shelter by calculating location of the person (lat, lon) and shelter coordinates (lat, lon) (from the text chunk given). Don't give proximity to cyclone and proximity to shelter though (only use this to generate the guideline). Also, give the helpline number at the end: 333."""
        result = qa({'query': user_question})
        return result['result']
    else:
        return "Please provide all inputs."

css = """
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
"""

title = """
    <h1>🌪️ Generate Tailored Cyclone Safety Guidelines</h1>
"""

with gr.Blocks(css=css, theme='Taithrah/Minimal') as demo:
    with gr.Column(elem_id='col-container'):
        gr.HTML(title)

        # LLM selection
        repo_id = gr.Dropdown(
            label='Select Language Model (LLM)', 
            choices=[
                'mistralai/Mistral-7B-Instruct-v0.1', 
                'HuggingFaceH4/zephyr-7b-beta', 
                'meta-llama/Llama-2-7b-chat-hf', 
                '01-ai/Yi-6B-200K',
                'cognitivecomputations/dolphin-2.5-mixtral-8x7b',
                'tiiuae/falcon-7b-instruct',
                'tiiuae/falcon-7b',
                'google/flan-t5-base',
                'google/flan-t5-large',
                'google/flan-t5-xl',
                'bigscience/bloom-7b1',
                'EleutherAI/gpt-neo-2.7B',
                'EleutherAI/gpt-j-6B',
                'facebook/opt-6.7b',
                'facebook/galactica-6.7b',
                'cerebras/Cerebras-GPT-6.7B',
                'RWKV/rwkv-4-7b-pile',
                # Add more models here as desired
            ],
            value='mistralai/Mistral-7B-Instruct-v0.1'
        )

        # Status display
        langchain_status = gr.Textbox(
            label='Status', placeholder='', interactive=False, value="Loading guideline1.pdf..."
        )

        # Input fields for user information
        infrastructure = gr.Textbox(label='Infrastructure')
        location = gr.Textbox(label='Location Coordinates (lat, lon)')
        cyclone_predicted_coordinates = gr.Textbox(label='Predicted Cyclone Coordinates (lat, lon)')
        cyclone_speed = gr.Textbox(label='Cyclone Speed in Knots')

        submit_btn = gr.Button('Generate Guideline')
        output = gr.Textbox(label='Personalized Guideline', lines=10)

    # Global variable to store the QA chain
    qa = None

    # Function to initialize the QA chain
    def initialize_qa(repo_id_value):
        global qa
        pdf_path = 'guideline1.pdf'  # Ensure this PDF is in the same directory
        qa = load_pdf_to_langchain(pdf_path, repo_id_value)
        return f"Loaded guideline1.pdf with LLM: {repo_id_value}"

    # Initialize QA chain with default LLM
    initial_status = initialize_qa(repo_id.value)
    langchain_status.value = initial_status

    # Update QA chain when LLM selection changes
    def on_repo_id_change(repo_id_value):
        status = initialize_qa(repo_id_value)
        return status

    repo_id.change(
        on_repo_id_change,
        inputs=repo_id,
        outputs=langchain_status
    )

    submit_btn.click(
        generate_guideline, 
        inputs=[infrastructure, location, cyclone_predicted_coordinates, cyclone_speed], 
        outputs=output
    )

demo.launch()