File size: 2,299 Bytes
31a77e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3f4785b
31a77e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import gradio as gr

from langchain.document_loaders import PyPDFLoader  # for loading the pdf
from langchain.embeddings import OpenAIEmbeddings  # for creating embeddings
from langchain.vectorstores import Chroma  # for the vectorization part
from langchain.chains import ChatVectorDBChain  # for chatting with the pdf
from langchain.llms import OpenAI  # the LLM model we'll use (CHatGPT)


class Chat:
    def __init__(self, pdf, api_input):
        self.api = api_input
        loader = PyPDFLoader(pdf)
        pages = loader.load_and_split()

        embeddings = OpenAIEmbeddings(openai_api_key=self.api)
        vectordb = Chroma.from_documents(pages, embedding=embeddings, persist_directory=".")
        vectordb.persist()

        self.pdf_qa = ChatVectorDBChain.from_llm(OpenAI(temperature=0.9, model_name="gpt-3.5-turbo",
                                                        openai_api_key=self.api),
                                                 vectordb, return_source_documents=True)

    def question(self, query):
        result = self.pdf_qa({"question": "请使用中文回答" + query, "chat_history": ""})
        print("Answer:")
        print(result["answer"])

        return result["answer"]


def analyse(pdf_file, api_input):
    print(pdf_file.name)
    session = Chat(pdf_file.name, api_input)
    return session, "文章分析完成"


def ask_question(data, question):
    if data == "":
        return "Please upload PDF file first!"
    return data.question(question)


with gr.Blocks() as demo:
    gr.Markdown(
        """
        # ChatPDF based on Langchain
        """)
    data = gr.State()
    with gr.Tab("Upload PDF File"):
        pdf_input = gr.File(label="PDF File")
        api_input = gr.Textbox(label="OpenAI API Key")
        result = gr.Textbox()
        upload_button = gr.Button("Start Analyse")
        question_input = gr.Textbox(label="Your Question", placeholder="Authors of this paper?")
        answer = gr.Textbox(label="Answer")
        ask_button = gr.Button("Ask")

    upload_button.click(fn=analyse, inputs=[pdf_input, api_input], outputs=[data, result])
    ask_button.click(ask_question, inputs=[data, question_input], outputs=answer)

if __name__ == "__main__":
    demo.title = "ChatPDF Based on Langchain"
    demo.launch()