File size: 1,662 Bytes
19f4fce
 
 
 
 
 
13c5bb4
19f4fce
 
 
 
13c5bb4
 
 
19f4fce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13c5bb4
19f4fce
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import os

import openai
from dotenv import load_dotenv
from langchain.chains import LLMChain
from langchain.chat_models import ChatOpenAI
from langchain.document_loaders import PyPDFLoader
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.prompts import PromptTemplate
from langchain.vectorstores import FAISS

loader = PyPDFLoader("./assets/pdf/CADWReg.pdf")
pages = loader.load_and_split()

load_dotenv()

embeddings = OpenAIEmbeddings()

prompt_template = """Answer the question using the given context to the best of your ability. 
If you don't know, answer I don't know.
Context: {context}
Topic: {topic}"""

PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "topic"])


class LangOpen:
    def __init__(self, model_name: str) -> None:
        self.index = self.initialize_index("langOpen")
        self.llm = ChatOpenAI(temperature=0.3, model=model_name)
        self.chain = LLMChain(llm=self.llm, prompt=PROMPT)

    def initialize_index(self, index_name):
        path = f"./vectorStores/{index_name}"
        if os.path.exists(path=path):
            return FAISS.load_local(folder_path=path, embeddings=embeddings)
        else:
            faiss = FAISS.from_documents(pages, embeddings)
            faiss.save_local(path)
            return faiss

    def get_response(self, query_str):
        print("query_str: ", query_str)
        print("model_name: ", self.llm.model_name)
        docs = self.index.similarity_search(query_str, k=4)
        inputs = [{"context": doc.page_content, "topic": query_str} for doc in docs]
        result = self.chain.apply(inputs)[0]["text"]
        return result