File size: 1,459 Bytes
ac3b3f0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
from pathlib import Path
from langchain.text_splitter import CharacterTextSplitter
import faiss
from langchain.vectorstores import FAISS
from langchain.embeddings import OpenAIEmbeddings
import pickle

def create_vector_store(suffix, paper_text):
    # with open('paper-dir/main.txt') as f:
    #     paper_text = f.read()

    split_chars = ["§", "§.§"]
    data = []
    for c in split_chars:
        paper_text = paper_text.replace(c, "§")
    data = paper_text.split("§")

    # metadatas is the rest of the text on the same line as the section symbol
    sources = []
    for d in data:
        sources.append(d.split("\n")[0].strip())
        # data = [d.split("\n")[1:] for d in data]

    sources[0] = "Beginning of paper"

    # Here we split the documents, as needed, into smaller chunks.
    # We do this due to the context limits of the LLMs.
    text_splitter = CharacterTextSplitter(chunk_size=1500, separator="\n")
    docs = []
    metadatas = []
    for i, d in enumerate(data):
        splits = text_splitter.split_text(d)
        docs.extend(splits)
        metadatas.extend([{"source": sources[i]}] * len(splits))

    # Here we create a vector store from the documents and save it to disk.
    store = FAISS.from_texts(docs, OpenAIEmbeddings(), metadatas=metadatas)
    faiss.write_index(store.index, f"{suffix}/docs.index")
    store.index = None
    with open(f"{suffix}/faiss_store.pkl", "wb") as f:
        pickle.dump(store, f)