|
import os |
|
import gradio as gr |
|
from langchain_huggingface import HuggingFaceEndpoint,HuggingFaceEmbeddings,ChatHuggingFace |
|
from langchain_core.load import dumpd, dumps, load, loads |
|
from langchain_core.prompts import ChatPromptTemplate |
|
from langchain_core.callbacks import StreamingStdOutCallbackHandler |
|
|
|
from langchain_chroma import Chroma |
|
from langchain_core.documents import Document |
|
from langchain_text_splitters import CharacterTextSplitter |
|
from pypdf import PdfReader |
|
import random |
|
cwd = os.getcwd() |
|
print(cwd) |
|
token="" |
|
|
|
repo_id = "mistralai/Mixtral-8x7B-Instruct-v0.1" |
|
emb = "sentence-transformers/all-mpnet-base-v2" |
|
hf = HuggingFaceEmbeddings(model_name=emb) |
|
|
|
|
|
|
|
|
|
def embed_fn(inp): |
|
db=Chroma() |
|
text_splitter = CharacterTextSplitter(chunk_size=200, chunk_overlap=10) |
|
documents = text_splitter.split_text(inp) |
|
out_emb= hf.embed_documents(documents) |
|
string_representation = dumps(out_emb, pretty=True) |
|
db.from_texts(documents,embedding_function=HuggingFaceEmbeddings(model_name=emb),persist_directory=f"{cwd}/chroma_langchain_db") |
|
|
|
def proc_doc(doc_in): |
|
for doc in doc_in: |
|
if doc.endswith(".txt"): |
|
yield [["",f"Loading Document: {doc}"]] |
|
outp = read_txt(doc) |
|
embed_fn(outp) |
|
yield [["","Loaded"]] |
|
elif doc.endswith(".pdf"): |
|
yield [["",f"Loading Document: {doc}"]] |
|
outp = read_pdf(doc) |
|
embed_fn(outp) |
|
yield [["","Loaded"]] |
|
|
|
|
|
def read_txt(txt_path): |
|
text="" |
|
with open(txt_path,"r") as f: |
|
text = f.read() |
|
f.close() |
|
return text |
|
|
|
def read_pdf(pdf_path): |
|
text="" |
|
reader = PdfReader(f'{pdf_path}') |
|
number_of_pages = len(reader.pages) |
|
for i in range(number_of_pages): |
|
page = reader.pages[i] |
|
text = f'{text}\n{page.extract_text()}' |
|
return text |
|
def run_llm(input_text,history): |
|
MAX_TOKENS=20000 |
|
try: |
|
qur= hf.embed_query(input_text) |
|
docs = db.similarity_search_by_vector(qur, k=3) |
|
|
|
print(docs) |
|
except Exception as e: |
|
print(e) |
|
callbacks = [StreamingStdOutCallbackHandler()] |
|
llm = HuggingFaceEndpoint( |
|
endpoint_url=repo_id, |
|
max_new_tokens=2056, |
|
seed=random.randint(1,99999999999), |
|
top_k=10, |
|
top_p=0.95, |
|
typical_p=0.95, |
|
temperature=0.01, |
|
repetition_penalty=1.03, |
|
|
|
streaming=True, |
|
huggingfacehub_api_token=token, |
|
) |
|
out="" |
|
|
|
prompt=[ |
|
{"role": "system", "content": f"[INST] Use this data to help answer users questions: {str(docs)} [/INST]"}, |
|
{"role": "user", "content": f"[INST]{input_text}[/INST]"}, |
|
] |
|
|
|
t=llm.invoke(prompt) |
|
for chunk in t: |
|
out+=chunk |
|
yield out |
|
|
|
|
|
css=""" |
|
#component-0 { |
|
height:400px; |
|
} |
|
""" |
|
|
|
with gr.Blocks(css=css) as app: |
|
data=gr.State() |
|
with gr.Column(): |
|
|
|
chat = gr.ChatInterface( |
|
fn=run_llm, |
|
type="tuples", |
|
concurrency_limit=20, |
|
|
|
) |
|
with gr.Row(): |
|
msg=gr.HTML() |
|
file_in=gr.Files(file_count="multiple") |
|
file_in.change(proc_doc, file_in, msg) |
|
|
|
app.queue().launch() |