Ritesh-hf commited on
Commit
e5180db
1 Parent(s): b0f9425

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -90,17 +90,17 @@ llm = ChatPerplexity(temperature=0, pplx_api_key=GROQ_API_KEY, model="llama-3.1-
90
 
91
  # compressor = FlashrankRerank(model="ms-marco-MultiBERT-L-12", score_threshold=0.6)
92
 
93
- from langchain_community.document_compressors.rankllm_rerank import RankLLMRerank
94
 
95
- compressor = RankLLMRerank(top_n=6, model="zephyr")
96
 
97
- compression_retriever = ContextualCompressionRetriever(
98
- base_compressor=compressor, base_retriever=retriever
99
- )
100
 
101
- compression_retriever = ContextualCompressionRetriever(
102
- base_compressor=compressor, base_retriever=retriever
103
- )
104
 
105
  # Contextualization prompt and retriever
106
  contextualize_q_system_prompt = """ Given a chat history and the latest user question \
@@ -115,7 +115,7 @@ contextualize_q_prompt = ChatPromptTemplate.from_messages(
115
  ("human", "{input}")
116
  ]
117
  )
118
- history_aware_retriever = create_history_aware_retriever(llm, compression_retriever, contextualize_q_prompt)
119
 
120
  # QA system prompt and chain
121
  qa_system_prompt = """ You are a highly skilled information retrieval assistant. Use the following context to answer questions effectively.
 
90
 
91
  # compressor = FlashrankRerank(model="ms-marco-MultiBERT-L-12", score_threshold=0.6)
92
 
93
+ # from langchain_community.document_compressors.rankllm_rerank import RankLLMRerank
94
 
95
+ # compressor = RankLLMRerank(top_n=6, model="zephyr")
96
 
97
+ # compression_retriever = ContextualCompressionRetriever(
98
+ # base_compressor=compressor, base_retriever=retriever
99
+ # )
100
 
101
+ # compression_retriever = ContextualCompressionRetriever(
102
+ # base_compressor=compressor, base_retriever=retriever
103
+ # )
104
 
105
  # Contextualization prompt and retriever
106
  contextualize_q_system_prompt = """ Given a chat history and the latest user question \
 
115
  ("human", "{input}")
116
  ]
117
  )
118
+ history_aware_retriever = create_history_aware_retriever(llm, retriever, contextualize_q_prompt)
119
 
120
  # QA system prompt and chain
121
  qa_system_prompt = """ You are a highly skilled information retrieval assistant. Use the following context to answer questions effectively.