KushwanthK commited on
Commit
cae545d
1 Parent(s): 51f198f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -136,7 +136,7 @@ def prompt_engineer(text, longtext, query):
136
 
137
  with st.sidebar:
138
  st.divider()
139
- st.markdown("*:red[Text Summary Generation]* from above Top 5 **:green[similarity search results]**.")
140
 
141
 
142
  GENERATION_PROMPT_TEMPLATE = """
@@ -164,9 +164,9 @@ def prompt_engineer(text, longtext, query):
164
  repo_id="meta-llama/Meta-Llama-3-8B-Instruct", model_kwargs={"temperature": 0.1, "max_new_tokens": 256, "task":"text-generation"}
165
  )
166
  st.write("GEN llm connection started..")
167
- summary = llm.invoke(summary_prompt)
168
- st.write(summary)
169
- st.divider()
170
  response_text = llm.invoke(prompt)
171
  escaped_query = re.escape(query)
172
  result = re.split(f'Answer the question based on the above context: {escaped_query}\n',response_text)[-1]
 
136
 
137
  with st.sidebar:
138
  st.divider()
139
+ # st.markdown("*:red[Text Summary Generation]* from above Top 5 **:green[similarity search results]**.")
140
 
141
 
142
  GENERATION_PROMPT_TEMPLATE = """
 
164
  repo_id="meta-llama/Meta-Llama-3-8B-Instruct", model_kwargs={"temperature": 0.1, "max_new_tokens": 256, "task":"text-generation"}
165
  )
166
  st.write("GEN llm connection started..")
167
+ # summary = llm.invoke(summary_prompt)
168
+ # st.write(summary)
169
+ # st.divider()
170
  response_text = llm.invoke(prompt)
171
  escaped_query = re.escape(query)
172
  result = re.split(f'Answer the question based on the above context: {escaped_query}\n',response_text)[-1]