lfoppiano commited on
Commit
a70fbd3
1 Parent(s): 77334fb
Files changed (1) hide show
  1. streamlit_app.py +43 -24
streamlit_app.py CHANGED
@@ -71,15 +71,35 @@ st.set_page_config(
71
  }
72
  )
73
 
74
- # css = '''
75
- # <style>
76
- # [data-testid="ScrollToBottomContainer"] {
77
- # overflow: hidden;
78
- # }
79
- # </style>
80
- # '''
81
- #
82
- # st.markdown(css, unsafe_allow_html=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83
 
84
 
85
  def new_file():
@@ -231,9 +251,9 @@ with st.sidebar:
231
  # is_api_key_provided = st.session_state['api_key']
232
 
233
  st.button(
234
- 'Reset chat memory.',
235
- on_click=clear_memory(),
236
- help="Clear the conversational memory. Currently implemented to retrain the 4 most recent messages.")
237
 
238
  left_column, right_column = st.columns([1, 1])
239
 
@@ -322,15 +342,15 @@ with left_column:
322
  left_column.markdown(get_pdf_display(st.session_state['binary']), unsafe_allow_html=True)
323
 
324
  with right_column:
325
- css = '''
326
- <style>
327
- [data-testid="column"] {
328
- overflow: auto;
329
- height: 70vh;
330
- }
331
- </style>
332
- '''
333
- st.markdown(css, unsafe_allow_html=True)
334
 
335
  # st.markdown(
336
  # """
@@ -341,7 +361,6 @@ with right_column:
341
  # unsafe_allow_html=True,
342
  # )
343
 
344
-
345
  if st.session_state.loaded_embeddings and question and len(question) > 0 and st.session_state.doc_id:
346
  for message in st.session_state.messages:
347
  with st.chat_message(message["role"]):
@@ -365,8 +384,8 @@ with right_column:
365
  elif mode == "LLM":
366
  with st.spinner("Generating response..."):
367
  _, text_response = st.session_state['rqa'][model].query_document(question, st.session_state.doc_id,
368
- context_size=context_size,
369
- memory=st.session_state.memory)
370
 
371
  if not text_response:
372
  st.error("Something went wrong. Contact Luca Foppiano ([email protected]) to report the issue.")
 
71
  }
72
  )
73
 
74
+ css_modify_left_column = '''
75
+ <style>
76
+ [data-testid="stHorizontalBlock"] > div:nth-child(1) {
77
+ overflow: hidden;
78
+ background-color: red;
79
+ height: 70vh;
80
+ }
81
+ </style>
82
+ '''
83
+ css_modify_right_column = '''
84
+ <style>
85
+ [data-testid="stHorizontalBlock"]> div:first-child {
86
+ background-color: red;
87
+ position: fixed
88
+ height: 70vh;
89
+ }
90
+ </style>
91
+ '''
92
+ css_disable_scrolling_container = '''
93
+ <style>
94
+ [data-testid="ScrollToBottomContainer"] {
95
+ overflow: hidden;
96
+ }
97
+ </style>
98
+ '''
99
+
100
+
101
+ # st.markdown(css_lock_column_fixed, unsafe_allow_html=True)
102
+ # st.markdown(css2, unsafe_allow_html=True)
103
 
104
 
105
  def new_file():
 
251
  # is_api_key_provided = st.session_state['api_key']
252
 
253
  st.button(
254
+ 'Reset chat memory.',
255
+ on_click=clear_memory(),
256
+ help="Clear the conversational memory. Currently implemented to retrain the 4 most recent messages.")
257
 
258
  left_column, right_column = st.columns([1, 1])
259
 
 
342
  left_column.markdown(get_pdf_display(st.session_state['binary']), unsafe_allow_html=True)
343
 
344
  with right_column:
345
+ # css = '''
346
+ # <style>
347
+ # [data-testid="column"] {
348
+ # overflow: auto;
349
+ # height: 70vh;
350
+ # }
351
+ # </style>
352
+ # '''
353
+ # st.markdown(css, unsafe_allow_html=True)
354
 
355
  # st.markdown(
356
  # """
 
361
  # unsafe_allow_html=True,
362
  # )
363
 
 
364
  if st.session_state.loaded_embeddings and question and len(question) > 0 and st.session_state.doc_id:
365
  for message in st.session_state.messages:
366
  with st.chat_message(message["role"]):
 
384
  elif mode == "LLM":
385
  with st.spinner("Generating response..."):
386
  _, text_response = st.session_state['rqa'][model].query_document(question, st.session_state.doc_id,
387
+ context_size=context_size,
388
+ memory=st.session_state.memory)
389
 
390
  if not text_response:
391
  st.error("Something went wrong. Contact Luca Foppiano ([email protected]) to report the issue.")