Spaces:
Running
Running
pragneshbarik
commited on
Commit
•
5d2a014
1
Parent(s):
e51667a
moved chat loop into chat_loop() following major refactoring
Browse files- app.py +7 -21
- components/chat_box.py +1 -1
- components/chat_loop.py +23 -0
app.py
CHANGED
@@ -1,13 +1,14 @@
|
|
1 |
import streamlit as st
|
2 |
import yaml
|
3 |
-
from chat_client import chat
|
4 |
from components.sidebar import sidebar
|
5 |
from components.prompt_engineering_dashboard import prompt_engineering_dashboard
|
6 |
-
from components.stream_handler import stream_handler
|
7 |
-
from components.show_source import show_source
|
8 |
from components.chat_box import chat_box
|
9 |
-
from components.generate_chat_stream import generate_chat_stream
|
10 |
from components.init_state import init_state
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
|
13 |
with open("config.yaml", "r") as file:
|
@@ -27,21 +28,6 @@ prompt_engineering_dashboard(st.session_state, config)
|
|
27 |
|
28 |
sidebar(st.session_state, config)
|
29 |
|
30 |
-
chat_box(st.session_state)
|
31 |
-
|
32 |
-
if prompt := st.chat_input("Generate Ebook"):
|
33 |
-
st.chat_message("user").markdown(prompt)
|
34 |
-
st.session_state.messages.append({"role": "user", "content": prompt})
|
35 |
-
|
36 |
-
chat_stream, links = generate_chat_stream(st.session_state, prompt, config)
|
37 |
-
|
38 |
-
with st.chat_message("assistant"):
|
39 |
-
placeholder = st.empty()
|
40 |
-
full_response = stream_handler(
|
41 |
-
st.session_state, chat_stream, prompt, placeholder
|
42 |
-
)
|
43 |
-
if st.session_state.rag_enabled:
|
44 |
-
show_source(links)
|
45 |
|
46 |
-
|
47 |
-
st.session_state.messages.append({"role": "assistant", "content": full_response})
|
|
|
1 |
import streamlit as st
|
2 |
import yaml
|
|
|
3 |
from components.sidebar import sidebar
|
4 |
from components.prompt_engineering_dashboard import prompt_engineering_dashboard
|
|
|
|
|
5 |
from components.chat_box import chat_box
|
|
|
6 |
from components.init_state import init_state
|
7 |
+
from components.chat_loop import chat_loop
|
8 |
+
|
9 |
+
# from components.show_source import show_source
|
10 |
+
# from components.stream_handler import stream_handler
|
11 |
+
# from components.generate_chat_stream import generate_chat_stream
|
12 |
|
13 |
|
14 |
with open("config.yaml", "r") as file:
|
|
|
28 |
|
29 |
sidebar(st.session_state, config)
|
30 |
|
31 |
+
chat_box(st.session_state, config)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
|
33 |
+
chat_loop(st.session_state, config)
|
|
components/chat_box.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import streamlit as st
|
2 |
|
3 |
|
4 |
-
def chat_box(session_state):
|
5 |
for message in session_state.messages:
|
6 |
with st.chat_message(message["role"]):
|
7 |
st.markdown(message["content"])
|
|
|
1 |
import streamlit as st
|
2 |
|
3 |
|
4 |
+
def chat_box(session_state, config):
|
5 |
for message in session_state.messages:
|
6 |
with st.chat_message(message["role"]):
|
7 |
st.markdown(message["content"])
|
components/chat_loop.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from components.generate_chat_stream import generate_chat_stream
|
3 |
+
from components.stream_handler import stream_handler
|
4 |
+
from components.show_source import show_source
|
5 |
+
|
6 |
+
|
7 |
+
def chat_loop(session_state, config):
|
8 |
+
if prompt := st.chat_input("Generate Ebook"):
|
9 |
+
st.chat_message("user").markdown(prompt)
|
10 |
+
session_state.messages.append({"role": "user", "content": prompt})
|
11 |
+
|
12 |
+
chat_stream, links = generate_chat_stream(session_state, prompt, config)
|
13 |
+
|
14 |
+
with st.chat_message("assistant"):
|
15 |
+
placeholder = st.empty()
|
16 |
+
full_response = stream_handler(
|
17 |
+
session_state, chat_stream, prompt, placeholder
|
18 |
+
)
|
19 |
+
if session_state.rag_enabled:
|
20 |
+
show_source(links)
|
21 |
+
|
22 |
+
session_state.history.append([prompt, full_response])
|
23 |
+
session_state.messages.append({"role": "assistant", "content": full_response})
|