Spaces:
Running
Running
Divyansh12
commited on
Commit
•
238d377
1
Parent(s):
23da66a
Update app.py
Browse files
app.py
CHANGED
@@ -1,62 +1,71 @@
|
|
1 |
import streamlit as st
|
2 |
-
from langchain.chains import ConversationChain
|
3 |
-
from langchain.memory import ConversationBufferMemory
|
4 |
-
from langchain.schema import HumanMessage, AIMessage
|
5 |
-
from langchain.chat_models.base import BaseChatModel
|
6 |
from llama_cpp import Llama
|
7 |
|
8 |
-
#
|
9 |
-
llm
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
#
|
39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
|
41 |
# Streamlit UI
|
42 |
-
st.title("Chatbot
|
43 |
st.write("### Interact with the chatbot!")
|
44 |
|
45 |
# User input field
|
46 |
user_message = st.text_area("Your Message:", "")
|
47 |
|
|
|
|
|
|
|
|
|
48 |
# Button to send the message
|
49 |
-
if st.button("Send")
|
50 |
-
#
|
51 |
-
|
|
|
|
|
|
|
|
|
52 |
|
53 |
-
|
54 |
-
|
55 |
|
56 |
# Display the chat history
|
57 |
st.write("### Chat History")
|
58 |
-
for
|
59 |
-
|
60 |
-
|
61 |
-
elif isinstance(msg, AIMessage):
|
62 |
-
st.write(f"**Assistant:** {msg.content}")
|
|
|
1 |
import streamlit as st
|
|
|
|
|
|
|
|
|
2 |
from llama_cpp import Llama
|
3 |
|
4 |
+
# Load the model once per session
|
5 |
+
if 'llm' not in st.session_state:
|
6 |
+
st.session_state.llm = Llama.from_pretrained(
|
7 |
+
repo_id="Divyansh12/check",
|
8 |
+
filename="unsloth.F16.gguf",
|
9 |
+
verbose=True,
|
10 |
+
n_ctx=32768,
|
11 |
+
n_threads=2,
|
12 |
+
chat_format="chatml"
|
13 |
+
)
|
14 |
+
|
15 |
+
# Define the function to get responses from the model
|
16 |
+
def respond(message, history):
|
17 |
+
messages = []
|
18 |
+
|
19 |
+
for user_message, assistant_message in history:
|
20 |
+
if user_message:
|
21 |
+
messages.append({"role": "user", "content": user_message})
|
22 |
+
if assistant_message:
|
23 |
+
messages.append({"role": "assistant", "content": assistant_message})
|
24 |
+
|
25 |
+
messages.append({"role": "user", "content": message})
|
26 |
+
|
27 |
+
response = ""
|
28 |
+
# Stream the response from the model
|
29 |
+
response_stream = st.session_state.llm.create_chat_completion(
|
30 |
+
messages=messages,
|
31 |
+
stream=True,
|
32 |
+
max_tokens=512, # Use a default value for simplicity
|
33 |
+
temperature=0.7, # Use a default value for simplicity
|
34 |
+
top_p=0.95 # Use a default value for simplicity
|
35 |
+
)
|
36 |
+
|
37 |
+
# Collect the response chunks
|
38 |
+
for chunk in response_stream:
|
39 |
+
if len(chunk['choices'][0]["delta"]) != 0 and "content" in chunk['choices'][0]["delta"]:
|
40 |
+
response += chunk['choices'][0]["delta"]["content"]
|
41 |
+
|
42 |
+
return response # Return the full response
|
43 |
|
44 |
# Streamlit UI
|
45 |
+
st.title("Simple Chatbot")
|
46 |
st.write("### Interact with the chatbot!")
|
47 |
|
48 |
# User input field
|
49 |
user_message = st.text_area("Your Message:", "")
|
50 |
|
51 |
+
# Chat history
|
52 |
+
if 'history' not in st.session_state:
|
53 |
+
st.session_state.history = []
|
54 |
+
|
55 |
# Button to send the message
|
56 |
+
if st.button("Send"):
|
57 |
+
if user_message: # Check if user has entered a message
|
58 |
+
# Get the response from the model
|
59 |
+
response = respond(user_message, st.session_state.history)
|
60 |
+
|
61 |
+
# Add user message and model response to history
|
62 |
+
st.session_state.history.append((user_message, response))
|
63 |
|
64 |
+
# Clear the input field after sending
|
65 |
+
user_message = "" # Reset user_message to clear input
|
66 |
|
67 |
# Display the chat history
|
68 |
st.write("### Chat History")
|
69 |
+
for user_msg, assistant_msg in st.session_state.history:
|
70 |
+
st.write(f"**User:** {user_msg}")
|
71 |
+
st.write(f"**Assistant:** {assistant_msg}")
|
|
|
|