Divyansh12 commited on
Commit
238d377
1 Parent(s): 23da66a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -47
app.py CHANGED
@@ -1,62 +1,71 @@
1
  import streamlit as st
2
- from langchain.chains import ConversationChain
3
- from langchain.memory import ConversationBufferMemory
4
- from langchain.schema import HumanMessage, AIMessage
5
- from langchain.chat_models.base import BaseChatModel
6
  from llama_cpp import Llama
7
 
8
- # Initialize the Llama model
9
- llm = Llama.from_pretrained(
10
- repo_id="Divyansh12/check",
11
- filename="unsloth.F16.gguf",
12
- verbose=True,
13
- n_ctx=32768,
14
- n_threads=2,
15
- chat_format="chatml"
16
- )
17
-
18
- # Define the LangChain model for Llama
19
- class LlamaChatModel(BaseChatModel):
20
- def _llm_type(self) -> str:
21
- return "llama"
22
-
23
- def _generate(self, messages, stop=None):
24
- # Prepare prompt from conversation history
25
- prompt = "\n".join(
26
- f"user: {msg.content}" if isinstance(msg, HumanMessage) else f"assistant: {msg.content}"
27
- for msg in messages
28
- )
29
-
30
- # Generate response from Llama
31
- response = llm.chat(prompt)
32
- return [AIMessage(content=response)]
33
-
34
- # Initialize memory and chat model
35
- memory = ConversationBufferMemory(return_messages=True)
36
- llama_chat_model = LlamaChatModel()
37
-
38
- # Create the conversation chain
39
- conversation = ConversationChain(memory=memory, llm=llama_chat_model)
 
 
 
 
 
 
 
40
 
41
  # Streamlit UI
42
- st.title("Chatbot with LangChain and Llama")
43
  st.write("### Interact with the chatbot!")
44
 
45
  # User input field
46
  user_message = st.text_area("Your Message:", "")
47
 
 
 
 
 
48
  # Button to send the message
49
- if st.button("Send") and user_message:
50
- # Get the response from the LangChain conversation
51
- response = conversation.predict(input=user_message)
 
 
 
 
52
 
53
- # Update chat history in session state
54
- st.session_state.history = memory.chat_memory.messages
55
 
56
  # Display the chat history
57
  st.write("### Chat History")
58
- for msg in st.session_state.get('history', []):
59
- if isinstance(msg, HumanMessage):
60
- st.write(f"**User:** {msg.content}")
61
- elif isinstance(msg, AIMessage):
62
- st.write(f"**Assistant:** {msg.content}")
 
1
  import streamlit as st
 
 
 
 
2
  from llama_cpp import Llama
3
 
4
+ # Load the model once per session
5
+ if 'llm' not in st.session_state:
6
+ st.session_state.llm = Llama.from_pretrained(
7
+ repo_id="Divyansh12/check",
8
+ filename="unsloth.F16.gguf",
9
+ verbose=True,
10
+ n_ctx=32768,
11
+ n_threads=2,
12
+ chat_format="chatml"
13
+ )
14
+
15
+ # Define the function to get responses from the model
16
+ def respond(message, history):
17
+ messages = []
18
+
19
+ for user_message, assistant_message in history:
20
+ if user_message:
21
+ messages.append({"role": "user", "content": user_message})
22
+ if assistant_message:
23
+ messages.append({"role": "assistant", "content": assistant_message})
24
+
25
+ messages.append({"role": "user", "content": message})
26
+
27
+ response = ""
28
+ # Stream the response from the model
29
+ response_stream = st.session_state.llm.create_chat_completion(
30
+ messages=messages,
31
+ stream=True,
32
+ max_tokens=512, # Use a default value for simplicity
33
+ temperature=0.7, # Use a default value for simplicity
34
+ top_p=0.95 # Use a default value for simplicity
35
+ )
36
+
37
+ # Collect the response chunks
38
+ for chunk in response_stream:
39
+ if len(chunk['choices'][0]["delta"]) != 0 and "content" in chunk['choices'][0]["delta"]:
40
+ response += chunk['choices'][0]["delta"]["content"]
41
+
42
+ return response # Return the full response
43
 
44
  # Streamlit UI
45
+ st.title("Simple Chatbot")
46
  st.write("### Interact with the chatbot!")
47
 
48
  # User input field
49
  user_message = st.text_area("Your Message:", "")
50
 
51
+ # Chat history
52
+ if 'history' not in st.session_state:
53
+ st.session_state.history = []
54
+
55
  # Button to send the message
56
+ if st.button("Send"):
57
+ if user_message: # Check if user has entered a message
58
+ # Get the response from the model
59
+ response = respond(user_message, st.session_state.history)
60
+
61
+ # Add user message and model response to history
62
+ st.session_state.history.append((user_message, response))
63
 
64
+ # Clear the input field after sending
65
+ user_message = "" # Reset user_message to clear input
66
 
67
  # Display the chat history
68
  st.write("### Chat History")
69
+ for user_msg, assistant_msg in st.session_state.history:
70
+ st.write(f"**User:** {user_msg}")
71
+ st.write(f"**Assistant:** {assistant_msg}")