from groq import Groq import streamlit as st from langchain_groq import ChatGroq from langchain.chains import LLMChain from langchain.chains.conversation.memory import ConversationBufferWindowMemory from langchain_core.prompts import ( ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, ) from langchain_core.messages import SystemMessage def modelConversation(user_question): groq_chat = ChatGroq( groq_api_key=st.secrets["GROQ_API_KEY"], model_name="llama3-8b-8192" ) system_prompt = 'You are Verbat chat assistant created by Abhishek. Please provide the accurate answers.' if user_question: prompt = ChatPromptTemplate.from_messages( [ SystemMessage( content=system_prompt ), # This is the persistent system prompt that is always included at the start of the chat. MessagesPlaceholder( variable_name="chat_history" ), # This placeholder will be replaced by the actual chat history during the conversation. It helps in maintaining context. HumanMessagePromptTemplate.from_template( "{human_input}" ), # This template is where the user's current input will be injected into the prompt. ] ) conversation = LLMChain( llm=groq_chat, # The Groq LangChain chat object initialized earlier. prompt=prompt, # The constructed prompt template. verbose=False, # TRUE Enables verbose output, which can be useful for debugging. memory=st.session_state.memory, # The conversational memory object that stores and manages the conversation history. ) response = conversation.predict(human_input=user_question) # return response return {"Question":user_question, "Answer":response} # return f"Question: {user_question}\nAnswer:\n{response}" conversational_memory_length = 5 if 'memory' not in st.session_state: st.session_state.memory = ConversationBufferWindowMemory(k=5, memory_key="chat_history", return_messages=True) if 'body' not in st.session_state: st.session_state.body = [] def main(): st.header('Verbat Chat Bot | Groq Cloud', divider='red') st.markdown('''llama3-8b-8192 Model , ConversationBufferWindowMemory :balloon:''' ) for chat in st.session_state.body: with st.chat_message("user"): st.write(chat["Question"]) with st.chat_message("Vervat"): st.write(chat["Answer"]) user_question = st.chat_input('Hi i am Verbat bot. Say something...') if user_question: st.chat_message("user").write(user_question) with st.spinner("Processing..."): result = modelConversation(user_question) st.session_state.body.append(result) # st.write(st.session_state.body[::-1]) st.experimental_rerun() if __name__ == "__main__": main()