ghuman7 commited on
Commit
a12a4d7
1 Parent(s): 57642d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -23
app.py CHANGED
@@ -1,32 +1,27 @@
1
  import streamlit as st
2
- from transformers import RagTokenizer, RagRetriever, RagSequenceForGeneration
3
 
4
- # Load the RAG model components
5
  @st.cache_resource
6
  def load_rag_model():
7
- tokenizer = RagTokenizer.from_pretrained("facebook/rag-sequence-nq")
8
- retriever = RagRetriever.from_pretrained("facebook/rag-sequence-nq", use_dummy_dataset=True)
9
- rag_model = RagSequenceForGeneration.from_pretrained("facebook/rag-sequence-nq", retriever=retriever)
10
  return tokenizer, retriever, rag_model
11
 
12
- tokenizer, retriever, rag_model = load_rag_model()
 
 
13
 
14
- # Streamlit UI for Mental Health Chatbot
15
- st.title("Mental Health Chatbot")
16
- st.write("""
17
- This chatbot uses a pre-trained RAG model to provide responses to mental health-related queries.
18
- Please note that this is an AI-based tool and is not a substitute for professional mental health support.
19
- """)
20
 
21
- # User input
22
- query = st.text_input("How can I help you today?")
 
 
 
 
 
23
 
24
- if st.button("Get Response"):
25
- if query:
26
- # Generate a response using the RAG model
27
- inputs = tokenizer(query, return_tensors="pt")
28
- outputs = rag_model.generate(**inputs)
29
- response = tokenizer.batch_decode(outputs, skip_special_tokens=True)
30
- st.write(f"**Response:** {response[0]}")
31
- else:
32
- st.write("Please enter a query to get a response.")
 
1
  import streamlit as st
2
+ from transformers import RagRetriever, RagTokenizer, RagTokenForGeneration
3
 
4
+ # Load the RAG model
5
  @st.cache_resource
6
  def load_rag_model():
7
+ retriever = RagRetriever.from_pretrained("facebook/rag-token-nq")
8
+ tokenizer = RagTokenizer.from_pretrained("facebook/rag-token-nq")
9
+ rag_model = RagTokenForGeneration.from_pretrained("facebook/rag-token-nq", retriever=retriever)
10
  return tokenizer, retriever, rag_model
11
 
12
+ # Set up the Streamlit interface
13
+ def main():
14
+ st.title("Mental Health Chatbot")
15
 
16
+ tokenizer, retriever, rag_model = load_rag_model()
 
 
 
 
 
17
 
18
+ user_input = st.text_input("Ask me something about mental health:")
19
+
20
+ if user_input:
21
+ input_ids = tokenizer(user_input, return_tensors="pt").input_ids
22
+ output = rag_model.generate(input_ids)
23
+ response = tokenizer.decode(output[0], skip_special_tokens=True)
24
+ st.write(f"Response: {response}")
25
 
26
+ if __name__ == "__main__":
27
+ main()