Spaces:
Sleeping
Sleeping
File size: 4,583 Bytes
fa58a9a 82900db 6ae6ccb 82900db fa58a9a 624b06a fa58a9a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 |
import subprocess
# Instalar un paquete utilizando pip desde Python
subprocess.check_call(["pip", "install", "langchain_community","langchain"])
# Import necessary libraries
import streamlit as st
from langchain.chains import ConversationChain
from langchain.chains.conversation.memory import ConversationEntityMemory
from langchain.chains.conversation.prompt import ENTITY_MEMORY_CONVERSATION_TEMPLATE
import os
from getpass import getpass
from langchain import HuggingFaceHub
from langchain_community.llms import HuggingFaceEndpoint
from langchain_core.output_parsers import StrOutputParser
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
# Set Streamlit page configuration
st.set_page_config(page_title='🧠MemoryBot🤖', layout='wide')
# Initialize session states. Un session state es como un diccionario
if "generated" not in st.session_state:
st.session_state["generated"] = []
if "past" not in st.session_state:
st.session_state["past"] = []
if "input" not in st.session_state:
st.session_state["input"] = ""
if "stored_session" not in st.session_state:
st.session_state["stored_session"] = []
# Define function to get user input
def get_text():
"""
Get the user input text.
Returns:
(str): The text entered by the user
"""
input_text = st.text_input("You: ", st.session_state["input"], key="input",
placeholder="Your AI assistant here! Ask me anything ...",
label_visibility='hidden')
return input_text
# #parte para hacer un chat nuevo
def new_chat():
"""
Clears session state and starts a new chat.
"""
save = []
for i in range(len(st.session_state['generated'])-1, -1, -1):
save.append("User:" + st.session_state["past"][i])
save.append("Bot:" + st.session_state["generated"][i])
st.session_state["stored_session"].append(save)
st.session_state["generated"] = []
st.session_state["past"] = []
st.session_state["input"] = ""
st.session_state.entity_memory.entity_store = {}
st.session_state.entity_memory.buffer.clear()
# Add a button to start a new chat
st.sidebar.button("New Chat", on_click = new_chat, type='primary')
# Move K outside of the sidebar expander
K = st.sidebar.number_input(' (#)Summary of prompts to consider', min_value=3, max_value=1000)
# Set up the Streamlit app layout
st.title("Personalized chatbot")
# Create prompt ( elige entre template_mario, template_poet, template_helper, template_chef, template_gym...)
prompt = PromptTemplate.from_template(os.environ["template_chef"])
#Cambia la referencia a esta variable en llm_chain SOLO si vas a usar la memoria
prompt_memory = ENTITY_MEMORY_CONVERSATION_TEMPLATE
# Create an OpenAI instance
llm = HuggingFaceEndpoint(repo_id='mistralai/Mistral-7B-Instruct-v0.2',
temperature=0.3,
model_kwargs = {"max_length":128},
huggingfacehub_api_token = os.environ["HUGGINGFACEHUB_API_TOKEN"])
# Create a ConversationEntityMemory object if not already created
if 'entity_memory' not in st.session_state:
st.session_state.entity_memory = ConversationEntityMemory(llm=llm, k=K )
# Create the LLMChain object with the specified configuration
llm_chain = LLMChain(
llm = llm,
prompt= prompt_memory, #cambia esto a prompt si no vas a usar memoria
memory = st.session_state.entity_memory,
output_parser=StrOutputParser()
)
# Get the user input
user_input = get_text()
# Generate the output using the ConversationChain object and the user input, and add the input/output to the session
if user_input:
output = Conversation.run(input=user_input)
st.session_state.past.append(user_input)
st.session_state.generated.append(output)
# Display the conversation history using an expander, and allow the user to download it
with st.expander("Conversation", expanded=True):
for i in range(len(st.session_state['generated'])-1, -1, -1):
st.info(st.session_state["past"][i],icon="🧐")
st.success(st.session_state["generated"][i], icon="🤖")
# Display stored conversation sessions in the sidebar
for i, sublist in enumerate(st.session_state.stored_session):
with st.sidebar.expander(label= f"Conversation-Session:{i}"):
st.write(sublist)
# Allow the user to clear all stored conversation sessions
if st.session_state.stored_session:
if st.sidebar.checkbox("Clear-all"):
del st.session_state.stored_session |