Spaces:
Sleeping
Sleeping
import subprocess | |
# Instalar un paquete utilizando pip desde Python | |
subprocess.check_call(["pip", "install", "langchain_community","langchain"]) | |
import streamlit as st | |
from langchain.chains import LLMChain | |
from langchain.chains import ConversationChain | |
#importacipones adicionales | |
import streamlit as st | |
from langchain.chains import ConversationChain | |
from langchain.memory import ConversationBufferMemory | |
from langchain.chains import ConversationChain | |
from langchain.prompts import PromptTemplate | |
from langchain.chains.conversation.memory import ConversationEntityMemory | |
from langchain.chains.conversation.prompt import ENTITY_MEMORY_CONVERSATION_TEMPLATE | |
from langchain_community.llms import HuggingFaceEndpoint | |
import os | |
st.set_page_config(page_title= "bot", layout="wide") | |
#Interfaz | |
st.title("Maqueta") | |
#template | |
#llm | |
llm = HuggingFaceEndpoint(repo_id='mistralai/Mistral-7B-v0.1', | |
max_length=128, | |
temperature=0.5, | |
higgingfacehub_api_token = os.environ["HUGGINGFACEHUB_API_TOKEN"]) | |
#memory | |
conversation_buf = ConversationChain(llm = llm, | |
memory = ConversationBufferMemory(), | |
verbose = True) | |
if "generated" not in st.session_state: | |
st.session_state["generated"] = [] | |
if "past" not in st.session_state: | |
st.session_state["past"] = [] | |
if "input" not in st.session_state: | |
st.session_state["input"] = "" | |
if "stored_session" not in st.session_state: | |
st.session_state["stored_session"] = [] | |
def get_text(): | |
""" | |
Get the user input text. | |
Returns: | |
(str): The text entered by the user | |
""" | |
input_text = st.text_input("You: ", st.session_state["input"], key="input", | |
placeholder="Your AI assistant here! Ask me anything ...", | |
label_visibility='hidden') | |
return input_text | |
user_input = get_text() | |
if 'entity memory' not in st.session_state: | |
st.session_state.entity_memory = ConversationEntityMemory(llm = llm,k=10) | |
Conversation = ConversationChain(llm = llm, | |
prompt= ENTITY_MEMORY_CONVERSATION_TEMPLATE, | |
memory = st.session_state.entity_memory) | |
submit = st.button("Generate") | |
while submit: | |
output = Conversation.run(input=user_input) | |
st.session_state.past.append(user_input) | |
st.session_state.generated.append(output) | |