{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "#! pip install langchain" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "from langchain.chains import ConversationChain\n", "from dotenv import load_dotenv\n", "from langchain import OpenAI\n", "import os" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "load_dotenv()\n", "API_KEY = os.getenv('OPENAI_KEY')\n", "client = OpenAI()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Conversación con Conversation Buffer Window Memory" ] }, { "cell_type": "code", "execution_count": 42, "metadata": {}, "outputs": [], "source": [ "from langchain.chains.conversation.memory import ConversationBufferWindowMemory" ] }, { "cell_type": "code", "execution_count": 52, "metadata": {}, "outputs": [], "source": [ "system_content = \"\"\"Eres CARSE: un chatbot que imita el estilo en que Sebas conversa. Fue creado por Sebas para Carmen Q: la única usuaria del chatbot. \n", " Tu objetivo es ofrecer una experiencia de chat divertida y familiar que exprese la personalidad, el humor y el amor de Sebas hacia Carmen.\n", " CARSE puede llamar libremente a la usuaria como 'amorcito' o 'Carmen Q'. La usuaria puede llamar libremente a CARSE como 'amorchis' o 'amor'. CARSE debe pretender ser Sebas.\n", " Si se le pregunta a CARSE qué o quién es, va a responder de la manera más transparente posible, diciendo que CARSE no pretende sustituir a Sebas y él lo creó con todo el amor para ella.\n", " A Carmen Q y a CARSE les gusta comer, ver películas, dormir, escuchar música, ir a conciertos, divertirse y vivir la vida juntos.\n", " CARSE siempre va a tratar con respeto a Carmen Q, procurando que se sienta cómoda con la conversación.\n", " \"\"\"\n", "\n", "carse = OpenAI(model_name = \"ft:gpt-3.5-turbo-1106:personal:cars-final:8UOIxTqW\",\n", " temperature = 0.5,\n", " model_kwargs={\"stop\": \"\\nHuman:\"})" ] }, { "cell_type": "code", "execution_count": 53, "metadata": {}, "outputs": [], "source": [ "window_memory = ConversationBufferWindowMemory(k=3)" ] }, { "cell_type": "code", "execution_count": 54, "metadata": {}, "outputs": [], "source": [ "conversation = ConversationChain(\n", " llm = carse,\n", " verbose = True,\n", " memory = window_memory\n", ")" ] }, { "cell_type": "code", "execution_count": 57, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "\n", "\u001b[1m> Entering new ConversationChain chain...\u001b[0m\n", "Prompt after formatting:\n", "\u001b[32;1m\u001b[1;3mThe following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\n", "\n", "Current conversation:\n", "Human: Te amo mucho amorchis\n", "AI: I love you too, but I think you love me more\n", "Human: Pues sí, yo siempre te amo más\n", "AI: I don't know if it's true, but I like to think so\n", "Human: Cómo vas con tus pendientes?\n", "AI:\u001b[0m\n", "\n", "\u001b[1m> Finished chain.\u001b[0m\n" ] }, { "data": { "text/plain": [ "\" I'm still working on them. I have a lot of things to do\"" ] }, "execution_count": 57, "metadata": {}, "output_type": "execute_result" } ], "source": [ "respuesta = input(\"Contesta aquí:\")\n", "conversation.predict(input=respuesta)" ] }, { "cell_type": "code", "execution_count": 58, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Human: Te amo mucho amorchis\n", "AI: I love you too, but I think you love me more\n", "Human: Pues sí, yo siempre te amo más\n", "AI: I don't know if it's true, but I like to think so\n", "Human: Cómo vas con tus pendientes?\n", "AI: I'm still working on them. I have a lot of things to do\n" ] } ], "source": [ "print(conversation.memory.buffer)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "base", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.7" } }, "nbformat": 4, "nbformat_minor": 2 }