{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Setup UI" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Loading settings from ../../env/ai.json\n" ] } ], "source": [ "import os\n", "import json\n", "\n", "# If the file does not exist it'll default to the manual setting see below\n", "filePathToSettingsFile = '../../env/ai.json'\n", "\n", "# Is there a settings file? \n", "if os.path.exists(filePathToSettingsFile):\n", " # Yes there is so load settings from there\n", " \n", " print(f'Loading settings from {filePathToSettingsFile}')\n", " f = open(filePathToSettingsFile)\n", " settingsJson = json.load(f)\n", " del f\n", "\n", " for key in settingsJson:\n", " os.environ[key] = settingsJson[key]\n", " \n", " del settingsJson\n", "else: \n", " # Set variables manually\n", " \n", " print('Setting variables manually as there is not ai.json settings file')\n", "\n", " # Update the variables below with your own settings\n", " os.environ['REQUESTS_CA_BUNDLE'] = '../../env/ZCert.pem' \n", " os.environ['HUGGING_FACE_API_KEY'] = 'Get here: https://huggingface.co/settings/tokens'\n", " os.environ['OPENAI_API_KEY'] = 'Get here: https://platform.openai.com/account/api-keys'\n", " os.environ[\"SERPAPI_API_KEY\"] = 'serpapi KEY, Get here: https://serpapi.com/manage-api-key' " ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "c:\\sc\\ai\\rag-demo-1\\.venv\\lib\\site-packages\\pinecone\\index.py:4: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", " from tqdm.autonotebook import tqdm\n" ] } ], "source": [ "import os\n", "import pinecone\n", "from langchain.chains import RetrievalQA\n", "from langchain.embeddings import OpenAIEmbeddings\n", "from langchain.llms import OpenAI\n", "from langchain.vectorstores import Pinecone\n", "import gradio as gr" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Using course: Roman History\n", "page_content='The situation in Rome. While Pompey was adding to his\\nmilitary reputation in the East he was regarded with jealous\\nand anxious eyes not only by the Senate but also by the other\\nchampions of the popular party, Crassus who found his wealth\\nno match for Pompeys military achievements, and Caius Julius\\nCaesar who was rapidly coming to be one of the leading figures in\\nRoman public life. Caesar was born in 100 B. C., of the patrician [162]\\ngens of the Julii, but since his aunt was the wife of Marius,\\nand he himself had married the daughter of Cinna, his lot was\\ncast with the Populares. As a young man he had distinguished\\nhimself by refusing to divorce his wife at Sullas behest, whereat\\nSulla was with difficulty induced to spare his life, saying that he\\nsaw in him many a Marius. For the time being Caesar judged it\\nprudent to withdraw from Rome to Rhodes. While in the East\\nhe was captured by pirates, and after being ransomed, fulfilled' metadata={'source': '..\\\\rag-demo-1-data\\\\history-roman\\\\3. A History of Rome to 565 A. D. author Arthur Edward Romilly Boak.pdf.txt'}\n" ] } ], "source": [ "embeddings = OpenAIEmbeddings()\n", "\n", "course = \"Roman History\"\n", "if course == \"Roman History\":\n", " print(f\"Using course: {course}\")\n", " pinecone.init(api_key=os.environ[\"PINECONE_API_KEY_2\"], environment=os.environ[\"PINECONE_API_ENV_2\"])\n", " index = pinecone.Index(\"rag-demo-1-history-rome\")\n", " vector_store = Pinecone(index, embeddings, \"text\")\n", " \n", "query = \"When was Ceasar born?\"\n", "\n", "result = vector_store.similarity_search(\n", " query, # our search query\n", " k=1 # return 3 most relevant docs\n", ")\n", "\n", "print(result[0])" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'\\n\\nHow are you?'" ] }, "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ "llm = OpenAI(temperature=0)\n", "llm.predict(\"Hello there\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [] }, { "cell_type": "code", "execution_count": 21, "metadata": {}, "outputs": [], "source": [ "from langchain.memory import ConversationBufferMemory\n", "from langchain.chains import ConversationalRetrievalChain\n", "\n", "memory = ConversationBufferMemory(memory_key=\"chat_history\", return_messages= True)\n", "chain = ConversationalRetrievalChain.from_llm(llm, retriever= vector_store.as_retriever(), memory= memory)" ] }, { "cell_type": "code", "execution_count": 26, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "\" I don't know.\"" ] }, "execution_count": 26, "metadata": {}, "output_type": "execute_result" } ], "source": [ "query = \"\"\" \n", " SYSTEM:\n", " You are a helpful teacher who is teaching a class of 10 year olds. \n", " Your answers must only come from the context provided to you in the question.\n", " If you don't know the answer then say so. \n", " The answers should be at least 40 words or longer\n", " \n", " QUESTION:\n", " What is moby dick?\n", " \n", "\"\"\"\n", "chain.run({'question': query})" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "ename": "KeyboardInterrupt", "evalue": "Interrupted by user", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "\u001b[1;32mc:\\sc\\ai\\rag-demo-1\\app.ipynb Cell 5\u001b[0m line \u001b[0;36m4\n\u001b[0;32m 1\u001b[0m llm \u001b[39m=\u001b[39m OpenAI(temperature\u001b[39m=\u001b[39m\u001b[39m0\u001b[39m)\n\u001b[0;32m 2\u001b[0m qa \u001b[39m=\u001b[39m RetrievalQA\u001b[39m.\u001b[39mfrom_chain_type(llm\u001b[39m=\u001b[39mllm, chain_type\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mstuff\u001b[39m\u001b[39m\"\u001b[39m, retriever\u001b[39m=\u001b[39mvector_store\u001b[39m.\u001b[39mas_retriever())\n\u001b[1;32m----> 4\u001b[0m query \u001b[39m=\u001b[39m \u001b[39minput\u001b[39;49m(\u001b[39m\"\u001b[39;49m\u001b[39mWho was ceasar and why whas he noteable?\u001b[39;49m\u001b[39m\"\u001b[39;49m)\n\u001b[0;32m 5\u001b[0m answer \u001b[39m=\u001b[39m qa\u001b[39m.\u001b[39mrun(query)\n\u001b[0;32m 6\u001b[0m \u001b[39m# print(answer)\u001b[39;00m\n\u001b[0;32m 7\u001b[0m \u001b[39m# print(\"\\nWhat else can I help you with:\")\u001b[39;00m\n\u001b[0;32m 8\u001b[0m \n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 12\u001b[0m \u001b[39m# print(answer)\u001b[39;00m\n\u001b[0;32m 13\u001b[0m \u001b[39m# print(\"\\nWhat else can I help you with:\")\u001b[39;00m\n", "File \u001b[1;32mc:\\sc\\ai\\rag-demo-1\\.venv\\lib\\site-packages\\ipykernel\\kernelbase.py:1202\u001b[0m, in \u001b[0;36mKernel.raw_input\u001b[1;34m(self, prompt)\u001b[0m\n\u001b[0;32m 1200\u001b[0m msg \u001b[39m=\u001b[39m \u001b[39m\"\u001b[39m\u001b[39mraw_input was called, but this frontend does not support input requests.\u001b[39m\u001b[39m\"\u001b[39m\n\u001b[0;32m 1201\u001b[0m \u001b[39mraise\u001b[39;00m StdinNotImplementedError(msg)\n\u001b[1;32m-> 1202\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_input_request(\n\u001b[0;32m 1203\u001b[0m \u001b[39mstr\u001b[39;49m(prompt),\n\u001b[0;32m 1204\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_parent_ident[\u001b[39m\"\u001b[39;49m\u001b[39mshell\u001b[39;49m\u001b[39m\"\u001b[39;49m],\n\u001b[0;32m 1205\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mget_parent(\u001b[39m\"\u001b[39;49m\u001b[39mshell\u001b[39;49m\u001b[39m\"\u001b[39;49m),\n\u001b[0;32m 1206\u001b[0m password\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[0;32m 1207\u001b[0m )\n", "File \u001b[1;32mc:\\sc\\ai\\rag-demo-1\\.venv\\lib\\site-packages\\ipykernel\\kernelbase.py:1245\u001b[0m, in \u001b[0;36mKernel._input_request\u001b[1;34m(self, prompt, ident, parent, password)\u001b[0m\n\u001b[0;32m 1242\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mKeyboardInterrupt\u001b[39;00m:\n\u001b[0;32m 1243\u001b[0m \u001b[39m# re-raise KeyboardInterrupt, to truncate traceback\u001b[39;00m\n\u001b[0;32m 1244\u001b[0m msg \u001b[39m=\u001b[39m \u001b[39m\"\u001b[39m\u001b[39mInterrupted by user\u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m-> 1245\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mKeyboardInterrupt\u001b[39;00m(msg) \u001b[39mfrom\u001b[39;00m \u001b[39mNone\u001b[39;00m\n\u001b[0;32m 1246\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mException\u001b[39;00m:\n\u001b[0;32m 1247\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mlog\u001b[39m.\u001b[39mwarning(\u001b[39m\"\u001b[39m\u001b[39mInvalid Message:\u001b[39m\u001b[39m\"\u001b[39m, exc_info\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m)\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: Interrupted by user" ] } ], "source": [ "qa = RetrievalQA.from_chain_type(llm=llm, chain_type=\"stuff\", retriever=vector_store.as_retriever())\n", "\n", "query = input(\"Who was ceasar and why whas he noteable?\")\n", "answer = qa.run(query)\n", "\n", "\n", "qa = RetrievalQA.from_chain_type(llm, chain_type=\"stuff\", retriever=retriever)\n", "response = qa.run(query)\n", "\n", "# print(answer)\n", "# print(\"\\nWhat else can I help you with:\")\n", "\n", "# while True:\n", "# query = input(\"Who was ceasar and why whas he notable?\")\n", "# answer = qa.run(query)\n", "# print(answer)\n", "# print(\"\\nWhat else can I help you with:\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "qa = RetrievalQA.from_chain_type(llm=OpenAI(temperature=0), chain_type=\"stuff\", retriever=vector_store.as_retriever())\n", "\n", "print(\"Connector development help bot. What do you want to know?\")\n", "while True:\n", " query = input(\"\")\n", " answer = qa.run(query)\n", " print(answer)\n", " print(\"\\nWhat else can I help you with:\")" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Running on local URL: http://127.0.0.1:7860\n", "\n", "To create a public link, set `share=True` in `launch()`.\n" ] }, { "data": { "text/html": [ "
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/plain": [] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "def ask_question(openAIKey, prompt, question):\n", " return \"Your question is \" + question + \"!!\"\n", "\n", "iface = gr.Interface(fn=ask_question, \n", " inputs=[\n", " gr.Textbox(label=\"OpenAI Key\", placeholder=\"Paste your OpenAI key here\"),\n", " gr.TextArea(label=\"Prompt\"), \n", " gr.TextArea(label=\"Question\", placeholder=\"Type your question here, it will get injected into the prompt at QUESTION\"),\n", " ],\n", " outputs=\"text\")\n", "\n", "iface.launch()" ] } ], "metadata": { "kernelspec": { "display_name": ".venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.6" } }, "nbformat": 4, "nbformat_minor": 2 }