File size: 2,501 Bytes
13fab0d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
from langchain.chains import ConversationalRetrievalChain
from langchain.chat_models import ChatOpenAI
from langchain.vectorstores import Pinecone
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.memory import ConversationBufferMemory
import pinecone
import os
import logging

# Load environment variables
from dotenv import load_dotenv
load_dotenv()

# Retrieve values from .env file
pinecone_api_key = '228cca47-e537-42af-bcef-569cb18fb7cf'
pinecone_environment = 'us-west1-gcp-free'
pinecone_index_name = 'chatbot'
openai_api_key = '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'

# Set up logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)

def create_conversation(query: str, chat_history: list) -> tuple:
    logger.debug("Starting create_conversation function")

    try:
        logger.debug("Initializing Pinecone")
        pinecone.init(api_key=pinecone_api_key, environment=pinecone_environment)
        logger.debug("Pinecone initialized")

        logger.debug("Creating embeddings")
        embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)
        logger.debug("Embeddings created")

        logger.debug("Accessing existing Pinecone index")
        db = Pinecone.from_existing_index(index_name=pinecone_index_name, embedding=embeddings)
        logger.debug("Index accessed")

        logger.debug("Setting up memory for conversation")
        memory = ConversationBufferMemory(memory_key='chat_history', return_messages=False)
        logger.debug("Memory set up")

        logger.debug("Creating ConversationalRetrievalChain")
        cqa = ConversationalRetrievalChain.from_llm(
            llm=ChatOpenAI(temperature=0.0, openai_api_key=openai_api_key),
            retriever=db.as_retriever(),
            memory=memory,
            get_chat_history=lambda h: h,
        )
        logger.debug("ConversationalRetrievalChain created")

        logger.debug(f"Processing query: {query}")
        result = cqa({'question': query, 'chat_history': chat_history})
        logger.debug(f"Received result: {result}")

        logger.debug("Appending result to chat history")
        chat_history.append((query, result['answer']))
        logger.debug(f"Updated chat history: {chat_history}")

        return '', chat_history

    except Exception as e:
        logger.exception("Exception occurred in create_conversation")
        chat_history.append((query, "Error: " + str(e)))
        return '', chat_history