prototipo-1-rag / agent.py
Pecximenes's picture
Removing diretory and organazing files
94e320d
raw
history blame
2.58 kB
import os
from dotenv import load_dotenv
from openai import OpenAI
from qdrant_client import QdrantClient
from pipelines.message import send_message
import redis
conversation_chat = []
def run():
load_dotenv()
try:
oa_client = OpenAI(
api_key=os.environ.get("OPENAI_API_KEY")
)
print("✅ Conectado a OpenAI.")
qdrant_client = QdrantClient(
host=os.environ.get("QDRANT_HOST"),
port=os.environ.get("QDRANT_PORT")
)
print("✅ Conectado ao Qdrant.")
redis_client = redis.Redis(
host=os.environ.get("REDIS_HOST"),
port=os.environ.get("REDIS_PORT"),
decode_responses=True
)
print("✅ Conectado ao Redis.")
while True:
prompt = input("Digite sua pergunta: ")
embedding = oa_client.embeddings.create(
input=[prompt],
model=os.environ.get("OPENAI_MODEL_EMBEDDING")
).data[0].embedding
child_texts = qdrant_client.search(
collection_name=os.environ.get("COLLECTION_NAME"),
query_vector=embedding,
limit=3
)
print("--------- Child text ---------")
print(child_texts)
contexts = []
for child_text in child_texts:
parent_text = redis_client.hgetall(
child_text[0].payload["parent_id"]
)
context = {
"content": parent_text["content"],
"url": parent_text["url"]
}
contexts.append(context)
print("--------- Contexts ---------")
print(contexts)
stream_response = send_message(
oa_client,
context,
prompt,
conversation_chat
)
print("--------- Response Agent ---------")
response = ""
for chunk in stream_response:
if chunk.choices[0].delta.content is not None:
response += chunk.choices[0].delta.content
print(chunk.choices[0].delta.content, end="")
conversation_chat.append({
"role": "assistant",
"content": response
})
is_exit = input("\nDeseja sair? (s/n): ")
if is_exit == "s":
break
except Exception as error:
print(f"❌ Erro: {error}")
if __name__ == "__main__":
run()