shreya-bot / main.py
svishal2001's picture
Duplicate from rohan13/makerlab-bot
95f539b
raw
history blame
897 Bytes
from utils import create_index, get_agent_chain, get_prompt_and_tools, get_search_index
from utils import get_custom_agent, get_prompt_and_tools_for_custom_agent
question_starters = ['who', 'why', 'what', 'how', 'where', 'when', 'which', 'whom', 'whose']
def run(question):
index = get_search_index()
# prompt, tools = get_prompt_and_tools()
# agent_chain = get_agent_chain(prompt, tools)
prompt, tools = get_prompt_and_tools_for_custom_agent()
agent_chain = get_custom_agent(prompt, tools)
result = None
try:
result = agent_chain.run(question)
print(result)
except ValueError as ve:
if "Could not parse LLM output:" in ve.args[0] and question.lower().startswith(tuple(question_starters)) and not question.lower().endswith('?'):
question = question + '?'
result = agent_chain.run(question)
return result