Spaces:
Sleeping
Sleeping
from langchain_experimental.llms.ollama_functions import OllamaFunctions | |
from langchain_core.messages import HumanMessage, ToolMessage | |
model = OllamaFunctions( | |
model="llama3", | |
format="json" | |
) | |
def add(a: int, b: int) -> int: | |
"""Adds a and b. Args: a: first int b: second int """ | |
return a + b | |
def multiply(a: int, b: int) -> int: | |
"""Multiplies a and b. Args: a: first int b: second int""" | |
return a * b | |
tools = [add, multiply] | |
llm_with_tools = model.bind_tools(tools) | |
messages = [HumanMessage(query)] | |
ai_msg = llm_with_tools.invoke(messages) | |
messages.append(ai_msg) | |
for tool_call in ai_msg.tool_calls: | |
selected_tool = {"add": add, "multiply": multiply}[tool_call["name"].lower()] | |
tool_output = selected_tool.invoke(tool_call["args"]) | |
messages.append(ToolMessage(tool_output, tool_call_id=tool_call["id"])) | |
messages |