You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Description:
Encountering an issue when using LangGraph React Agent with the MistralAI/Mistral-Large model, particularly during scenarios involving consecutive tool calls alongside astream_events for handling asynchronous events. The tool name and ID are correctly populated for the first call but fail to populate during the second consecutive call, causing incomplete or incorrect execution.
from langchain_core.tools import tool
from typing import Literal
@tool("search")
def search(query: str):
"""Call to surf the web for capital of countries"""
# This is a placeholder, but don't tell the LLM that...
return ["capital of america is washington D.C."]
@tool("get_weather")
def get_weather(city: Literal["nyc", "sf"]):
"""Use this to get weather information."""
if city == "nyc":
return "It might be cloudy in nyc"
elif city == "sf":
return "It's always sunny in sf"
else:
raise AssertionError("Unknown city")
tools = [search, get_weather]
model = model.bind_tools(tools)
Define the graph
from langgraph.prebuilt import create_react_agent
graph = create_react_agent(model, tools=tools)
from langchain_core.messages import HumanMessage, AIMessage, ToolMessage
buffer = ""
async for event in graph.astream_events(
{"messages": [HumanMessage(content=' what is the weather in the NY')]},
{"recursion_limit": 10},
version="v2"
):
event_type = event["event"]
if event_type == "on_chat_model_stream":
# Handle the main content streaming from the language model
chunk = event["data"]["chunk"]
content = chunk.content if hasattr(chunk, "content") else str(chunk)
buffer += content
if content:
# Stream the content chunk immediately
print (buffer)
Using query which will call multiple tools
async for event in graph.astream_events(
{"messages": [HumanMessage(content='what is the weather in the NY and capital of america')]},
{"recursion_limit": 10},
version="v2"
):
event_type = event["event"]
print(event)
if event_type == "on_chat_model_stream":
# Handle the main content streaming from the language model
chunk = event["data"]["chunk"]
content = chunk.content if hasattr(chunk, "content") else str(chunk)
buffer += content
if content:
# Stream the content chunk immediately
print (buffer)
Error screenshot
The text was updated successfully, but these errors were encountered:
Description:
Encountering an issue when using LangGraph React Agent with the MistralAI/Mistral-Large model, particularly during scenarios involving consecutive tool calls alongside astream_events for handling asynchronous events. The tool name and ID are correctly populated for the first call but fail to populate during the second consecutive call, causing incomplete or incorrect execution.
Minimum viable code:
WATSONX_URL = "https://us-south.ml.cloud.ibm.com"
WATSONX_PROJECT_ID = "XXXXXXX"
WATSONX_APIKEY = "XXXXXXX"
from langchain_ibm import ChatWatsonx
Initialize LLM
model = ChatWatsonx(model_id = 'mistralai/mistral-large', #'meta-llama/llama-3-405b-instruct', , #'ibm/granite-13b-instruct-v2', #'ibm/granite-3-8b-instruct', #'mistralai/mixtral-8x7b-instruct-v01', #'
params = {
'decoding_method': 'greedy',
'max_new_tokens': 4000,
'min_new_tokens': 0,
'temperature': 0,
},
project_id = WATSONX_PROJECT_ID,
url=WATSONX_URL,
apikey=WATSONX_APIKEY )
from langchain_core.tools import tool
from typing import Literal
@tool("search")
def search(query: str):
"""Call to surf the web for capital of countries"""
# This is a placeholder, but don't tell the LLM that...
return ["capital of america is washington D.C."]
@tool("get_weather")
def get_weather(city: Literal["nyc", "sf"]):
"""Use this to get weather information."""
if city == "nyc":
return "It might be cloudy in nyc"
elif city == "sf":
return "It's always sunny in sf"
else:
raise AssertionError("Unknown city")
tools = [search, get_weather]
model = model.bind_tools(tools)
Define the graph
from langgraph.prebuilt import create_react_agent
graph = create_react_agent(model, tools=tools)
from langchain_core.messages import HumanMessage, AIMessage, ToolMessage
buffer = ""
async for event in graph.astream_events(
{"messages": [HumanMessage(content=' what is the weather in the NY')]},
{"recursion_limit": 10},
version="v2"
):
event_type = event["event"]
Using query which will call multiple tools
async for event in graph.astream_events(
{"messages": [HumanMessage(content='what is the weather in the NY and capital of america')]},
{"recursion_limit": 10},
version="v2"
):
event_type = event["event"]
print(event)
if event_type == "on_chat_model_stream":
# Handle the main content streaming from the language model
chunk = event["data"]["chunk"]
content = chunk.content if hasattr(chunk, "content") else str(chunk)
buffer += content
if content:
# Stream the content chunk immediately
print (buffer)
Error screenshot
The text was updated successfully, but these errors were encountered: