From e90ea8a9c0b26b8044f406a96cf2f476a3504f44 Mon Sep 17 00:00:00 2001 From: Azraf <69325302+turboslapper@users.noreply.github.com> Date: Wed, 22 Jan 2025 02:55:53 -0500 Subject: [PATCH] Replace `initialize_agent` with `create_react_agent` for LangGraph compatibility - Updated code to use `create_react_agent` instead of the deprecated `initialize_agent`. - Removed explicit `AgentType` parameters as they are implicitly supported by `create_react_agent`. - Replaced `verbose=True` with `debug=True` for detailed logging. Partially addresses #29277 --- docs/docs/integrations/callbacks/llmonitor.md | 13 +++++- .../callbacks/sagemaker_tracking.ipynb | 9 ++-- .../providers/comet_tracking.ipynb | 13 +++--- .../integrations/providers/google_serper.mdx | 10 ++-- .../docs/integrations/providers/searchapi.mdx | 10 ++-- docs/docs/integrations/tools/awslambda.ipynb | 9 ++-- docs/docs/integrations/tools/bash.ipynb | 8 ++-- docs/docs/integrations/tools/bearly.ipynb | 12 ++--- .../integrations/tools/chatgpt_plugins.ipynb | 43 +++++++++++++---- docs/docs/integrations/tools/connery.ipynb | 16 ++++--- docs/docs/integrations/tools/gitlab.ipynb | 8 ++-- .../integrations/tools/google_finance.ipynb | 11 +++-- docs/docs/integrations/tools/memorize.ipynb | 16 ++++--- docs/docs/integrations/tools/nasa.ipynb | 8 ++-- .../integrations/tools/openweathermap.ipynb | 9 ++-- docs/docs/integrations/tools/playwright.ipynb | 13 +++--- docs/docs/integrations/tools/searchapi.ipynb | 9 ++-- docs/docs/integrations/tools/zapier.ipynb | 46 ++++++++++--------- 18 files changed, 165 insertions(+), 98 deletions(-) diff --git a/docs/docs/integrations/callbacks/llmonitor.md b/docs/docs/integrations/callbacks/llmonitor.md index ac455c63c0582..1402f0e264a2c 100644 --- a/docs/docs/integrations/callbacks/llmonitor.md +++ b/docs/docs/integrations/callbacks/llmonitor.md @@ -83,16 +83,25 @@ agent_executor.run("how many letters in the word educa?", callbacks=[handler]) Another example: ```python -from langchain.agents import load_tools, initialize_agent, AgentType +from langchain.agents import load_tools from langchain_openai import OpenAI from langchain_community.callbacks.llmonitor_callback import LLMonitorCallbackHandler +from langgraph.prebuilt import create_react_agent handler = LLMonitorCallbackHandler() llm = OpenAI(temperature=0) tools = load_tools(["serpapi", "llm-math"], llm=llm) -agent = initialize_agent(tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, metadata={ "agent_name": "GirlfriendAgeFinder" }) # <- recommended, assign a custom name +system_prompt = "You are a helpful assistant named GirlfriendAgeFinder." + +agent = create_react_agent( + model=llm, + tools=tools, + # Add a system prompt (or other advanced instructions). + state_modifier=system_prompt, + debug=False, +) agent.run( "Who is Leo DiCaprio's girlfriend? What is her current age raised to the 0.43 power?", diff --git a/docs/docs/integrations/callbacks/sagemaker_tracking.ipynb b/docs/docs/integrations/callbacks/sagemaker_tracking.ipynb index adaa7d1571750..04b13faf257b0 100644 --- a/docs/docs/integrations/callbacks/sagemaker_tracking.ipynb +++ b/docs/docs/integrations/callbacks/sagemaker_tracking.ipynb @@ -89,7 +89,8 @@ }, "outputs": [], "source": [ - "from langchain.agents import initialize_agent, load_tools\n", + "from langgraph.prebuilt import create_react_agent\n", + "from langchain.agents import load_tools\n", "from langchain.chains import LLMChain, SimpleSequentialChain\n", "from langchain_core.prompts import PromptTemplate\n", "from langchain_openai import OpenAI\n", @@ -294,8 +295,10 @@ " tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm, callbacks=[sagemaker_callback])\n", "\n", " # Initialize agent with all the tools\n", - " agent = initialize_agent(\n", - " tools, llm, agent=\"zero-shot-react-description\", callbacks=[sagemaker_callback]\n", + " agent = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True,\n", " )\n", "\n", " # Run agent\n", diff --git a/docs/docs/integrations/providers/comet_tracking.ipynb b/docs/docs/integrations/providers/comet_tracking.ipynb index e8752aed96c1f..2c794a93a940d 100644 --- a/docs/docs/integrations/providers/comet_tracking.ipynb +++ b/docs/docs/integrations/providers/comet_tracking.ipynb @@ -192,7 +192,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.agents import initialize_agent, load_tools\n", + "from langgraph.prebuilt import create_react_agent\n", + "from langchain.agents import load_tools\n", "from langchain_community.callbacks import CometCallbackHandler\n", "from langchain_core.callbacks import StdOutCallbackHandler\n", "from langchain_openai import OpenAI\n", @@ -207,12 +208,10 @@ "llm = OpenAI(temperature=0.9, callbacks=callbacks)\n", "\n", "tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm, callbacks=callbacks)\n", - "agent = initialize_agent(\n", - " tools,\n", - " llm,\n", - " agent=\"zero-shot-react-description\",\n", - " callbacks=callbacks,\n", - " verbose=True,\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True,\n", ")\n", "agent.run(\n", " \"Who is Leo DiCaprio's girlfriend? What is her current age raised to the 0.43 power?\"\n", diff --git a/docs/docs/integrations/providers/google_serper.mdx b/docs/docs/integrations/providers/google_serper.mdx index 0401e66b53581..db154a0f2e8aa 100644 --- a/docs/docs/integrations/providers/google_serper.mdx +++ b/docs/docs/integrations/providers/google_serper.mdx @@ -23,8 +23,8 @@ You can use it as part of a Self Ask chain: ```python from langchain_community.utilities import GoogleSerperAPIWrapper from langchain_openai import OpenAI -from langchain.agents import initialize_agent, Tool -from langchain.agents import AgentType +from langchain.agents import Tool +from langgraph.prebuilt import create_react_agent import os @@ -41,7 +41,11 @@ tools = [ ) ] -self_ask_with_search = initialize_agent(tools, llm, agent=AgentType.SELF_ASK_WITH_SEARCH, verbose=True) +self_ask_with_search = create_react_agent( + model=llm, + tools=tools, + debug=True, +) self_ask_with_search.run("What is the hometown of the reigning men's U.S. Open champion?") ``` diff --git a/docs/docs/integrations/providers/searchapi.mdx b/docs/docs/integrations/providers/searchapi.mdx index 1dfaded161009..18628370f9800 100644 --- a/docs/docs/integrations/providers/searchapi.mdx +++ b/docs/docs/integrations/providers/searchapi.mdx @@ -22,8 +22,8 @@ You can use it as part of a Self Ask chain: ```python from langchain_community.utilities import SearchApiAPIWrapper from langchain_openai import OpenAI -from langchain.agents import initialize_agent, Tool -from langchain.agents import AgentType +from langchain.agents import Tool +from langgraph.prebuilt import create_react_agent import os @@ -40,7 +40,11 @@ tools = [ ) ] -self_ask_with_search = initialize_agent(tools, llm, agent=AgentType.SELF_ASK_WITH_SEARCH, verbose=True) +self_ask_with_search = create_react_agent( + model=llm, + tools=tools, + debug=True, +) self_ask_with_search.run("Who lived longer: Plato, Socrates, or Aristotle?") ``` diff --git a/docs/docs/integrations/tools/awslambda.ipynb b/docs/docs/integrations/tools/awslambda.ipynb index 08a7ed4e6928f..ccbf5962efbff 100644 --- a/docs/docs/integrations/tools/awslambda.ipynb +++ b/docs/docs/integrations/tools/awslambda.ipynb @@ -62,7 +62,8 @@ }, "outputs": [], "source": [ - "from langchain.agents import AgentType, initialize_agent, load_tools\n", + "from langgraph.prebuilt import create_react_agent\n", + "from langchain.agents import load_tools\n", "from langchain_openai import OpenAI\n", "\n", "llm = OpenAI(temperature=0)\n", @@ -74,8 +75,10 @@ " function_name=\"testFunction1\",\n", ")\n", "\n", - "agent = initialize_agent(\n", - " tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True\n", ")\n", "\n", "agent.run(\"Send an email to test@testing123.com saying hello world.\")" diff --git a/docs/docs/integrations/tools/bash.ipynb b/docs/docs/integrations/tools/bash.ipynb index b01f070926f4e..efe06a3f33818 100644 --- a/docs/docs/integrations/tools/bash.ipynb +++ b/docs/docs/integrations/tools/bash.ipynb @@ -154,7 +154,7 @@ } ], "source": [ - "from langchain.agents import AgentType, initialize_agent\n", + "from langgraph.prebuilt import create_react_agent\n", "from langchain_openai import ChatOpenAI\n", "\n", "llm = ChatOpenAI(temperature=0)\n", @@ -162,8 +162,10 @@ "shell_tool.description = shell_tool.description + f\"args {shell_tool.args}\".replace(\n", " \"{\", \"{{\"\n", ").replace(\"}\", \"}}\")\n", - "self_ask_with_search = initialize_agent(\n", - " [shell_tool], llm, agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n", + "self_ask_with_search = create_react_agent(\n", + " model=llm,\n", + " tools=[shell_tool],\n", + " debug=True\n", ")\n", "self_ask_with_search.run(\n", " \"Download the langchain.com webpage and grep for all urls. Return only a sorted list of them. Be sure to use double quotes.\"\n", diff --git a/docs/docs/integrations/tools/bearly.ipynb b/docs/docs/integrations/tools/bearly.ipynb index 72b6b15e5187c..91e236adff289 100644 --- a/docs/docs/integrations/tools/bearly.ipynb +++ b/docs/docs/integrations/tools/bearly.ipynb @@ -47,7 +47,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.agents import AgentType, initialize_agent\n", + "from langgraph.prebuilt import create_react_agent\n", "from langchain_openai import ChatOpenAI" ] }, @@ -175,12 +175,10 @@ "outputs": [], "source": [ "llm = ChatOpenAI(model=\"gpt-4\", temperature=0)\n", - "agent = initialize_agent(\n", - " tools,\n", - " llm,\n", - " agent=AgentType.OPENAI_FUNCTIONS,\n", - " verbose=True,\n", - " handle_parsing_errors=True,\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True,\n", ")" ] }, diff --git a/docs/docs/integrations/tools/chatgpt_plugins.ipynb b/docs/docs/integrations/tools/chatgpt_plugins.ipynb index 809a13869e261..1e0446b0e9e9d 100644 --- a/docs/docs/integrations/tools/chatgpt_plugins.ipynb +++ b/docs/docs/integrations/tools/chatgpt_plugins.ipynb @@ -32,17 +32,25 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "70d493c8", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], "source": [ "%pip install --upgrade --quiet langchain-community" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "66cc9494-c060-4bc2-92bf-6d88a45690da", "metadata": {}, "outputs": [], @@ -52,12 +60,25 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "id": "d41405b5", "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "ModuleNotFoundError", + "evalue": "No module named 'langgraph'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[2], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21;01mlanggraph\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mprebuilt\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m create_react_agent\n\u001b[1;32m 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21;01mlangchain\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01magents\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m load_tools\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21;01mlangchain_openai\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m ChatOpenAI\n", + "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'langgraph'" + ] + } + ], "source": [ - "from langchain.agents import AgentType, initialize_agent, load_tools\n", + "from langgraph.prebuilt import create_react_agent\n", + "from langchain.agents import load_tools\n", "from langchain_openai import ChatOpenAI" ] }, @@ -116,8 +137,10 @@ "tools = load_tools([\"requests_all\"])\n", "tools += [tool]\n", "\n", - "agent_chain = initialize_agent(\n", - " tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n", + "agent_chain = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True\n", ")\n", "agent_chain.run(\"what t shirts are available in klarna?\")" ] @@ -133,7 +156,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -147,7 +170,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.13.1" } }, "nbformat": 4, diff --git a/docs/docs/integrations/tools/connery.ipynb b/docs/docs/integrations/tools/connery.ipynb index 5070996bba999..7109f0045c6a7 100644 --- a/docs/docs/integrations/tools/connery.ipynb +++ b/docs/docs/integrations/tools/connery.ipynb @@ -109,7 +109,7 @@ "source": [ "import os\n", "\n", - "from langchain.agents import AgentType, initialize_agent\n", + "from langgraph.prebuilt import create_react_agent\n", "from langchain_community.agent_toolkits.connery import ConneryToolkit\n", "from langchain_community.tools.connery import ConneryService\n", "from langchain_openai import ChatOpenAI\n", @@ -130,8 +130,10 @@ "\n", "# Use OpenAI Functions agent to execute the prompt using actions from the Connery Toolkit.\n", "llm = ChatOpenAI(temperature=0)\n", - "agent = initialize_agent(\n", - " connery_toolkit.get_tools(), llm, AgentType.OPENAI_FUNCTIONS, verbose=True\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=connery_toolkit.get_tools(),\n", + " debug=True\n", ")\n", "result = agent.run(\n", " f\"\"\"Make a short summary of the webpage http://www.paulgraham.com/vb.html in three sentences\n", @@ -157,7 +159,7 @@ "source": [ "import os\n", "\n", - "from langchain.agents import AgentType, initialize_agent\n", + "from langgraph.prebuilt import create_react_agent\n", "from langchain_community.tools.connery import ConneryService\n", "from langchain_openai import ChatOpenAI\n", "\n", @@ -233,8 +235,10 @@ ], "source": [ "llm = ChatOpenAI(temperature=0)\n", - "agent = initialize_agent(\n", - " [send_email_action], llm, AgentType.OPENAI_FUNCTIONS, verbose=True\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=[send_email_action],\n", + " debug=True # Equivalent to verbose=True\n", ")\n", "agent_run_result = agent.run(\n", " f\"Send an email to the {recepient_email} and say that I will be late for the meeting.\"\n", diff --git a/docs/docs/integrations/tools/gitlab.ipynb b/docs/docs/integrations/tools/gitlab.ipynb index 622622d9add22..d51c2aed3cf6b 100644 --- a/docs/docs/integrations/tools/gitlab.ipynb +++ b/docs/docs/integrations/tools/gitlab.ipynb @@ -100,7 +100,7 @@ "source": [ "import os\n", "\n", - "from langchain.agents import AgentType, initialize_agent\n", + "from langgraph.prebuilt import create_react_agent\n", "from langchain_community.agent_toolkits.gitlab.toolkit import GitLabToolkit\n", "from langchain_community.utilities.gitlab import GitLabAPIWrapper\n", "from langchain_openai import OpenAI" @@ -132,8 +132,10 @@ "llm = OpenAI(temperature=0)\n", "gitlab = GitLabAPIWrapper()\n", "toolkit = GitLabToolkit.from_gitlab_api_wrapper(gitlab)\n", - "agent = initialize_agent(\n", - " toolkit.get_tools(), llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=toolkit.get_tools(),\n", + " debug=True\n", ")" ] }, diff --git a/docs/docs/integrations/tools/google_finance.ipynb b/docs/docs/integrations/tools/google_finance.ipynb index 315b4dae78f3b..d9a384511e96f 100644 --- a/docs/docs/integrations/tools/google_finance.ipynb +++ b/docs/docs/integrations/tools/google_finance.ipynb @@ -74,15 +74,18 @@ "source": [ "import os\n", "\n", - "from langchain.agents import AgentType, initialize_agent, load_tools\n", + "from langgraph.prebuilt import create_react_agent\n", + "from langchain.agents import load_tools\n", "from langchain_openai import OpenAI\n", "\n", "os.environ[\"OPENAI_API_KEY\"] = \"\"\n", "os.environ[\"SERP_API_KEY\"] = \"\"\n", "llm = OpenAI()\n", "tools = load_tools([\"google-scholar\", \"google-finance\"], llm=llm)\n", - "agent = initialize_agent(\n", - " tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True\n", ")\n", "agent.run(\"what is google's stock\")" ] @@ -104,7 +107,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.5" + "version": "3.13.1" } }, "nbformat": 4, diff --git a/docs/docs/integrations/tools/memorize.ipynb b/docs/docs/integrations/tools/memorize.ipynb index db7ec349854d6..e7abdedde2e29 100644 --- a/docs/docs/integrations/tools/memorize.ipynb +++ b/docs/docs/integrations/tools/memorize.ipynb @@ -26,7 +26,8 @@ "source": [ "import os\n", "\n", - "from langchain.agents import AgentExecutor, AgentType, initialize_agent, load_tools\n", + "from langgraph.prebuilt import create_react_agent\n", + "from langchain.agents import load_tools\n", "from langchain.chains import LLMChain\n", "from langchain.memory import ConversationBufferMemory\n", "from langchain_community.llms import GradientLLM" @@ -118,12 +119,13 @@ "metadata": {}, "outputs": [], "source": [ - "agent = initialize_agent(\n", - " tools,\n", - " llm,\n", - " agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,\n", - " verbose=True,\n", - " # memory=ConversationBufferMemory(memory_key=\"chat_history\", return_messages=True),\n", + "memory = ConversationBufferMemory(memory_key=\"chat_history\", return_messages=True)\n", + "\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True,\n", + " state_modifier=memory\n", ")" ] }, diff --git a/docs/docs/integrations/tools/nasa.ipynb b/docs/docs/integrations/tools/nasa.ipynb index 9aa420d8d17c0..d20a9213c3d2c 100644 --- a/docs/docs/integrations/tools/nasa.ipynb +++ b/docs/docs/integrations/tools/nasa.ipynb @@ -39,16 +39,18 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.agents import AgentType, initialize_agent\n", "from langchain_community.agent_toolkits.nasa.toolkit import NasaToolkit\n", "from langchain_community.utilities.nasa import NasaAPIWrapper\n", "from langchain_openai import OpenAI\n", + "from langgraph.prebuilt import create_react_agent\n", "\n", "llm = OpenAI(temperature=0, openai_api_key=\"\")\n", "nasa = NasaAPIWrapper()\n", "toolkit = NasaToolkit.from_nasa_api_wrapper(nasa)\n", - "agent = initialize_agent(\n", - " toolkit.get_tools(), llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=toolkit.get_tools(),\n", + " debug=True,\n", ")" ] }, diff --git a/docs/docs/integrations/tools/openweathermap.ipynb b/docs/docs/integrations/tools/openweathermap.ipynb index 0535fe6e0c719..0f7f6d8f83bec 100644 --- a/docs/docs/integrations/tools/openweathermap.ipynb +++ b/docs/docs/integrations/tools/openweathermap.ipynb @@ -83,7 +83,8 @@ "source": [ "import os\n", "\n", - "from langchain.agents import AgentType, initialize_agent, load_tools\n", + "from langchain.agents import load_tools\n", + "from langgraph.prebuilt import create_react_agent\n", "from langchain_openai import OpenAI\n", "\n", "os.environ[\"OPENAI_API_KEY\"] = \"\"\n", @@ -93,8 +94,10 @@ "\n", "tools = load_tools([\"openweathermap-api\"], llm)\n", "\n", - "agent_chain = initialize_agent(\n", - " tools=tools, llm=llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n", + "agent_chain = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True\n", ")" ] }, diff --git a/docs/docs/integrations/tools/playwright.ipynb b/docs/docs/integrations/tools/playwright.ipynb index e09f4e79cd24d..f019754a17f91 100644 --- a/docs/docs/integrations/tools/playwright.ipynb +++ b/docs/docs/integrations/tools/playwright.ipynb @@ -246,18 +246,17 @@ } ], "source": [ - "from langchain.agents import AgentType, initialize_agent\n", + "from langgraph.prebuilt import create_react_agent\n", "from langchain_anthropic import ChatAnthropic\n", "\n", "llm = ChatAnthropic(\n", " model_name=\"claude-3-haiku-20240307\", temperature=0\n", ") # or any other LLM, e.g., ChatOpenAI(), OpenAI()\n", "\n", - "agent_chain = initialize_agent(\n", - " tools,\n", - " llm,\n", - " agent=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,\n", - " verbose=True,\n", + "agent_chain = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True\n", ")" ] }, @@ -298,7 +297,7 @@ "```\n", "\n", "\u001b[0m\n", - "Observation: \u001b[31;1m\u001b[1;3mLangChain We value your privacy We use cookies to analyze our traffic. By clicking \"Accept All\", you consent to our use of cookies. Privacy Policy Customize Reject All Accept All Customize Consent Preferences We may use cookies to help you navigate efficiently and perform certain functions. You will find detailed information about all cookies under each consent category below. The cookies that are categorized as \"Necessary\" are stored on your browser as they are essential for enabling the basic functionalities of the site.... Show more Necessary Always Active Necessary cookies are required to enable the basic features of this site, such as providing secure log-in or adjusting your consent preferences. These cookies do not store any personally identifiable data. Functional Functional cookies help perform certain functionalities like sharing the content of the website on social media platforms, collecting feedback, and other third-party features. Analytics Analytical cookies are used to understand how visitors interact with the website. These cookies help provide information on metrics such as the number of visitors, bounce rate, traffic source, etc. Performance Performance cookies are used to understand and analyze the key performance indexes of the website which helps in delivering a better user experience for the visitors. Advertisement Advertisement cookies are used to provide visitors with customized advertisements based on the pages you visited previously and to analyze the effectiveness of the ad campaigns. Uncategorized Other uncategorized cookies are those that are being analyzed and have not been classified into a category as yet. Reject All Save My Preferences Accept All Products LangChain LangSmith LangGraph Methods Retrieval Agents Evaluation Resources Blog Case Studies Use Case Inspiration Experts Changelog Docs LangChain Docs LangSmith Docs Company About Careers Pricing Get a demo Sign up LangChain’s suite of products supports developers along each step of the LLM application lifecycle. Applications that can reason. Powered by LangChain. Get a demo Sign up for free From startups to global enterprises, ambitious builders choose LangChain products. Build LangChain is a framework to build with LLMs by chaining interoperable components. LangGraph is the framework for building controllable agentic workflows. Run Deploy your LLM applications at scale with LangGraph Cloud, our infrastructure purpose-built for agents. Manage Debug, collaborate, test, and monitor your LLM app in LangSmith - whether it's built with a LangChain framework or not. Build your app with LangChain Build context-aware, reasoning applications with LangChain’s flexible framework that leverages your company’s data and APIs. Future-proof your application by making vendor optionality part of your LLM infrastructure design. Learn more about LangChain Run at scale with LangGraph Cloud Deploy your LangGraph app with LangGraph Cloud for fault-tolerant scalability - including support for async background jobs, built-in persistence, and distributed task queues. Learn more about LangGraph Manage LLM performance with LangSmith Ship faster with LangSmith’s debug, test, deploy, and monitoring workflows. Don’t rely on “vibes” – add engineering rigor to your LLM-development workflow, whether you’re building with LangChain or not. Learn more about LangSmith Hear from our happy customers LangChain, LangGraph, and LangSmith help teams of all sizes, across all industries - from ambitious startups to established enterprises. “LangSmith helped us improve the accuracy and performance of Retool’s fine-tuned models. Not only did we deliver a better product by iterating with LangSmith, but we’re shipping new AI features to our users in a fraction of the time it would have taken without it.” Jamie Cuffe Head of Self-Serve and New Products “By combining the benefits of LangSmith and standing on the shoulders of a gigantic open-source community, we’re able to identify the right approaches of using LLMs in an enterprise-setting faster.” Yusuke Kaji General Manager of AI “Working with LangChain and LangSmith on the Elastic AI Assistant had a significant positive impact on the overall pace and quality of the development and shipping experience. We couldn’t have achieved  the product experience delivered to our customers without LangChain, and we couldn’t have done it at the same pace without LangSmith.” James Spiteri Director of Security Products “As soon as we heard about LangSmith, we moved our entire development stack onto it. We could have built evaluation, testing and monitoring tools in house, but with LangSmith it took us 10x less time to get a 1000x better tool.” Jose Peña Senior Manager The reference architecture enterprises adopt for success. LangChain’s suite of products can be used independently or stacked together for multiplicative impact – guiding you through building, running, and managing your LLM apps. 15M+ Monthly Downloads 100K+ Apps Powered 75K+ GitHub Stars 3K+ Contributors The biggest developer community in GenAI Learn alongside the 1M+ developers who are pushing the industry forward. Explore LangChain Get started with the LangSmith platform today Get a demo Sign up for free Teams building with LangChain are driving operational efficiency, increasing discovery & personalization, and delivering premium products that generate revenue. Discover Use Cases Get inspired by companies who have done it. Financial Services FinTech Technology LangSmith is the enterprise DevOps platform built for LLMs. Explore LangSmith Gain visibility to make trade offs between cost, latency, and quality. Increase developer productivity. Eliminate manual, error-prone testing. Reduce hallucinations and improve reliability. Enterprise deployment options to keep data secure. Ready to start shipping 
", + "Observation: \u001b[31;1m\u001b[1;3mLangChain We value your privacy We use cookies to analyze our traffic. By clicking \"Accept All\", you consent to our use of cookies. Privacy Policy Customize Reject All Accept All Customize Consent Preferences We may use cookies to help you navigate efficiently and perform certain functions. You will find detailed information about all cookies under each consent category below. The cookies that are categorized as \"Necessary\" are stored on your browser as they are essential for enabling the basic functionalities of the site.... Show more Necessary Always Active Necessary cookies are required to enable the basic features of this site, such as providing secure log-in or adjusting your consent preferences. These cookies do not store any personally identifiable data. Functional Functional cookies help perform certain functionalities like sharing the content of the website on social media platforms, collecting feedback, and other third-party features. Analytics Analytical cookies are used to understand how visitors interact with the website. These cookies help provide information on metrics such as the number of visitors, bounce rate, traffic source, etc. Performance Performance cookies are used to understand and analyze the key performance indexes of the website which helps in delivering a better user experience for the visitors. Advertisement Advertisement cookies are used to provide visitors with customized advertisements based on the pages you visited previously and to analyze the effectiveness of the ad campaigns. Uncategorized Other uncategorized cookies are those that are being analyzed and have not been classified into a category as yet. Reject All Save My Preferences Accept All Products LangChain LangSmith LangGraph Methods Retrieval Agents Evaluation Resources Blog Case Studies Use Case Inspiration Experts Changelog Docs LangChain Docs LangSmith Docs Company About Careers Pricing Get a demo Sign up LangChain’s suite of products supports developers along each step of the LLM application lifecycle. Applications that can reason. Powered by LangChain. Get a demo Sign up for free From startups to global enterprises, ambitious builders choose LangChain products. Build LangChain is a framework to build with LLMs by chaining interoperable components. LangGraph is the framework for building controllable agentic workflows. Run Deploy your LLM applications at scale with LangGraph Cloud, our infrastructure purpose-built for agents. Manage Debug, collaborate, test, and monitor your LLM app in LangSmith - whether it's built with a LangChain framework or not. Build your app with LangChain Build context-aware, reasoning applications with LangChain’s flexible framework that leverages your company’s data and APIs. Future-proof your application by making vendor optionality part of your LLM infrastructure design. Learn more about LangChain Run at scale with LangGraph Cloud Deploy your LangGraph app with LangGraph Cloud for fault-tolerant scalability - including support for async background jobs, built-in persistence, and distributed task queues. Learn more about LangGraph Manage LLM performance with LangSmith Ship faster with LangSmith’s debug, test, deploy, and monitoring workflows. Don’t rely on “vibes” – add engineering rigor to your LLM-development workflow, whether you’re building with LangChain or not. Learn more about LangSmith Hear from our happy customers LangChain, LangGraph, and LangSmith help teams of all sizes, across all industries - from ambitious startups to established enterprises. “LangSmith helped us improve the accuracy and performance of Retool’s fine-tuned models. Not only did we deliver a better product by iterating with LangSmith, but we’re shipping new AI features to our users in a fraction of the time it would have taken without it.” Jamie Cuffe Head of Self-Serve and New Products “By combining the benefits of LangSmith and standing on the shoulders of a gigantic open-source community, we’re able to identify the right approaches of using LLMs in an enterprise-setting faster.” Yusuke Kaji General Manager of AI “Working with LangChain and LangSmith on the Elastic AI Assistant had a significant positive impact on the overall pace and quality of the development and shipping experience. We couldn’t have achieved  the product experience delivered to our customers without LangChain, and we couldn’t have done it at the same pace without LangSmith.” James Spiteri Director of Security Products “As soon as we heard about LangSmith, we moved our entire development stack onto it. We could have built evaluation, testing and monitoring tools in house, but with LangSmith it took us 10x less time to get a 1000x better tool.” Jose Peña Senior Manager The reference architecture enterprises adopt for success. LangChain’s suite of products can be used independently or stacked together for multiplicative impact – guiding you through building, running, and managing your LLM apps. 15M+ Monthly Downloads 100K+ Apps Powered 75K+ GitHub Stars 3K+ Contributors The biggest developer community in GenAI Learn alongside the 1M+ developers who are pushing the industry forward. Explore LangChain Get started with the LangSmith platform today Get a demo Sign up for free Teams building with LangChain are driving operational efficiency, increasing discovery & personalization, and delivering premium products that generate revenue. Discover Use Cases Get inspired by companies who have done it. Financial Services FinTech Technology LangSmith is the enterprise DevOps platform built for LLMs. Explore LangSmith Gain visibility to make trade offs between cost, latency, and quality. Increase developer productivity. Eliminate manual, error-prone testing. Reduce hallucinations and improve reliability. Enterprise deployment options to keep data secure. Ready to start shipping 
\n", "reliable GenAI apps faster? Get started with LangChain, LangGraph, and LangSmith to enhance your LLM app development, from prototype to production. Get a demo Sign up for free Products LangChain LangSmith LangGraph Agents Evaluation Retrieval Resources Python Docs JS/TS Docs GitHub Integrations Templates Changelog LangSmith Trust Portal Company About Blog Twitter LinkedIn YouTube Community Marketing Assets Sign up for our newsletter to stay up to date Thank you! Your submission has been received! Oops! Something went wrong while submitting the form. All systems operational Privacy Policy Terms of Service\u001b[0m\n", "Thought:\u001b[32;1m\u001b[1;3mBased on the text extracted from the langchain.com website, the main headers I can see are:\n", "\n", diff --git a/docs/docs/integrations/tools/searchapi.ipynb b/docs/docs/integrations/tools/searchapi.ipynb index c086b4d18d1c0..037ecf68edc25 100644 --- a/docs/docs/integrations/tools/searchapi.ipynb +++ b/docs/docs/integrations/tools/searchapi.ipynb @@ -124,7 +124,7 @@ } ], "source": [ - "from langchain.agents import AgentType, initialize_agent\n", + "from langgraph.prebuilt import create_react_agent\n", "from langchain_community.utilities import SearchApiAPIWrapper\n", "from langchain_core.tools import Tool\n", "from langchain_openai import OpenAI\n", @@ -139,9 +139,12 @@ " )\n", "]\n", "\n", - "self_ask_with_search = initialize_agent(\n", - " tools, llm, agent=AgentType.SELF_ASK_WITH_SEARCH, verbose=True\n", + "self_ask_with_search = create_react_agent(\n", + " model=llm,\n", + " tools=tools,\n", + " debug=True\n", ")\n", + "\n", "self_ask_with_search.run(\"Who lived longer: Plato, Socrates, or Aristotle?\")" ] }, diff --git a/docs/docs/integrations/tools/zapier.ipynb b/docs/docs/integrations/tools/zapier.ipynb index 3c73d1f15ac11..bc6b2665b740e 100644 --- a/docs/docs/integrations/tools/zapier.ipynb +++ b/docs/docs/integrations/tools/zapier.ipynb @@ -60,7 +60,7 @@ }, "outputs": [], "source": [ - "from langchain.agents import AgentType, initialize_agent\n", + "from langgraph.prebuilt import create_react_agent\n", "from langchain_community.agent_toolkits import ZapierToolkit\n", "from langchain_community.utilities.zapier import ZapierNLAWrapper\n", "from langchain_openai import OpenAI" @@ -91,8 +91,10 @@ "llm = OpenAI(temperature=0)\n", "zapier = ZapierNLAWrapper()\n", "toolkit = ZapierToolkit.from_zapier_nla_wrapper(zapier)\n", - "agent = initialize_agent(\n", - " toolkit.get_tools(), llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=toolkit.get_tools(),\n", + " debug=True\n", ")" ] }, @@ -110,19 +112,19 @@ "text": [ "\n", "\n", - "\u001B[1m> Entering new AgentExecutor chain...\u001B[0m\n", - "\u001B[32;1m\u001B[1;3m I need to find the email and summarize it.\n", + "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n", + "\u001b[32;1m\u001b[1;3m I need to find the email and summarize it.\n", "Action: Gmail: Find Email\n", - "Action Input: Find the latest email from Silicon Valley Bank\u001B[0m\n", - "Observation: \u001B[31;1m\u001B[1;3m{\"from__name\": \"Silicon Valley Bridge Bank, N.A.\", \"from__email\": \"sreply@svb.com\", \"body_plain\": \"Dear Clients, After chaotic, tumultuous & stressful days, we have clarity on path for SVB, FDIC is fully insuring all deposits & have an ask for clients & partners as we rebuild. Tim Mayopoulos Finished chain.\u001B[0m\n" + "\u001b[1m> Finished chain.\u001b[0m\n" ] }, { @@ -286,18 +288,18 @@ "text": [ "\n", "\n", - "\u001B[1m> Entering new SimpleSequentialChain chain...\u001B[0m\n", - "\u001B[36;1m\u001B[1;3m{\"from__name\": \"Silicon Valley Bridge Bank, N.A.\", \"from__email\": \"sreply@svb.com\", \"body_plain\": \"Dear Clients, After chaotic, tumultuous & stressful days, we have clarity on path for SVB, FDIC is fully insuring all deposits & have an ask for clients & partners as we rebuild. Tim Mayopoulos Entering new SimpleSequentialChain chain...\u001b[0m\n", + "\u001b[36;1m\u001b[1;3m{\"from__name\": \"Silicon Valley Bridge Bank, N.A.\", \"from__email\": \"sreply@svb.com\", \"body_plain\": \"Dear Clients, After chaotic, tumultuous & stressful days, we have clarity on path for SVB, FDIC is fully insuring all deposits & have an ask for clients & partners as we rebuild. Tim Mayopoulos Finished chain.\u001B[0m\n" + "\u001b[1m> Finished chain.\u001b[0m\n" ] }, { @@ -341,8 +343,10 @@ "llm = OpenAI(temperature=0)\n", "zapier = ZapierNLAWrapper(zapier_nla_oauth_access_token=\"\")\n", "toolkit = ZapierToolkit.from_zapier_nla_wrapper(zapier)\n", - "agent = initialize_agent(\n", - " toolkit.get_tools(), llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=toolkit.get_tools(),\n", + " debug=True\n", ")\n", "\n", "agent.run(\n",