From d6ffdc895d5de3f5ccb4275b74678e6b1648ea1d Mon Sep 17 00:00:00 2001 From: Tom Liu Date: Sun, 10 Aug 2025 01:32:16 +0000 Subject: [PATCH] To upgrade langchain, langgraph, faiss packagge; correct embedding QA model id and use correct class on langgraph. Signed-off-by: Tom Liu --- ...raph_HandlingAgent_IntermediateSteps.ipynb | 54 +++++++++++++------ 1 file changed, 37 insertions(+), 17 deletions(-) diff --git a/RAG/notebooks/langchain/LangGraph_HandlingAgent_IntermediateSteps.ipynb b/RAG/notebooks/langchain/LangGraph_HandlingAgent_IntermediateSteps.ipynb index e1f18b27b..9d0a8dd74 100644 --- a/RAG/notebooks/langchain/LangGraph_HandlingAgent_IntermediateSteps.ipynb +++ b/RAG/notebooks/langchain/LangGraph_HandlingAgent_IntermediateSteps.ipynb @@ -44,13 +44,19 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install --upgrade pip\n", - "!pip install langchain==0.2.5\n", - "!pip install langchain-nvidia-ai-endpoints==0.1.2\n", - "!pip install langchain-community==0.2.2\n", - "!pip install langgraph==0.0.62\n", - "!pip install faiss-gpu==1.7.2\n", - "!pip install wikipedia==1.4.0" + "# install below package when 'pip' is referred to the dedicated python virtual environment. \n", + "# !pip install --upgrade pip\n", + "# !pip install langchain==0.3.27\n", + "# !pip install langchain-nvidia-ai-endpoints==0.3.13\n", + "# !pip install langchain-community==0.3.27\n", + "# !pip install langgraph==0.6.3\n", + "# !pip install wikipedia==1.4.0\n", + "\n", + "# faiss-gpu can only be used up to py310, when using later Python versions, please move to faiss-cpu\n", + "# with py311+\n", + "# !pip install faiss-cpu=1.11.0\n", + "# or with GPU & py310 and below\n", + "# !pip install faiss-gpu==1.7.2\n" ] }, { @@ -113,8 +119,12 @@ "from langchain_nvidia_ai_endpoints import ChatNVIDIA\n", "from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings\n", "\n", - "llm = ChatNVIDIA(model=\"mistralai/mixtral-8x7b-instruct-v0.1\", nvidia_api_key=nvapi_key, max_tokens=2048)\n", - "embedder = NVIDIAEmbeddings(model=\"NV-Embed-QA\", truncate=\"END\")\n" + "llm = ChatNVIDIA(model=\"mistralai/mixtral-8x7b-instruct-v0.1\", \n", + " nvidia_api_key=nvapi_key, \n", + " max_completion_tokens=2048) # max_tokens is about to be depreciated.\n", + "\n", + "embedder = NVIDIAEmbeddings(model=\"nvidia/nv-embedqa-e5-v5\", # recent valid embedding QA model\n", + " truncate=\"END\")\n" ] }, { @@ -197,12 +207,13 @@ "outputs": [], "source": [ "from langchain.tools import BaseTool\n", + "from pydantic import Field\n", "\n", "class SwedenRetriever(BaseTool):\n", - " name = \"AboutSweden\"\n", - " description = \"Useful for when you need to answer questions about Sweden's population, history, and so on.\"\n", + " name: str = \"AboutSweden\"\n", + " description: str = \"Useful for when you need to answer questions about Sweden's population, history, and so on.\"\n", "\n", - " def _run(self, query):\n", + " def _run(self, query) -> str:\n", " out = retriever.invoke(query)\n", " o = out[0]\n", " item=o.page_content.split('|')\n", @@ -257,6 +268,7 @@ " func=wikipedia.run,\n", " name=\"Wiki\",\n", " description=\"useful for when you need to search certain topic on Wikipedia, aka wiki\")\n", + "\n", "retriever_tool=Tool.from_function(\n", " func=sv.invoke,\n", " name=\"AboutSweden\",\n", @@ -283,11 +295,11 @@ "outputs": [], "source": [ "from langchain_core.agents import AgentFinish\n", - "from langgraph.prebuilt.tool_executor import ToolExecutor\n", + "from langgraph.prebuilt import ToolNode\n", "\n", "# This a helper class we have that is useful for running tools\n", "# It takes in an agent action and calls that tool and returns the result\n", - "tool_executor = ToolExecutor(tools)\n" + "tool_node = ToolNode(tools)\n" ] }, { @@ -505,7 +517,7 @@ " agent_output = data[\"agent_outcome\"]\n", " if len(agent_output['intermediate_steps'])>=1 :\n", " agent_action = agent_output['intermediate_steps'][0][0]\n", - " output = tool_executor.invoke(agent_action)\n", + " output = tool_node.invoke(agent_action)\n", " return {\"intermediate_steps\": [(agent_action, str(output))]}\n", " else:\n", " return {\"intermediate_steps\":[]}\n", @@ -622,11 +634,19 @@ "inputs = {\"input\": \"Find me Taylor Swift information on wiki?\"}\n", "outputs=app.invoke(inputs)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9801bb4c", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "generativeaiexamples", "language": "python", "name": "python3" }, @@ -640,7 +660,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.12.11" } }, "nbformat": 4,