■ CompiledStateGraph 클래스의 update_state 메소드에서 as_node 인자를 사용하는 방법을 보여준다.
※ OPENAI_API_KEY 환경 변수 값은 .env 파일에 정의한다.
※ TAVILY_API_KEY 환경 변수 값은 .env 파일에 정의한다.
▶ main.py
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
from dotenv import load_dotenv from langchain_openai import ChatOpenAI from langchain_community.tools.tavily_search import TavilySearchResults from typing_extensions import TypedDict from typing import Annotated from langgraph.graph.message import add_messages from langgraph.graph import StateGraph from langgraph.prebuilt import ToolNode from langgraph.graph import START from langgraph.prebuilt import tools_condition from langgraph.checkpoint.memory import MemorySaver from langchain_core.messages import ToolMessage from langchain_core.messages import AIMessage load_dotenv() chatOpenAI = ChatOpenAI(model = "gpt-4o-mini") tavilySearchResults = TavilySearchResults(max_results = 2) toolList = [tavilySearchResults] runnableBinding = chatOpenAI.bind_tools(toolList) class State(TypedDict): messages : Annotated[list, add_messages] stateGraph = StateGraph(State) def chatbot(state : State): return {"messages" : [runnableBinding.invoke(state["messages"])]} stateGraph.add_node("chatbot_node", chatbot) toolNode = ToolNode(tools = [tavilySearchResults]) stateGraph.add_node("tools", toolNode) stateGraph.add_edge(START, "chatbot_node") stateGraph.add_conditional_edges("chatbot_node", tools_condition) stateGraph.add_edge("tools", "chatbot_node") memorySaver = MemorySaver() compiledStateGraph = stateGraph.compile(checkpointer = memorySaver, interrupt_before = ["tools"]) userInput = "I'm learning LangGraph. Could you do some research on it for me?" configurableDictionary = {"configurable" : {"thread_id" : "1"}} generactor1 = compiledStateGraph.stream({"messages": [("user", userInput)]}, configurableDictionary, stream_mode = "values") for addableUpdatesDict in generactor1: print(addableUpdatesDict) print("-" * 50) """ {'messages': [HumanMessage(content="I'm learning LangGraph. Could you do some research on it for me?", additional_kwargs={}, response_metadata={}, id='016a8f81-5f70-4228-a0f3-ebe0b4c6acc3')]} {'messages': [HumanMessage(content="I'm learning LangGraph. Could you do some research on it for me?", additional_kwargs={}, response_metadata={}, id='016a8f81-5f70-4228-a0f3-ebe0b4c6acc3'), AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_PX71LDspLj7cwphZ2hdMzkGK', 'function': {'arguments': '{"query":"LangGraph"}', 'name': 'tavily_search_results_json'}, 'type': 'function'}], 'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 20, 'prompt_tokens': 94, 'total_tokens': 114, 'completion_tokens_details': {'accepted_prediction_tokens': 0, 'audio_tokens': 0, 'reasoning_tokens': 0, 'rejected_prediction_tokens': 0}, 'prompt_tokens_details': {'audio_tokens': 0, 'cached_tokens': 0}}, 'model_name': 'gpt-4o-mini-2024-07-18', 'system_fingerprint': 'fp_0aa8d3e20b', 'finish_reason': 'tool_calls', 'logprobs': None}, id='run-efaf3ae7-7442-4312-87e0-110318a15ad0-0', tool_calls=[{'name': 'tavily_search_results_json', 'args': {'query': 'LangGraph'}, 'id': 'call_PX71LDspLj7cwphZ2hdMzkGK', 'type': 'tool_call'}], usage_metadata={'input_tokens': 94, 'output_tokens': 20, 'total_tokens': 114, 'input_token_details': {'audio': 0, 'cache_read': 0}, 'output_token_details': {'audio': 0, 'reasoning': 0}})]} -------------------------------------------------- """ stateSnapshot = compiledStateGraph.get_state(configurableDictionary) lastMessage = stateSnapshot.values["messages"][-1] answer = "LangGraph is a library for building stateful, multi-actor applications with LLMs." compiledStateGraph.update_state( configurableDictionary, { "messages" : [ ToolMessage(content = answer, tool_call_id = lastMessage.tool_calls[0]["id"]), AIMessage(content = answer) ] }, as_node = "chatbot_node" ) compiledStateGraph.update_state( configurableDictionary, {"messages" : [AIMessage(content = "I'm an AI expert!")]}, as_node = "chatbot_node" ) generator2 = compiledStateGraph.stream(None, configurableDictionary, stream_mode = "values") for addableValuesDict in generator2: print(addableValuesDict) print("-" * 50) """ {'messages': [HumanMessage(content="I'm learning LangGraph. Could you do some research on it for me?", additional_kwargs={}, response_metadata={}, id='016a8f81-5f70-4228-a0f3-ebe0b4c6acc3'), AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_PX71LDspLj7cwphZ2hdMzkGK', 'function': {'arguments': '{"query":"LangGraph"}', 'name': 'tavily_search_results_json'}, 'type': 'function'}], 'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 20, 'prompt_tokens': 94, 'total_tokens': 114, 'completion_tokens_details': {'accepted_prediction_tokens': 0, 'audio_tokens': 0, 'reasoning_tokens': 0, 'rejected_prediction_tokens': 0}, 'prompt_tokens_details': {'audio_tokens': 0, 'cached_tokens': 0}}, 'model_name': 'gpt-4o-mini-2024-07-18', 'system_fingerprint': 'fp_0aa8d3e20b', 'finish_reason': 'tool_calls', 'logprobs': None}, id='run-efaf3ae7-7442-4312-87e0-110318a15ad0-0', tool_calls=[{'name': 'tavily_search_results_json', 'args': {'query': 'LangGraph'}, 'id': 'call_PX71LDspLj7cwphZ2hdMzkGK', 'type': 'tool_call'}], usage_metadata={'input_tokens': 94, 'output_tokens': 20, 'total_tokens': 114, 'input_token_details': {'audio': 0, 'cache_read': 0}, 'output_token_details': {'audio': 0, 'reasoning': 0}}), ToolMessage(content='LangGraph is a library for building stateful, multi-actor applications with LLMs.', id='bcde7877-a85a-4f54-ba88-be9fe3af0423', tool_call_id='call_PX71LDspLj7cwphZ2hdMzkGK'), AIMessage(content='LangGraph is a library for building stateful, multi-actor applications with LLMs.', additional_kwargs={}, response_metadata={}, id='e91b54bd-0924-4cea-b39b-873526a581d8'), AIMessage(content="I'm an AI expert!", additional_kwargs={}, response_metadata={}, id='6ebc921e-1beb-4ad9-8eae-fb850f5e04d6')]} -------------------------------------------------- """ |
▶ requirements.txt
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
aiohappyeyeballs==2.4.4 aiohttp==3.11.11 aiosignal==1.3.2 annotated-types==0.7.0 anyio==4.7.0 attrs==24.3.0 certifi==2024.12.14 charset-normalizer==3.4.1 colorama==0.4.6 dataclasses-json==0.6.7 distro==1.9.0 frozenlist==1.5.0 greenlet==3.1.1 h11==0.14.0 httpcore==1.0.7 httpx==0.28.1 httpx-sse==0.4.0 idna==3.10 jiter==0.8.2 jsonpatch==1.33 jsonpointer==3.0.0 langchain==0.3.13 langchain-community==0.3.13 langchain-core==0.3.28 langchain-openai==0.2.14 langchain-text-splitters==0.3.4 langgraph==0.2.60 langgraph-checkpoint==2.0.9 langgraph-sdk==0.1.48 langsmith==0.2.7 marshmallow==3.23.2 msgpack==1.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numpy==2.2.1 openai==1.58.1 orjson==3.10.13 packaging==24.2 propcache==0.2.1 pydantic==2.10.4 pydantic-settings==2.7.1 pydantic_core==2.27.2 python-dotenv==1.0.1 PyYAML==6.0.2 regex==2024.11.6 requests==2.32.3 requests-toolbelt==1.0.0 sniffio==1.3.1 SQLAlchemy==2.0.36 tenacity==9.0.0 tiktoken==0.8.0 tqdm==4.67.1 typing-inspect==0.9.0 typing_extensions==4.12.2 urllib3==2.3.0 yarl==1.18.3 |
※ pip install python-dotenv langchain-community langchain-openai langgraph 명령을 실행했다.