■ 커스텀 도구 노드를 사용해 도구 실행 여부 확인 후 실행하는 방법을 보여준다.
※ OPENAI_API_KEY 환경 변수 값은 .env 파일에 정의한다.
※ TAVILY_API_KEY 환경 변수 값은 .env 파일에 정의한다.
▶ main.py
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 |
import json from dotenv import load_dotenv from langchain_openai import ChatOpenAI from langchain_community.tools.tavily_search import TavilySearchResults from typing_extensions import TypedDict from typing import Annotated from langgraph.graph.message import add_messages from langgraph.graph import StateGraph from langchain_core.messages import ToolMessage from langchain_core.messages import AIMessage from langgraph.graph import END from langgraph.graph import START from langgraph.checkpoint.memory import MemorySaver load_dotenv() chatOpenAI = ChatOpenAI(model = "gpt-4o-mini") tavilySearchResults = TavilySearchResults(max_results = 2) toolList = [tavilySearchResults] runnableBinding = chatOpenAI.bind_tools(toolList) class State(TypedDict): messageList : Annotated[list, add_messages] stateGraph = StateGraph(State) def chat(state : State): messageList = state["messageList"] lastMessage = messageList[-1] if(isinstance(lastMessage, ToolMessage) and lastMessage.content == "Operation cancelled by user"): return {"messageList" : [AIMessage(content = "Operation cancelled by user")]} responseAIMessage = runnableBinding.invoke(messageList) return {"messageList" : [responseAIMessage]} stateGraph.add_node("chatbot_node", chat) class CustomToolNode: def __init__(self, toolList : list) -> None: self.toolNameDictionary = {tool.name : tool for tool in toolList} def __call__(self, inputDictionary : dict): if messageList := inputDictionary.get("messageList", []): lastMessage = messageList[-1] else: raise ValueError("No message found in input") if not hasattr(lastMessage, "tool_calls") or not lastMessage.tool_calls: return {"messageList" : messageList} confirmation = input("\n작업을 진행하시겠습니까?? (y/n) : ").lower() print() toolMessageList = [] if confirmation != "y": for toolCall in lastMessage.tool_calls: toolMessageList.append(ToolMessage(content = "Operation cancelled by user", name = toolCall["name"], tool_call_id = toolCall["id"])) else: for toolCall in lastMessage.tool_calls: toolResultDictionaryList = self.toolNameDictionary[toolCall["name"]].invoke(toolCall["args"]) toolMessageList.append(ToolMessage(content = json.dumps(toolResultDictionaryList), name = toolCall["name"], tool_call_id = toolCall["id"])) return {"messageList" : toolMessageList} customToolNode = CustomToolNode(toolList = [tavilySearchResults]) stateGraph.add_node("tool_node", customToolNode) def routeToolNode(state : State): if isinstance(state, list): message = state[-1] elif messages := state.get("messageList", []): message = messages[-1] else: raise ValueError(f"No messages found in input state to tool_edge: {state}") if hasattr(message, "tool_calls") and len(message.tool_calls) > 0: return "tool_node" return END stateGraph.add_conditional_edges( "chatbot_node", routeToolNode, {"tool_node" : "tool_node", END : END} ) stateGraph.add_edge(START , "chatbot_node") stateGraph.add_edge("tool_node", "chatbot_node") memorySaver = MemorySaver() compiledStateGraph = stateGraph.compile(checkpointer = memorySaver) def query(userInput : str): for addableUpdatesDict in compiledStateGraph.stream({"messageList" : [("user", userInput)]}, {"configurable" : {"thread_id" : "1"}}): if "chatbot_node" in addableUpdatesDict: for valueDictionary in addableUpdatesDict.values(): content = valueDictionary["messageList"][-1].content if content: print("비서 :", content) while True: userInput = input("사용자 : ") if userInput.lower() in ["quit", "exit", "q"]: print() print("프로그램을 종료하겠습니다.") break query(userInput) |
▶ requirements.txt
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
aiohappyeyeballs==2.4.4 aiohttp==3.11.11 aiosignal==1.3.2 annotated-types==0.7.0 anyio==4.7.0 attrs==24.3.0 certifi==2024.12.14 charset-normalizer==3.4.1 colorama==0.4.6 dataclasses-json==0.6.7 distro==1.9.0 frozenlist==1.5.0 greenlet==3.1.1 h11==0.14.0 httpcore==1.0.7 httpx==0.28.1 httpx-sse==0.4.0 idna==3.10 jiter==0.8.2 jsonpatch==1.33 jsonpointer==3.0.0 langchain==0.3.13 langchain-community==0.3.13 langchain-core==0.3.28 langchain-openai==0.2.14 langchain-text-splitters==0.3.4 langgraph==0.2.60 langgraph-checkpoint==2.0.9 langgraph-sdk==0.1.48 langsmith==0.2.6 marshmallow==3.23.2 msgpack==1.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numpy==2.2.1 openai==1.58.1 orjson==3.10.13 packaging==24.2 propcache==0.2.1 pydantic==2.10.4 pydantic-settings==2.7.0 pydantic_core==2.27.2 python-dotenv==1.0.1 PyYAML==6.0.2 regex==2024.11.6 requests==2.32.3 requests-toolbelt==1.0.0 sniffio==1.3.1 SQLAlchemy==2.0.36 tavily-python==0.5.0 tenacity==9.0.0 tiktoken==0.8.0 tqdm==4.67.1 typing-inspect==0.9.0 typing_extensions==4.12.2 urllib3==2.3.0 yarl==1.18.3 |
※ pip install python-dotenv langchain_community langchain_openai langgraph tavily-python 명령을 실행했다.