■ ChatOpenAI 클래스의 model_kwargs 속성을 사용해 함수를 설정하는 방법을 보여준다.
※ OPENAI_API_KEY 환경 변수 값은 .env 파일에 정의한다.
▶ main.py
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
import json from dotenv import load_dotenv from langchain_openai import ChatOpenAI from typing import Callable from langchain.schema import HumanMessage from langchain.schema import FunctionMessage load_dotenv() def getWeather(location : str) -> str: # 실제로는 날씨 API를 호출하거나 DB를 조회하겠지만, 예시를 위해 하드코딩된 값을 반환한다. weatherDictionary = { "location" : location, "temperature" : "20도", "condition" : "맑음" } return json.dumps(weatherDictionary) functionList = [ { "name" : "getWeather", "description" : "특정 도시의 현재 날씨 정보를 조회합니다", "parameters" : { "type" : "object", "properties" : { "location" : { "type" : "string", "description" : "도시 이름 (예 : 서울, 부산)" } }, "required" : ["location"] } } ] chatOpenAI = ChatOpenAI(temperature = 0) chatOpenAI.model_kwargs = {"functions" : functionList} def processConversation(humanMessageContent : str, availableFunctionDictionary : dict[str, Callable]): messageList = [HumanMessage(content = humanMessageContent)] while True: response = chatOpenAI.generate([messageList]) responseAIMessage = response.generations[0][0].message if responseAIMessage.additional_kwargs.get("function_call"): functionName = responseAIMessage.additional_kwargs["function_call"]["name"] functionArgumentDictionary = json.loads(responseAIMessage.additional_kwargs["function_call"]["arguments"]) functionToCall = availableFunctionDictionary[functionName] functionResponse = functionToCall(**functionArgumentDictionary) messageList.append(responseAIMessage) messageList.append(FunctionMessage(name = functionName, content = functionResponse)) else: messageList.append(responseAIMessage) break return messageList availableFunctionDictionary = {"getWeather" : getWeather} responseMessageList = processConversation("서울의 현재 날씨를 알려줘", availableFunctionDictionary) for responseMessage in responseMessageList: print(f"Type : {type(responseMessage).__name__}") print(f"Content : {responseMessage.content}") if isinstance(responseMessage, FunctionMessage): print(f"Function : {responseMessage.name}") print() """ Type : HumanMessage Content : 서울의 현재 날씨를 알려줘 Type : AIMessage Content : Type : FunctionMessage Content : {"location": "\uc11c\uc6b8", "temperature": "20\ub3c4", "condition": "\ub9d1\uc74c"} Function : getWeather Type : AIMessage Content : 서울의 현재 날씨는 20도이며, 맑은 날씨입니다. """ |
▶ requirements.txt
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
aiohappyeyeballs==2.4.3 aiohttp==3.11.7 aiosignal==1.3.1 annotated-types==0.7.0 anyio==4.6.2.post1 attrs==24.2.0 certifi==2024.8.30 charset-normalizer==3.4.0 colorama==0.4.6 distro==1.9.0 frozenlist==1.5.0 greenlet==3.1.1 h11==0.14.0 httpcore==1.0.7 httpx==0.27.2 idna==3.10 jiter==0.7.1 jsonpatch==1.33 jsonpointer==3.0.0 langchain==0.3.7 langchain-core==0.3.19 langchain-openai==0.2.9 langchain-text-splitters==0.3.2 langsmith==0.1.144 multidict==6.1.0 numpy==1.26.4 openai==1.55.0 orjson==3.10.11 packaging==24.2 propcache==0.2.0 pydantic==2.10.1 pydantic_core==2.27.1 python-dotenv==1.0.1 PyYAML==6.0.2 regex==2024.11.6 requests==2.32.3 requests-toolbelt==1.0.0 sniffio==1.3.1 SQLAlchemy==2.0.36 tenacity==9.0.0 tiktoken==0.8.0 tqdm==4.67.0 typing_extensions==4.12.2 urllib3==2.2.3 yarl==1.18.0 |
※ pip install python-dotenv langchain langchain-openai 명령을 실행했다.