■ dumps 함수를 사용해 RunnableSequence 객체에서 JSON 문자열을 구하는 방법을 보여준다.
※ OPENAI_API_KEY 환경 변수 값은 .env 파일에 정의한다.
▶ main.py
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 |
from dotenv import load_dotenv from langchain_core.prompts import ChatPromptTemplate from langchain_openai import ChatOpenAI from langchain_core.load import dumps load_dotenv() chatPromptTemplate = ChatPromptTemplate.from_messages( [ ("system", "Translate the following into {language} :"), ("user" , "{text}") ] ) chatOpenAI = ChatOpenAI(model = "gpt-4o") runnableSequence = chatPromptTemplate | chatOpenAI json = dumps(runnableSequence, pretty = True) print(json) """ { "lc": 1, "type": "constructor", "id": [ "langchain", "schema", "runnable", "RunnableSequence" ], "kwargs": { "first": { "lc": 1, "type": "constructor", "id": [ "langchain", "prompts", "chat", "ChatPromptTemplate" ], "kwargs": { "input_variables": [ "language", "text" ], "messages": [ { "lc": 1, "type": "constructor", "id": [ "langchain", "prompts", "chat", "SystemMessagePromptTemplate" ], "kwargs": { "prompt": { "lc": 1, "type": "constructor", "id": [ "langchain", "prompts", "prompt", "PromptTemplate" ], "kwargs": { "input_variables": [ "language" ], "template": "Translate the following into {language}:", "template_format": "f-string" }, "name": "PromptTemplate" } } }, { "lc": 1, "type": "constructor", "id": [ "langchain", "prompts", "chat", "HumanMessagePromptTemplate" ], "kwargs": { "prompt": { "lc": 1, "type": "constructor", "id": [ "langchain", "prompts", "prompt", "PromptTemplate" ], "kwargs": { "input_variables": [ "text" ], "template": "{text}", "template_format": "f-string" }, "name": "PromptTemplate" } } } ] }, "name": "ChatPromptTemplate" }, "last": { "lc": 1, "type": "constructor", "id": [ "langchain", "chat_models", "openai", "ChatOpenAI" ], "kwargs": { "model_name": "gpt-4o", "temperature": 0.7, "openai_api_key": { "lc": 1, "type": "secret", "id": [ "OPENAI_API_KEY" ] }, "max_retries": 2, "n": 1 }, "name": "ChatOpenAI" } }, "name": "RunnableSequence" } """ |
▶ requirements.txt
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
annotated-types==0.7.0 anyio==4.6.2.post1 certifi==2024.8.30 charset-normalizer==3.4.0 colorama==0.4.6 distro==1.9.0 h11==0.14.0 httpcore==1.0.7 httpx==0.27.2 idna==3.10 jiter==0.7.1 jsonpatch==1.33 jsonpointer==3.0.0 langchain-core==0.3.21 langchain-openai==0.2.9 langsmith==0.1.145 openai==1.55.0 orjson==3.10.12 packaging==24.2 pydantic==2.10.1 pydantic_core==2.27.1 python-dotenv==1.0.1 PyYAML==6.0.2 regex==2024.11.6 requests==2.32.3 requests-toolbelt==1.0.0 sniffio==1.3.1 tenacity==9.0.0 tiktoken==0.8.0 tqdm==4.67.1 typing_extensions==4.12.2 urllib3==2.2.3 |
※ pip install python-dotenv langchain-openai 명령을 실행했다.