■ load_index_from_storage 함수를 사용해 라마 인덱스 데이터를 로드하는 방법을 보여준다.
▶ main.py
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
import os import logging import sys from llama_index.core import StorageContext, load_index_from_storage os.environ["OPENAI_API_KEY"] = "<OPENAI_API_KEY>" logging.basicConfig(stream = sys.stdout, level = logging.DEBUG, force = True) storageContext = StorageContext.from_defaults(persist_dir = "./storage") # 라마 인덱스 데이터 저장 디렉토리 vectorStoreIndex = load_index_from_storage(storageContext) retrieverQueryEngine = vectorStoreIndex.as_query_engine() answer1 = retrieverQueryEngine.query("미코의 소꿉친구 이름은?" ) answer2 = retrieverQueryEngine.query("울프 코퍼레이션의 CEO 이름은?") answer3 = retrieverQueryEngine.query("미코의 성격은?" ) print(f"미코의 소꿉친구 이름 : {answer1}") print(f"울프 코퍼레이션 CEO 이름 : {answer2}") print(f"미코의 성격은 :\n{answer3}") |
▶ 실행 결과
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
DEBUG:llama_index.core.storage.kvstore.simple_kvstore:Loading llama_index.core.storage.kvstore.simple_kvstore from ./storage/docstore.json. DEBUG:fsspec.local:open file: /home/king/testproject/storage/docstore.json DEBUG:llama_index.core.storage.kvstore.simple_kvstore:Loading llama_index.core.storage.kvstore.simple_kvstore from ./storage/index_store.json. DEBUG:fsspec.local:open file: /home/king/testproject/storage/index_store.json DEBUG:llama_index.core.graph_stores.simple:Loading llama_index.core.graph_stores.simple from ./storage/graph_store.json. DEBUG:fsspec.local:open file: /home/king/testproject/storage/graph_store.json DEBUG:fsspec.local:open file: /home/king/testproject/storage/property_graph_store.json DEBUG:llama_index.core.vector_stores.simple:Loading llama_index.core.vector_stores.simple from ./storage/default__vector_store.json. DEBUG:fsspec.local:open file: /home/king/testproject/storage/default__vector_store.json DEBUG:llama_index.core.vector_stores.simple:Loading llama_index.core.vector_stores.simple from ./storage/image__vector_store.json. DEBUG:fsspec.local:open file: /home/king/testproject/storage/image__vector_store.json INFO:llama_index.core.indices.loading:Loading all indices. DEBUG:httpx:load_ssl_context verify=True cert=None trust_env=True http2=False DEBUG:httpx:load_verify_locations cafile='/home/king/testproject/env/lib/python3.10/site-packages/certifi/cacert.pem' DEBUG:openai._base_client:Request options: {'method': 'post', 'url': '/embeddings', 'files': None, 'post_parser': <function Embeddings.create.<locals>.parser at 0x7f64dca32290>, 'json_data': {'input': ['미코의 소꿉친구 이름은?'], 'model': 'text-embedding-ada-002', 'encoding_format': 'base64'}} DEBUG:openai._base_client:Sending HTTP Request: POST https://api.openai.com/v1/embeddings DEBUG:httpcore.connection:connect_tcp.started host='api.openai.com' port=443 local_address=None timeout=60.0 socket_options=None DEBUG:httpcore.connection:connect_tcp.complete return_value=<httpcore._backends.sync.SyncStream object at 0x7f64dca12920> DEBUG:httpcore.connection:start_tls.started ssl_context=<ssl.SSLContext object at 0x7f64dca487c0> server_hostname='api.openai.com' timeout=60.0 DEBUG:httpcore.connection:start_tls.complete return_value=<httpcore._backends.sync.SyncStream object at 0x7f64dca12950> DEBUG:httpcore.http11:send_request_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_headers.complete DEBUG:httpcore.http11:send_request_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_body.complete DEBUG:httpcore.http11:receive_response_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Date', b'Fri, 07 Jun 2024 03:17:40 GMT'), (b'Content-Type', b'application/json'), (b'Transfer-Encoding', b'chunked'), (b'Connection', b'keep-alive'), (b'access-control-allow-origin', b'*'), (b'openai-model', b'text-embedding-ada-002'), (b'openai-organization', b'user-89spyw5fyioq1wqsb6pwacpu'), (b'openai-processing-ms', b'21'), (b'openai-version', b'2020-10-01'), (b'strict-transport-security', b'max-age=15724800; includeSubDomains'), (b'x-ratelimit-limit-requests', b'3000'), (b'x-ratelimit-limit-tokens', b'1000000'), (b'x-ratelimit-remaining-requests', b'2999'), (b'x-ratelimit-remaining-tokens', b'999992'), (b'x-ratelimit-reset-requests', b'20ms'), (b'x-ratelimit-reset-tokens', b'0s'), (b'x-request-id', b'req_c5a0bcd32220edb07c65d55653e4273e'), (b'CF-Cache-Status', b'DYNAMIC'), (b'Set-Cookie', b'__cf_bm=nkTy3ivlAbzUzytgZssBYptgFk626l2QUmcDwRqOBOQ-1717730260-1.0.1.1-aihbxzcYBco35zEj4LVmdM4DzPGM8Lua2fR2e4md2EE7a3xz2fc6xnpjuvt4tKh4kNEACsmt2c_N1VcucFWWhA; path=/; expires=Fri, 07-Jun-24 03:47:40 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None'), (b'Set-Cookie', b'_cfuvid=XsPmspdYh5sKEaMuaNifrM4F4325_RdDLq.NuSnWwAY-1717730260652-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None'), (b'Server', b'cloudflare'), (b'CF-RAY', b'88fd7d8fab5b305b-ICN'), (b'Content-Encoding', b'gzip'), (b'alt-svc', b'h3=":443"; ma=86400')]) INFO:httpx:HTTP Request: POST https://api.openai.com/v1/embeddings "HTTP/1.1 200 OK" DEBUG:httpcore.http11:receive_response_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_body.complete DEBUG:httpcore.http11:response_closed.started DEBUG:httpcore.http11:response_closed.complete DEBUG:openai._base_client:HTTP Response: POST https://api.openai.com/v1/embeddings "200 OK" Headers([('date', 'Fri, 07 Jun 2024 03:17:40 GMT'), ('content-type', 'application/json'), ('transfer-encoding', 'chunked'), ('connection', 'keep-alive'), ('access-control-allow-origin', '*'), ('openai-model', 'text-embedding-ada-002'), ('openai-organization', 'user-89spyw5fyioq1wqsb6pwacpu'), ('openai-processing-ms', '21'), ('openai-version', '2020-10-01'), ('strict-transport-security', 'max-age=15724800; includeSubDomains'), ('x-ratelimit-limit-requests', '3000'), ('x-ratelimit-limit-tokens', '1000000'), ('x-ratelimit-remaining-requests', '2999'), ('x-ratelimit-remaining-tokens', '999992'), ('x-ratelimit-reset-requests', '20ms'), ('x-ratelimit-reset-tokens', '0s'), ('x-request-id', 'req_c5a0bcd32220edb07c65d55653e4273e'), ('cf-cache-status', 'DYNAMIC'), ('set-cookie', '__cf_bm=nkTy3ivlAbzUzytgZssBYptgFk626l2QUmcDwRqOBOQ-1717730260-1.0.1.1-aihbxzcYBco35zEj4LVmdM4DzPGM8Lua2fR2e4md2EE7a3xz2fc6xnpjuvt4tKh4kNEACsmt2c_N1VcucFWWhA; path=/; expires=Fri, 07-Jun-24 03:47:40 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None'), ('set-cookie', '_cfuvid=XsPmspdYh5sKEaMuaNifrM4F4325_RdDLq.NuSnWwAY-1717730260652-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None'), ('server', 'cloudflare'), ('cf-ray', '88fd7d8fab5b305b-ICN'), ('content-encoding', 'gzip'), ('alt-svc', 'h3=":443"; ma=86400')]) DEBUG:openai._base_client:request_id: req_c5a0bcd32220edb07c65d55653e4273e DEBUG:llama_index.core.indices.utils:> Top 2 nodes: > [Node a28f295b-4029-4d54-9d62-ae2b8413062a] [Similarity score: 0.826677] 제5장: 결전의 순간 미코와 료는 마침내 울프 코퍼레이션의 최상층에 도착해 CEO인 교활한 울프 박사와 대면한다. 울프 박사는 시민을 지배하려는 사악한 야망을 드러내며 자신... > [Node 5792c637-74de-45e3-81c1-62dc41c8a21d] [Similarity score: 0.825153] 제3장: 배신과 재회 술집 '할머니의 집'에서 미코는 데이터를 받을 사람인 료를 기다리고 있었다. 료는 그녀의 어릴 적 친구이자 그 역시 울프 코퍼레이션과 싸우는 해커 집단... DEBUG:httpx:load_ssl_context verify=True cert=None trust_env=True http2=False DEBUG:httpx:load_verify_locations cafile='/home/king/testproject/env/lib/python3.10/site-packages/certifi/cacert.pem' DEBUG:openai._base_client:Request options: {'method': 'post', 'url': '/chat/completions', 'files': None, 'json_data': {'messages': [{'role': 'system', 'content': "You are an expert Q&A system that is trusted around the world.\nAlways answer the query using the provided context information, and not prior knowledge.\nSome rules to follow:\n1. Never directly reference the given context in your answer.\n2. Avoid statements like 'Based on the context, ...' or 'The context information ...' or anything along those lines."}, {'role': 'user', 'content': "Context information is below.\n---------------------\nfile_path: /home/king/data/akazukin5.txt\n\n제5장: 결전의 순간\n\n미코와 료는 마침내 울프 코퍼레이션의 최상층에 도착해 CEO인 교활한 울프 박사와 대면한다. 울프 박사는 시민을 지배하려는 사악한 야망을 드러내며 자신의 압도적인 힘을 과시한다. 하지만 미코와 료는 서로를 도와가며 울프 박사와 싸우고 그의 약점을 찾아낸다.\n\nfile_path: /home/king/data/akazukin3.txt\n\n제3장: 배신과 재회\n\n술집 '할머니의 집'에서 미코는 데이터를 받을 사람인 료를 기다리고 있었다. 료는 그녀의 어릴 적 친구이자 그 역시 울프 코퍼레이션과 싸우는 해커 집단의 일원이었다. 하지만 료는 미코에게 배신감을 느꼈고, 그녀가 데이터 카우리아에 몸을 던진 것에 화가 났다.\n\n그럼에도 불구하고 미코는 료에게 데이터를 건네며 울프 코퍼레이션에 대한 반격을 믿기로 한다. 두 사람은 함께 울프 코퍼레이션의 음모를 밝혀내고 시민들을 구하기로 결심한다.\n---------------------\nGiven the context information and not prior knowledge, answer the query.\nQuery: 미코의 소꿉친구 이름은?\nAnswer: "}], 'model': 'gpt-3.5-turbo', 'stream': False, 'temperature': 0.1}} DEBUG:openai._base_client:Sending HTTP Request: POST https://api.openai.com/v1/chat/completions DEBUG:httpcore.connection:connect_tcp.started host='api.openai.com' port=443 local_address=None timeout=60.0 socket_options=None DEBUG:httpcore.connection:connect_tcp.complete return_value=<httpcore._backends.sync.SyncStream object at 0x7f64dc262e90> DEBUG:httpcore.connection:start_tls.started ssl_context=<ssl.SSLContext object at 0x7f64deaded40> server_hostname='api.openai.com' timeout=60.0 DEBUG:httpcore.connection:start_tls.complete return_value=<httpcore._backends.sync.SyncStream object at 0x7f64dc262e60> DEBUG:httpcore.http11:send_request_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_headers.complete DEBUG:httpcore.http11:send_request_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_body.complete DEBUG:httpcore.http11:receive_response_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Date', b'Fri, 07 Jun 2024 03:17:41 GMT'), (b'Content-Type', b'application/json'), (b'Transfer-Encoding', b'chunked'), (b'Connection', b'keep-alive'), (b'openai-organization', b'user-89spyw5fyioq1wqsb6pwacpu'), (b'openai-processing-ms', b'351'), (b'openai-version', b'2020-10-01'), (b'strict-transport-security', b'max-age=15724800; includeSubDomains'), (b'x-ratelimit-limit-requests', b'10000'), (b'x-ratelimit-limit-tokens', b'60000'), (b'x-ratelimit-remaining-requests', b'9999'), (b'x-ratelimit-remaining-tokens', b'59574'), (b'x-ratelimit-reset-requests', b'8.64s'), (b'x-ratelimit-reset-tokens', b'426ms'), (b'x-request-id', b'req_b7a589d97b4ac1b84eb33c0bd0ad6516'), (b'CF-Cache-Status', b'DYNAMIC'), (b'Set-Cookie', b'__cf_bm=7W1TFceQ4JA91P4W4IEwUndOyl6hdphUI1s9GQyNFWY-1717730261-1.0.1.1-rF0gCMlakrxaKJhqQRMFvBwavlxGsM3NssNFOjsHrtZXj1bGjBg4vAmHVfEWOn5iNPJQDLq_eToIR..PET92eg; path=/; expires=Fri, 07-Jun-24 03:47:41 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None'), (b'Set-Cookie', b'_cfuvid=hejal5DunMH9lrgAeXGl.YwKMuMyrbNt9oPfru30h8U-1717730261361-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None'), (b'Server', b'cloudflare'), (b'CF-RAY', b'88fd7d915ec3327d-ICN'), (b'Content-Encoding', b'gzip'), (b'alt-svc', b'h3=":443"; ma=86400')]) INFO:httpx:HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK" DEBUG:httpcore.http11:receive_response_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_body.complete DEBUG:httpcore.http11:response_closed.started DEBUG:httpcore.http11:response_closed.complete DEBUG:openai._base_client:HTTP Response: POST https://api.openai.com/v1/chat/completions "200 OK" Headers([('date', 'Fri, 07 Jun 2024 03:17:41 GMT'), ('content-type', 'application/json'), ('transfer-encoding', 'chunked'), ('connection', 'keep-alive'), ('openai-organization', 'user-89spyw5fyioq1wqsb6pwacpu'), ('openai-processing-ms', '351'), ('openai-version', '2020-10-01'), ('strict-transport-security', 'max-age=15724800; includeSubDomains'), ('x-ratelimit-limit-requests', '10000'), ('x-ratelimit-limit-tokens', '60000'), ('x-ratelimit-remaining-requests', '9999'), ('x-ratelimit-remaining-tokens', '59574'), ('x-ratelimit-reset-requests', '8.64s'), ('x-ratelimit-reset-tokens', '426ms'), ('x-request-id', 'req_b7a589d97b4ac1b84eb33c0bd0ad6516'), ('cf-cache-status', 'DYNAMIC'), ('set-cookie', '__cf_bm=7W1TFceQ4JA91P4W4IEwUndOyl6hdphUI1s9GQyNFWY-1717730261-1.0.1.1-rF0gCMlakrxaKJhqQRMFvBwavlxGsM3NssNFOjsHrtZXj1bGjBg4vAmHVfEWOn5iNPJQDLq_eToIR..PET92eg; path=/; expires=Fri, 07-Jun-24 03:47:41 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None'), ('set-cookie', '_cfuvid=hejal5DunMH9lrgAeXGl.YwKMuMyrbNt9oPfru30h8U-1717730261361-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None'), ('server', 'cloudflare'), ('cf-ray', '88fd7d915ec3327d-ICN'), ('content-encoding', 'gzip'), ('alt-svc', 'h3=":443"; ma=86400')]) DEBUG:openai._base_client:request_id: req_b7a589d97b4ac1b84eb33c0bd0ad6516 DEBUG:openai._base_client:Request options: {'method': 'post', 'url': '/embeddings', 'files': None, 'post_parser': <function Embeddings.create.<locals>.parser at 0x7f64dbcd2a70>, 'json_data': {'input': ['울프 코퍼레이션의 CEO 이름은?'], 'model': 'text-embedding-ada-002', 'encoding_format': 'base64'}} DEBUG:openai._base_client:Sending HTTP Request: POST https://api.openai.com/v1/embeddings DEBUG:httpcore.http11:send_request_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_headers.complete DEBUG:httpcore.http11:send_request_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_body.complete DEBUG:httpcore.http11:receive_response_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Date', b'Fri, 07 Jun 2024 03:17:41 GMT'), (b'Content-Type', b'application/json'), (b'Transfer-Encoding', b'chunked'), (b'Connection', b'keep-alive'), (b'access-control-allow-origin', b'*'), (b'openai-model', b'text-embedding-ada-002'), (b'openai-organization', b'user-89spyw5fyioq1wqsb6pwacpu'), (b'openai-processing-ms', b'19'), (b'openai-version', b'2020-10-01'), (b'strict-transport-security', b'max-age=15724800; includeSubDomains'), (b'x-ratelimit-limit-requests', b'3000'), (b'x-ratelimit-limit-tokens', b'1000000'), (b'x-ratelimit-remaining-requests', b'2999'), (b'x-ratelimit-remaining-tokens', b'999989'), (b'x-ratelimit-reset-requests', b'20ms'), (b'x-ratelimit-reset-tokens', b'0s'), (b'x-request-id', b'req_c73e85835cca8078a00ad87f1f811785'), (b'CF-Cache-Status', b'DYNAMIC'), (b'Server', b'cloudflare'), (b'CF-RAY', b'88fd7d958b43305b-ICN'), (b'Content-Encoding', b'gzip'), (b'alt-svc', b'h3=":443"; ma=86400')]) INFO:httpx:HTTP Request: POST https://api.openai.com/v1/embeddings "HTTP/1.1 200 OK" DEBUG:httpcore.http11:receive_response_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_body.complete DEBUG:httpcore.http11:response_closed.started DEBUG:httpcore.http11:response_closed.complete DEBUG:openai._base_client:HTTP Response: POST https://api.openai.com/v1/embeddings "200 OK" Headers({'date': 'Fri, 07 Jun 2024 03:17:41 GMT', 'content-type': 'application/json', 'transfer-encoding': 'chunked', 'connection': 'keep-alive', 'access-control-allow-origin': '*', 'openai-model': 'text-embedding-ada-002', 'openai-organization': 'user-89spyw5fyioq1wqsb6pwacpu', 'openai-processing-ms': '19', 'openai-version': '2020-10-01', 'strict-transport-security': 'max-age=15724800; includeSubDomains', 'x-ratelimit-limit-requests': '3000', 'x-ratelimit-limit-tokens': '1000000', 'x-ratelimit-remaining-requests': '2999', 'x-ratelimit-remaining-tokens': '999989', 'x-ratelimit-reset-requests': '20ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_c73e85835cca8078a00ad87f1f811785', 'cf-cache-status': 'DYNAMIC', 'server': 'cloudflare', 'cf-ray': '88fd7d958b43305b-ICN', 'content-encoding': 'gzip', 'alt-svc': 'h3=":443"; ma=86400'}) DEBUG:openai._base_client:request_id: req_c73e85835cca8078a00ad87f1f811785 DEBUG:llama_index.core.indices.utils:> Top 2 nodes: > [Node a28f295b-4029-4d54-9d62-ae2b8413062a] [Similarity score: 0.83732] 제5장: 결전의 순간 미코와 료는 마침내 울프 코퍼레이션의 최상층에 도착해 CEO인 교활한 울프 박사와 대면한다. 울프 박사는 시민을 지배하려는 사악한 야망을 드러내며 자신... > [Node 4362bb3e-1179-44c9-9ccc-7d951cdf5a96] [Similarity score: 0.803833] 제7장: 새로운 시작 울프 코퍼레이션이 무너진 후, 미코와 료는 서로의 과거를 용서하고 다시 우정을 회복한다. 미코는 데이터카우리아를 그만두고 료와 함께 새로운 길을 걷기 ... DEBUG:openai._base_client:Request options: {'method': 'post', 'url': '/chat/completions', 'files': None, 'json_data': {'messages': [{'role': 'system', 'content': "You are an expert Q&A system that is trusted around the world.\nAlways answer the query using the provided context information, and not prior knowledge.\nSome rules to follow:\n1. Never directly reference the given context in your answer.\n2. Avoid statements like 'Based on the context, ...' or 'The context information ...' or anything along those lines."}, {'role': 'user', 'content': 'Context information is below.\n---------------------\nfile_path: /home/king/data/akazukin5.txt\n\n제5장: 결전의 순간\n\n미코와 료는 마침내 울프 코퍼레이션의 최상층에 도착해 CEO인 교활한 울프 박사와 대면한다. 울프 박사는 시민을 지배하려는 사악한 야망을 드러내며 자신의 압도적인 힘을 과시한다. 하지만 미코와 료는 서로를 도와가며 울프 박사와 싸우고 그의 약점을 찾아낸다.\n\nfile_path: /home/king/data/akazukin7.txt\n\n제7장: 새로운 시작\n\n울프 코퍼레이션이 무너진 후, 미코와 료는 서로의 과거를 용서하고 다시 우정을 회복한다. 미코는 데이터카우리아를 그만두고 료와 함께 새로운 길을 걷기 시작한다. 그들은 스스로의 힘으로 미래의 네오 도쿄를 더 나은 도시로 바꾸어 나갈 것을 다짐한다. 이것은 미코와 료, 그리고 전뇌 빨간 망토의 새로운 모험의 시작이었다.\n---------------------\nGiven the context information and not prior knowledge, answer the query.\nQuery: 울프 코퍼레이션의 CEO 이름은?\nAnswer: '}], 'model': 'gpt-3.5-turbo', 'stream': False, 'temperature': 0.1}} DEBUG:openai._base_client:Sending HTTP Request: POST https://api.openai.com/v1/chat/completions DEBUG:httpcore.http11:send_request_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_headers.complete DEBUG:httpcore.http11:send_request_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_body.complete DEBUG:httpcore.http11:receive_response_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Date', b'Fri, 07 Jun 2024 03:17:42 GMT'), (b'Content-Type', b'application/json'), (b'Transfer-Encoding', b'chunked'), (b'Connection', b'keep-alive'), (b'openai-organization', b'user-89spyw5fyioq1wqsb6pwacpu'), (b'openai-processing-ms', b'515'), (b'openai-version', b'2020-10-01'), (b'strict-transport-security', b'max-age=15724800; includeSubDomains'), (b'x-ratelimit-limit-requests', b'10000'), (b'x-ratelimit-limit-tokens', b'60000'), (b'x-ratelimit-remaining-requests', b'9998'), (b'x-ratelimit-remaining-tokens', b'59611'), (b'x-ratelimit-reset-requests', b'16.417s'), (b'x-ratelimit-reset-tokens', b'389ms'), (b'x-request-id', b'req_3de085ba97e1b1a4d47604411822c429'), (b'CF-Cache-Status', b'DYNAMIC'), (b'Server', b'cloudflare'), (b'CF-RAY', b'88fd7d96ee49327d-ICN'), (b'Content-Encoding', b'gzip'), (b'alt-svc', b'h3=":443"; ma=86400')]) INFO:httpx:HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK" DEBUG:httpcore.http11:receive_response_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_body.complete DEBUG:httpcore.http11:response_closed.started DEBUG:httpcore.http11:response_closed.complete DEBUG:openai._base_client:HTTP Response: POST https://api.openai.com/v1/chat/completions "200 OK" Headers({'date': 'Fri, 07 Jun 2024 03:17:42 GMT', 'content-type': 'application/json', 'transfer-encoding': 'chunked', 'connection': 'keep-alive', 'openai-organization': 'user-89spyw5fyioq1wqsb6pwacpu', 'openai-processing-ms': '515', 'openai-version': '2020-10-01', 'strict-transport-security': 'max-age=15724800; includeSubDomains', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '60000', 'x-ratelimit-remaining-requests': '9998', 'x-ratelimit-remaining-tokens': '59611', 'x-ratelimit-reset-requests': '16.417s', 'x-ratelimit-reset-tokens': '389ms', 'x-request-id': 'req_3de085ba97e1b1a4d47604411822c429', 'cf-cache-status': 'DYNAMIC', 'server': 'cloudflare', 'cf-ray': '88fd7d96ee49327d-ICN', 'content-encoding': 'gzip', 'alt-svc': 'h3=":443"; ma=86400'}) DEBUG:openai._base_client:request_id: req_3de085ba97e1b1a4d47604411822c429 DEBUG:openai._base_client:Request options: {'method': 'post', 'url': '/embeddings', 'files': None, 'post_parser': <function Embeddings.create.<locals>.parser at 0x7f64dca33e20>, 'json_data': {'input': ['미코의 성격은?'], 'model': 'text-embedding-ada-002', 'encoding_format': 'base64'}} DEBUG:openai._base_client:Sending HTTP Request: POST https://api.openai.com/v1/embeddings DEBUG:httpcore.http11:send_request_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_headers.complete DEBUG:httpcore.http11:send_request_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_body.complete DEBUG:httpcore.http11:receive_response_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Date', b'Fri, 07 Jun 2024 03:17:42 GMT'), (b'Content-Type', b'application/json'), (b'Transfer-Encoding', b'chunked'), (b'Connection', b'keep-alive'), (b'access-control-allow-origin', b'*'), (b'openai-model', b'text-embedding-ada-002'), (b'openai-organization', b'user-89spyw5fyioq1wqsb6pwacpu'), (b'openai-processing-ms', b'19'), (b'openai-version', b'2020-10-01'), (b'strict-transport-security', b'max-age=15724800; includeSubDomains'), (b'x-ratelimit-limit-requests', b'3000'), (b'x-ratelimit-limit-tokens', b'1000000'), (b'x-ratelimit-remaining-requests', b'2999'), (b'x-ratelimit-remaining-tokens', b'999994'), (b'x-ratelimit-reset-requests', b'20ms'), (b'x-ratelimit-reset-tokens', b'0s'), (b'x-request-id', b'req_4531c9ce2feaa047646ee067c556b2a9'), (b'CF-Cache-Status', b'DYNAMIC'), (b'Server', b'cloudflare'), (b'CF-RAY', b'88fd7d9bfceb305b-ICN'), (b'Content-Encoding', b'gzip'), (b'alt-svc', b'h3=":443"; ma=86400')]) INFO:httpx:HTTP Request: POST https://api.openai.com/v1/embeddings "HTTP/1.1 200 OK" DEBUG:httpcore.http11:receive_response_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_body.complete DEBUG:httpcore.http11:response_closed.started DEBUG:httpcore.http11:response_closed.complete DEBUG:openai._base_client:HTTP Response: POST https://api.openai.com/v1/embeddings "200 OK" Headers({'date': 'Fri, 07 Jun 2024 03:17:42 GMT', 'content-type': 'application/json', 'transfer-encoding': 'chunked', 'connection': 'keep-alive', 'access-control-allow-origin': '*', 'openai-model': 'text-embedding-ada-002', 'openai-organization': 'user-89spyw5fyioq1wqsb6pwacpu', 'openai-processing-ms': '19', 'openai-version': '2020-10-01', 'strict-transport-security': 'max-age=15724800; includeSubDomains', 'x-ratelimit-limit-requests': '3000', 'x-ratelimit-limit-tokens': '1000000', 'x-ratelimit-remaining-requests': '2999', 'x-ratelimit-remaining-tokens': '999994', 'x-ratelimit-reset-requests': '20ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_4531c9ce2feaa047646ee067c556b2a9', 'cf-cache-status': 'DYNAMIC', 'server': 'cloudflare', 'cf-ray': '88fd7d9bfceb305b-ICN', 'content-encoding': 'gzip', 'alt-svc': 'h3=":443"; ma=86400'}) DEBUG:openai._base_client:request_id: req_4531c9ce2feaa047646ee067c556b2a9 DEBUG:llama_index.core.indices.utils:> Top 2 nodes: > [Node a28f295b-4029-4d54-9d62-ae2b8413062a] [Similarity score: 0.825045] 제5장: 결전의 순간 미코와 료는 마침내 울프 코퍼레이션의 최상층에 도착해 CEO인 교활한 울프 박사와 대면한다. 울프 박사는 시민을 지배하려는 사악한 야망을 드러내며 자신... > [Node 4362bb3e-1179-44c9-9ccc-7d951cdf5a96] [Similarity score: 0.818595] 제7장: 새로운 시작 울프 코퍼레이션이 무너진 후, 미코와 료는 서로의 과거를 용서하고 다시 우정을 회복한다. 미코는 데이터카우리아를 그만두고 료와 함께 새로운 길을 걷기 ... DEBUG:openai._base_client:Request options: {'method': 'post', 'url': '/chat/completions', 'files': None, 'json_data': {'messages': [{'role': 'system', 'content': "You are an expert Q&A system that is trusted around the world.\nAlways answer the query using the provided context information, and not prior knowledge.\nSome rules to follow:\n1. Never directly reference the given context in your answer.\n2. Avoid statements like 'Based on the context, ...' or 'The context information ...' or anything along those lines."}, {'role': 'user', 'content': 'Context information is below.\n---------------------\nfile_path: /home/king/data/akazukin5.txt\n\n제5장: 결전의 순간\n\n미코와 료는 마침내 울프 코퍼레이션의 최상층에 도착해 CEO인 교활한 울프 박사와 대면한다. 울프 박사는 시민을 지배하려는 사악한 야망을 드러내며 자신의 압도적인 힘을 과시한다. 하지만 미코와 료는 서로를 도와가며 울프 박사와 싸우고 그의 약점을 찾아낸다.\n\nfile_path: /home/king/data/akazukin7.txt\n\n제7장: 새로운 시작\n\n울프 코퍼레이션이 무너진 후, 미코와 료는 서로의 과거를 용서하고 다시 우정을 회복한다. 미코는 데이터카우리아를 그만두고 료와 함께 새로운 길을 걷기 시작한다. 그들은 스스로의 힘으로 미래의 네오 도쿄를 더 나은 도시로 바꾸어 나갈 것을 다짐한다. 이것은 미코와 료, 그리고 전뇌 빨간 망토의 새로운 모험의 시작이었다.\n---------------------\nGiven the context information and not prior knowledge, answer the query.\nQuery: 미코의 성격은?\nAnswer: '}], 'model': 'gpt-3.5-turbo', 'stream': False, 'temperature': 0.1}} DEBUG:openai._base_client:Sending HTTP Request: POST https://api.openai.com/v1/chat/completions DEBUG:httpcore.http11:send_request_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_headers.complete DEBUG:httpcore.http11:send_request_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:send_request_body.complete DEBUG:httpcore.http11:receive_response_headers.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Date', b'Fri, 07 Jun 2024 03:17:45 GMT'), (b'Content-Type', b'application/json'), (b'Transfer-Encoding', b'chunked'), (b'Connection', b'keep-alive'), (b'openai-organization', b'user-89spyw5fyioq1wqsb6pwacpu'), (b'openai-processing-ms', b'2368'), (b'openai-version', b'2020-10-01'), (b'strict-transport-security', b'max-age=15724800; includeSubDomains'), (b'x-ratelimit-limit-requests', b'10000'), (b'x-ratelimit-limit-tokens', b'60000'), (b'x-ratelimit-remaining-requests', b'9997'), (b'x-ratelimit-remaining-tokens', b'59616'), (b'x-ratelimit-reset-requests', b'23.966s'), (b'x-ratelimit-reset-tokens', b'384ms'), (b'x-request-id', b'req_d84a9069d17d013d58ebdbf190f7b793'), (b'CF-Cache-Status', b'DYNAMIC'), (b'Server', b'cloudflare'), (b'CF-RAY', b'88fd7d9d9fde327d-ICN'), (b'Content-Encoding', b'gzip'), (b'alt-svc', b'h3=":443"; ma=86400')]) INFO:httpx:HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK" DEBUG:httpcore.http11:receive_response_body.started request=<Request [b'POST']> DEBUG:httpcore.http11:receive_response_body.complete DEBUG:httpcore.http11:response_closed.started DEBUG:httpcore.http11:response_closed.complete DEBUG:openai._base_client:HTTP Response: POST https://api.openai.com/v1/chat/completions "200 OK" Headers({'date': 'Fri, 07 Jun 2024 03:17:45 GMT', 'content-type': 'application/json', 'transfer-encoding': 'chunked', 'connection': 'keep-alive', 'openai-organization': 'user-89spyw5fyioq1wqsb6pwacpu', 'openai-processing-ms': '2368', 'openai-version': '2020-10-01', 'strict-transport-security': 'max-age=15724800; includeSubDomains', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '60000', 'x-ratelimit-remaining-requests': '9997', 'x-ratelimit-remaining-tokens': '59616', 'x-ratelimit-reset-requests': '23.966s', 'x-ratelimit-reset-tokens': '384ms', 'x-request-id': 'req_d84a9069d17d013d58ebdbf190f7b793', 'cf-cache-status': 'DYNAMIC', 'server': 'cloudflare', 'cf-ray': '88fd7d9d9fde327d-ICN', 'content-encoding': 'gzip', 'alt-svc': 'h3=":443"; ma=86400'}) DEBUG:openai._base_client:request_id: req_d84a9069d17d013d58ebdbf190f7b793 미코의 소꿉친구 이름 : 료 울프 코퍼레이션 CEO 이름 : 교활한 울프 박사 미코의 성격은 : 미코는 울프 박사와의 결전에서 용감하고 결의가 강한 모습을 보여주며, 료와의 협력을 통해 약점을 찾아내는 등 지능적이고 창의적인 면을 보여준다. 이후 울프 코퍼레이션이 무너지고 나서는 료와의 우정을 회복하고 함께 미래를 향해 나아가는 모습을 보여주어 따뜻하고 헌신적인 성향을 보여준다. |
▶ requirements.txt
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
aiohttp==3.9.5 aiosignal==1.3.1 async-timeout==4.0.3 attrs==23.2.0 certifi==2024.6.2 charset-normalizer==3.3.2 dataclasses-json==0.5.14 frozenlist==1.4.1 fsspec==2024.6.0 greenlet==3.0.3 idna==3.7 langchain==0.0.181 llama-index==0.6.12 marshmallow==3.21.3 multidict==6.0.5 mypy-extensions==1.0.0 numexpr==2.10.0 numpy==1.26.4 openai==0.28.0 openapi-schema-pydantic==1.2.4 packaging==24.0 pandas==2.2.2 pydantic==1.10.15 python-dateutil==2.9.0.post0 pytz==2024.1 PyYAML==6.0.1 regex==2024.5.15 requests==2.32.3 six==1.16.0 SQLAlchemy==2.0.24 tenacity==8.3.0 tiktoken==0.7.0 tqdm==4.66.4 typing-inspect==0.8.0 typing_extensions==4.5.0 tzdata==2024.1 urllib3==1.26.18 yarl==1.9.4 |
※ pip install llama-index 명령을 실행했다.