| | import gradio as gr |
| | import os |
| | os.environ["OPENAI_API_KEY"] = os.getenv('OPENAI_API_KEY') |
| | from langchain.document_loaders import PyPDFLoader |
| | from langchain.text_splitter import RecursiveCharacterTextSplitter |
| | from langchain.vectorstores import Chroma |
| | from langchain.embeddings import OpenAIEmbeddings |
| | from langchain.chat_models import ChatOpenAI |
| | from langchain.retrievers.multi_query import MultiQueryRetriever |
| | from langchain.chains import RetrievalQA |
| |
|
| | |
| | loader = PyPDFLoader("2020_emaster_keynote.pdf") |
| | pages = loader.load_and_split() |
| |
|
| | |
| | text_splitter = RecursiveCharacterTextSplitter( |
| | |
| | chunk_size = 300, |
| | chunk_overlap = 20, |
| | length_function = len, |
| | is_separator_regex = False, |
| | ) |
| | texts = text_splitter.split_documents(pages) |
| |
|
| | |
| | embeddings_model = OpenAIEmbeddings() |
| |
|
| | |
| | db = Chroma.from_documents(texts, embeddings_model) |
| |
|
| | |
| | llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.5) |
| | qa_chain = RetrievalQA.from_chain_type(llm,retriever=db.as_retriever()) |
| |
|
| |
|
| | |
| | with gr.Blocks() as demo: |
| | gr.Image('images/emaster.png') |
| |
|
| | gr.Text(''' |
| | ์ด๋ฐ ์ง๋ฌธ ์ด๋ ์ธ์? |
| | ๋ฐ์ดํฐ๋ฒ ์ด์ค์ ์ ์ |
| | ''') |
| | chatbot = gr.Chatbot(label="์ ๋ณด์ฒ๋ฆฌ์ฐ์
๊ธฐ์ฌ์ฑ๋ด") |
| | msg = gr.Textbox(label="์ง๋ฌธํด์ฃผ์ธ์!") |
| | clear = gr.Button("๋ํ ์ด๊ธฐํ") |
| |
|
| | |
| | def respond(message, chat_history): |
| | result = qa_chain({"query": message}) |
| | |
| | bot_message = result['result'] |
| |
|
| | |
| | chat_history.append((message, bot_message)) |
| | return "", chat_history |
| |
|
| | |
| | msg.submit(respond, [msg, chatbot], [msg, chatbot]) |
| |
|
| | |
| | clear.click(lambda: None, None, chatbot, queue=False) |
| |
|
| | |
| | demo.launch(debug=True, share=True) |