btc-chat-bot / chains.py
atoye1's picture
v2 production
04f9bf9
from dotenv import load_dotenv
# langchain libraries
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQAWithSourcesChain
from langchain.prompts.chat import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
)
import openai
import os
from vector_store import get_or_create_vector_store
from pinecone_vector_store import get_pinecone_store
load_dotenv()
openai.api_key = os.getenv("OPENAI_API_KEY")
def generate_prompt_templates():
system_template = """Use the following pieces of context to answer the users question as long as possible. Provided context is regulations of subway company in busan. what users want to query is exact details or inferable information from regulations.
Given the following summaries of a long document and a question, create a final answer with references to the document.
If you don't know the answer, just say that "μ£„μ†‘ν•©λ‹ˆλ‹€. ν•΄λ‹Ήμ§ˆλ¬Έμ— λŒ€ν•œ 닡을 μ°Ύμ§€ λͺ»ν–ˆμŠ΅λ‹ˆλ‹€.😿\\n μ‚¬κ·œμ— ν¬ν•¨λœ λ‚΄μš©μ„ ꡬ체적으둜 λ²”μœ„λ₯Ό μ’ν˜€μ„œ μ§ˆλ¬Έν•΄μ£Όμ„Έμš”.πŸ€–\\n μ›ν•˜λŠ” 닡을 μ–»μ§€ λͺ»ν–ˆμ„ 경우 μ–΄λ–€ κ·œμ •μ„ λŒ€μƒμœΌλ‘œ μ§ˆλ¬Έν•˜λŠ”μ§€ λͺ…μ‹œν•΄μ„œ λ‹€μ‹œ μ§ˆλ¬Έν•΄ λ³΄μ„Έμš”! ", don't try to make up an answer. + '\n'
----------------
{summaries}
You MUST answer in Korean:"""
messages = [
SystemMessagePromptTemplate.from_template(system_template),
HumanMessagePromptTemplate.from_template("{question}")
]
prompt = ChatPromptTemplate.from_messages(messages)
return prompt
def generate_chain(pincone=True):
if pincone:
vector_store = get_pinecone_store()
else:
vector_store = get_or_create_vector_store()
retriever = vector_store.as_retriever(search_kwargs={"k": 4})
llm = ChatOpenAI(model_name="gpt-3.5-turbo-16k", temperature=0.3)
chain_type_kwargs = {"prompt": generate_prompt_templates()}
chain = RetrievalQAWithSourcesChain.from_chain_type(
llm=llm,
chain_type="stuff",
retriever=retriever,
return_source_documents=True,
chain_type_kwargs=chain_type_kwargs)
return chain