File size: 1,171 Bytes
dcc1634
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
import os
from typing import Any, Dict, List
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA
from langchain.chains import ConversationalRetrievalChain
from langchain.vectorstores import Pinecone
import pinecone

from dotenv import load_dotenv

from consts import INDEX_NAME
load_dotenv()

pinecone.init(
    api_key=os.environ.get("PINECONE_API_KEY"),
    environment=os.environ.get("PINECONE_ENVIRONMENT_REGION"),
)

def run_llm(query, chat_history):
    embeddings = OpenAIEmbeddings()
    docSearch = Pinecone.from_existing_index(index_name = INDEX_NAME, embedding = embeddings)
    chat = ChatOpenAI(verbose = True, temperature = 0)

    #qa = RetrievalQA.from_chain_type(llm = chat,chain_type = "stuff", retriever = docSearch.as_retriever(), return_source_documents = True)
    
    qa = ConversationalRetrievalChain.from_llm(llm = chat, retriever = docSearch.as_retriever(), return_source_documents = True)

    return qa({"question" : query, "chat_history" : chat_history})

# if __name__ == "__main__":
#     print(run_llm(query = "What is RetrievalQA Chain ? "), )