File size: 1,288 Bytes
5bf66bd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import os
from typing import Any, Dict, List

from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.chains import RetrievalQA
from langchain.vectorstores import Chroma
from langchain.output_parsers import ResponseSchema
from langchain.output_parsers import StructuredOutputParser
from langchain.document_loaders import PyPDFLoader
from langchain.prompts import ChatPromptTemplate

from dotenv import load_dotenv

load_dotenv()

persist_directory = 'chroma/'

def run_llm(query: str, chat_history: List[Dict[str, Any]] = []):
    
    embeddings = OpenAIEmbeddings(openai_api_key=os.environ["OPENAI_API_KEY"])
    
    new_vectorstore = Chroma(persist_directory=persist_directory, embedding_function=embeddings)
       
    chat = ChatOpenAI(
        model="gpt-3.5-turbo-16k",
        verbose=True,
        temperature=0,
    )

    qa = ConversationalRetrievalChain.from_llm(
        llm=chat, retriever=new_vectorstore.as_retriever(search_type = "mmr", search_kwargs={"k": 6, "fetch_k":20}), return_source_documents=True
     )

    return qa({"question": query, "chat_history": chat_history})

if __name__ == "__main__":
    print(run_llm(query="What is Venu 2?"))