ChatbotNico / backend /core.py
nesanchezo's picture
primer commit
5bf66bd
import os
from typing import Any, Dict, List
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
from langchain.chains import RetrievalQA
from langchain.vectorstores import Chroma
from langchain.output_parsers import ResponseSchema
from langchain.output_parsers import StructuredOutputParser
from langchain.document_loaders import PyPDFLoader
from langchain.prompts import ChatPromptTemplate
from dotenv import load_dotenv
load_dotenv()
persist_directory = 'chroma/'
def run_llm(query: str, chat_history: List[Dict[str, Any]] = []):
embeddings = OpenAIEmbeddings(openai_api_key=os.environ["OPENAI_API_KEY"])
new_vectorstore = Chroma(persist_directory=persist_directory, embedding_function=embeddings)
chat = ChatOpenAI(
model="gpt-3.5-turbo-16k",
verbose=True,
temperature=0,
)
qa = ConversationalRetrievalChain.from_llm(
llm=chat, retriever=new_vectorstore.as_retriever(search_type = "mmr", search_kwargs={"k": 6, "fetch_k":20}), return_source_documents=True
)
return qa({"question": query, "chat_history": chat_history})
if __name__ == "__main__":
print(run_llm(query="What is Venu 2?"))