import os from langchain.embeddings.openai import OpenAIEmbeddings from langchain.vectorstores import Chroma from langchain.prompts import ChatPromptTemplate from langchain.chat_models import ChatOpenAI import gradio as gr OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") username = os.getenv("user") passw = os.getenv("pass")# add your OpenAI API Key # for this example I used Alphabet Inc 10-K Report 2022 # https://www.sec.gov/Archives/edgar/data/1652044/000165204423000016/goog-20221231.htm embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY) db_chroma = Chroma(persist_directory="./chroma", embedding_function=embeddings) PROMPT_TEMPLATE = """ Answer the question based only on the following context: {context} Provide a detailed answer. Don’t justify your answers. Don’t give information not mentioned in the CONTEXT INFORMATION. Do not say "according to the context" or "mentioned in the context" or similar. **Important: Respond in the same language as the question.** - If the question is asked in English, your response must be in English. - Si la pregunta está formulada en español, tu respuesta debe ser en español. Question/Pregunta: {question}.""" prompt_template = ChatPromptTemplate.from_template(PROMPT_TEMPLATE) model = ChatOpenAI(api_key=OPENAI_API_KEY, model='gpt-3.5-turbo-0125') def get_response(query,history): docs_chroma = db_chroma.similarity_search_with_score(query, k=10) # generate an answer based on given user query and retrieved context information context_text = "\n\n".join([doc.page_content for doc, _score in docs_chroma]) prompt = prompt_template.format(context=context_text, question=query) response_text = model.predict(prompt) return response_text gr.ChatInterface(fn=get_response,examples=['CI/CD', 'Kuberflow Pipeline', 'Cloud Build Service', 'TensorFlow Extended (TFX)'],title='Machine Learning Chatbot').launch(auth=(username, passw), debug=True)