File size: 1,930 Bytes
cc04109
 
 
 
 
 
 
63c4e77
6c32e36
63c4e77
cc04109
 
 
 
 
e35908e
cc04109
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dfe8e33
cc04109
 
dfe8e33
cc04109
 
 
 
 
 
 
 
 
 
63c4e77
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import os
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import Chroma
from langchain.prompts import ChatPromptTemplate
from langchain.chat_models import ChatOpenAI
import gradio as gr

OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
username = os.getenv("user")
passw = os.getenv("pass")# add your OpenAI API Key
# for this example I used Alphabet Inc 10-K Report 2022
# https://www.sec.gov/Archives/edgar/data/1652044/000165204423000016/goog-20221231.htm

embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)

db_chroma = Chroma(persist_directory="./chroma", embedding_function=embeddings)

PROMPT_TEMPLATE = """
Answer the question based only on the following context:
{context}

Provide a detailed answer.
Don’t justify your answers.
Don’t give information not mentioned in the CONTEXT INFORMATION.
Do not say "according to the context" or "mentioned in the context" or similar.

**Important: Respond in the same language as the question.**
- If the question is asked in English, your response must be in English.
- Si la pregunta está formulada en español, tu respuesta debe ser en español.

Question/Pregunta:
{question}."""

prompt_template = ChatPromptTemplate.from_template(PROMPT_TEMPLATE)

model = ChatOpenAI(api_key=OPENAI_API_KEY, model='gpt-3.5-turbo-0125')

def get_response(query,history):
  docs_chroma = db_chroma.similarity_search_with_score(query, k=10)

  # generate an answer based on given user query and retrieved context information
  context_text = "\n\n".join([doc.page_content for doc, _score in docs_chroma])

  prompt = prompt_template.format(context=context_text, question=query)

  response_text = model.predict(prompt)
  return response_text


gr.ChatInterface(fn=get_response,examples=['CI/CD', 'Kuberflow Pipeline', 'Cloud Build Service', 'TensorFlow Extended (TFX)'],title='Machine Learning Chatbot').launch(auth=(username, passw), debug=True)