Americo commited on
Commit
cc04109
·
verified ·
1 Parent(s): 6a0c089

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -0
app.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from langchain.embeddings.openai import OpenAIEmbeddings
3
+ from langchain.vectorstores import Chroma
4
+ from langchain.prompts import ChatPromptTemplate
5
+ from langchain.chat_models import ChatOpenAI
6
+ import gradio as gr
7
+
8
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") # add your OpenAI API Key
9
+ # for this example I used Alphabet Inc 10-K Report 2022
10
+ # https://www.sec.gov/Archives/edgar/data/1652044/000165204423000016/goog-20221231.htm
11
+
12
+ embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
13
+
14
+ db_chroma = Chroma(persist_directory="./chroma", embedding_function=embedding)
15
+
16
+ PROMPT_TEMPLATE = """
17
+ Answer the question based only on the following context:
18
+ {context}
19
+
20
+ Provide a detailed answer.
21
+ Don’t justify your answers.
22
+ Don’t give information not mentioned in the CONTEXT INFORMATION.
23
+ Do not say "according to the context" or "mentioned in the context" or similar.
24
+
25
+ **Important: Respond in the same language as the question.**
26
+ - If the question is asked in English, your response must be in English.
27
+ - Si la pregunta está formulada en español, tu respuesta debe ser en español.
28
+
29
+ Question/Pregunta:
30
+ {question}."""
31
+
32
+ prompt_template = ChatPromptTemplate.from_template(PROMPT_TEMPLATE)
33
+
34
+ model = ChatOpenAI(api_key=OPENAI_API_KEY)
35
+
36
+ def get_response(query,history):
37
+ docs_chroma = db_chroma.similarity_search_with_score(query, k=5)
38
+
39
+ # generate an answer based on given user query and retrieved context information
40
+ context_text = "\n\n".join([doc.page_content for doc, _score in docs_chroma])
41
+
42
+ prompt = prompt_template.format(context=context_text, question=query)
43
+
44
+ response_text = model.predict(prompt)
45
+ return response_text
46
+
47
+
48
+ gr.ChatInterface(fn=get_response,examples=['CI/CD', 'Kuberflow Pipeline', 'Cloud Build Service', 'TensorFlow Extended (TFX)'],title='Machine Learning Certification Chatbot').launch(debug=True)