samipshahdev commited on
Commit
f472a4f
·
0 Parent(s):

Duplicate from FyVa/chat

Browse files
Files changed (5) hide show
  1. .gitattributes +34 -0
  2. README.md +14 -0
  3. app.py +84 -0
  4. constants.py +4 -0
  5. requirements.txt +5 -0
.gitattributes ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tflite filter=lfs diff=lfs merge=lfs -text
29
+ *.tgz filter=lfs diff=lfs merge=lfs -text
30
+ *.wasm filter=lfs diff=lfs merge=lfs -text
31
+ *.xz filter=lfs diff=lfs merge=lfs -text
32
+ *.zip filter=lfs diff=lfs merge=lfs -text
33
+ *.zst filter=lfs diff=lfs merge=lfs -text
34
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Chat
3
+ emoji: 🔥
4
+ colorFrom: green
5
+ colorTo: blue
6
+ sdk: streamlit
7
+ sdk_version: 1.19.0
8
+ app_file: app.py
9
+ pinned: false
10
+ license: bsd
11
+ duplicated_from: FyVa/chat
12
+ ---
13
+
14
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Python file to serve as the frontend"""
2
+ import streamlit as st
3
+ from streamlit_chat import message
4
+
5
+ from langchain.chains import ConversationChain
6
+ from langchain.llms import OpenAI
7
+ import os
8
+ from langchain.embeddings.openai import OpenAIEmbeddings
9
+ from langchain.text_splitter import CharacterTextSplitter
10
+ from langchain.vectorstores import Pinecone
11
+ from langchain.document_loaders import TextLoader
12
+ import pinecone
13
+ from langchain.document_loaders import TextLoader
14
+ import streamlit as st
15
+ # import pandas as pd
16
+ from constants import INDEX_NAME, NAMESPACE,PINECONE_ENV
17
+ from langchain.chains import LLMChain
18
+ from langchain.prompts import PromptTemplate
19
+ from langchain.llms import OpenAI
20
+ from langchain.chains.question_answering import load_qa_chain
21
+
22
+
23
+
24
+ PINECONE_API_KEY= st.secrets["PINECONE_API_KEY"]
25
+ OPENAI_API_KEY = st.secrets["OPENAI_API_KEY"]
26
+
27
+ os.environ['OPENAI_API_KEY'] =OPENAI_API_KEY
28
+
29
+ # initialize pinecone
30
+ pinecone.init(
31
+ api_key=PINECONE_API_KEY, # find at app.pinecone.io
32
+ environment=PINECONE_ENV # next to api key in console
33
+ )
34
+
35
+ embeddings = OpenAIEmbeddings()
36
+
37
+ llm = OpenAI(temperature=0)
38
+
39
+ @st.cache_resource
40
+ def load_pinecone_existing_index(question):
41
+ pass
42
+ searchIndex = Pinecone.from_existing_index(index_name=INDEX_NAME,embedding = embeddings, namespace=NAMESPACE)
43
+ docsReturned = searchIndex.similarity_search(question, k=2)
44
+ return docsReturned
45
+
46
+ @st.cache_resource
47
+ def get_answer(question):
48
+ chain = load_qa_chain(llm, chain_type="stuff")
49
+ docs=load_pinecone_existing_index(question)
50
+ answer = chain.run(input_documents=docs, question=question)
51
+ return answer
52
+ # chain = load_qa_chain(llm, chain_type="stuff")
53
+ # answer = chain.run(input_documents=docs, question=QUERY)
54
+
55
+ # From here down is all the StreamLit UI.
56
+ st.set_page_config(page_title="FyVa", page_icon=":robot:")
57
+ st.header("FyVa")
58
+
59
+ if "generated" not in st.session_state:
60
+ st.session_state["generated"] = []
61
+
62
+ if "past" not in st.session_state:
63
+ st.session_state["past"] = []
64
+
65
+
66
+ def get_text():
67
+ input_text = st.text_input("You: ", "Hi,how are you.", key="input")
68
+ return input_text
69
+
70
+
71
+ user_input = get_text()
72
+
73
+ if user_input:
74
+ # output = chain.run(input=user_input)
75
+ output = get_answer(user_input)
76
+
77
+ st.session_state.past.append(user_input)
78
+ st.session_state.generated.append(output)
79
+
80
+ if st.session_state["generated"]:
81
+
82
+ for i in range(len(st.session_state["generated"]) - 1, -1, -1):
83
+ message(st.session_state["generated"][i], key=str(i))
84
+ message(st.session_state["past"][i], is_user=True, key=str(i) + "_user")
constants.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ INDEX_NAME = "arunachala"
2
+ NAMESPACE = "medine"
3
+ PINECONE_ENV = "asia-northeast1-gcp"
4
+ QUERY = 'What is the business model?'
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ langchain==0.0.157
2
+ pinecone_client==2.2.1
3
+ streamlit==1.22.0
4
+ streamlit_chat==0.0.2.2
5
+ openai==0.27.6