Update app.py
Browse files
app.py
CHANGED
|
@@ -50,7 +50,7 @@ def process_file(index_name, dir):
|
|
| 50 |
|
| 51 |
#using openai embedding hence dim = 1536
|
| 52 |
pinecone.create_index(index_name, dimension=1536, metric="cosine")
|
| 53 |
-
time.sleep(5)
|
| 54 |
|
| 55 |
embeddings = OpenAIEmbeddings(openai_api_key=os.environ['OPENAI_API_KEY'])
|
| 56 |
splter = SpacyTextSplitter(chunk_size=1000,chunk_overlap=200)
|
|
@@ -122,25 +122,15 @@ def textGPT(text):
|
|
| 122 |
return chats
|
| 123 |
|
| 124 |
|
| 125 |
-
def
|
| 126 |
-
global messages
|
| 127 |
-
|
| 128 |
-
file_text = extract_text(file_obj.name)
|
| 129 |
-
text = prompt + "\n\n" + file_text
|
| 130 |
-
|
| 131 |
-
messages.append({"role": "user", "content": text})
|
| 132 |
-
|
| 133 |
-
response = openai.ChatCompletion.create(model="gpt-4", messages=messages)
|
| 134 |
-
|
| 135 |
-
system_message = response["choices"][0]["message"]
|
| 136 |
-
messages.append(system_message)
|
| 137 |
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
|
| 143 |
-
return
|
| 144 |
|
| 145 |
|
| 146 |
|
|
@@ -198,8 +188,8 @@ vector_server = gr.Interface(fn=process_file, inputs=["text", gr.inputs.File(fil
|
|
| 198 |
|
| 199 |
#audio = gr.Interface(fn=audioGPT, inputs=gr.Audio(source="microphone", type="filepath"), outputs="text")
|
| 200 |
#siri = gr.Interface(fn=siriGPT, inputs=gr.Audio(source="microphone", type="filepath"), outputs = "audio")
|
| 201 |
-
file = gr.Interface(fn=
|
| 202 |
-
demo = gr.TabbedInterface([role, text, file, chatHistory, vector_server, pinecone_tools], [ "roleChoice", "chatGPT", "
|
| 203 |
|
| 204 |
if __name__ == "__main__":
|
| 205 |
demo.launch(enable_queue=False, auth=lambda u, p: user_db.get(u) == p,
|
|
|
|
| 50 |
|
| 51 |
#using openai embedding hence dim = 1536
|
| 52 |
pinecone.create_index(index_name, dimension=1536, metric="cosine")
|
| 53 |
+
#time.sleep(5)
|
| 54 |
|
| 55 |
embeddings = OpenAIEmbeddings(openai_api_key=os.environ['OPENAI_API_KEY'])
|
| 56 |
splter = SpacyTextSplitter(chunk_size=1000,chunk_overlap=200)
|
|
|
|
| 122 |
return chats
|
| 123 |
|
| 124 |
|
| 125 |
+
def fileSearch(index_name, prompt):
|
| 126 |
+
global messages
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 127 |
|
| 128 |
+
init_pinecone()
|
| 129 |
+
embeddings = OpenAIEmbeddings(openai_api_key=os.environ['OPENAI_API_KEY'])
|
| 130 |
+
docsearch = Pinecone.from_existing_index(index_name, embeddings)
|
| 131 |
+
docs = docsearch.similarity_search(prompt)
|
| 132 |
|
| 133 |
+
return "Top 5 findings are:\n\n" + "1. " + docs[0].page_content + "\n\n" + "2. " + docs[1].page_content + "\n\n" + "3. " + docs[2].page_content + "\n\n" + "4. " + docs[3].page_content + "\n\n" + "5. " + docs[4].page_content + "\n\n"
|
| 134 |
|
| 135 |
|
| 136 |
|
|
|
|
| 188 |
|
| 189 |
#audio = gr.Interface(fn=audioGPT, inputs=gr.Audio(source="microphone", type="filepath"), outputs="text")
|
| 190 |
#siri = gr.Interface(fn=siriGPT, inputs=gr.Audio(source="microphone", type="filepath"), outputs = "audio")
|
| 191 |
+
file = gr.Interface(fn=fileSearch, inputs=["text", "text"], outputs=None, description = "Enter file name and prompt")
|
| 192 |
+
demo = gr.TabbedInterface([role, text, file, chatHistory, vector_server, pinecone_tools], [ "roleChoice", "chatGPT", "FileSearch", "ChatHistory", "VectorServer", "PineconeTools"])
|
| 193 |
|
| 194 |
if __name__ == "__main__":
|
| 195 |
demo.launch(enable_queue=False, auth=lambda u, p: user_db.get(u) == p,
|