Solshine commited on
Commit
f539dee
·
verified ·
1 Parent(s): 9f5a990

Create app.py

Browse files

https://github.com/techleadhd/chatgpt-retrieval/blob/main/chatgpt.py

Files changed (1) hide show
  1. app.py +57 -0
app.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ import os
4
+ import sys
5
+
6
+ import openai
7
+ from langchain.chains import ConversationalRetrievalChain, RetrievalQA
8
+ from langchain.chat_models import ChatOpenAI
9
+ from langchain.document_loaders import DirectoryLoader, TextLoader
10
+ from langchain.embeddings import OpenAIEmbeddings
11
+ from langchain.indexes import VectorstoreIndexCreator
12
+ from langchain.indexes.vectorstore import VectorStoreIndexWrapper
13
+ from langchain.llms import OpenAI
14
+ from langchain.vectorstores import Chroma
15
+
16
+ import constants
17
+
18
+ os.environ["OPENAI_API_KEY"] = constants.APIKEY
19
+
20
+ # Enable to save to disk & reuse the model (for repeated queries on the same data)
21
+ PERSIST = False
22
+
23
+ query = None
24
+ if len(sys.argv) > 1:
25
+ query = sys.argv[1]
26
+
27
+ if PERSIST and os.path.exists("persist"):
28
+ print("Reusing index...\n")
29
+ vectorstore = Chroma(persist_directory="persist", embedding_function=OpenAIEmbeddings())
30
+ index = VectorStoreIndexWrapper(vectorstore=vectorstore)
31
+ else:
32
+ #loader = TextLoader("data/data.txt") # Use this line if you only need data.txt
33
+ loader = DirectoryLoader("data/")
34
+ if PERSIST:
35
+ index = VectorstoreIndexCreator(vectorstore_kwargs={"persist_directory":"persist"}).from_loaders([loader])
36
+ else:
37
+ index = VectorstoreIndexCreator().from_loaders([loader])
38
+
39
+ chain = ConversationalRetrievalChain.from_llm(
40
+ llm=ChatOpenAI(model="gpt-3.5-turbo"),
41
+ retriever=index.vectorstore.as_retriever(search_kwargs={"k": 1}),
42
+ )
43
+
44
+ chat_history = []
45
+ while True:
46
+ if not query:
47
+ query = input("Prompt: ")
48
+ if query in ['quit', 'q', 'exit']:
49
+ sys.exit()
50
+ result = chain({"question": query, "chat_history": chat_history})
51
+ print(result['answer'])
52
+
53
+ chat_history.append((query, result['answer']))
54
+ query = None
55
+
56
+ iface = gr.Interface(fn=greet, inputs="text", outputs="text")
57
+ iface.launch()