Update app.py
Browse files
app.py
CHANGED
|
@@ -27,10 +27,15 @@ from langchain.llms import OpenAI
|
|
| 27 |
from langchain.document_loaders import PyPDFLoader
|
| 28 |
from langchain.indexes import VectorstoreIndexCreator
|
| 29 |
import tempfile
|
|
|
|
| 30 |
|
| 31 |
os.environ["OPENAI_API_KEY"] = os.environ['OpenApi_Key']
|
| 32 |
query1=" "
|
| 33 |
limit = 0
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
def loading_pdf():
|
| 35 |
return "Loading..."
|
| 36 |
|
|
@@ -95,24 +100,36 @@ def bot(history):
|
|
| 95 |
def infer(question):
|
| 96 |
global query1
|
| 97 |
global limit
|
|
|
|
|
|
|
| 98 |
openai.api_key = os.environ['OpenApi_Key']
|
| 99 |
prompt_text = question
|
| 100 |
-
if
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 116 |
|
| 117 |
css="""
|
| 118 |
#col-container { margin-left: auto; margin-right: auto;}
|
|
|
|
| 27 |
from langchain.document_loaders import PyPDFLoader
|
| 28 |
from langchain.indexes import VectorstoreIndexCreator
|
| 29 |
import tempfile
|
| 30 |
+
import time
|
| 31 |
|
| 32 |
os.environ["OPENAI_API_KEY"] = os.environ['OpenApi_Key']
|
| 33 |
query1=" "
|
| 34 |
limit = 0
|
| 35 |
+
st = 0
|
| 36 |
+
paused = False
|
| 37 |
+
waittime = 10.0
|
| 38 |
+
maxLimit = 5
|
| 39 |
def loading_pdf():
|
| 40 |
return "Loading..."
|
| 41 |
|
|
|
|
| 100 |
def infer(question):
|
| 101 |
global query1
|
| 102 |
global limit
|
| 103 |
+
global st
|
| 104 |
+
global paused
|
| 105 |
openai.api_key = os.environ['OpenApi_Key']
|
| 106 |
prompt_text = question
|
| 107 |
+
if paused == False:
|
| 108 |
+
if prompt_text:
|
| 109 |
+
query1 = query1 + "\nUser: " + prompt_text + "\nBot: "
|
| 110 |
+
if limit <= maxLimit:
|
| 111 |
+
result = chain(query1)
|
| 112 |
+
query1 = query1 + result['answer']
|
| 113 |
+
query1 = openai.ChatCompletion.create(
|
| 114 |
+
model="gpt-3.5-turbo",
|
| 115 |
+
messages=[
|
| 116 |
+
{"role": "system", "content": "You are provided with chat history and latset conversation between user and bot. Summarise the history and latest conversationin minimum most tokens possible. Do not include greetings in the summary like hi, hello, etc."},
|
| 117 |
+
{"role": "user", "content": query1},
|
| 118 |
+
]
|
| 119 |
+
) ["choices"][0]["message"]["content"].replace("'", "")
|
| 120 |
+
limit += 1
|
| 121 |
+
if limit == 2:
|
| 122 |
+
paused = True
|
| 123 |
+
st = time.time()
|
| 124 |
+
return result['answer']
|
| 125 |
+
else:
|
| 126 |
+
if time.time() - st > waittime:
|
| 127 |
+
paused = False
|
| 128 |
+
limit = 0
|
| 129 |
+
return infer(question)
|
| 130 |
+
else:
|
| 131 |
+
return "Usage Limit reached :( Please visit https://edith.framer.ai/pricing to unlock unlimited access!"
|
| 132 |
+
|
| 133 |
|
| 134 |
css="""
|
| 135 |
#col-container { margin-left: auto; margin-right: auto;}
|