Spaces:
Build error
Build error
Update logic.py
Browse files
logic.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
| 1 |
-
#from google.colab import userdata
|
| 2 |
import kuzu
|
| 3 |
import logging
|
| 4 |
import sys
|
|
@@ -24,6 +23,11 @@ import numpy as np
|
|
| 24 |
import plotly.express as px
|
| 25 |
import umap
|
| 26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
def load_index(token,name):
|
| 28 |
os.environ["OPENAI_API_KEY"] = token
|
| 29 |
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
|
@@ -38,6 +42,31 @@ def load_index(token,name):
|
|
| 38 |
return index
|
| 39 |
|
| 40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
def get_index(links,token,name):
|
| 42 |
os.mkdir(name)
|
| 43 |
os.environ["OPENAI_API_KEY"] = token
|
|
@@ -111,6 +140,4 @@ def query_model(index,user_query):
|
|
| 111 |
)
|
| 112 |
|
| 113 |
response = query_engine.query(user_query)
|
| 114 |
-
return response
|
| 115 |
-
|
| 116 |
-
|
|
|
|
|
|
|
| 1 |
import kuzu
|
| 2 |
import logging
|
| 3 |
import sys
|
|
|
|
| 23 |
import plotly.express as px
|
| 24 |
import umap
|
| 25 |
|
| 26 |
+
|
| 27 |
+
def save_uploadedfile(uploadedfile):
|
| 28 |
+
with open(os.path.join("data",uploadedfile.name),"wb") as f:
|
| 29 |
+
f.write(uploadedfile.getbuffer())
|
| 30 |
+
|
| 31 |
def load_index(token,name):
|
| 32 |
os.environ["OPENAI_API_KEY"] = token
|
| 33 |
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
|
|
|
| 42 |
return index
|
| 43 |
|
| 44 |
|
| 45 |
+
def get_index_pdf(token,name):
|
| 46 |
+
documents = SimpleDirectoryReader("./data").load_data()
|
| 47 |
+
print(documents)
|
| 48 |
+
print(documents)
|
| 49 |
+
os.mkdir(name)
|
| 50 |
+
os.environ["OPENAI_API_KEY"] = token
|
| 51 |
+
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
| 52 |
+
|
| 53 |
+
db = kuzu.Database(name+"/kg")
|
| 54 |
+
graph_store = KuzuGraphStore(db)
|
| 55 |
+
llm = OpenAI(temperature=0, model="gpt-3.5-turbo",api_key=token)
|
| 56 |
+
service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)
|
| 57 |
+
storage_context = StorageContext.from_defaults(graph_store=graph_store)
|
| 58 |
+
|
| 59 |
+
index = KnowledgeGraphIndex.from_documents(documents=documents,
|
| 60 |
+
max_triplets_per_chunk=2,
|
| 61 |
+
storage_context=storage_context,
|
| 62 |
+
service_context=service_context,
|
| 63 |
+
show_progress=True,
|
| 64 |
+
include_embeddings=True)
|
| 65 |
+
index.storage_context.persist(name+"/storage")
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
return index
|
| 69 |
+
|
| 70 |
def get_index(links,token,name):
|
| 71 |
os.mkdir(name)
|
| 72 |
os.environ["OPENAI_API_KEY"] = token
|
|
|
|
| 140 |
)
|
| 141 |
|
| 142 |
response = query_engine.query(user_query)
|
| 143 |
+
return response
|
|
|
|
|
|