syaikhipin commited on
Commit
1fead9a
·
verified ·
1 Parent(s): f6e57b3

Update logic.py

Browse files
Files changed (1) hide show
  1. logic.py +6 -4
logic.py CHANGED
@@ -2,7 +2,6 @@ import kuzu
2
  import logging
3
  import sys
4
  import os
5
- from llama_index.llms import OpenAI
6
  #import llama_index
7
  from llama_index.graph_stores import KuzuGraphStore
8
  from llama_index import (
@@ -34,11 +33,12 @@ def save_uploadedfile(uploadedfile):
34
 
35
  def load_index(token,name):
36
  os.environ["OPENAI_API_KEY"] = token
 
37
  logging.basicConfig(stream=sys.stdout, level=logging.INFO)
38
 
39
  db = kuzu.Database(name+"/kg")
40
  graph_store = KuzuGraphStore(db)
41
- llm = OpenAI(temperature=0, model="gpt-3.5-turbo", api_key=token, openai_api_base="https://ipin-copilot.onrender.com/v1")
42
 
43
  service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)
44
  storage_context = StorageContext.from_defaults(graph_store=graph_store,persist_dir=name+"/storage")
@@ -52,11 +52,12 @@ def get_index_pdf(token,name):
52
  print(documents)
53
  os.mkdir(name)
54
  os.environ["OPENAI_API_KEY"] = token
 
55
  logging.basicConfig(stream=sys.stdout, level=logging.INFO)
56
 
57
  db = kuzu.Database(name+"/kg")
58
  graph_store = KuzuGraphStore(db)
59
- llm = OpenAI(temperature=0, model="gpt-3.5-turbo", api_key=token, openai_api_base="https://ipin-copilot.onrender.com/v1")
60
  service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)
61
  storage_context = StorageContext.from_defaults(graph_store=graph_store)
62
 
@@ -74,6 +75,7 @@ def get_index_pdf(token,name):
74
  def get_index(links,token,name):
75
  os.mkdir(name)
76
  os.environ["OPENAI_API_KEY"] = token
 
77
  logging.basicConfig(stream=sys.stdout, level=logging.INFO)
78
 
79
  db = kuzu.Database(name+"/kg")
@@ -84,7 +86,7 @@ def get_index(links,token,name):
84
  links
85
  )
86
 
87
- llm = OpenAI(temperature=0, model="gpt-3.5-turbo", api_key=token, openai_api_base="https://ipin-copilot.onrender.com/v1")
88
  service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)
89
  storage_context = StorageContext.from_defaults(graph_store=graph_store)
90
 
 
2
  import logging
3
  import sys
4
  import os
 
5
  #import llama_index
6
  from llama_index.graph_stores import KuzuGraphStore
7
  from llama_index import (
 
33
 
34
  def load_index(token,name):
35
  os.environ["OPENAI_API_KEY"] = token
36
+ os.environ["OPENAI_API_BASE"] = base_url
37
  logging.basicConfig(stream=sys.stdout, level=logging.INFO)
38
 
39
  db = kuzu.Database(name+"/kg")
40
  graph_store = KuzuGraphStore(db)
41
+ llm = OpenAI(temperature=0, model="gpt-3.5-turbo", api_key=token, openai_api_base=base_url)
42
 
43
  service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)
44
  storage_context = StorageContext.from_defaults(graph_store=graph_store,persist_dir=name+"/storage")
 
52
  print(documents)
53
  os.mkdir(name)
54
  os.environ["OPENAI_API_KEY"] = token
55
+ os.environ["OPENAI_API_BASE"] = base_url
56
  logging.basicConfig(stream=sys.stdout, level=logging.INFO)
57
 
58
  db = kuzu.Database(name+"/kg")
59
  graph_store = KuzuGraphStore(db)
60
+ llm = OpenAI(temperature=0, model="gpt-3.5-turbo", api_key=token, openai_api_base=base_url)
61
  service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)
62
  storage_context = StorageContext.from_defaults(graph_store=graph_store)
63
 
 
75
  def get_index(links,token,name):
76
  os.mkdir(name)
77
  os.environ["OPENAI_API_KEY"] = token
78
+ os.environ["OPENAI_API_BASE"] = base_url
79
  logging.basicConfig(stream=sys.stdout, level=logging.INFO)
80
 
81
  db = kuzu.Database(name+"/kg")
 
86
  links
87
  )
88
 
89
+ llm = OpenAI(temperature=0, model="gpt-3.5-turbo", api_key=token, openai_api_base=base_url)
90
  service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)
91
  storage_context = StorageContext.from_defaults(graph_store=graph_store)
92