KyleIsaacs commited on
Commit
67452f6
·
1 Parent(s): df5389f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -24,7 +24,7 @@ embedding = HuggingFaceHubEmbeddings()
24
  loader = PyPDFLoader("sample.pdf")
25
  pages = loader.load_and_split()
26
 
27
- text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0)
28
  docs = text_splitter.split_documents(pages)
29
 
30
  vectorstore = Chroma.from_documents(documents=docs, embedding=embedding)
@@ -40,7 +40,7 @@ PROMPT = PromptTemplate(
40
  )
41
  chain_type_kwargs = {"prompt": PROMPT}
42
 
43
- llm = HuggingFaceHub(repo_id="google/flan-ul2", model_kwargs={"temperature":0.1, "max_new_tokens":150})
44
 
45
  qachain=RetrievalQA.from_chain_type(llm, retriever=vectorstore.as_retriever(), chain_type_kwargs=chain_type_kwargs)
46
  st.header("#CodeWars localGPT", divider='rainbow')
 
24
  loader = PyPDFLoader("sample.pdf")
25
  pages = loader.load_and_split()
26
 
27
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50)
28
  docs = text_splitter.split_documents(pages)
29
 
30
  vectorstore = Chroma.from_documents(documents=docs, embedding=embedding)
 
40
  )
41
  chain_type_kwargs = {"prompt": PROMPT}
42
 
43
+ llm = HuggingFaceHub(repo_id="google/flan-ul2", model_kwargs={"temperature":0.4, "max_new_tokens":250})
44
 
45
  qachain=RetrievalQA.from_chain_type(llm, retriever=vectorstore.as_retriever(), chain_type_kwargs=chain_type_kwargs)
46
  st.header("#CodeWars localGPT", divider='rainbow')