IsmaeelPandey commited on
Commit
40b63ae
·
1 Parent(s): 43a1264

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -50,11 +50,10 @@ PROMPT = PromptTemplate(
50
  )
51
  chain_type_kwargs = {"prompt": PROMPT}
52
 
53
- llm = HuggingFaceHub(repo_id="mistralai/Mistral-7B-v0.1", model_kwargs={"temperature":0.1, "max_new_tokens":250})
54
 
55
- # qachain=RetrievalQA.from_chain_type(llm, retriever=vectorstore.as_retriever(), memory=memory, chain_type_kwargs=chain_type_kwargs)
56
 
57
- llm_chain = LLMChain(prompt=PROMPT , llm=llm)
58
 
59
  st.header("#CodeWars localGPT", divider='rainbow')
60
 
@@ -67,6 +66,6 @@ context = [] # the context stores a conversation history, you can use this to ma
67
  if(prompt):
68
  with st.chat_message(option):
69
  st.write(f"{datetime.datetime.now()} :red[{option}:] ", prompt)
70
- context = llm_chain.run(prompt)
71
 
72
  st.write(f"{datetime.datetime.now()}", context)
 
50
  )
51
  chain_type_kwargs = {"prompt": PROMPT}
52
 
53
+ llm = HuggingFaceHub(repo_id="NousResearch/Llama-2-13b-hf", model_kwargs={"temperature":0.1, "max_new_tokens":250})
54
 
55
+ qachain=RetrievalQA.from_chain_type(llm, retriever=vectorstore.as_retriever(), memory=memory, chain_type_kwargs=chain_type_kwargs)
56
 
 
57
 
58
  st.header("#CodeWars localGPT", divider='rainbow')
59
 
 
66
  if(prompt):
67
  with st.chat_message(option):
68
  st.write(f"{datetime.datetime.now()} :red[{option}:] ", prompt)
69
+ context = qachain({"query": user_input})
70
 
71
  st.write(f"{datetime.datetime.now()}", context)