istiak101 commited on
Commit
2e2603f
·
verified ·
1 Parent(s): e70a4cb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -56,8 +56,8 @@ def get_llama_response(query):
56
  # response = tokenizer.decode(outputs[0]["generated_text"], skip_special_tokens=True)
57
  pipe = pipeline(
58
  task="text-generation",
59
- model=model,
60
- tokenizer=tokenizer,
61
  max_new_tokens=128,
62
  return_full_text=False,
63
  )
 
56
  # response = tokenizer.decode(outputs[0]["generated_text"], skip_special_tokens=True)
57
  pipe = pipeline(
58
  task="text-generation",
59
+ model=model=st.session_state.llama_model,
60
+ tokenizer=st.session_state.llama_tokenizer,
61
  max_new_tokens=128,
62
  return_full_text=False,
63
  )