AnushkaTonk commited on
Commit
a5d5bf8
·
1 Parent(s): 0a6f2d0

updated params in predict() function

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -61,14 +61,14 @@ explainer = LimeTextExplainer(class_names = class_names)
61
  def predict_and_explainations(model_choice, user_text):
62
  tokenizer, model, id2label = load_model_and_tokenizer(model_choice)
63
 
64
- input = tokenizer(user_text, return_tensors = "tf", truncation = True, padding = True, max_length = 128)
65
  logits = model(**input).logits
66
  probs = tf.nn.softmax(logits, axis = -1).numpy()[0]
67
  predicted_index = tf.argmax(probs).numpy()
68
  predictions = {id2label[i] : float(probs[i]) for i in range(len(probs))}
69
 
70
  def lime_predict(texts):
71
- batch = tokenizer(texts, return_tensors = "tf", padding = True, max_length = 128)
72
  logits = model(**batch).logits
73
  lime_output = tf.nn.softmax(logits, axis = 1).numpy()
74
  return lime_output
 
61
  def predict_and_explainations(model_choice, user_text):
62
  tokenizer, model, id2label = load_model_and_tokenizer(model_choice)
63
 
64
+ input = tokenizer(user_text, return_tensors = "tf", truncation = True, padding = "max_length", max_length = 128)
65
  logits = model(**input).logits
66
  probs = tf.nn.softmax(logits, axis = -1).numpy()[0]
67
  predicted_index = tf.argmax(probs).numpy()
68
  predictions = {id2label[i] : float(probs[i]) for i in range(len(probs))}
69
 
70
  def lime_predict(texts):
71
+ batch = tokenizer(texts, return_tensors = "tf", padding = "max_length", max_length = 128)
72
  logits = model(**batch).logits
73
  lime_output = tf.nn.softmax(logits, axis = 1).numpy()
74
  return lime_output