AshenR commited on
Commit
6e93504
·
verified ·
1 Parent(s): e78a3e1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -6
app.py CHANGED
@@ -36,7 +36,6 @@ def predict(test_query):
36
  # Add [CLS] at the front
37
  temp_token.append('[CLS]')
38
  token_list = tokenizer.tokenize(test_query)
39
- token_list
40
  for m,token in enumerate(token_list):
41
  temp_token.append(token)
42
  # Trim the token to fit the length requirement
@@ -48,13 +47,10 @@ def predict(test_query):
48
  input_ids = pad_sequences([tokenizer.convert_tokens_to_ids(txt) for txt in tokenized_texts],
49
  maxlen=max_len, dtype="long", truncating="post", padding="post")
50
  attention_masks = [[int(i>0) for i in ii] for ii in input_ids]
51
- attention_masks[0];
52
  segment_ids = [[0] * len(input_id) for input_id in input_ids]
53
- segment_ids[0];
54
  input_ids = torch.tensor(input_ids)
55
  attention_masks = torch.tensor(attention_masks)
56
  segment_ids = torch.tensor(segment_ids)
57
- import torch
58
 
59
  # Assuming you have defined your model and input_ids somewhere before this
60
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
@@ -70,11 +66,9 @@ def predict(test_query):
70
  # Make logits into numpy type predict result
71
  # The predict result contain each token's all tags predict result
72
  predict_results = logits.detach().cpu().numpy()
73
- predict_results.shape
74
 
75
  from scipy.special import softmax
76
  result_arrays_soft = softmax(predict_results[0])
77
- result_arrays_soft[0]
78
 
79
  result_array = result_arrays_soft
80
  result_list = np.argmax(result_array,axis=-1)
 
36
  # Add [CLS] at the front
37
  temp_token.append('[CLS]')
38
  token_list = tokenizer.tokenize(test_query)
 
39
  for m,token in enumerate(token_list):
40
  temp_token.append(token)
41
  # Trim the token to fit the length requirement
 
47
  input_ids = pad_sequences([tokenizer.convert_tokens_to_ids(txt) for txt in tokenized_texts],
48
  maxlen=max_len, dtype="long", truncating="post", padding="post")
49
  attention_masks = [[int(i>0) for i in ii] for ii in input_ids]
 
50
  segment_ids = [[0] * len(input_id) for input_id in input_ids]
 
51
  input_ids = torch.tensor(input_ids)
52
  attention_masks = torch.tensor(attention_masks)
53
  segment_ids = torch.tensor(segment_ids)
 
54
 
55
  # Assuming you have defined your model and input_ids somewhere before this
56
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
 
66
  # Make logits into numpy type predict result
67
  # The predict result contain each token's all tags predict result
68
  predict_results = logits.detach().cpu().numpy()
 
69
 
70
  from scipy.special import softmax
71
  result_arrays_soft = softmax(predict_results[0])
 
72
 
73
  result_array = result_arrays_soft
74
  result_list = np.argmax(result_array,axis=-1)