wangjin2000 commited on
Commit
d63575c
·
verified ·
1 Parent(s): 3175238

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -121,7 +121,7 @@ def finetune(base_model_path): #, train_dataset, test_dataset):
121
  return save_path
122
 
123
  def compute_pseudo_perplexity(model, tokenizer, protein_seq, binder_seq):
124
- sequence = protein_seq + binder_seq
125
  original_input = tokenizer.encode(sequence, return_tensors='pt').to(model.device)
126
  length_of_binder = len(binder_seq)
127
  print("original_input 125:",original_input)
@@ -132,7 +132,7 @@ def compute_pseudo_perplexity(model, tokenizer, protein_seq, binder_seq):
132
 
133
  print("masked_inputs tokens 129:",masked_inputs[torch.arange(length_of_binder), positions_to_mask])
134
  masked_inputs[torch.arange(length_of_binder), positions_to_mask] = tokenizer.mask_token_id
135
- print("masked_inputs tokens 131:",masked_inputs[torch.arange(length_of_binder), positions_to_mask],masked_inputs[torch.arange(length_of_binder))
136
  print("masked_inputs tokens 131:",masked_inputs)
137
 
138
  # Prepare labels for the masked tokens
 
121
  return save_path
122
 
123
  def compute_pseudo_perplexity(model, tokenizer, protein_seq, binder_seq):
124
+ sequence = protein_seq + binder_seq
125
  original_input = tokenizer.encode(sequence, return_tensors='pt').to(model.device)
126
  length_of_binder = len(binder_seq)
127
  print("original_input 125:",original_input)
 
132
 
133
  print("masked_inputs tokens 129:",masked_inputs[torch.arange(length_of_binder), positions_to_mask])
134
  masked_inputs[torch.arange(length_of_binder), positions_to_mask] = tokenizer.mask_token_id
135
+ print("masked_inputs tokens 131:",[torch.arange(length_of_binder), positions_to_mask],masked_inputs[torch.arange(length_of_binder), positions_to_mask])
136
  print("masked_inputs tokens 131:",masked_inputs)
137
 
138
  # Prepare labels for the masked tokens