debugging
Browse files- modeling_bert.py +3 -1
modeling_bert.py
CHANGED
|
@@ -501,6 +501,9 @@ class BertForPreTraining(BertPreTrainedModel):
|
|
| 501 |
)
|
| 502 |
prediction_scores, seq_relationship_score = self.cls(sequence_output, pooled_output)
|
| 503 |
|
|
|
|
|
|
|
|
|
|
| 504 |
if (
|
| 505 |
self.dense_seq_output and labels is not None
|
| 506 |
): # prediction_scores are already flattened
|
|
@@ -515,7 +518,6 @@ class BertForPreTraining(BertPreTrainedModel):
|
|
| 515 |
else:
|
| 516 |
masked_lm_loss = 0
|
| 517 |
if next_sentence_label is not None:
|
| 518 |
-
assert False, "TODO: remove this assertion"
|
| 519 |
next_sentence_loss = self.nsp_loss(
|
| 520 |
rearrange(seq_relationship_score, "... t -> (...) t"),
|
| 521 |
rearrange(next_sentence_label, "... -> (...)"),
|
|
|
|
| 501 |
)
|
| 502 |
prediction_scores, seq_relationship_score = self.cls(sequence_output, pooled_output)
|
| 503 |
|
| 504 |
+
print(labels)
|
| 505 |
+
assert False
|
| 506 |
+
|
| 507 |
if (
|
| 508 |
self.dense_seq_output and labels is not None
|
| 509 |
): # prediction_scores are already flattened
|
|
|
|
| 518 |
else:
|
| 519 |
masked_lm_loss = 0
|
| 520 |
if next_sentence_label is not None:
|
|
|
|
| 521 |
next_sentence_loss = self.nsp_loss(
|
| 522 |
rearrange(seq_relationship_score, "... t -> (...) t"),
|
| 523 |
rearrange(next_sentence_label, "... -> (...)"),
|