Upload modeling_dplm.py with huggingface_hub
Browse files- modeling_dplm.py +1 -2
modeling_dplm.py
CHANGED
|
@@ -430,9 +430,8 @@ def get_attention_mask(
|
|
| 430 |
if attention_mask is None:
|
| 431 |
flex_block_mask = None
|
| 432 |
else:
|
| 433 |
-
sequence_ids = torch.where(token_attention_mask, 1, -1)
|
| 434 |
def mask_mod(batch_idx, head_idx, q_idx, kv_idx):
|
| 435 |
-
return (
|
| 436 |
|
| 437 |
flex_block_mask = create_block_mask(
|
| 438 |
mask_mod,
|
|
|
|
| 430 |
if attention_mask is None:
|
| 431 |
flex_block_mask = None
|
| 432 |
else:
|
|
|
|
| 433 |
def mask_mod(batch_idx, head_idx, q_idx, kv_idx):
|
| 434 |
+
return (token_attention_mask[batch_idx, q_idx] == token_attention_mask[batch_idx, kv_idx]) & (token_attention_mask[batch_idx, q_idx] != 0)
|
| 435 |
|
| 436 |
flex_block_mask = create_block_mask(
|
| 437 |
mask_mod,
|