Fix behavior when batch_size=1
#1
by
kaitos255
- opened
- modeling_plamo.py +1 -1
modeling_plamo.py
CHANGED
|
@@ -817,7 +817,7 @@ class ModifiedAttention(Attention):
|
|
| 817 |
|
| 818 |
|
| 819 |
PLAMO_ATTENTION_CLASSES = {
|
| 820 |
-
"sdpa":
|
| 821 |
}
|
| 822 |
|
| 823 |
|
|
|
|
| 817 |
|
| 818 |
|
| 819 |
PLAMO_ATTENTION_CLASSES = {
|
| 820 |
+
"sdpa": Attention,
|
| 821 |
}
|
| 822 |
|
| 823 |
|