Update modeling_opt.py
Browse files- modeling_opt.py +2 -1
modeling_opt.py
CHANGED
|
@@ -18,6 +18,7 @@
|
|
| 18 |
from typing import List, Optional, Tuple, Union
|
| 19 |
|
| 20 |
import torch
|
|
|
|
| 21 |
import torch.nn.functional as F
|
| 22 |
import torch.utils.checkpoint
|
| 23 |
from torch import nn
|
|
@@ -724,7 +725,7 @@ class OPTDecoderLayer(nn.Module):
|
|
| 724 |
|
| 725 |
self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](
|
| 726 |
config=config, is_decoder=True)
|
| 727 |
-
|
| 728 |
self.do_layer_norm_before = config.do_layer_norm_before
|
| 729 |
self.dropout = config.dropout
|
| 730 |
self.activation_fn = ACT2FN[config.activation_function]
|
|
|
|
| 18 |
from typing import List, Optional, Tuple, Union
|
| 19 |
|
| 20 |
import torch
|
| 21 |
+
|
| 22 |
import torch.nn.functional as F
|
| 23 |
import torch.utils.checkpoint
|
| 24 |
from torch import nn
|
|
|
|
| 725 |
|
| 726 |
self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](
|
| 727 |
config=config, is_decoder=True)
|
| 728 |
+
print(self.self_attn)
|
| 729 |
self.do_layer_norm_before = config.do_layer_norm_before
|
| 730 |
self.dropout = config.dropout
|
| 731 |
self.activation_fn = ACT2FN[config.activation_function]
|