import DynamicLayer

#4
by pcuenq HF Staff - opened
Files changed (1) hide show
  1. modeling_bailing_moe_linear_v2.py +1 -1
modeling_bailing_moe_linear_v2.py CHANGED
@@ -28,7 +28,7 @@ import torch.nn.functional as F
28
  from torch import nn
29
 
30
  from transformers.activations import ACT2FN
31
- from transformers.cache_utils import Cache, DynamicCache
32
  from transformers.modeling_attn_mask_utils import (
33
  AttentionMaskConverter,
34
  _prepare_4d_attention_mask,
 
28
  from torch import nn
29
 
30
  from transformers.activations import ACT2FN
31
+ from transformers.cache_utils import Cache, DynamicCache, DynamicLayer
32
  from transformers.modeling_attn_mask_utils import (
33
  AttentionMaskConverter,
34
  _prepare_4d_attention_mask,