File size: 230 Bytes
0e9a03e |
1 2 3 4 5 6 7 |
from transformers.models.clip.modeling_clip import CLIPEncoderLayer
# Check if we can correctly grab dependencies with correct naming from all UPPERCASE old model
class FromUppercaseModelEncoderLayer(CLIPEncoderLayer):
pass
|