File size: 3,877 Bytes
d670799
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# Copyright (c) OpenMMLab. All rights reserved.
from typing import Optional

from mmengine.fileio import load
from mmengine.logging import MMLogger
from mmengine.runner.checkpoint import _load_checkpoint_with_prefix
from torch import Tensor, nn

from mmaction.registry import MODELS
from mmaction.utils import ConfigType, get_str_type
from .base import BaseHead


@MODELS.register_module()
class UniFormerHead(BaseHead):
    """Classification head for UniFormer. supports loading pretrained

    Kinetics-710 checkpoint to fine-tuning on other Kinetics dataset.



    A pytorch implement of: `UniFormerV2: Spatiotemporal

    Learning by Arming Image ViTs with Video UniFormer

    <https://arxiv.org/abs/2211.09552>`



    Args:

        num_classes (int): Number of classes to be classified.

        in_channels (int): Number of channels in input feature.

        loss_cls (dict or ConfigDict): Config for building loss.

            Defaults to `dict(type='CrossEntropyLoss')`.

        dropout_ratio (float): Probability of dropout layer.

            Defaults to : 0.0.

        channel_map (str, optional): Channel map file to selecting

            channels from pretrained head with extra channels.

            Defaults to None.

        init_cfg (dict or ConfigDict, optional): Config to control the

           initialization. Defaults to

           ``[

            dict(type='TruncNormal', layer='Linear', std=0.01)

           ]``.

        kwargs (dict, optional): Any keyword argument to be used to initialize

            the head.

    """

    def __init__(self,

                 num_classes: int,

                 in_channels: int,

                 loss_cls: ConfigType = dict(type='CrossEntropyLoss'),

                 dropout_ratio: float = 0.0,

                 channel_map: Optional[str] = None,

                 init_cfg: Optional[dict] = dict(

                     type='TruncNormal', layer='Linear', std=0.02),

                 **kwargs) -> None:
        super().__init__(
            num_classes, in_channels, loss_cls, init_cfg=init_cfg, **kwargs)
        self.channel_map = channel_map
        self.dropout_ratio = dropout_ratio

        if self.dropout_ratio != 0:
            self.dropout = nn.Dropout(p=self.dropout_ratio)
        else:
            self.dropout = None
        self.fc_cls = nn.Linear(self.in_channels, self.num_classes)

    def _select_channels(self, stact_dict):
        selected_channels = load(self.channel_map)
        for key in stact_dict:
            stact_dict[key] = stact_dict[key][selected_channels]

    def init_weights(self) -> None:
        """Initiate the parameters from scratch."""
        if get_str_type(self.init_cfg['type']) == 'Pretrained':
            assert self.channel_map is not None, \
                'load cls_head weights needs to specify the channel map file'
            logger = MMLogger.get_current_instance()
            pretrained = self.init_cfg['checkpoint']
            logger.info(f'load pretrained model from {pretrained}')
            state_dict = _load_checkpoint_with_prefix(
                'cls_head.', pretrained, map_location='cpu')
            self._select_channels(state_dict)
            msg = self.load_state_dict(state_dict, strict=False)
            logger.info(msg)
        else:
            super().init_weights()

    def forward(self, x: Tensor, **kwargs) -> Tensor:
        """Defines the computation performed at every call.



        Args:

            x (Tensor): The input data.



        Returns:

            Tensor: The classification scores for input samples.

        """
        # [N, in_channels]
        if self.dropout is not None:
            x = self.dropout(x)
        # [N, in_channels]
        cls_score = self.fc_cls(x)
        # [N, num_classes]
        return cls_score