File size: 1,653 Bytes
3301011
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import os
import yaml
import torch
from transformers import AlbertConfig, AlbertModel

class CustomAlbert(AlbertModel):
    def forward(self, *args, **kwargs):
        # Call the original forward method
        outputs = super().forward(*args, **kwargs)

        # Only return the last_hidden_state
        return outputs.last_hidden_state


def load_plbert(log_dir):
    config_path = os.path.join(log_dir)
    plbert_config = yaml.safe_load(open(config_path))
    
    albert_base_configuration = AlbertConfig(**plbert_config['model_params'])
    bert = CustomAlbert(albert_base_configuration)

    # files = os.listdir(log_dir)
    # ckpts = []
    # for f in os.listdir(log_dir):
    #     if f.startswith("step_"): ckpts.append(f)

    # iters = [int(f.split('_')[-1].split('.')[0]) for f in ckpts if os.path.isfile(os.path.join(log_dir, f))]
    # iters = sorted(iters)[-1]

    # checkpoint = torch.load(log_dir + "/step_" + str(iters) + ".t7", map_location='cpu')
    # We just need load the backbone
    # checkpoint = torch.load(os.path.join(log_dir, plbert_config['ckpt_path']), map_location='cpu')
    # print("Loaded PLBERT from:", os.path.join(log_dir, plbert_config['ckpt_path']))
    # state_dict = checkpoint['net']
    # from collections import OrderedDict
    # new_state_dict = OrderedDict()
    # for k, v in state_dict.items():
    #     name = k[7:] # remove `module.`
    #     if name.startswith('encoder.'):
    #         name = name[8:] # remove `encoder.`
    #         new_state_dict[name] = v
    # del new_state_dict["embeddings.position_ids"]
    # bert.load_state_dict(new_state_dict, strict=False)
    
    return bert