File size: 3,486 Bytes
6f89716
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
# --- Required imports ---
from transformers import AutoModelForCausalLM
from peft import LoraConfig, get_peft_model
from safetensors.torch import load_file
import glob
import torch
import torch.nn as nn
from huggingface_hub import hf_hub_download

def load_safetensor_from_hf(repo_id, filename, repo_type="dataset"):
    cached_path = hf_hub_download(
        repo_id=repo_id,
        filename=filename,
        repo_type=repo_type,
        local_files_only=True
    )
    return load_file(cached_path)

def load_pretrain(model, pretrain_ckpt_path):
        print(f"📂 Loading pretrained weights from: {str(pretrain_ckpt_path)}")
        
        # self.accelerator.load_state('/home/m50048399/transfered/ye_project/UniPointMap/results/sqa3d_sft_align_run1_b128_SQA3D_ScanNetSQA3D_sqa3d_sft_align_run1/point_map_finetuned/ckpt/')
        # Search for safetensors files
        model_weight_path_pattern = pretrain_ckpt_path + "/model*.safetensors"
        model_weight_paths = glob.glob(model_weight_path_pattern)

        if len(model_weight_paths) == 0:
            raise FileNotFoundError(f"❌ Cannot find any .safetensors file in {str(pretrain_ckpt_path)}")

        # Load and merge weights
        weights = {}
        for model_weight_path in model_weight_paths:
            print(f"📥 Loading weights from: {model_weight_path}")
            weights.update(load_file(model_weight_path, device="cpu"))

        # Load weights with strict=False
        result = model.load_state_dict(weights, strict=False)
        
        model_keys = set(model.state_dict().keys())
        loaded_keys = model_keys.intersection(weights.keys())
        missing_keys = result.missing_keys
        unexpected_keys = result.unexpected_keys
        breakpoint()
        print(f"✅ Loaded keys:      {len(loaded_keys)} / {len(model_keys)}")
        print(f"❌ Missing keys:     {len(missing_keys)}")
        print(f"⚠️ Unexpected keys:  {len(unexpected_keys)}")
        
        
class RepModel(nn.Module):
    def __init__(self):
        super(RepModel, self).__init__()
        # --- Model + LoRA configuration ---
        model_root = 'fg-clip-base'

        lora_config = LoraConfig(
            r=32,                             # Rank of LoRA matrices
            lora_alpha=64,                    # Scaling factor (≈ 2 × r)
            target_modules=["q_proj", "v_proj", "k_proj", "fc1", "fc2"],  # Attention + FFN
            lora_dropout=0.05,                # Dropout rate
            bias="none",                      # Do not tune bias
            task_type="FEATURE_EXTRACTION"    # LoRA mode; can also use "CAUSAL_LM"
        )

        # --- Load and wrap model ---
        target_model = AutoModelForCausalLM.from_pretrained(
            model_root,
            trust_remote_code=True
        )
        self.target_model = get_peft_model(target_model, lora_config)

        # (optional) print summary
        self.target_model.print_trainable_parameters()


    def get_image_feature(self, point_map):
        return self.target_model.get_image_features(point_map)

    def forward(self, data_dict):
        point_map = data_dict['point_map'] # B, 32, 3, 224, 224
        
        self.target_model.get_image_features(point_map)
        
         
# --- Load pretrained weights ---
ckpt_path = '/home/m50048399/transfered/ye_project/checkpoints/sceneverse_scannet_exp1_b64_Pretrain_all_scannet_training_run1/poma/ckpt'
model = RepModel()
load_pretrain(model, ckpt_path)