File size: 1,170 Bytes
c4b43ee
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
{
  "bos_token_id": 1,
  "conv_type": "llava_v1",
  "eos_token_id": 2,
  "imis_sam_checkpoint": "/data/huggingface/IMISNet-B.pth",
  "local_rank": 0,
  "lora_alpha": 16,
  "lora_dropout": 0.05,
  "lora_r": 8,
  "lora_target_modules": "q_proj,v_proj,gate_proj,up_proj,down_proj",
  "max_length": 4096,
  "model_max_length": 2048,
  "out_dim": 768,
  "pad_token_id": 0,
  "precision": "bf16",
  "pretrain_sam": null,
  "region_fea_adapter": false,
  "sam_img_size": 1024,
  "save_path": "/data/checkpoints/imis-stage4-loop-alllong-bs48-3e-5-5e-5-trainfcs-gradaccu5-vlmearlystop70-60/",
  "seg_token_idx": 32000,
  "sft_modules": "lm_head,embed_tokens,input_layernorm,post_attention_layernorm,mm_projector",
  "train_mask_decoder": false,
  "transformers_version": "4.31.0",
  "use_mm_start_end": true,
  "version": "/data/huggingface/llava-v1.5-7b",
  "vis_save_path": "./vis_output",
  "vision_pretrained": "/data/huggingface/sam-med2d_b.pth",
  "vision_tower": "/data/huggingface/clip-vit-large-patch14-336",
  "weight": "/data/IMIS_runs/imis-stage4-loop-alllong-bs48-3e-5-5e-5-trainfcs-gradaccu5-vlmearlystop70/ckpt_model_60/global_step60/mp_rank_00_model_states.pt"
}