File size: 7,169 Bytes
04e0ec4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
{
  "model": "google/paligemma-3b-pt-224",
  "model_type": "paligemma",
  "model_revision": null,
  "task_type": "causal_lm",
  "torch_dtype": "float16",
  "attn_impl": null,
  "new_special_tokens": [],
  "num_labels": null,
  "problem_type": null,
  "rope_scaling": null,
  "device_map": null,
  "max_memory": {},
  "max_model_len": null,
  "local_repo_path": null,
  "init_strategy": null,
  "template": "paligemma",
  "system": null,
  "max_length": 2048,
  "truncation_strategy": "delete",
  "max_pixels": null,
  "agent_template": null,
  "norm_bbox": null,
  "use_chat_template": true,
  "padding_free": false,
  "padding_side": "right",
  "loss_scale": "default",
  "sequence_parallel_size": 1,
  "response_prefix": null,
  "template_backend": "swift",
  "dataset": [],
  "val_dataset": [],
  "split_dataset_ratio": 0.0,
  "data_seed": 42,
  "dataset_num_proc": 1,
  "load_from_cache_file": true,
  "dataset_shuffle": true,
  "val_dataset_shuffle": false,
  "streaming": false,
  "interleave_prob": null,
  "stopping_strategy": "first_exhausted",
  "shuffle_buffer_size": 1000,
  "download_mode": "reuse_dataset_if_exists",
  "columns": {},
  "strict": false,
  "remove_unused_columns": true,
  "model_name": null,
  "model_author": null,
  "custom_dataset_info": [],
  "quant_method": "bnb",
  "quant_bits": 4,
  "hqq_axis": null,
  "bnb_4bit_compute_dtype": "float16",
  "bnb_4bit_quant_type": "nf4",
  "bnb_4bit_use_double_quant": true,
  "bnb_4bit_quant_storage": null,
  "max_new_tokens": null,
  "temperature": null,
  "top_k": null,
  "top_p": null,
  "repetition_penalty": null,
  "num_beams": 1,
  "stream": false,
  "stop_words": [],
  "logprobs": false,
  "top_logprobs": null,
  "ckpt_dir": "/kaggle/working/output_Kvasir-VQA-x1/v0-20250902-124816/checkpoint-188",
  "lora_modules": [],
  "tuner_backend": "peft",
  "train_type": "lora",
  "adapters": [
    "/kaggle/working/output_Kvasir-VQA-x1/v0-20250902-124816/checkpoint-188"
  ],
  "external_plugins": [],
  "seed": 42,
  "model_kwargs": {},
  "load_args": true,
  "load_data_args": false,
  "packing": false,
  "lazy_tokenize": true,
  "cached_dataset": [],
  "custom_register_path": [],
  "use_hf": true,
  "hub_token": null,
  "ddp_timeout": 18000000,
  "ddp_backend": null,
  "ignore_args_error": false,
  "use_swift_lora": false,
  "merge_lora": false,
  "safe_serialization": true,
  "max_shard_size": "5GB",
  "output_dir": "/kaggle/working/output_Kvasir-VQA-x1/v0-20250902-124816/checkpoint-188-bnb-int4",
  "quant_n_samples": 256,
  "quant_batch_size": 1,
  "group_size": 128,
  "to_cached_dataset": false,
  "to_ollama": false,
  "to_mcore": false,
  "to_hf": false,
  "mcore_model": null,
  "mcore_adapters": [],
  "thread_count": null,
  "test_convert_precision": false,
  "push_to_hub": true,
  "hub_model_id": "Kvasir-VQA-x1-lora_250902-1244",
  "hub_private_repo": false,
  "commit_message": "update files",
  "to_peft_format": false,
  "exist_ok": false,
  "rank": -1,
  "local_rank": -1,
  "global_world_size": 1,
  "local_world_size": 1,
  "model_suffix": "paligemma-3b-pt-224",
  "model_info": "ModelInfo(model_type='paligemma', model_dir='/root/.cache/huggingface/hub/models--google--paligemma-3b-pt-224/snapshots/35e4f46485b4d07967e7e9935bc3786aad50687c', torch_dtype=torch.float16, max_model_len=8192, quant_method='bnb', quant_bits=4, rope_scaling=None, is_moe_model=False, config=None, task_type='causal_lm', num_labels=None)",
  "model_meta": "ModelMeta(model_type='paligemma', model_groups=[ModelGroup(models=[Model(ms_model_id='AI-ModelScope/paligemma-3b-pt-224', hf_model_id='google/paligemma-3b-pt-224', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma-3b-pt-448', hf_model_id='google/paligemma-3b-pt-448', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma-3b-pt-896', hf_model_id='google/paligemma-3b-pt-896', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[]), ModelGroup(models=[Model(ms_model_id='AI-ModelScope/paligemma-3b-mix-224', hf_model_id='google/paligemma-3b-mix-224', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma-3b-mix-448', hf_model_id='google/paligemma-3b-mix-448', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[]), ModelGroup(models=[Model(ms_model_id='AI-ModelScope/paligemma2-3b-pt-224', hf_model_id='google/paligemma2-3b-pt-224', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma2-3b-pt-448', hf_model_id='google/paligemma2-3b-pt-448', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma2-3b-pt-896', hf_model_id='google/paligemma2-3b-pt-896', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma2-10b-pt-224', hf_model_id='google/paligemma2-10b-pt-224', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma2-10b-pt-448', hf_model_id='google/paligemma2-10b-pt-448', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma2-10b-pt-896', hf_model_id='google/paligemma2-10b-pt-896', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma2-28b-pt-224', hf_model_id='google/paligemma2-28b-pt-224', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma2-28b-pt-448', hf_model_id='google/paligemma2-28b-pt-448', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma2-28b-pt-896', hf_model_id='google/paligemma2-28b-pt-896', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[]), ModelGroup(models=[Model(ms_model_id='AI-ModelScope/paligemma2-3b-ft-docci-448', hf_model_id='google/paligemma2-3b-ft-docci-448', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='AI-ModelScope/paligemma2-10b-ft-docci-448', hf_model_id='google/paligemma2-10b-ft-docci-448', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[])], template='paligemma', get_function=<function get_model_tokenizer_paligemma_vision at 0x7a9427329a80>, model_arch=MultiModelKeys(arch_name='llava_hf', embedding=None, module_list=None, lm_head=None, q_proj=None, k_proj=None, v_proj=None, o_proj=None, attention=None, mlp=None, down_proj=None, qkv_proj=None, qk_proj=None, qa_proj=None, qb_proj=None, kv_proj=None, kva_proj=None, kvb_proj=None, language_model=['model.language_model'], aligner=['model.multi_modal_projector'], vision_tower=['model.vision_tower'], generator=[]), architectures=['PaliGemmaForConditionalGeneration'], additional_saved_files=[], torch_dtype=None, is_multimodal=True, is_reward=False, task_type=None, ignore_patterns=None, requires=['transformers>=4.41'], tags=['vision'])",
  "model_dir": "/root/.cache/huggingface/hub/models--google--paligemma-3b-pt-224/snapshots/35e4f46485b4d07967e7e9935bc3786aad50687c",
  "hub": "<class 'swift.hub.hub.HFHub'>"
}