File size: 6,711 Bytes
c5a870c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
{
"model": "/data/minimax-dialogue/experiment/qwen/Qwen3-32B-long",
"model_type": "qwen3",
"model_revision": null,
"task_type": "causal_lm",
"torch_dtype": "bfloat16",
"attn_impl": null,
"num_labels": null,
"problem_type": null,
"rope_scaling": null,
"device_map": null,
"max_memory": {},
"local_repo_path": null,
"init_strategy": null,
"template": "qwen3",
"system": null,
"max_length": 2048,
"truncation_strategy": "delete",
"max_pixels": null,
"agent_template": null,
"norm_bbox": null,
"use_chat_template": true,
"padding_free": false,
"padding_side": "right",
"loss_scale": "default",
"sequence_parallel_size": 1,
"response_prefix": null,
"template_backend": "swift",
"dataset": [],
"val_dataset": [],
"split_dataset_ratio": 0.01,
"data_seed": 42,
"dataset_num_proc": 1,
"load_from_cache_file": true,
"dataset_shuffle": true,
"val_dataset_shuffle": false,
"streaming": false,
"interleave_prob": null,
"stopping_strategy": "first_exhausted",
"shuffle_buffer_size": 1000,
"download_mode": "reuse_dataset_if_exists",
"columns": {},
"strict": false,
"remove_unused_columns": true,
"model_name": null,
"model_author": null,
"custom_dataset_info": [],
"quant_method": null,
"quant_bits": null,
"hqq_axis": null,
"bnb_4bit_compute_dtype": "bfloat16",
"bnb_4bit_quant_type": "nf4",
"bnb_4bit_use_double_quant": true,
"bnb_4bit_quant_storage": null,
"max_new_tokens": null,
"temperature": null,
"top_k": null,
"top_p": null,
"repetition_penalty": null,
"num_beams": 1,
"stream": false,
"stop_words": [],
"logprobs": false,
"top_logprobs": null,
"ckpt_dir": "/data/minimax-dialogue/users/ado/082025project/final_roleplay_v2/ckpt/Qwen3-32B-with-systhink/v9-20251222-082607",
"lora_modules": [],
"tuner_backend": "peft",
"train_type": "lora",
"adapters": [],
"external_plugins": [],
"seed": 42,
"model_kwargs": {},
"load_args": true,
"load_data_args": false,
"packing": false,
"packing_cache": null,
"custom_register_path": [],
"use_hf": false,
"hub_token": null,
"ddp_timeout": 18000000,
"ddp_backend": null,
"ignore_args_error": false,
"use_swift_lora": false,
"merge_lora": false,
"safe_serialization": true,
"max_shard_size": "5GB",
"output_dir": "/data/minimax-dialogue/users/ado/082025project/final_roleplay_v2/ckpt/Qwen3-32B-with-systhink/v9-20251222-082607/hf_0882",
"quant_n_samples": 256,
"quant_batch_size": 1,
"group_size": 128,
"to_ollama": false,
"to_mcore": false,
"to_hf": true,
"mcore_model": "/data/minimax-dialogue/users/ado/082025project/final_roleplay_v2/ckpt/Qwen3-32B-with-systhink/v9-20251222-082607",
"thread_count": 7,
"test_convert_precision": false,
"push_to_hub": false,
"hub_model_id": null,
"hub_private_repo": false,
"commit_message": "update files",
"to_peft_format": false,
"exist_ok": false,
"rank": 0,
"local_rank": -1,
"global_world_size": 1,
"local_world_size": 1,
"model_suffix": "Qwen3-32B-long",
"model_info": "ModelInfo(model_type='qwen3', model_dir='/data/minimax-dialogue/experiment/qwen/Qwen3-32B-long', torch_dtype=torch.bfloat16, max_model_len=131072, quant_method=None, quant_bits=None, rope_scaling={'rope_type': 'yarn', 'factor': 4.0, 'original_max_position_embeddings': 32768}, config=None, task_type='causal_lm', num_labels=None)",
"model_meta": "ModelMeta(model_type='qwen3', model_groups=[ModelGroup(models=[Model(ms_model_id='Qwen/Qwen3-0.6B-Base', hf_model_id='Qwen/Qwen3-0.6B-Base', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-1.7B-Base', hf_model_id='Qwen/Qwen3-1.7B-Base', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-4B-Base', hf_model_id='Qwen/Qwen3-4B-Base', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-8B-Base', hf_model_id='Qwen/Qwen3-8B-Base', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-14B-Base', hf_model_id='Qwen/Qwen3-14B-Base', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-0.6B', hf_model_id='Qwen/Qwen3-0.6B', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-1.7B', hf_model_id='Qwen/Qwen3-1.7B', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-4B', hf_model_id='Qwen/Qwen3-4B', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-8B', hf_model_id='Qwen/Qwen3-8B', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-14B', hf_model_id='Qwen/Qwen3-14B', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-32B', hf_model_id='Qwen/Qwen3-32B', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-0.6B-FP8', hf_model_id='Qwen/Qwen3-0.6B-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-1.7B-FP8', hf_model_id='Qwen/Qwen3-1.7B-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-4B-FP8', hf_model_id='Qwen/Qwen3-4B-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-8B-FP8', hf_model_id='Qwen/Qwen3-8B-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-14B-FP8', hf_model_id='Qwen/Qwen3-14B-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-32B-FP8', hf_model_id='Qwen/Qwen3-32B-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-4B-AWQ', hf_model_id='Qwen/Qwen3-4B-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-8B-AWQ', hf_model_id='Qwen/Qwen3-8B-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-14B-AWQ', hf_model_id='Qwen/Qwen3-14B-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-32B-AWQ', hf_model_id='Qwen/Qwen3-32B-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='swift/Qwen3-32B-AWQ', hf_model_id=None, model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[])], template='qwen3', get_function=<function get_model_tokenizer_with_flash_attn at 0x7fb81ee028c0>, model_arch='llama', architectures=['Qwen3ForCausalLM'], additional_saved_files=[], torch_dtype=None, is_multimodal=False, is_reward=False, task_type=None, ignore_patterns=None, requires=['transformers>=4.51'], tags=[])",
"model_dir": "/data/minimax-dialogue/experiment/qwen/Qwen3-32B-long",
"hub": "<class 'swift.hub.hub.MSHub'>"
} |