{ "lora_rank": 16, "lora_alpha": 32, "lora_dropout": 0.05, "lora_target_modules": ["q_proj", "k_proj", "v_proj", "o_proj"], "block_size": 16, "mask_token_id": 151669, "model_type": "qwen3", "base_model": "Qwen/Qwen3-8B" }