{ "attn_implementation": "flash_attention_2", "bos_token_id": 151643, "do_sample": true, "eos_token_id": [ 151645, 151643 ], "if_include_sam": false, "if_use_qwen_connector": true, "num_of_query": 64, "pad_token_id": 151643, "repetition_penalty": 1.05, "temperature": 1e-06, "transformers_version": "4.51.3", "use_cache": false }