File size: 1,061 Bytes
716e409 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
import json
private_data_version = {
"device_type": "htp",
"qnn_runtime_ver": "qnn223",
"arch_type": "v75",
"htp_backend_cfg": r"D:\LLM\Qwen2.5-7B-instruct-qnn299_qcs8550_4096\weight_shared_serialized_binaries_AR128_AR1\htp-backend.json",
"htp_cfg": r"D:\LLM\Qwen2.5-7B-instruct-qnn299_qcs8550_4096\weight_shared_serialized_binaries_AR128_AR1\qwen2.5-7b-instruct-htp.json",
"tokenizer_cfg": r"D:\LLM\Qwen2.5-7B-instruct-qnn299_qcs8550_4096\weight_shared_serialized_binaries_AR128_AR1\qwen2.5-7b-instruct-tokenizer.json"
}
private_data_version['htp_backend_cfg'] = open(private_data_version['htp_backend_cfg'], 'r', encoding="utf-8").read()
private_data_version['htp_cfg'] = open(private_data_version['htp_cfg'], 'r', encoding="utf-8").read()
private_data_version['tokenizer_cfg'] = open(private_data_version['tokenizer_cfg'], 'r', encoding="utf-8").read()
print(private_data_version['tokenizer_cfg'])
with open("private_data.json", "w", encoding="utf-8") as f:
json.dump(private_data_version, f, ensure_ascii=False)
|