diff --git a/.gitattributes b/.gitattributes
index a6344aac8c09253b3b630fb776ae94478aa0275b..c6c18fe4d6549a20be806102f3a683d4431c2850 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -33,3 +33,79 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
+axmodels/feat_encoder.in_proj.axmodel filter=lfs diff=lfs merge=lfs -text
+axmodels/lm_to_dit_proj.axmodel filter=lfs diff=lfs merge=lfs -text
+axmodels/stop_predictor.axmodel filter=lfs diff=lfs merge=lfs -text
+axmodels/audio_vae.decoder.onnx filter=lfs diff=lfs merge=lfs -text
+axmodels/audio_vae.encoder.onnx filter=lfs diff=lfs merge=lfs -text
+axmodels/enc_to_lm_proj.axmodel filter=lfs diff=lfs merge=lfs -text
+axmodels/locdit.part1.axmodel filter=lfs diff=lfs merge=lfs -text
+axmodels/locdit.part3.axmodel filter=lfs diff=lfs merge=lfs -text
+axmodels/res_to_dit_proj.axmodel filter=lfs diff=lfs merge=lfs -text
+axmodels/feat_encoder.in_proj.onnx filter=lfs diff=lfs merge=lfs -text
+axmodels/feat_encoder.special_token.npy filter=lfs diff=lfs merge=lfs -text
+axmodels/fsq_layer.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l11_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l1_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l5_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l16_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l8_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l14_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l18_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l21_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l17_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l19_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l23_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l4_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l9_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l10_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l12_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l13_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/model.embed_tokens.weight.npy filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l15_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l2_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_post.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l3_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/model.embed_tokens.weight.float32.bin filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l0_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l20_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/model.embed_tokens.weight.bfloat16.bin filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l22_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l6_together.axmodel filter=lfs diff=lfs merge=lfs -text
+base_lm-axmodels/MiniCPMForCausalLM_p64_l7_together.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_decoder_estimator_decoder-axmodels/MiniCPMForCausalLM_p64_l3_together.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_decoder_estimator_decoder-axmodels/MiniCPMForCausalLM_post.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_decoder_estimator_decoder-axmodels/model.embed_tokens.weight.bfloat16.bin filter=lfs diff=lfs merge=lfs -text
+feat_decoder_estimator_decoder-axmodels/model.embed_tokens.weight.float32.bin filter=lfs diff=lfs merge=lfs -text
+feat_decoder_estimator_decoder-axmodels/model.embed_tokens.weight.npy filter=lfs diff=lfs merge=lfs -text
+feat_decoder_estimator_decoder-axmodels/MiniCPMForCausalLM_p64_l0_together.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_decoder_estimator_decoder-axmodels/MiniCPMForCausalLM_p64_l1_together.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_decoder_estimator_decoder-axmodels/MiniCPMForCausalLM_p64_l2_together.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_encoder_encoder-axmodels/MiniCPMForCausalLM_p64_l2_together.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_encoder_encoder-axmodels/MiniCPMForCausalLM_p64_l3_together.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_encoder_encoder-axmodels/MiniCPMForCausalLM_post.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_encoder_encoder-axmodels/model.embed_tokens.weight.bfloat16.bin filter=lfs diff=lfs merge=lfs -text
+feat_encoder_encoder-axmodels/model.embed_tokens.weight.float32.bin filter=lfs diff=lfs merge=lfs -text
+feat_encoder_encoder-axmodels/model.embed_tokens.weight.npy filter=lfs diff=lfs merge=lfs -text
+feat_encoder_encoder-axmodels/MiniCPMForCausalLM_p64_l0_together.axmodel filter=lfs diff=lfs merge=lfs -text
+feat_encoder_encoder-axmodels/MiniCPMForCausalLM_p64_l1_together.axmodel filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/MiniCPMForCausalLM_post.axmodel filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/model.embed_tokens.weight.bfloat16.bin filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/MiniCPMForCausalLM_p64_l4_together.axmodel filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/MiniCPMForCausalLM_p64_l5_together.axmodel filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/MiniCPMForCausalLM_p64_l2_together.axmodel filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/MiniCPMForCausalLM_p64_l3_together.axmodel filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/model.embed_tokens.weight.float32.bin filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/model.embed_tokens.weight.npy filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/MiniCPMForCausalLM_p64_l0_together.axmodel filter=lfs diff=lfs merge=lfs -text
+residual_lm-axmodels/MiniCPMForCausalLM_p64_l1_together.axmodel filter=lfs diff=lfs merge=lfs -text
+assets/en_woman1.txt filter=lfs diff=lfs merge=lfs -text
+assets/zh_man1.wav filter=lfs diff=lfs merge=lfs -text
+assets/zh_man2.mp3 filter=lfs diff=lfs merge=lfs -text
+assets/zh_man2.txt filter=lfs diff=lfs merge=lfs -text
+assets/zh_woman1.wav filter=lfs diff=lfs merge=lfs -text
+assets/en_man1.txt filter=lfs diff=lfs merge=lfs -text
+assets/en_woman1.mp3 filter=lfs diff=lfs merge=lfs -text
+assets/zh_man1.txt filter=lfs diff=lfs merge=lfs -text
+assets/zh_woman1.txt filter=lfs diff=lfs merge=lfs -text
+assets/en_man1.mp3 filter=lfs diff=lfs merge=lfs -text
diff --git a/README.md b/README.md
index 7be5fc7f47d5db027d120b8024982df93db95b74..d11278e49d4d68e411201a86f8d62348419d91f8 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,59 @@
----
-license: mit
----
+---
+license: mit
+language:
+- en
+- zh
+base_model:
+- VoxCPM
+pipeline_tag: text-to-speech
+library_name: transformers
+tags:
+- VoxCPM
+- Speech
+---
+
+# VoxCPM
+This version of VoxCPM has been converted to run on the Axera NPU using **w8a16** quantization.
+Compatible with Pulsar2 version: 4.2
+
+## Convert tools links:
+For those who are interested in model conversion, you can try to export axmodel through the original repo :
+[VoxCPM offical](https://github.com/OpenBMB/VoxCPM/)
+
+[Pulsar2 Link, How to Convert LLM from Huggingface to axmodel](https://pulsar2-docs.readthedocs.io/en/latest/appendix/build_llm.html)
+
+[AXera NPU HOST LLM Runtime](https://github.com/AXERA-TECH/VoxCPM.Axera)
+
+## Support Platform
+
+- AX650
+ - AX650N DEMO Board
+ - [M4N-Dock(爱芯派Pro)](https://wiki.sipeed.com/hardware/zh/maixIV/m4ndock/m4ndock.html)
+ - [M.2 Accelerator card](https://axcl-docs.readthedocs.io/zh-cn/latest/doc_guide_hardware.html)
+
+
+
+## How to use
+
+Download all files from this repository to the device
+### 1. Install packages
+
+#### 1. Install voxcpm axinfer package
+```
+git clone -b 1.0.4-axmode_infer https://github.com/techshoww/VoxCPM.git
+cd VoxCPM
+pip3 install .
+```
+
+#### 2. Download zipenhancer
+```
+pip3 install modelscope
+modelscope download --model iic/speech_zipenhancer_ans_multiloss_16k_base --local_dir iic/speech_zipenhancer_ans_multiloss_16k_base
+```
+
+### 2. Run on Axera Device
+Go to the root directory of this project. run:
+```
+python3 run_ax650.py
+```
+
diff --git a/VoxCPM-0.5B/config.json b/VoxCPM-0.5B/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1ffe017ebab475ffbdcb7c563d0c5147ca7dedce
--- /dev/null
+++ b/VoxCPM-0.5B/config.json
@@ -0,0 +1,52 @@
+{
+ "architecture": "voxcpm",
+ "lm_config": {
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "hidden_size": 1024,
+ "intermediate_size": 4096,
+ "max_position_embeddings": 32768,
+ "num_attention_heads": 16,
+ "num_hidden_layers": 24,
+ "num_key_value_heads": 2,
+ "rms_norm_eps": 1e-05,
+ "rope_theta": 10000,
+ "rope_scaling": {
+ "type": "longrope",
+ "long_factor": [1.0004360675811768, 1.0668443441390991, 1.1631425619125366, 1.3025742769241333, 1.5040205717086792, 1.7941505908966064, 2.2101221084594727, 2.802666664123535, 3.6389970779418945, 4.804192543029785, 6.39855432510376, 8.527148246765137, 11.277542114257812, 14.684998512268066, 18.69317054748535, 23.13019371032715, 27.72362518310547, 32.1606559753418, 36.168827056884766, 39.57627868652344, 42.32667541503906, 44.45526885986328, 46.04962921142578, 47.21482849121094, 48.05115509033203, 48.64370346069336, 49.05967712402344, 49.34980392456055, 49.551246643066406, 49.69068145751953, 49.78697967529297, 49.85338592529297],
+ "short_factor": [1.0004360675811768, 1.0668443441390991, 1.1631425619125366, 1.3025742769241333, 1.5040205717086792, 1.7941505908966064, 2.2101221084594727, 2.802666664123535, 3.6389970779418945, 4.804192543029785, 6.39855432510376, 8.527148246765137, 11.277542114257812, 14.684998512268066, 18.69317054748535, 23.13019371032715, 27.72362518310547, 32.1606559753418, 36.168827056884766, 39.57627868652344, 42.32667541503906, 44.45526885986328, 46.04962921142578, 47.21482849121094, 48.05115509033203, 48.64370346069336, 49.05967712402344, 49.34980392456055, 49.551246643066406, 49.69068145751953, 49.78697967529297, 49.85338592529297],
+ "original_max_position_embeddings": 32768
+ },
+ "vocab_size": 73448,
+ "scale_emb": 12,
+ "dim_model_base": 256,
+ "scale_depth": 1.4,
+ "use_mup": false
+ },
+ "patch_size": 2,
+ "feat_dim": 64,
+ "scalar_quantization_latent_dim": 256,
+ "scalar_quantization_scale": 9,
+ "residual_lm_num_layers": 6,
+ "encoder_config": {
+ "hidden_dim": 1024,
+ "ffn_dim": 4096,
+ "num_heads": 16,
+ "num_layers": 4
+ },
+ "dit_config": {
+ "hidden_dim": 1024,
+ "ffn_dim": 4096,
+ "num_heads": 16,
+ "num_layers": 4,
+ "cfm_config": {
+ "sigma_min": 1e-06,
+ "solver": "euler",
+ "t_scheduler": "log-norm",
+ "inference_cfg_rate": 2.0
+ }
+ },
+ "max_length": 4096,
+ "device": "cuda",
+ "dtype": "bfloat16"
+}
\ No newline at end of file
diff --git a/VoxCPM-0.5B/special_tokens_map.json b/VoxCPM-0.5B/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..8619dda6f3eb6d60d0a1bb274820054e46f41699
--- /dev/null
+++ b/VoxCPM-0.5B/special_tokens_map.json
@@ -0,0 +1,81 @@
+{
+ "additional_special_tokens": [
+ {
+ "content": "<|im_end|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ {
+ "content": "<|im_start|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ {
+ "content": "<|tool_call|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ {
+ "content": "<|execute_start|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ {
+ "content": "<|execute_end|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ {
+ "content": "<|fim_prefix|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ {
+ "content": "<|fim_middle|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ {
+ "content": "<|fim_suffix|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+ ],
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/VoxCPM-0.5B/tokenizer.json b/VoxCPM-0.5B/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..5405eda8fc32d18fb5a304cb1e292abf6c61dc3f
--- /dev/null
+++ b/VoxCPM-0.5B/tokenizer.json
@@ -0,0 +1,177952 @@
+{
+ "version": "1.0",
+ "truncation": null,
+ "padding": null,
+ "added_tokens": [
+ {
+ "id": 0,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 1,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 2,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 73440,
+ "content": "<|im_end|>",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 73441,
+ "content": "<|im_start|>",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 73442,
+ "content": "<|tool_call|>",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 73443,
+ "content": "<|execute_start|>",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 73444,
+ "content": "<|execute_end|>",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 73445,
+ "content": "<|fim_prefix|>",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 73446,
+ "content": "<|fim_middle|>",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 73447,
+ "content": "<|fim_suffix|>",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ }
+ ],
+ "normalizer": {
+ "type": "Sequence",
+ "normalizers": [
+ {
+ "type": "Prepend",
+ "prepend": "▁"
+ },
+ {
+ "type": "Replace",
+ "pattern": {
+ "String": " "
+ },
+ "content": "▁"
+ }
+ ]
+ },
+ "pre_tokenizer": null,
+ "post_processor": {
+ "type": "TemplateProcessing",
+ "single": [
+ {
+ "SpecialToken": {
+ "id": "",
+ "type_id": 0
+ }
+ },
+ {
+ "Sequence": {
+ "id": "A",
+ "type_id": 0
+ }
+ }
+ ],
+ "pair": [
+ {
+ "SpecialToken": {
+ "id": "",
+ "type_id": 0
+ }
+ },
+ {
+ "Sequence": {
+ "id": "A",
+ "type_id": 0
+ }
+ },
+ {
+ "SpecialToken": {
+ "id": "",
+ "type_id": 1
+ }
+ },
+ {
+ "Sequence": {
+ "id": "B",
+ "type_id": 1
+ }
+ }
+ ],
+ "special_tokens": {
+ "": {
+ "id": "",
+ "ids": [
+ 1
+ ],
+ "tokens": [
+ ""
+ ]
+ }
+ }
+ },
+ "decoder": {
+ "type": "Sequence",
+ "decoders": [
+ {
+ "type": "Replace",
+ "pattern": {
+ "String": "▁"
+ },
+ "content": " "
+ },
+ {
+ "type": "ByteFallback"
+ },
+ {
+ "type": "Fuse"
+ },
+ {
+ "type": "Strip",
+ "content": " ",
+ "start": 1,
+ "stop": 0
+ }
+ ]
+ },
+ "model": {
+ "type": "BPE",
+ "dropout": null,
+ "unk_token": "",
+ "continuing_subword_prefix": null,
+ "end_of_word_suffix": null,
+ "fuse_unk": true,
+ "byte_fallback": true,
+ "ignore_merges": false,
+ "vocab": {
+ "": 0,
+ "": 1,
+ "": 2,
+ "": 3,
+ "": 4,
+ "\n": 5,
+ "\t": 6,
+ "
": 7,
+ "
": 8,
+ "": 9,
+ "": 10,
+ "": 11,
+ "
": 12,
+ "": 13,
+ " | | ": 14,
+ "": 15,
+ "": 16,
+ "": 17,
+ "": 18,
+ "": 21,
+ "": 22,
+ "
": 23,
+ "": 24,
+ "": 25,
+ "": 26,
+ "": 27,
+ "": 28,
+ "": 29,
+ "": 30,
+ "": 31,
+ "": 32,
+ "
": 33,
+ "
": 34,
+ "
": 35,
+ "": 36,
+ "": 37,
+ "": 38,
+ "
": 39,
+ "": 40,
+ "": 41,
+ "
": 42,
+ "": 43,
+ "
": 44,
+ "
": 45,
+ "": 46,
+ "": 47,
+ "
": 48,
+ "": 49,
+ "": 50,
+ "": 51,
+ "0": 52,
+ "1": 53,
+ "2": 54,
+ "3": 55,
+ "4": 56,
+ "5": 57,
+ "6": 58,
+ "7": 59,
+ "8": 60,
+ "9": 61,
+ "+": 62,
+ "-": 63,
+ "=": 64,
+ ",": 65,
+ "。": 66,
+ "!": 67,
+ "?": 68,
+ "、": 69,
+ ":": 70,
+ "¥": 71,
+ ".": 72,
+ "!": 73,
+ "?": 74,
+ "...": 75,
+ "。。。": 76,
+ "。。。。。。": 77,
+ "《": 78,
+ "》": 79,
+ "【": 80,
+ "】": 81,
+ "『": 82,
+ "』": 83,
+ "```": 84,
+ "": 86,
+ "---": 87,
+ "": 88,
+ ";": 89,
+ ".": 90,
+ "=": 91,
+ "<": 92,
+ ">": 93,
+ "-": 94,
+ "+": 95,
+ "%": 96,
+ "‼": 97,
+ "㊣": 98,
+ "/": 99,
+ "|": 100,
+ "<|audio_start|>": 101,
+ "<|audio_end|>": 102,
+ "<|audio_prompt_start|>": 103,
+ "<|audio_prompt_end|>": 104,
+ "<|background|>": 105,
+ "<|/background|>": 106,
+ "<|characters|>": 107,
+ "<|/characters|>": 108,
+ "<|speaker_id|>": 109,
+ "<|/speaker_id|>": 110,
+ "<|span|>": 111,
+ "<|/span|>": 112,
+ "": 113,
+ "": 114,
+ "": 115,
+ "": 116,
+ "": 117,
+ "": 118,
+ "": 119,
+ "": 120,
+ "": 121,
+ "": 122,
+ "": 123,
+ "": 124,
+ "": 125,
+ "": 126,
+ "": 127,
+ "": 128,
+ "": 129,
+ "": 130,
+ "": 131,
+ "": 132,
+ "": 133,
+ "": 134,
+ "": 135,
+ "": 136,
+ "": 137,
+ "": 138,
+ "": 139,
+ "": 140,
+ "": 141,
+ "": 142,
+ "": 143,
+ "": 144,
+ "": 145,
+ "": 146,
+ "": 147,
+ "": 148,
+ "": 149,
+ "": 150,
+ "": 151,
+ "": 152,
+ "": 153,
+ "": 154,
+ "": 155,
+ "": 156,
+ "": 157,
+ "": 158,
+ "": 159,
+ "": 160,
+ "": 161,
+ "": 162,
+ "": 163,
+ "": 164,
+ "": 165,
+ "": 166,
+ "": 167,
+ "": 168,
+ "": 169,
+ "": 170,
+ "": 171,
+ "": 172,
+ "": 173,
+ "": 174,
+ "": 175,
+ "": 176,
+ "": 177,
+ "": 178,
+ "": 179,
+ "": 180,
+ "": 181,
+ "": 182,
+ "": 183,
+ "": 184,
+ "": 185,
+ "": 186,
+ "": 187,
+ "": 188,
+ "": 189,
+ "": 190,
+ "": 191,
+ "": 192,
+ "": 193,
+ "": 194,
+ "": 195,
+ "": 196,
+ "": 197,
+ "": 198,
+ "": 199,
+ "": 200,
+ "": 201,
+ "": 202,
+ "": 203,
+ "": 204,
+ "": 205,
+ "": 206,
+ "": 207,
+ "": 208,
+ "": 209,
+ "": 210,
+ "": 211,
+ "": 212,
+ "": 213,
+ "": 214,
+ "": 215,
+ "": 216,
+ "": 217,
+ "": 218,
+ "": 219,
+ "": 220,
+ "": 221,
+ "": 222,
+ "": 223,
+ "": 224,
+ "": 225,
+ "": 226,
+ "": 227,
+ "": 228,
+ "": 229,
+ "": 230,
+ "": 231,
+ "": 232,
+ "": 233,
+ "": 234,
+ "": 235,
+ "": 236,
+ "": 237,
+ "": 238,
+ "": 239,
+ "": 240,
+ "": 241,
+ "": 242,
+ "": 243,
+ "": 244,
+ "": 245,
+ "": 246,
+ "": 247,
+ "": 248,
+ "": 249,
+ "": 250,
+ "": 251,
+ "": 252,
+ "": 253,
+ "": 254,
+ "": 255,
+ "": 256,
+ "": 257,
+ "": 258,
+ "": 259,
+ "": 260,
+ "": 261,
+ "": 262,
+ "": 263,
+ "": 264,
+ "": 265,
+ "": 266,
+ "": 267,
+ "": 268,
+ "": 269,
+ "": 270,
+ "": 271,
+ "": 272,
+ "": 273,
+ "": 274,
+ "": 275,
+ "": 276,
+ "": 277,
+ "": 278,
+ "": 279,
+ "": 280,
+ "": 281,
+ "": 282,
+ "": 283,
+ "": 284,
+ "": 285,
+ "": 286,
+ "": 287,
+ "": 288,
+ "": 289,
+ "": 290,
+ "": 291,
+ "": 292,
+ "": 293,
+ "": 294,
+ "": 295,
+ "": 296,
+ "": 297,
+ "": 298,
+ "": 299,
+ "": 300,
+ "": 301,
+ "": 302,
+ "": 303,
+ "": 304,
+ "": 305,
+ "": 306,
+ "": 307,
+ "": 308,
+ "": 309,
+ "": 310,
+ "": 311,
+ "": 312,
+ "": 313,
+ "": 314,
+ "": 315,
+ "": 316,
+ "": 317,
+ "": 318,
+ "": 319,
+ "": 320,
+ "": 321,
+ "": 322,
+ "": 323,
+ "": 324,
+ "": 325,
+ "": 326,
+ "": 327,
+ "": 328,
+ "": 329,
+ "": 330,
+ "": 331,
+ "": 332,
+ "": 333,
+ "": 334,
+ "": 335,
+ "": 336,
+ "": 337,
+ "": 338,
+ "": 339,
+ "": 340,
+ "": 341,
+ "": 342,
+ "": 343,
+ "": 344,
+ "": 345,
+ "": 346,
+ "": 347,
+ "": 348,
+ "": 349,
+ "": 350,
+ "": 351,
+ "": 352,
+ "": 353,
+ "": 354,
+ "": 355,
+ "": 356,
+ "": 357,
+ "": 358,
+ "": 359,
+ "": 360,
+ "": 361,
+ "": 362,
+ "": 363,
+ "": 364,
+ "": 365,
+ "