Upload folder using huggingface_hub
Browse files- .gitattributes +1 -0
- README.md +24 -0
- added_tokens.json +107 -0
- chat_template.jinja +88 -0
- config.json +297 -0
- configuration_minicpmo.py +260 -0
- generation_config.json +12 -0
- model-00001-of-00002.safetensors +3 -0
- model-00002-of-00002.safetensors +3 -0
- model.safetensors.index.json +0 -0
- modeling_minicpmo.py +0 -0
- modeling_navit_siglip.py +981 -0
- preprocessor_config.json +35 -0
- processing_minicpmo.py +1665 -0
- processor_config.json +89 -0
- special_tokens_map.json +580 -0
- tokenization_minicpmo_fast.py +120 -0
- tokenizer.json +3 -0
- tokenizer_config.json +6989 -0
- utils.py +2417 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: apache-2.0
|
| 3 |
+
pipeline_tag: any-to-any
|
| 4 |
+
library_name: transformers
|
| 5 |
+
tags:
|
| 6 |
+
- minicpm-o
|
| 7 |
+
- minicpm-v
|
| 8 |
+
- multimodal
|
| 9 |
+
- full-duplex
|
| 10 |
+
- mlx
|
| 11 |
+
---
|
| 12 |
+
|
| 13 |
+
# mlx-community/MiniCPM-o-4_5-4bit
|
| 14 |
+
This model was converted to MLX format from [`openbmb/MiniCPM-o-4_5`]() using mlx-vlm version **0.3.13**.
|
| 15 |
+
Refer to the [original model card](https://huggingface.co/openbmb/MiniCPM-o-4_5) for more details on the model.
|
| 16 |
+
## Use with mlx
|
| 17 |
+
|
| 18 |
+
```bash
|
| 19 |
+
pip install -U mlx-vlm
|
| 20 |
+
```
|
| 21 |
+
|
| 22 |
+
```bash
|
| 23 |
+
python -m mlx_vlm.generate --model mlx-community/MiniCPM-o-4_5-4bit --max-tokens 100 --temperature 0.0 --prompt "Describe this image." --image <path_to_image>
|
| 24 |
+
```
|
added_tokens.json
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</answer>": 151686,
|
| 3 |
+
"</box>": 151674,
|
| 4 |
+
"</focus>": 151688,
|
| 5 |
+
"</image>": 151670,
|
| 6 |
+
"</image_id>": 151682,
|
| 7 |
+
"</image_save_to>": 151696,
|
| 8 |
+
"</line>": 151690,
|
| 9 |
+
"</perception>": 151692,
|
| 10 |
+
"</point>": 151678,
|
| 11 |
+
"</quad>": 151676,
|
| 12 |
+
"</ref>": 151672,
|
| 13 |
+
"</slice>": 151680,
|
| 14 |
+
"</source_image>": 151694,
|
| 15 |
+
"</think>": 151668,
|
| 16 |
+
"</tool_call>": 151658,
|
| 17 |
+
"</tool_response>": 151666,
|
| 18 |
+
"</unit>": 151684,
|
| 19 |
+
"<answer>": 151685,
|
| 20 |
+
"<box>": 151673,
|
| 21 |
+
"<focus>": 151687,
|
| 22 |
+
"",
|
| 17 |
+
"slice_start": "<slice>",
|
| 18 |
+
"slice_end": "</slice>",
|
| 19 |
+
"unk": "<unk>",
|
| 20 |
+
"im_id_start": "<image_id>",
|
| 21 |
+
"im_id_end": "</image_id>",
|
| 22 |
+
"slice_mode": true,
|
| 23 |
+
"audio_pool_step": 5,
|
| 24 |
+
"norm_mean": [
|
| 25 |
+
0.5,
|
| 26 |
+
0.5,
|
| 27 |
+
0.5
|
| 28 |
+
],
|
| 29 |
+
"norm_std": [
|
| 30 |
+
0.5,
|
| 31 |
+
0.5,
|
| 32 |
+
0.5
|
| 33 |
+
],
|
| 34 |
+
"version": 4.5
|
| 35 |
+
}
|
processing_minicpmo.py
ADDED
|
@@ -0,0 +1,1665 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
# Copyright 2026 The OpenBMB Team. All rights reserved.
|
| 4 |
+
#
|
| 5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 6 |
+
# you may not use this file except in compliance with the License.
|
| 7 |
+
# You may obtain a copy of the License at
|
| 8 |
+
#
|
| 9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 10 |
+
#
|
| 11 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 14 |
+
# See the License for the specific language governing permissions and
|
| 15 |
+
# limitations under the License.
|
| 16 |
+
|
| 17 |
+
import copy
|
| 18 |
+
import math
|
| 19 |
+
import re
|
| 20 |
+
from typing import Any
|
| 21 |
+
from typing import Dict
|
| 22 |
+
from typing import List
|
| 23 |
+
from typing import Optional
|
| 24 |
+
from typing import Tuple
|
| 25 |
+
from typing import Union
|
| 26 |
+
|
| 27 |
+
import numpy as np
|
| 28 |
+
import torch
|
| 29 |
+
from PIL import Image
|
| 30 |
+
from transformers import AutoImageProcessor
|
| 31 |
+
from transformers.audio_utils import spectrogram
|
| 32 |
+
from transformers.audio_utils import window_function
|
| 33 |
+
from transformers.image_processing_utils import BaseImageProcessor
|
| 34 |
+
from transformers.image_processing_utils import BatchFeature
|
| 35 |
+
from transformers.image_transforms import to_channel_dimension_format
|
| 36 |
+
from transformers.image_utils import ChannelDimension
|
| 37 |
+
from transformers.image_utils import ImageInput
|
| 38 |
+
from transformers.image_utils import infer_channel_dimension_format
|
| 39 |
+
from transformers.image_utils import is_torch_tensor
|
| 40 |
+
from transformers.image_utils import to_numpy_array
|
| 41 |
+
from transformers.image_utils import valid_images
|
| 42 |
+
from transformers.models.whisper.feature_extraction_whisper import WhisperFeatureExtractor
|
| 43 |
+
from transformers.processing_utils import ProcessorMixin
|
| 44 |
+
from transformers.tokenization_utils_base import PreTokenizedInput
|
| 45 |
+
from transformers.tokenization_utils_base import TextInput
|
| 46 |
+
from transformers.utils import is_torch_device
|
| 47 |
+
from transformers.utils import is_torch_dtype
|
| 48 |
+
from transformers.utils import requires_backends
|
| 49 |
+
from transformers.utils import TensorType
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def recursive_converter(converter, value):
|
| 53 |
+
if isinstance(value, list):
|
| 54 |
+
new_value = []
|
| 55 |
+
for v in value:
|
| 56 |
+
new_value += [recursive_converter(converter, v)]
|
| 57 |
+
return new_value
|
| 58 |
+
else:
|
| 59 |
+
return converter(value)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class MiniCPMOBatchFeature(BatchFeature):
|
| 63 |
+
"""Extend from BatchFeature for supporting various image size"""
|
| 64 |
+
|
| 65 |
+
def __init__(self, data: Optional[Dict[str, Any]] = None, tensor_type: Union[None, str, TensorType] = None):
|
| 66 |
+
super().__init__(data)
|
| 67 |
+
self.convert_to_tensors(tensor_type=tensor_type)
|
| 68 |
+
|
| 69 |
+
def convert_to_tensors(self, tensor_type: Optional[Union[str, TensorType]] = None):
|
| 70 |
+
if tensor_type is None:
|
| 71 |
+
return self
|
| 72 |
+
|
| 73 |
+
is_tensor, as_tensor = self._get_is_as_tensor_fns(tensor_type)
|
| 74 |
+
|
| 75 |
+
def converter(value):
|
| 76 |
+
try:
|
| 77 |
+
if not is_tensor(value):
|
| 78 |
+
tensor = as_tensor(value)
|
| 79 |
+
return tensor
|
| 80 |
+
except: # noqa E722
|
| 81 |
+
if key == "overflowing_values":
|
| 82 |
+
raise ValueError("Unable to create tensor returning overflowing values of different lengths. ")
|
| 83 |
+
raise ValueError(
|
| 84 |
+
"Unable to create tensor, you should probably activate padding "
|
| 85 |
+
"with 'padding=True' to have batched tensors with the same length."
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
for key, value in self.items():
|
| 89 |
+
self[key] = recursive_converter(converter, value)
|
| 90 |
+
return self
|
| 91 |
+
|
| 92 |
+
def to(self, *args, **kwargs) -> "MiniCPMOBatchFeature":
|
| 93 |
+
requires_backends(self, ["torch"])
|
| 94 |
+
import torch
|
| 95 |
+
|
| 96 |
+
def cast_tensor(v):
|
| 97 |
+
if not torch.is_tensor(v):
|
| 98 |
+
return v
|
| 99 |
+
|
| 100 |
+
if torch.is_floating_point(v):
|
| 101 |
+
return v.to(*args, **kwargs)
|
| 102 |
+
elif device is not None:
|
| 103 |
+
return v.to(device=device)
|
| 104 |
+
else:
|
| 105 |
+
return v
|
| 106 |
+
|
| 107 |
+
new_data = {}
|
| 108 |
+
device = kwargs.get("device")
|
| 109 |
+
if device is None and len(args) > 0:
|
| 110 |
+
arg = args[0]
|
| 111 |
+
if is_torch_dtype(arg):
|
| 112 |
+
pass
|
| 113 |
+
elif isinstance(arg, str) or is_torch_device(arg) or isinstance(arg, int):
|
| 114 |
+
device = arg
|
| 115 |
+
else:
|
| 116 |
+
raise ValueError(f"Attempting to cast a BatchFeature to type {str(arg)}. This is not supported.")
|
| 117 |
+
|
| 118 |
+
# We cast only floating point tensors to avoid issues with tokenizers casting `LongTensor` to `FloatTensor`
|
| 119 |
+
for k, v in self.items():
|
| 120 |
+
new_data[k] = recursive_converter(cast_tensor, v)
|
| 121 |
+
self.data = new_data
|
| 122 |
+
return self
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class MiniCPMVImageProcessor(BaseImageProcessor):
|
| 126 |
+
model_input_names = ["pixel_values"]
|
| 127 |
+
|
| 128 |
+
def __init__(self, max_slice_nums=9, scale_resolution=448, patch_size=14, **kwargs):
|
| 129 |
+
super().__init__(**kwargs)
|
| 130 |
+
self.max_slice_nums = max_slice_nums
|
| 131 |
+
self.scale_resolution = scale_resolution
|
| 132 |
+
self.patch_size = patch_size
|
| 133 |
+
self.use_image_id = kwargs.pop("use_image_id", False)
|
| 134 |
+
self.image_feature_size = kwargs.pop("image_feature_size", 64)
|
| 135 |
+
self.im_start_token = kwargs.pop("im_start", "")
|
| 137 |
+
self.slice_start_token = kwargs.pop("slice_start", "<slice>")
|
| 138 |
+
self.slice_end_token = kwargs.pop("slice_end", "</slice>")
|
| 139 |
+
self.unk_token = kwargs.pop("unk", "<unk>")
|
| 140 |
+
self.im_id_start = kwargs.pop("im_id_start", "<image_id>")
|
| 141 |
+
self.im_id_end = kwargs.pop("im_id_end", "</image_id>")
|
| 142 |
+
self.slice_mode = kwargs.pop("slice_mode", True)
|
| 143 |
+
|
| 144 |
+
self.mean = np.array(kwargs.pop("norm_mean", [0.5, 0.5, 0.5]))
|
| 145 |
+
self.std = np.array(kwargs.pop("norm_std", [0.5, 0.5, 0.5]))
|
| 146 |
+
self.version = kwargs.pop("version", 2.0)
|
| 147 |
+
|
| 148 |
+
@staticmethod
|
| 149 |
+
def ensure_divide(length, patch_size):
|
| 150 |
+
return max(round(length / patch_size) * patch_size, patch_size)
|
| 151 |
+
|
| 152 |
+
def find_best_resize(self, original_size, scale_resolution, patch_size, allow_upscale=False):
|
| 153 |
+
width, height = original_size
|
| 154 |
+
if (width * height > scale_resolution * scale_resolution) or allow_upscale:
|
| 155 |
+
r = width / height
|
| 156 |
+
height = int(scale_resolution / math.sqrt(r))
|
| 157 |
+
width = int(height * r)
|
| 158 |
+
best_width = self.ensure_divide(width, patch_size)
|
| 159 |
+
best_height = self.ensure_divide(height, patch_size)
|
| 160 |
+
return best_width, best_height
|
| 161 |
+
|
| 162 |
+
def get_refine_size(self, original_size, grid, scale_resolution, patch_size, allow_upscale=False):
|
| 163 |
+
width, height = original_size
|
| 164 |
+
grid_x, grid_y = grid
|
| 165 |
+
|
| 166 |
+
refine_width = self.ensure_divide(width, grid_x)
|
| 167 |
+
refine_height = self.ensure_divide(height, grid_y)
|
| 168 |
+
|
| 169 |
+
grid_width = refine_width / grid_x
|
| 170 |
+
grid_height = refine_height / grid_y
|
| 171 |
+
|
| 172 |
+
best_grid_size = self.find_best_resize(
|
| 173 |
+
(grid_width, grid_height), scale_resolution, patch_size, allow_upscale=allow_upscale
|
| 174 |
+
)
|
| 175 |
+
refine_size = (best_grid_size[0] * grid_x, best_grid_size[1] * grid_y)
|
| 176 |
+
return refine_size
|
| 177 |
+
|
| 178 |
+
@staticmethod
|
| 179 |
+
def split_to_patches(image, grid):
|
| 180 |
+
patches = []
|
| 181 |
+
width, height = image.size
|
| 182 |
+
grid_x = int(width / grid[0])
|
| 183 |
+
grid_y = int(height / grid[1])
|
| 184 |
+
for i in range(0, height, grid_y):
|
| 185 |
+
images = []
|
| 186 |
+
for j in range(0, width, grid_x):
|
| 187 |
+
box = (j, i, j + grid_x, i + grid_y)
|
| 188 |
+
patch = image.crop(box)
|
| 189 |
+
images.append(patch)
|
| 190 |
+
patches.append(images)
|
| 191 |
+
return patches
|
| 192 |
+
|
| 193 |
+
def slice_image(self, image, max_slice_nums=9, scale_resolution=448, patch_size=14, never_split=False):
|
| 194 |
+
original_size = image.size
|
| 195 |
+
source_image = None
|
| 196 |
+
best_grid = self.get_sliced_grid(original_size, max_slice_nums, never_split)
|
| 197 |
+
patches = []
|
| 198 |
+
|
| 199 |
+
if best_grid is None:
|
| 200 |
+
# dont need to slice, upsample
|
| 201 |
+
best_size = self.find_best_resize(original_size, scale_resolution, patch_size, allow_upscale=True)
|
| 202 |
+
source_image = image.resize(best_size, resample=Image.Resampling.BICUBIC)
|
| 203 |
+
else:
|
| 204 |
+
# source image, down-sampling and ensure divided by patch_size
|
| 205 |
+
best_resize = self.find_best_resize(original_size, scale_resolution, patch_size)
|
| 206 |
+
source_image = image.copy().resize(best_resize, resample=Image.Resampling.BICUBIC)
|
| 207 |
+
refine_size = self.get_refine_size(
|
| 208 |
+
original_size, best_grid, scale_resolution, patch_size, allow_upscale=True
|
| 209 |
+
)
|
| 210 |
+
refine_image = image.resize(refine_size, resample=Image.Resampling.BICUBIC)
|
| 211 |
+
patches = self.split_to_patches(refine_image, best_grid)
|
| 212 |
+
|
| 213 |
+
return source_image, patches, best_grid
|
| 214 |
+
|
| 215 |
+
def get_grid_placeholder(self, grid):
|
| 216 |
+
if grid is None:
|
| 217 |
+
return ""
|
| 218 |
+
slice_image_placeholder = (
|
| 219 |
+
self.slice_start_token + self.unk_token * self.image_feature_size + self.slice_end_token
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
cols = grid[0]
|
| 223 |
+
rows = grid[1]
|
| 224 |
+
slices = []
|
| 225 |
+
for i in range(rows):
|
| 226 |
+
lines = []
|
| 227 |
+
for j in range(cols):
|
| 228 |
+
lines.append(slice_image_placeholder)
|
| 229 |
+
slices.append("".join(lines))
|
| 230 |
+
|
| 231 |
+
slice_placeholder = "\n".join(slices)
|
| 232 |
+
return slice_placeholder
|
| 233 |
+
|
| 234 |
+
def get_image_id_placeholder(self, idx=0):
|
| 235 |
+
return f"{self.im_id_start}{idx}{self.im_id_end}"
|
| 236 |
+
|
| 237 |
+
def get_sliced_images(self, image, max_slice_nums=None):
|
| 238 |
+
slice_images = []
|
| 239 |
+
|
| 240 |
+
if not self.slice_mode:
|
| 241 |
+
return [image]
|
| 242 |
+
|
| 243 |
+
max_slice_nums = self.max_slice_nums if max_slice_nums is None else int(max_slice_nums)
|
| 244 |
+
assert max_slice_nums > 0
|
| 245 |
+
source_image, patches, sliced_grid = self.slice_image(
|
| 246 |
+
image, max_slice_nums, self.scale_resolution, self.patch_size # default: 9 # default: 448 # default: 14
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
slice_images.append(source_image)
|
| 250 |
+
if len(patches) > 0:
|
| 251 |
+
for i in range(len(patches)):
|
| 252 |
+
for j in range(len(patches[0])):
|
| 253 |
+
slice_images.append(patches[i][j])
|
| 254 |
+
return slice_images
|
| 255 |
+
|
| 256 |
+
def get_sliced_grid(self, image_size, max_slice_nums, nerver_split=False):
|
| 257 |
+
original_width, original_height = image_size
|
| 258 |
+
log_ratio = math.log(original_width / original_height)
|
| 259 |
+
ratio = original_width * original_height / (self.scale_resolution * self.scale_resolution)
|
| 260 |
+
multiple = min(math.ceil(ratio), max_slice_nums)
|
| 261 |
+
if multiple <= 1 or nerver_split:
|
| 262 |
+
return None
|
| 263 |
+
candidate_split_grids_nums = []
|
| 264 |
+
for i in [multiple - 1, multiple, multiple + 1]:
|
| 265 |
+
if i == 1 or i > max_slice_nums:
|
| 266 |
+
continue
|
| 267 |
+
candidate_split_grids_nums.append(i)
|
| 268 |
+
|
| 269 |
+
candidate_grids = []
|
| 270 |
+
for split_grids_nums in candidate_split_grids_nums:
|
| 271 |
+
m = 1
|
| 272 |
+
while m <= split_grids_nums:
|
| 273 |
+
if split_grids_nums % m == 0:
|
| 274 |
+
candidate_grids.append([m, split_grids_nums // m])
|
| 275 |
+
m += 1
|
| 276 |
+
|
| 277 |
+
best_grid = [1, 1]
|
| 278 |
+
min_error = float("inf")
|
| 279 |
+
for grid in candidate_grids:
|
| 280 |
+
error = abs(log_ratio - math.log(grid[0] / grid[1]))
|
| 281 |
+
if error < min_error:
|
| 282 |
+
best_grid = grid
|
| 283 |
+
min_error = error
|
| 284 |
+
|
| 285 |
+
return best_grid
|
| 286 |
+
|
| 287 |
+
def get_slice_image_placeholder(self, image_size, image_idx=0, max_slice_nums=None, use_image_id=None):
|
| 288 |
+
max_slice_nums = self.max_slice_nums if max_slice_nums is None else int(max_slice_nums)
|
| 289 |
+
assert max_slice_nums > 0
|
| 290 |
+
grid = self.get_sliced_grid(image_size=image_size, max_slice_nums=max_slice_nums)
|
| 291 |
+
|
| 292 |
+
image_placeholder = self.im_start_token + self.unk_token * self.image_feature_size + self.im_end_token
|
| 293 |
+
use_image_id = self.use_image_id if use_image_id is None else bool(use_image_id)
|
| 294 |
+
if use_image_id:
|
| 295 |
+
final_placeholder = self.get_image_id_placeholder(image_idx) + image_placeholder
|
| 296 |
+
else:
|
| 297 |
+
final_placeholder = image_placeholder
|
| 298 |
+
|
| 299 |
+
if self.slice_mode:
|
| 300 |
+
final_placeholder = final_placeholder + self.get_grid_placeholder(grid=grid)
|
| 301 |
+
return final_placeholder
|
| 302 |
+
|
| 303 |
+
@staticmethod
|
| 304 |
+
def to_pil_image(image, rescale=None) -> Image.Image:
|
| 305 |
+
"""Converts `image` to a PIL Image. Optionally rescales it and puts the channel dimension back
|
| 306 |
+
as the last axis if needed.
|
| 307 |
+
|
| 308 |
+
Args:
|
| 309 |
+
image (`Image.Image` or `numpy.ndarray` or `torch.Tensor`):
|
| 310 |
+
The image to convert to the PIL Image format.
|
| 311 |
+
rescale (`bool`, *optional*):
|
| 312 |
+
whether to apply the scaling factor (to make pixel values integers between 0 and 255). Will
|
| 313 |
+
default to `True` if the image type is a floating type, `False` otherwise.
|
| 314 |
+
"""
|
| 315 |
+
if isinstance(image, Image.Image):
|
| 316 |
+
return image
|
| 317 |
+
if is_torch_tensor(image):
|
| 318 |
+
image = image.numpy()
|
| 319 |
+
|
| 320 |
+
if isinstance(image, np.ndarray):
|
| 321 |
+
if rescale is None:
|
| 322 |
+
# rescale default to the array being of floating type.
|
| 323 |
+
rescale = isinstance(image.flat[0], np.floating)
|
| 324 |
+
# If the channel as been moved to first dim, we put it back at the end.
|
| 325 |
+
if image.ndim == 3 and image.shape[0] in [1, 3]:
|
| 326 |
+
image = image.transpose(1, 2, 0)
|
| 327 |
+
if rescale:
|
| 328 |
+
image = image * 255
|
| 329 |
+
image = image.astype(np.uint8)
|
| 330 |
+
return Image.fromarray(image)
|
| 331 |
+
return image
|
| 332 |
+
|
| 333 |
+
def reshape_by_patch(self, image):
|
| 334 |
+
image = torch.from_numpy(image)
|
| 335 |
+
patch_size = self.patch_size
|
| 336 |
+
patches = torch.nn.functional.unfold(image, (patch_size, patch_size), stride=(patch_size, patch_size))
|
| 337 |
+
|
| 338 |
+
patches = patches.reshape(image.size(0), patch_size, patch_size, -1)
|
| 339 |
+
patches = patches.permute(0, 1, 3, 2).reshape(image.size(0), patch_size, -1)
|
| 340 |
+
return patches.numpy()
|
| 341 |
+
|
| 342 |
+
def preprocess(
|
| 343 |
+
self,
|
| 344 |
+
images: Union[Image.Image, List[Image.Image], List[List[Image.Image]]],
|
| 345 |
+
do_pad: Optional[bool] = True,
|
| 346 |
+
max_slice_nums: int = None,
|
| 347 |
+
return_tensors: Optional[Union[str, TensorType]] = None,
|
| 348 |
+
**kwargs,
|
| 349 |
+
) -> MiniCPMOBatchFeature:
|
| 350 |
+
if isinstance(images, Image.Image):
|
| 351 |
+
images_list = [[images]]
|
| 352 |
+
elif isinstance(images[0], Image.Image):
|
| 353 |
+
images_list = [images]
|
| 354 |
+
else:
|
| 355 |
+
images_list = images
|
| 356 |
+
|
| 357 |
+
new_images_list = []
|
| 358 |
+
image_sizes_list = []
|
| 359 |
+
tgt_sizes_list = []
|
| 360 |
+
|
| 361 |
+
for _images in images_list:
|
| 362 |
+
if _images is None or len(_images) == 0:
|
| 363 |
+
new_images_list.append([])
|
| 364 |
+
image_sizes_list.append([])
|
| 365 |
+
tgt_sizes_list.append([])
|
| 366 |
+
continue
|
| 367 |
+
if not valid_images(_images):
|
| 368 |
+
raise ValueError(
|
| 369 |
+
"Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, "
|
| 370 |
+
"torch.Tensor, tf.Tensor or jax.ndarray."
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
_images = [self.to_pil_image(image).convert("RGB") for image in _images]
|
| 374 |
+
input_data_format = infer_channel_dimension_format(np.array(_images[0]))
|
| 375 |
+
|
| 376 |
+
new_images = []
|
| 377 |
+
image_sizes = [image.size for image in _images]
|
| 378 |
+
tgt_sizes = []
|
| 379 |
+
for image in _images:
|
| 380 |
+
image_patches = self.get_sliced_images(image, max_slice_nums)
|
| 381 |
+
image_patches = [to_numpy_array(image).astype(np.float32) / 255 for image in image_patches]
|
| 382 |
+
image_patches = [
|
| 383 |
+
self.normalize(image=image, mean=self.mean, std=self.std, input_data_format=input_data_format)
|
| 384 |
+
for image in image_patches
|
| 385 |
+
]
|
| 386 |
+
image_patches = [
|
| 387 |
+
to_channel_dimension_format(image, ChannelDimension.FIRST, input_channel_dim=input_data_format)
|
| 388 |
+
for image in image_patches
|
| 389 |
+
]
|
| 390 |
+
for slice_image in image_patches:
|
| 391 |
+
new_images.append(self.reshape_by_patch(slice_image))
|
| 392 |
+
tgt_sizes.append(
|
| 393 |
+
np.array((slice_image.shape[1] // self.patch_size, slice_image.shape[2] // self.patch_size))
|
| 394 |
+
)
|
| 395 |
+
|
| 396 |
+
if tgt_sizes:
|
| 397 |
+
tgt_sizes = np.vstack(tgt_sizes)
|
| 398 |
+
|
| 399 |
+
new_images_list.append(new_images)
|
| 400 |
+
image_sizes_list.append(image_sizes)
|
| 401 |
+
tgt_sizes_list.append(tgt_sizes)
|
| 402 |
+
return MiniCPMOBatchFeature(
|
| 403 |
+
data={"pixel_values": new_images_list, "image_sizes": image_sizes_list, "tgt_sizes": tgt_sizes_list},
|
| 404 |
+
tensor_type=return_tensors,
|
| 405 |
+
)
|
| 406 |
+
|
| 407 |
+
|
| 408 |
+
AutoImageProcessor.register("MiniCPMVImageProcessor", MiniCPMVImageProcessor)
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
def chunk_audio(audio: np.ndarray, max_duration_seconds: int = 30, sample_rate: int = 16000) -> List[np.ndarray]:
|
| 412 |
+
"""split long audio into chunks
|
| 413 |
+
|
| 414 |
+
Args:
|
| 415 |
+
audio:
|
| 416 |
+
max_duration_seconds:
|
| 417 |
+
sample_rate:
|
| 418 |
+
|
| 419 |
+
Returns:
|
| 420 |
+
chunks
|
| 421 |
+
"""
|
| 422 |
+
max_len = int(max_duration_seconds * sample_rate)
|
| 423 |
+
|
| 424 |
+
if len(audio) <= max_len:
|
| 425 |
+
return [audio]
|
| 426 |
+
|
| 427 |
+
chunks = []
|
| 428 |
+
for i in range(0, len(audio), max_len):
|
| 429 |
+
chunk = audio[i : i + max_len]
|
| 430 |
+
chunks.append(chunk)
|
| 431 |
+
|
| 432 |
+
return chunks
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
def process_audio_batch(
|
| 436 |
+
audios: Union[np.ndarray, List[np.ndarray], List[List[np.ndarray]]],
|
| 437 |
+
feature_extractor,
|
| 438 |
+
sampling_rate: int = 16000,
|
| 439 |
+
max_duration_seconds: int = 30,
|
| 440 |
+
return_attention_mask: bool = True,
|
| 441 |
+
) -> Tuple[torch.Tensor, List[torch.Tensor]]:
|
| 442 |
+
"""extract audio mel features
|
| 443 |
+
|
| 444 |
+
Args:
|
| 445 |
+
audios:
|
| 446 |
+
feature_extractor: WhisperFeatureExtractor
|
| 447 |
+
sampling_rate:
|
| 448 |
+
max_duration_seconds:
|
| 449 |
+
return_attention_mask:
|
| 450 |
+
|
| 451 |
+
Returns:
|
| 452 |
+
(audio_features, audio_feature_lens)
|
| 453 |
+
audio_features: [batch_size, n_mels, max_frames]
|
| 454 |
+
audio_feature_lens:
|
| 455 |
+
"""
|
| 456 |
+
if isinstance(audios, np.ndarray):
|
| 457 |
+
audios_list = [[audios]]
|
| 458 |
+
elif len(audios) > 0 and isinstance(audios[0], np.ndarray):
|
| 459 |
+
audios_list = [audios]
|
| 460 |
+
else:
|
| 461 |
+
audios_list = audios
|
| 462 |
+
|
| 463 |
+
audio_features_all = []
|
| 464 |
+
audio_feature_lens_list = []
|
| 465 |
+
|
| 466 |
+
for batch_audios in audios_list:
|
| 467 |
+
batch_lens = []
|
| 468 |
+
|
| 469 |
+
for audio in batch_audios:
|
| 470 |
+
chunks = chunk_audio(audio, max_duration_seconds, sampling_rate)
|
| 471 |
+
|
| 472 |
+
for chunk in chunks:
|
| 473 |
+
audio_input = feature_extractor(
|
| 474 |
+
chunk,
|
| 475 |
+
sampling_rate=sampling_rate,
|
| 476 |
+
return_tensors="pt",
|
| 477 |
+
padding="max_length",
|
| 478 |
+
return_attention_mask=return_attention_mask,
|
| 479 |
+
)
|
| 480 |
+
|
| 481 |
+
audio_feature = audio_input["input_features"] # [1, 80, frames]
|
| 482 |
+
|
| 483 |
+
if return_attention_mask:
|
| 484 |
+
actual_len = audio_input["attention_mask"].sum(dim=1) # Tensor([frames])
|
| 485 |
+
audio_feature = audio_feature[:, :, : actual_len[0]]
|
| 486 |
+
batch_lens.append(actual_len[0])
|
| 487 |
+
else:
|
| 488 |
+
batch_lens.append(torch.tensor(audio_feature.shape[2]))
|
| 489 |
+
|
| 490 |
+
audio_features_all.append(audio_feature.squeeze(0)) # [80, frames]
|
| 491 |
+
|
| 492 |
+
if len(batch_lens) > 0:
|
| 493 |
+
audio_feature_lens_list.append(torch.hstack(batch_lens))
|
| 494 |
+
else:
|
| 495 |
+
audio_feature_lens_list.append(torch.tensor([]))
|
| 496 |
+
|
| 497 |
+
# pad to same length
|
| 498 |
+
if audio_features_all:
|
| 499 |
+
audio_features = torch.nn.utils.rnn.pad_sequence(
|
| 500 |
+
[feat.transpose(0, 1) for feat in audio_features_all], batch_first=True, padding_value=0.0
|
| 501 |
+
).transpose(
|
| 502 |
+
1, 2
|
| 503 |
+
) # [batch, 80, max_frames]
|
| 504 |
+
else:
|
| 505 |
+
audio_features = torch.tensor([])
|
| 506 |
+
|
| 507 |
+
return audio_features, audio_feature_lens_list
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def regroup_audio_features(
|
| 511 |
+
audio_features: torch.Tensor, audio_feature_lens: List[torch.Tensor], regroup_seconds: int, fps: int = 100
|
| 512 |
+
) -> Tuple[torch.Tensor, List[torch.Tensor]]:
|
| 513 |
+
"""regroup audio features to fixed duration
|
| 514 |
+
|
| 515 |
+
Args:
|
| 516 |
+
audio_features: [batch, n_mels, frames]
|
| 517 |
+
audio_feature_lens: each batch's actual length
|
| 518 |
+
regroup_seconds: regroup duration (seconds)
|
| 519 |
+
fps: frames per second
|
| 520 |
+
|
| 521 |
+
Returns:
|
| 522 |
+
(regrouped_features, regrouped_lens)
|
| 523 |
+
"""
|
| 524 |
+
# flatten to continuous frames sequence
|
| 525 |
+
all_lens = []
|
| 526 |
+
for lens in audio_feature_lens:
|
| 527 |
+
if isinstance(lens, torch.Tensor):
|
| 528 |
+
all_lens.extend(lens.tolist())
|
| 529 |
+
elif isinstance(lens, list):
|
| 530 |
+
all_lens.extend([int(x) for x in lens])
|
| 531 |
+
|
| 532 |
+
if len(all_lens) == 0:
|
| 533 |
+
return torch.tensor([]), []
|
| 534 |
+
|
| 535 |
+
# concatenate all valid features
|
| 536 |
+
flat_slices = [audio_features[i, :, :L] for i, L in enumerate(all_lens)] # [n_mels, L]
|
| 537 |
+
|
| 538 |
+
if len(flat_slices) == 1:
|
| 539 |
+
full_feat = flat_slices[0]
|
| 540 |
+
else:
|
| 541 |
+
full_feat = torch.cat(flat_slices, dim=1) # [n_mels, total_frames]
|
| 542 |
+
|
| 543 |
+
# split to fixed frames
|
| 544 |
+
frames_per_seg = int(regroup_seconds * fps)
|
| 545 |
+
segments = []
|
| 546 |
+
|
| 547 |
+
for start in range(0, full_feat.size(1), frames_per_seg):
|
| 548 |
+
seg = full_feat[:, start : start + frames_per_seg]
|
| 549 |
+
if seg.size(1) > 0:
|
| 550 |
+
segments.append(seg)
|
| 551 |
+
|
| 552 |
+
if len(segments) == 0:
|
| 553 |
+
return torch.tensor([]), []
|
| 554 |
+
|
| 555 |
+
# pad and convert to batch
|
| 556 |
+
seg_lens = [s.size(1) for s in segments]
|
| 557 |
+
segs_transposed = [s.transpose(0, 1) for s in segments]
|
| 558 |
+
|
| 559 |
+
padded = torch.nn.utils.rnn.pad_sequence(segs_transposed, batch_first=True, padding_value=0.0) # [N, max_T, n_mels]
|
| 560 |
+
|
| 561 |
+
padded = padded.transpose(1, 2) # [N, n_mels, max_T]
|
| 562 |
+
lens_tensor = torch.tensor(seg_lens, dtype=torch.int32, device=padded.device)
|
| 563 |
+
|
| 564 |
+
return padded, [lens_tensor]
|
| 565 |
+
|
| 566 |
+
|
| 567 |
+
class MiniCPMAAudioProcessor(WhisperFeatureExtractor):
|
| 568 |
+
"""
|
| 569 |
+
On top of WhisperFeatureExtractor:
|
| 570 |
+
- support dynamic_log_norm (original max-8dB, adjustable dynamic_range_db)
|
| 571 |
+
- or fixed log_floor_db (e.g. -10dB)
|
| 572 |
+
- this is because we need to do streaming scheme, in which we can't do dynamic setting
|
| 573 |
+
- this can be modified in the middle, through set_dynamic_log_norm
|
| 574 |
+
Two paths (torch / numpy) keep consistent clipping and scaling order:
|
| 575 |
+
log10 -> (dynamic/fixed lower limit clipping) -> (+4)/4
|
| 576 |
+
"""
|
| 577 |
+
|
| 578 |
+
def __init__(
|
| 579 |
+
self,
|
| 580 |
+
*args,
|
| 581 |
+
dynamic_log_norm: bool = True,
|
| 582 |
+
dynamic_range_db: float = 8.0,
|
| 583 |
+
log_floor_db: float = -10.0,
|
| 584 |
+
**kwargs,
|
| 585 |
+
):
|
| 586 |
+
super().__init__(*args, **kwargs)
|
| 587 |
+
self.dynamic_log_norm = bool(dynamic_log_norm)
|
| 588 |
+
self.dynamic_range_db = float(dynamic_range_db)
|
| 589 |
+
self.log_floor_db = float(log_floor_db)
|
| 590 |
+
|
| 591 |
+
def set_spac_log_norm(
|
| 592 |
+
self,
|
| 593 |
+
dynamic_range_db: Optional[float] = None,
|
| 594 |
+
log_floor_db: Optional[float] = None,
|
| 595 |
+
*,
|
| 596 |
+
inplace: bool = True,
|
| 597 |
+
) -> "MiniCPMAAudioProcessor":
|
| 598 |
+
"""Hot update dynamic/fixed lower limit strategy.
|
| 599 |
+
|
| 600 |
+
Args:
|
| 601 |
+
enabled: True=use dynamic threshold (max - dynamic_range_db), False=use fixed lower limit log_floor_db.
|
| 602 |
+
None means keep unchanged.
|
| 603 |
+
dynamic_range_db: dynamic range (dB), only effective when enabled=True. None means keep unchanged.
|
| 604 |
+
log_floor_db: fixed log floor (dB, usually <= 0), only effective when enabled=False. None means keep unchanged.
|
| 605 |
+
inplace: True directly modify current instance; False return a shallow copy and modify on it.
|
| 606 |
+
|
| 607 |
+
Returns:
|
| 608 |
+
self or new instance (when inplace=False).
|
| 609 |
+
"""
|
| 610 |
+
|
| 611 |
+
target = self if inplace else copy.copy(self)
|
| 612 |
+
|
| 613 |
+
if dynamic_range_db is not None:
|
| 614 |
+
val = float(dynamic_range_db)
|
| 615 |
+
if val < 0:
|
| 616 |
+
raise ValueError("dynamic_range_db must be >= 0.")
|
| 617 |
+
target.dynamic_log_norm = True # explicitly set the value to dynamic mode
|
| 618 |
+
target.dynamic_range_db = val
|
| 619 |
+
|
| 620 |
+
if log_floor_db is not None:
|
| 621 |
+
val = float(log_floor_db)
|
| 622 |
+
# usually log10(mel) maximum is not more than ~0dB, floor should be <= 0; here do loose validation
|
| 623 |
+
if val > 0:
|
| 624 |
+
raise ValueError("log_floor_db should be <= 0 (log10 scale).")
|
| 625 |
+
target.dynamic_log_norm = False # explicitly set the value to fixed lower limit mode
|
| 626 |
+
target.log_floor_db = val
|
| 627 |
+
|
| 628 |
+
return target
|
| 629 |
+
|
| 630 |
+
def _np_extract_fbank_features(self, waveform_batch: np.ndarray, device: str) -> np.ndarray:
|
| 631 |
+
"""NumPy version consistent with upstream, but replace max-8dB with configurable dynamic/fixed lower limit clipping."""
|
| 632 |
+
if device != "cpu":
|
| 633 |
+
raise ValueError(
|
| 634 |
+
f"Got device `{device}` for feature extraction, but feature extraction on CUDA accelerator "
|
| 635 |
+
"devices requires torch. Set device='cpu' or install torch."
|
| 636 |
+
)
|
| 637 |
+
|
| 638 |
+
log_spec_batch: List[np.ndarray] = []
|
| 639 |
+
for waveform in waveform_batch:
|
| 640 |
+
# generate log10 Mel
|
| 641 |
+
log_spec = spectrogram(
|
| 642 |
+
waveform,
|
| 643 |
+
window_function(self.n_fft, "hann"),
|
| 644 |
+
frame_length=self.n_fft,
|
| 645 |
+
hop_length=self.hop_length,
|
| 646 |
+
power=2.0,
|
| 647 |
+
dither=self.dither,
|
| 648 |
+
mel_filters=self.mel_filters,
|
| 649 |
+
log_mel="log10",
|
| 650 |
+
)
|
| 651 |
+
# consistent with upstream: remove the last frame
|
| 652 |
+
log_spec = log_spec[:, :-1]
|
| 653 |
+
|
| 654 |
+
# dynamic/fixed clipping
|
| 655 |
+
if self.dynamic_log_norm:
|
| 656 |
+
threshold = log_spec.max() - self.dynamic_range_db
|
| 657 |
+
log_spec = np.maximum(log_spec, threshold)
|
| 658 |
+
else:
|
| 659 |
+
log_spec = np.maximum(log_spec, self.log_floor_db)
|
| 660 |
+
|
| 661 |
+
# consistent with Whisper linear scaling
|
| 662 |
+
log_spec = (log_spec + 4.0) / 4.0
|
| 663 |
+
|
| 664 |
+
log_spec_batch.append(log_spec)
|
| 665 |
+
|
| 666 |
+
return np.array(log_spec_batch)
|
| 667 |
+
|
| 668 |
+
def _torch_extract_fbank_features(self, waveform: np.ndarray, device: str = "cpu") -> np.ndarray:
|
| 669 |
+
if torch is None:
|
| 670 |
+
raise RuntimeError("PyTorch is not installed, cannot compute STFT on GPU.")
|
| 671 |
+
|
| 672 |
+
waveform = torch.from_numpy(waveform).to(device, torch.float32)
|
| 673 |
+
window = torch.hann_window(self.n_fft, device=device)
|
| 674 |
+
|
| 675 |
+
if self.dither != 0.0:
|
| 676 |
+
waveform = waveform + self.dither * torch.randn_like(waveform)
|
| 677 |
+
|
| 678 |
+
stft = torch.stft(waveform, n_fft=self.n_fft, hop_length=self.hop_length, window=window, return_complex=True)
|
| 679 |
+
magnitudes = stft[..., :-1].abs() ** 2
|
| 680 |
+
|
| 681 |
+
mel_filters = torch.from_numpy(self.mel_filters).to(device, torch.float32) # [n_mels, 1+n_fft//2]
|
| 682 |
+
mel_spec = mel_filters.T @ magnitudes # [..., n_mels, T]
|
| 683 |
+
|
| 684 |
+
log_spec = torch.clamp(mel_spec, min=1e-10).log10() # <= 0
|
| 685 |
+
|
| 686 |
+
if self.dynamic_log_norm:
|
| 687 |
+
if waveform.dim() == 2:
|
| 688 |
+
max_val_t = log_spec.max(dim=2, keepdim=True)[0] # over T
|
| 689 |
+
max_val_bt = max_val_t.max(dim=1, keepdim=True)[0] # over mel
|
| 690 |
+
threshold = max_val_bt - self.dynamic_range_db
|
| 691 |
+
log_spec = torch.maximum(log_spec, threshold)
|
| 692 |
+
else:
|
| 693 |
+
threshold = log_spec.max() - self.dynamic_range_db
|
| 694 |
+
log_spec = torch.maximum(log_spec, threshold)
|
| 695 |
+
else:
|
| 696 |
+
floor_tensor = torch.tensor(self.log_floor_db, dtype=log_spec.dtype, device=log_spec.device)
|
| 697 |
+
log_spec = torch.maximum(log_spec, floor_tensor)
|
| 698 |
+
|
| 699 |
+
log_spec = (log_spec + 4.0) / 4.0
|
| 700 |
+
|
| 701 |
+
if device != "cpu":
|
| 702 |
+
log_spec = log_spec.detach().cpu()
|
| 703 |
+
return log_spec.numpy()
|
| 704 |
+
|
| 705 |
+
def process(self, *args, **kwargs):
|
| 706 |
+
"""Alias of __call__ for convenience."""
|
| 707 |
+
return self.__call__(*args, **kwargs)
|
| 708 |
+
|
| 709 |
+
|
| 710 |
+
class StreamingMelProcessorExact:
|
| 711 |
+
"""Strictly offline equivalent streaming Mel processor.
|
| 712 |
+
|
| 713 |
+
- accumulate all historical audio into buffer; use the same feature_extractor to calculate the entire mel after each addition.
|
| 714 |
+
- only output "stable" frames: the frame center does not depend on future (right) context, i.e. center + n_fft//2 <= current buffer length.
|
| 715 |
+
- output the last batch of frames at the end (flush), ensuring complete consistency with offline full-calculation.
|
| 716 |
+
|
| 717 |
+
Cost: Each call performs feature extraction on the accumulated buffer (can be optimized to incremental if needed).
|
| 718 |
+
"""
|
| 719 |
+
|
| 720 |
+
def __init__(
|
| 721 |
+
self,
|
| 722 |
+
feature_extractor: MiniCPMAAudioProcessor,
|
| 723 |
+
chunk_ms: int = 100,
|
| 724 |
+
first_chunk_ms: Optional[int] = None,
|
| 725 |
+
sample_rate: int = 16000,
|
| 726 |
+
n_fft: int = 400,
|
| 727 |
+
hop_length: int = 160,
|
| 728 |
+
n_mels: int = 80,
|
| 729 |
+
cnn_redundancy_ms: int = 10, # (given in ms, usually 10ms=1 frame)
|
| 730 |
+
# sliding window parameters
|
| 731 |
+
enable_sliding_window: bool = False, # whether to enable sliding window
|
| 732 |
+
slide_trigger_seconds: float = 30.0, # trigger threshold for sliding window in seconds
|
| 733 |
+
slide_stride_seconds: float = 10.0, # stride for sliding window in seconds
|
| 734 |
+
):
|
| 735 |
+
self.feature_extractor = feature_extractor
|
| 736 |
+
self.chunk_ms = chunk_ms
|
| 737 |
+
self.first_chunk_ms = first_chunk_ms if first_chunk_ms is not None else chunk_ms
|
| 738 |
+
self.sample_rate = sample_rate
|
| 739 |
+
self.n_fft = n_fft
|
| 740 |
+
self.hop_length = hop_length
|
| 741 |
+
self.n_mels = n_mels
|
| 742 |
+
|
| 743 |
+
self.chunk_samples = int(round(chunk_ms * sample_rate / 1000))
|
| 744 |
+
self.chunk_frames = self.chunk_samples // hop_length
|
| 745 |
+
# align to hop_length to avoid frame boundary issues
|
| 746 |
+
hop = self.hop_length
|
| 747 |
+
raw_first_samples = int(round(self.first_chunk_ms * sample_rate / 1000))
|
| 748 |
+
aligned_first = max(hop, (raw_first_samples // hop) * hop)
|
| 749 |
+
self.first_chunk_samples = aligned_first
|
| 750 |
+
self.half_window = n_fft // 2 # required right context
|
| 751 |
+
|
| 752 |
+
# redundancy frames (in frames), <=1 frame: 10ms → 1 frame
|
| 753 |
+
self.cnn_redundancy_ms = cnn_redundancy_ms
|
| 754 |
+
self.cnn_redundancy_samples = int(cnn_redundancy_ms * sample_rate / 1000)
|
| 755 |
+
self.cnn_redundancy_frames = max(0, self.cnn_redundancy_samples // hop_length)
|
| 756 |
+
|
| 757 |
+
# sliding window configuration (Trigger mode)
|
| 758 |
+
self.enable_sliding_window = enable_sliding_window
|
| 759 |
+
self.trigger_seconds = slide_trigger_seconds
|
| 760 |
+
self.slide_seconds = slide_stride_seconds
|
| 761 |
+
|
| 762 |
+
# shift/base (global frame coordinates)
|
| 763 |
+
self.left_samples_dropped = 0 # samples dropped from the left
|
| 764 |
+
self.base_T = 0 # index of the "global frame" corresponding to mel_full[:, :, 0]
|
| 765 |
+
|
| 766 |
+
self.reset()
|
| 767 |
+
|
| 768 |
+
def reset(self):
|
| 769 |
+
self.buffer = np.zeros(0, dtype=np.float32)
|
| 770 |
+
self.last_emitted_T = 0
|
| 771 |
+
self.total_samples_processed = 0
|
| 772 |
+
self.chunk_count = 0
|
| 773 |
+
self.is_first = True
|
| 774 |
+
self.left_samples_dropped = 0
|
| 775 |
+
self.base_T = 0
|
| 776 |
+
|
| 777 |
+
def get_chunk_size(self) -> int:
|
| 778 |
+
return self.first_chunk_samples if self.is_first else self.chunk_samples
|
| 779 |
+
|
| 780 |
+
def get_expected_output_frames(self) -> int:
|
| 781 |
+
raise NotImplementedError("get_expected_output_frames is not implemented")
|
| 782 |
+
|
| 783 |
+
def _extract_full(self) -> torch.Tensor:
|
| 784 |
+
# when buffer length is less than n_fft, Whisper's internal STFT will raise an error in center=True and pad mode
|
| 785 |
+
# (pad is greater than input length). At this time, there is no stable frame to output, so return empty features directly.
|
| 786 |
+
if len(self.buffer) < self.n_fft:
|
| 787 |
+
raise ValueError(f"buffer length is shorter than n_fft {len(self.buffer)} < {self.n_fft}")
|
| 788 |
+
# if buffer length is less than 5s, use set_spac_log_norm(log_floor_db=-10) or the last cached result
|
| 789 |
+
if len(self.buffer) < 5 * self.sample_rate:
|
| 790 |
+
# TODO: here the best is to do some experiments to choose the best one, now this is selected through experience, can see MiniCPMAAudioProcessor's main implementation
|
| 791 |
+
self.feature_extractor.set_spac_log_norm(log_floor_db=-10)
|
| 792 |
+
# if buffer length is greater than 5s, use set_spac_log_norm(dynamic_range_db=8)
|
| 793 |
+
else:
|
| 794 |
+
self.feature_extractor.set_spac_log_norm(dynamic_range_db=8)
|
| 795 |
+
feats = self.feature_extractor(
|
| 796 |
+
self.buffer,
|
| 797 |
+
sampling_rate=self.sample_rate,
|
| 798 |
+
return_tensors="pt",
|
| 799 |
+
padding=False,
|
| 800 |
+
)
|
| 801 |
+
return feats.input_features # [1, 80, T]
|
| 802 |
+
|
| 803 |
+
def _stable_frames_count(self) -> int:
|
| 804 |
+
# number of stable frames = floor((len(buffer) - half_window) / hop) + 1, minimum is 0
|
| 805 |
+
L = int(self.buffer.shape[0])
|
| 806 |
+
if L <= 0:
|
| 807 |
+
return 0
|
| 808 |
+
if L < self.half_window:
|
| 809 |
+
return 0
|
| 810 |
+
return max(0, (L - self.half_window) // self.hop_length + 1)
|
| 811 |
+
|
| 812 |
+
def _maybe_slide_buffer(self):
|
| 813 |
+
"""Trigger mode sliding window: when the buffer reaches the trigger threshold, slide a fixed length window."""
|
| 814 |
+
if not self.enable_sliding_window:
|
| 815 |
+
return
|
| 816 |
+
|
| 817 |
+
sr = self.sample_rate
|
| 818 |
+
hop = self.hop_length
|
| 819 |
+
L = len(self.buffer)
|
| 820 |
+
|
| 821 |
+
# convert seconds to samples
|
| 822 |
+
trigger_samples = int(self.trigger_seconds * sr)
|
| 823 |
+
stride_samples = int(self.slide_seconds * sr)
|
| 824 |
+
|
| 825 |
+
# check if the trigger threshold is reached
|
| 826 |
+
if L < trigger_samples:
|
| 827 |
+
return
|
| 828 |
+
|
| 829 |
+
# calculate the number of samples to drop (fixed sliding stride_samples)
|
| 830 |
+
drop = stride_samples
|
| 831 |
+
|
| 832 |
+
# cannot drop the left context that is still needed for subsequent emission
|
| 833 |
+
# in trigger mode, we only need to protect the minimum necessary data
|
| 834 |
+
# i.e. ensure that we do not discard frames that may be needed in the future
|
| 835 |
+
last_emitted_local = self.last_emitted_T - self.base_T
|
| 836 |
+
|
| 837 |
+
# only protect necessary context (e.g. the most recent 1 second data)
|
| 838 |
+
min_keep_seconds = 1.0 # keep at least 1 second of data to ensure continuity
|
| 839 |
+
min_keep_samples = int(min_keep_seconds * sr)
|
| 840 |
+
|
| 841 |
+
# guard_samples are the minimum samples we must keep
|
| 842 |
+
guard_samples = min(min_keep_samples, L - drop)
|
| 843 |
+
|
| 844 |
+
# limit: do not exceed the safe boundary; and align hop
|
| 845 |
+
max_allowed_drop = max(0, L - guard_samples)
|
| 846 |
+
drop = min(drop, max_allowed_drop)
|
| 847 |
+
drop = (drop // hop) * hop
|
| 848 |
+
|
| 849 |
+
if drop <= 0:
|
| 850 |
+
return
|
| 851 |
+
|
| 852 |
+
# truly drop & update base
|
| 853 |
+
self.buffer = self.buffer[drop:]
|
| 854 |
+
self.left_samples_dropped += drop
|
| 855 |
+
self.base_T += drop // hop
|
| 856 |
+
|
| 857 |
+
def process(self, audio_chunk: np.ndarray, is_last_chunk: bool = False) -> Tuple[torch.Tensor, Dict]:
|
| 858 |
+
self.chunk_count += 1
|
| 859 |
+
# append to buffer
|
| 860 |
+
if len(self.buffer) == 0:
|
| 861 |
+
self.buffer = audio_chunk.astype(np.float32, copy=True)
|
| 862 |
+
else:
|
| 863 |
+
self.buffer = np.concatenate([self.buffer, audio_chunk.astype(np.float32, copy=True)])
|
| 864 |
+
|
| 865 |
+
# sliding window processing
|
| 866 |
+
self._maybe_slide_buffer()
|
| 867 |
+
|
| 868 |
+
# full extraction (for the current window)
|
| 869 |
+
mel_full = self._extract_full()
|
| 870 |
+
T_full = mel_full.shape[-1] # local frames in the current window
|
| 871 |
+
stable_T = min(T_full, self._stable_frames_count()) # local stable frames
|
| 872 |
+
stable_T_global = self.base_T + stable_T # map to global frame coordinates
|
| 873 |
+
|
| 874 |
+
# plan the core frames for the current emission (global coordinates)
|
| 875 |
+
core_start_g = self.last_emitted_T
|
| 876 |
+
core_end_g = core_start_g + self.chunk_frames
|
| 877 |
+
required_stable_g = core_end_g + self.cnn_redundancy_frames
|
| 878 |
+
|
| 879 |
+
if stable_T_global >= required_stable_g or is_last_chunk:
|
| 880 |
+
emit_start_g = max(0, core_start_g - self.cnn_redundancy_frames)
|
| 881 |
+
emit_end_g = core_end_g + self.cnn_redundancy_frames
|
| 882 |
+
|
| 883 |
+
# global -> local index
|
| 884 |
+
emit_start = max(0, emit_start_g - self.base_T)
|
| 885 |
+
emit_end = emit_end_g - self.base_T
|
| 886 |
+
emit_start = max(0, min(emit_start, T_full))
|
| 887 |
+
emit_end = max(emit_start, min(emit_end, T_full))
|
| 888 |
+
|
| 889 |
+
mel_output = mel_full[:, :, emit_start:emit_end]
|
| 890 |
+
self.last_emitted_T = core_end_g # only advance the core frame pointer (global)
|
| 891 |
+
else:
|
| 892 |
+
mel_output = mel_full[:, :, 0:0]
|
| 893 |
+
|
| 894 |
+
self.total_samples_processed += len(audio_chunk)
|
| 895 |
+
self.is_first = False
|
| 896 |
+
|
| 897 |
+
info = {
|
| 898 |
+
"type": "exact_chunk",
|
| 899 |
+
"chunk_number": self.chunk_count,
|
| 900 |
+
"emitted_frames": mel_output.shape[-1],
|
| 901 |
+
"stable_T": stable_T,
|
| 902 |
+
"T_full": T_full,
|
| 903 |
+
"base_T": self.base_T,
|
| 904 |
+
"stable_T_global": stable_T_global,
|
| 905 |
+
"buffer_len_samples": int(self.buffer.shape[0]),
|
| 906 |
+
"left_samples_dropped": self.left_samples_dropped,
|
| 907 |
+
"core_start": core_start_g, # if keep the original field name, use the global value here
|
| 908 |
+
"core_end": core_end_g, # same as above
|
| 909 |
+
}
|
| 910 |
+
return mel_output, info
|
| 911 |
+
|
| 912 |
+
def flush(self) -> torch.Tensor:
|
| 913 |
+
"""Called when the stream ends, output the remaining unemitted frames, ensuring consistency with offline (calculated by global coordinates)."""
|
| 914 |
+
if len(self.buffer) == 0:
|
| 915 |
+
return torch.zeros(1, 80, 0)
|
| 916 |
+
|
| 917 |
+
mel_full = self._extract_full()
|
| 918 |
+
T_local = mel_full.shape[-1]
|
| 919 |
+
T_global = self.base_T + T_local
|
| 920 |
+
|
| 921 |
+
if self.last_emitted_T < T_global:
|
| 922 |
+
start_l = max(0, self.last_emitted_T - self.base_T)
|
| 923 |
+
tail = mel_full[:, :, start_l:]
|
| 924 |
+
self.last_emitted_T = T_global
|
| 925 |
+
return tail
|
| 926 |
+
return mel_full[:, :, 0:0]
|
| 927 |
+
|
| 928 |
+
def get_config(self) -> Dict:
|
| 929 |
+
return {
|
| 930 |
+
"chunk_ms": self.chunk_ms,
|
| 931 |
+
"first_chunk_ms": self.first_chunk_ms,
|
| 932 |
+
"effective_first_chunk_ms": self.first_chunk_samples / self.sample_rate * 1000.0,
|
| 933 |
+
"sample_rate": self.sample_rate,
|
| 934 |
+
"n_fft": self.n_fft,
|
| 935 |
+
"hop_length": self.hop_length,
|
| 936 |
+
"cnn_redundancy_ms": self.cnn_redundancy_ms,
|
| 937 |
+
"cnn_redundancy_frames": self.cnn_redundancy_frames,
|
| 938 |
+
"enable_sliding_window": self.enable_sliding_window,
|
| 939 |
+
"trigger_seconds": self.trigger_seconds,
|
| 940 |
+
"slide_seconds": self.slide_seconds,
|
| 941 |
+
}
|
| 942 |
+
|
| 943 |
+
def get_state(self) -> Dict:
|
| 944 |
+
return {
|
| 945 |
+
"chunk_count": self.chunk_count,
|
| 946 |
+
"last_emitted_T": self.last_emitted_T,
|
| 947 |
+
"total_samples_processed": self.total_samples_processed,
|
| 948 |
+
"buffer_len": int(self.buffer.shape[0]),
|
| 949 |
+
"base_T": self.base_T,
|
| 950 |
+
"left_samples_dropped": self.left_samples_dropped,
|
| 951 |
+
}
|
| 952 |
+
|
| 953 |
+
def get_snapshot(self) -> Dict:
|
| 954 |
+
"""Get a complete state snapshot (including buffer), used for recovery from a fast start.
|
| 955 |
+
|
| 956 |
+
Returns:
|
| 957 |
+
A dictionary containing the complete state, which can be used to restore the snapshot
|
| 958 |
+
"""
|
| 959 |
+
buffer_copy = self.buffer.copy()
|
| 960 |
+
snapshot = {
|
| 961 |
+
"chunk_count": self.chunk_count,
|
| 962 |
+
"last_emitted_T": self.last_emitted_T,
|
| 963 |
+
"total_samples_processed": self.total_samples_processed,
|
| 964 |
+
"buffer": buffer_copy,
|
| 965 |
+
"base_T": self.base_T,
|
| 966 |
+
"left_samples_dropped": self.left_samples_dropped,
|
| 967 |
+
"is_first": self.is_first,
|
| 968 |
+
# save the state of the feature_extractor (key: ensure determinism of mel feature extraction)
|
| 969 |
+
"fe_dynamic_log_norm": getattr(self.feature_extractor, "dynamic_log_norm", None),
|
| 970 |
+
"fe_dynamic_range_db": getattr(self.feature_extractor, "dynamic_range_db", None),
|
| 971 |
+
"fe_log_floor_db": getattr(self.feature_extractor, "log_floor_db", None),
|
| 972 |
+
}
|
| 973 |
+
|
| 974 |
+
return snapshot
|
| 975 |
+
|
| 976 |
+
def restore_snapshot(self, snapshot: Dict) -> None:
|
| 977 |
+
"""Restore state from a snapshot
|
| 978 |
+
|
| 979 |
+
Args:
|
| 980 |
+
snapshot: the snapshot dictionary returned by get_snapshot
|
| 981 |
+
"""
|
| 982 |
+
# record the state before restoration
|
| 983 |
+
prev_state = {
|
| 984 |
+
"chunk_count": self.chunk_count,
|
| 985 |
+
"last_emitted_T": self.last_emitted_T,
|
| 986 |
+
"buffer_len": len(self.buffer),
|
| 987 |
+
}
|
| 988 |
+
|
| 989 |
+
# restore state
|
| 990 |
+
self.chunk_count = snapshot["chunk_count"]
|
| 991 |
+
self.last_emitted_T = snapshot["last_emitted_T"]
|
| 992 |
+
self.total_samples_processed = snapshot["total_samples_processed"]
|
| 993 |
+
self.buffer = snapshot["buffer"].copy() # copy buffer
|
| 994 |
+
self.base_T = snapshot["base_T"]
|
| 995 |
+
self.left_samples_dropped = snapshot["left_samples_dropped"]
|
| 996 |
+
self.is_first = snapshot["is_first"]
|
| 997 |
+
|
| 998 |
+
# restore the state of the feature_extractor (key: ensure determinism of mel feature extraction)
|
| 999 |
+
if snapshot.get("fe_dynamic_log_norm") is not None:
|
| 1000 |
+
self.feature_extractor.dynamic_log_norm = snapshot["fe_dynamic_log_norm"]
|
| 1001 |
+
if snapshot.get("fe_dynamic_range_db") is not None:
|
| 1002 |
+
self.feature_extractor.dynamic_range_db = snapshot["fe_dynamic_range_db"]
|
| 1003 |
+
if snapshot.get("fe_log_floor_db") is not None:
|
| 1004 |
+
self.feature_extractor.log_floor_db = snapshot["fe_log_floor_db"]
|
| 1005 |
+
|
| 1006 |
+
|
| 1007 |
+
class MiniCPMOProcessor(ProcessorMixin):
|
| 1008 |
+
attributes = ["image_processor", "audio_processor", "tokenizer"]
|
| 1009 |
+
audio_processor_class = "AutoFeatureExtractor"
|
| 1010 |
+
image_processor_class = "AutoImageProcessor"
|
| 1011 |
+
tokenizer_class = "AutoTokenizer"
|
| 1012 |
+
|
| 1013 |
+
def __init__(self, image_processor=None, audio_processor=None, tokenizer=None, **kwargs):
|
| 1014 |
+
super().__init__(image_processor, audio_processor, tokenizer)
|
| 1015 |
+
|
| 1016 |
+
self.version = image_processor.version if image_processor else None
|
| 1017 |
+
# audio feature pooling step, needs to be consistent with config.audio_pool_step
|
| 1018 |
+
self.pool_step = kwargs.get("audio_pool_step", 5)
|
| 1019 |
+
|
| 1020 |
+
# initialize the streaming audio processor
|
| 1021 |
+
self._streaming_mel_processor = None
|
| 1022 |
+
if audio_processor is not None:
|
| 1023 |
+
self._init_streaming_processor()
|
| 1024 |
+
|
| 1025 |
+
def get_audio_placeholder(
|
| 1026 |
+
self,
|
| 1027 |
+
audio_lens: int,
|
| 1028 |
+
chunk_input: bool = True,
|
| 1029 |
+
chunk_length: int = 1,
|
| 1030 |
+
) -> str:
|
| 1031 |
+
"""
|
| 1032 |
+
Public method to get audio placeholder string for vLLM integration.
|
| 1033 |
+
|
| 1034 |
+
Args:
|
| 1035 |
+
audio_lens: Length of audio in samples
|
| 1036 |
+
chunk_input: Whether to use chunked processing
|
| 1037 |
+
chunk_length: Chunk length in seconds
|
| 1038 |
+
|
| 1039 |
+
Returns:
|
| 1040 |
+
Audio placeholder string
|
| 1041 |
+
"""
|
| 1042 |
+
pool_step = self.pool_step
|
| 1043 |
+
feature_lens = math.ceil(audio_lens / self.audio_processor.hop_length)
|
| 1044 |
+
|
| 1045 |
+
feature_lens = (feature_lens - 1) // 2 + 1
|
| 1046 |
+
output_lens = (feature_lens - pool_step) // pool_step + 1
|
| 1047 |
+
|
| 1048 |
+
if chunk_input:
|
| 1049 |
+
fbank_feat_in_chunk = int(chunk_length * 100)
|
| 1050 |
+
cnn_feat_in_chunk = (fbank_feat_in_chunk - 1) // 2 + 1
|
| 1051 |
+
audio_embeds_in_chunk = (cnn_feat_in_chunk - pool_step) // pool_step + 1
|
| 1052 |
+
num_audio_chunks = (output_lens + audio_embeds_in_chunk - 1) // audio_embeds_in_chunk
|
| 1053 |
+
|
| 1054 |
+
place_holders = ""
|
| 1055 |
+
total_unk_len = 0
|
| 1056 |
+
for _ in range(num_audio_chunks):
|
| 1057 |
+
unk_len = min(audio_embeds_in_chunk, output_lens - total_unk_len)
|
| 1058 |
+
place_holders += self.tokenizer.audio_start + "<unk>" * unk_len + self.tokenizer.audio_end
|
| 1059 |
+
total_unk_len += unk_len
|
| 1060 |
+
audio_placeholder = place_holders
|
| 1061 |
+
else:
|
| 1062 |
+
audio_placeholder = self.tokenizer.audio_start + "<unk>" * output_lens + self.tokenizer.audio_end
|
| 1063 |
+
|
| 1064 |
+
return audio_placeholder
|
| 1065 |
+
|
| 1066 |
+
def _init_streaming_processor(
|
| 1067 |
+
self,
|
| 1068 |
+
chunk_ms: int = 100,
|
| 1069 |
+
cnn_redundancy_ms: int = 0,
|
| 1070 |
+
*,
|
| 1071 |
+
mode: str = "exact",
|
| 1072 |
+
first_chunk_ms: Optional[int] = None,
|
| 1073 |
+
enable_sliding_window: bool = False,
|
| 1074 |
+
slide_trigger_seconds: float = 30.0,
|
| 1075 |
+
slide_stride_seconds: float = 10.0,
|
| 1076 |
+
):
|
| 1077 |
+
"""Initialize the streaming processor
|
| 1078 |
+
|
| 1079 |
+
Args:
|
| 1080 |
+
chunk_ms: Chunk size in milliseconds, also the sliding step.
|
| 1081 |
+
cnn_redundancy_ms: CNN boundary redundancy in milliseconds (before and after), 0 means standard mode.
|
| 1082 |
+
mode: streaming processing mode, currently only supports "exact"
|
| 1083 |
+
first_chunk_ms: the size of the first chunk (milliseconds), if not specified, it is the same as chunk_ms
|
| 1084 |
+
enable_sliding_window: whether to enable sliding window (trigger mode)
|
| 1085 |
+
slide_trigger_seconds: trigger threshold for sliding window in seconds
|
| 1086 |
+
slide_stride_seconds: stride for sliding window in seconds
|
| 1087 |
+
"""
|
| 1088 |
+
if mode == "exact":
|
| 1089 |
+
self._streaming_mel_processor = StreamingMelProcessorExact(
|
| 1090 |
+
feature_extractor=self.audio_processor,
|
| 1091 |
+
chunk_ms=chunk_ms,
|
| 1092 |
+
first_chunk_ms=first_chunk_ms,
|
| 1093 |
+
sample_rate=16000,
|
| 1094 |
+
cnn_redundancy_ms=cnn_redundancy_ms,
|
| 1095 |
+
enable_sliding_window=enable_sliding_window,
|
| 1096 |
+
slide_trigger_seconds=slide_trigger_seconds,
|
| 1097 |
+
slide_stride_seconds=slide_stride_seconds,
|
| 1098 |
+
)
|
| 1099 |
+
else:
|
| 1100 |
+
raise ValueError(f"Unsupported mode: {mode}, only 'exact' is supported")
|
| 1101 |
+
self._streaming_mode = mode if mode in ["exact"] else ("exact")
|
| 1102 |
+
|
| 1103 |
+
def set_streaming_mode(
|
| 1104 |
+
self,
|
| 1105 |
+
mode: str = "exact",
|
| 1106 |
+
chunk_ms: int = 100,
|
| 1107 |
+
cnn_redundancy_ms: int = 0,
|
| 1108 |
+
*,
|
| 1109 |
+
first_chunk_ms: Optional[int] = None,
|
| 1110 |
+
enable_sliding_window: bool = False,
|
| 1111 |
+
slide_trigger_seconds: float = 30.0,
|
| 1112 |
+
slide_stride_seconds: float = 10.0,
|
| 1113 |
+
):
|
| 1114 |
+
"""Set streaming processing mode
|
| 1115 |
+
|
| 1116 |
+
Args:
|
| 1117 |
+
mode: streaming processing mode, currently only supports "exact"
|
| 1118 |
+
chunk_ms: chunk size in milliseconds, also the sliding step.
|
| 1119 |
+
cnn_redundancy_ms: CNN boundary redundancy in milliseconds (before and after), 0 means standard mode.
|
| 1120 |
+
first_chunk_ms: the size of the first chunk (milliseconds), if not specified, it is the same as chunk_ms
|
| 1121 |
+
enable_sliding_window: whether to enable sliding window (trigger mode)
|
| 1122 |
+
slide_trigger_seconds: trigger threshold for sliding window in seconds
|
| 1123 |
+
slide_stride_seconds: stride for sliding window in seconds
|
| 1124 |
+
"""
|
| 1125 |
+
if self.audio_processor is None:
|
| 1126 |
+
raise ValueError("audio_processor is not set, cannot initialize the streaming processor")
|
| 1127 |
+
self._init_streaming_processor(
|
| 1128 |
+
chunk_ms=chunk_ms,
|
| 1129 |
+
cnn_redundancy_ms=cnn_redundancy_ms,
|
| 1130 |
+
mode=mode,
|
| 1131 |
+
first_chunk_ms=first_chunk_ms,
|
| 1132 |
+
enable_sliding_window=enable_sliding_window,
|
| 1133 |
+
slide_trigger_seconds=slide_trigger_seconds,
|
| 1134 |
+
slide_stride_seconds=slide_stride_seconds,
|
| 1135 |
+
)
|
| 1136 |
+
|
| 1137 |
+
def process_image(
|
| 1138 |
+
self,
|
| 1139 |
+
images: Optional[ImageInput] = None,
|
| 1140 |
+
do_pad: bool = True,
|
| 1141 |
+
max_slice_nums: int = 1,
|
| 1142 |
+
return_tensors: str = "pt",
|
| 1143 |
+
) -> MiniCPMOBatchFeature:
|
| 1144 |
+
"""Process image data
|
| 1145 |
+
|
| 1146 |
+
Args:
|
| 1147 |
+
images: input images
|
| 1148 |
+
do_pad: whether to pad
|
| 1149 |
+
max_slice_nums: maximum number of slices
|
| 1150 |
+
return_tensors: return tensor type
|
| 1151 |
+
Returns:
|
| 1152 |
+
MiniCPMOBatchFeature object
|
| 1153 |
+
"""
|
| 1154 |
+
if images is None:
|
| 1155 |
+
return MiniCPMOBatchFeature(data={"pixel_values": [[]], "image_sizes": [[]], "tgt_sizes": [[]]})
|
| 1156 |
+
|
| 1157 |
+
result = self.image_processor(
|
| 1158 |
+
images, do_pad=do_pad, max_slice_nums=max_slice_nums, return_tensors=return_tensors
|
| 1159 |
+
)
|
| 1160 |
+
|
| 1161 |
+
model_inputs = {
|
| 1162 |
+
"pixel_values": result.get("pixel_values", [[]]),
|
| 1163 |
+
"image_sizes": result.get("image_sizes", [[]]),
|
| 1164 |
+
"tgt_sizes": result.get("tgt_sizes", [[]]),
|
| 1165 |
+
}
|
| 1166 |
+
|
| 1167 |
+
return MiniCPMOBatchFeature(data=model_inputs)
|
| 1168 |
+
|
| 1169 |
+
def process_audio(
|
| 1170 |
+
self,
|
| 1171 |
+
audios: Optional[Union[np.ndarray, List[np.ndarray]]] = None,
|
| 1172 |
+
sampling_rate: int = 16000,
|
| 1173 |
+
regroup_to_seconds: Optional[int] = None,
|
| 1174 |
+
fps: int = 100,
|
| 1175 |
+
) -> MiniCPMOBatchFeature:
|
| 1176 |
+
"""Process audio data in batch
|
| 1177 |
+
|
| 1178 |
+
Args:
|
| 1179 |
+
audios: audio data
|
| 1180 |
+
sampling_rate: sampling rate
|
| 1181 |
+
regroup_to_seconds: regroup duration in seconds
|
| 1182 |
+
fps: frames per second
|
| 1183 |
+
Returns:
|
| 1184 |
+
MiniCPMOBatchFeature object
|
| 1185 |
+
"""
|
| 1186 |
+
if audios is None:
|
| 1187 |
+
return MiniCPMOBatchFeature(data={"audio_features": [], "audio_feature_lens": []})
|
| 1188 |
+
|
| 1189 |
+
audio_features, audio_feature_lens = process_audio_batch(
|
| 1190 |
+
audios=audios,
|
| 1191 |
+
feature_extractor=self.audio_processor,
|
| 1192 |
+
sampling_rate=sampling_rate,
|
| 1193 |
+
max_duration_seconds=30,
|
| 1194 |
+
return_attention_mask=True,
|
| 1195 |
+
)
|
| 1196 |
+
|
| 1197 |
+
if regroup_to_seconds is not None and len(audio_features) > 0:
|
| 1198 |
+
audio_features, audio_feature_lens = regroup_audio_features(
|
| 1199 |
+
audio_features=audio_features,
|
| 1200 |
+
audio_feature_lens=audio_feature_lens,
|
| 1201 |
+
regroup_seconds=regroup_to_seconds,
|
| 1202 |
+
fps=fps,
|
| 1203 |
+
)
|
| 1204 |
+
|
| 1205 |
+
model_inputs = {"audio_features": audio_features, "audio_feature_lens": audio_feature_lens}
|
| 1206 |
+
|
| 1207 |
+
return MiniCPMOBatchFeature(data=model_inputs)
|
| 1208 |
+
|
| 1209 |
+
def process_audio_streaming(
|
| 1210 |
+
self,
|
| 1211 |
+
audio_chunk: np.ndarray,
|
| 1212 |
+
reset: bool = False,
|
| 1213 |
+
return_batch_feature: bool = False,
|
| 1214 |
+
is_last_chunk: bool = False,
|
| 1215 |
+
) -> Union[Tuple[torch.Tensor, dict], MiniCPMOBatchFeature]:
|
| 1216 |
+
"""Process audio chunk in streaming
|
| 1217 |
+
|
| 1218 |
+
Args:
|
| 1219 |
+
audio_chunk: audio data chunk (any audio, e.g. first process 125ms, then process 100ms)
|
| 1220 |
+
reset: whether to reset the processor state
|
| 1221 |
+
return_batch_feature: whether to return MiniCPMOBatchFeature format (consistent with process_audio)
|
| 1222 |
+
Returns:
|
| 1223 |
+
If return_batch_feature=False:
|
| 1224 |
+
(audio_features, info)
|
| 1225 |
+
- audio_features: [1, 80, n_frames] mel features
|
| 1226 |
+
- info: processing information dictionary
|
| 1227 |
+
If return_batch_feature=True:
|
| 1228 |
+
MiniCPMOBatchFeature object, containing:
|
| 1229 |
+
- audio_features: [1, 80, n_frames] mel features
|
| 1230 |
+
- audio_feature_lens: [tensor([n_frames])]
|
| 1231 |
+
- info: processing information (as an extra attribute)
|
| 1232 |
+
"""
|
| 1233 |
+
if self._streaming_mel_processor is None:
|
| 1234 |
+
raise ValueError("Streaming processor not initialized, please ensure audio_processor is set")
|
| 1235 |
+
|
| 1236 |
+
if reset:
|
| 1237 |
+
self._streaming_mel_processor.reset()
|
| 1238 |
+
|
| 1239 |
+
# process chunk
|
| 1240 |
+
mel_features, info = self._streaming_mel_processor.process(audio_chunk, is_last_chunk=is_last_chunk)
|
| 1241 |
+
|
| 1242 |
+
# determine the return format based on the parameters
|
| 1243 |
+
if return_batch_feature:
|
| 1244 |
+
# return the format consistent with process_audio
|
| 1245 |
+
# note: info returns emitted_frames, which represents the actual output frames
|
| 1246 |
+
n_frames = info.get("emitted_frames", mel_features.shape[-1])
|
| 1247 |
+
model_inputs = {
|
| 1248 |
+
"audio_features": mel_features,
|
| 1249 |
+
"audio_feature_lens": [torch.tensor([n_frames])],
|
| 1250 |
+
"streaming_info": info, # add streaming processing information
|
| 1251 |
+
}
|
| 1252 |
+
return MiniCPMOBatchFeature(data=model_inputs)
|
| 1253 |
+
else:
|
| 1254 |
+
return mel_features, info
|
| 1255 |
+
|
| 1256 |
+
def reset_streaming(self):
|
| 1257 |
+
if self._streaming_mel_processor is not None:
|
| 1258 |
+
self._streaming_mel_processor.reset()
|
| 1259 |
+
|
| 1260 |
+
def get_streaming_chunk_size(self) -> int:
|
| 1261 |
+
if self._streaming_mel_processor is None:
|
| 1262 |
+
raise ValueError("Streaming processor not initialized")
|
| 1263 |
+
return self._streaming_mel_processor.get_chunk_size()
|
| 1264 |
+
|
| 1265 |
+
def configure_streaming(
|
| 1266 |
+
self,
|
| 1267 |
+
chunk_ms: int = 100,
|
| 1268 |
+
enable_sliding_window: bool = False,
|
| 1269 |
+
slide_trigger_seconds: float = 30.0,
|
| 1270 |
+
slide_stride_seconds: float = 10.0,
|
| 1271 |
+
):
|
| 1272 |
+
"""Configure streaming processor parameters
|
| 1273 |
+
|
| 1274 |
+
Args:
|
| 1275 |
+
chunk_ms: chunk size in milliseconds
|
| 1276 |
+
enable_sliding_window: whether to enable sliding window (trigger mode)
|
| 1277 |
+
slide_trigger_seconds: trigger threshold for sliding window in seconds
|
| 1278 |
+
slide_stride_seconds: stride for sliding window in seconds
|
| 1279 |
+
"""
|
| 1280 |
+
if self.audio_processor is None:
|
| 1281 |
+
raise ValueError("audio_processor is not set")
|
| 1282 |
+
|
| 1283 |
+
self._init_streaming_processor(
|
| 1284 |
+
chunk_ms=chunk_ms,
|
| 1285 |
+
enable_sliding_window=enable_sliding_window,
|
| 1286 |
+
slide_trigger_seconds=slide_trigger_seconds,
|
| 1287 |
+
slide_stride_seconds=slide_stride_seconds,
|
| 1288 |
+
)
|
| 1289 |
+
|
| 1290 |
+
def get_streaming_config(self) -> dict:
|
| 1291 |
+
if self._streaming_mel_processor is None:
|
| 1292 |
+
return {}
|
| 1293 |
+
return self._streaming_mel_processor.get_config()
|
| 1294 |
+
|
| 1295 |
+
def get_streaming_state(self) -> dict:
|
| 1296 |
+
if self._streaming_mel_processor is None:
|
| 1297 |
+
return {}
|
| 1298 |
+
return self._streaming_mel_processor.get_state()
|
| 1299 |
+
|
| 1300 |
+
def get_streaming_snapshot(self) -> dict:
|
| 1301 |
+
if self._streaming_mel_processor is None:
|
| 1302 |
+
return {}
|
| 1303 |
+
return self._streaming_mel_processor.get_snapshot()
|
| 1304 |
+
|
| 1305 |
+
def restore_streaming_snapshot(self, snapshot: dict) -> None:
|
| 1306 |
+
if self._streaming_mel_processor is None:
|
| 1307 |
+
return
|
| 1308 |
+
if not snapshot:
|
| 1309 |
+
return
|
| 1310 |
+
self._streaming_mel_processor.restore_snapshot(snapshot)
|
| 1311 |
+
|
| 1312 |
+
def __call__(
|
| 1313 |
+
self,
|
| 1314 |
+
text: Union[TextInput, PreTokenizedInput, List[TextInput], List[PreTokenizedInput]],
|
| 1315 |
+
images: ImageInput = None,
|
| 1316 |
+
audios: Union[np.ndarray, List[np.ndarray], List[List[np.ndarray]]] = None,
|
| 1317 |
+
audio_parts: Optional[list] = None,
|
| 1318 |
+
max_length: Optional[int] = None,
|
| 1319 |
+
do_pad: Optional[bool] = True,
|
| 1320 |
+
max_slice_nums: int = None,
|
| 1321 |
+
use_image_id: bool = True,
|
| 1322 |
+
stream_input: bool = False,
|
| 1323 |
+
return_tensors: Optional[Union[str, TensorType]] = TensorType.PYTORCH,
|
| 1324 |
+
sampling_rate: Optional[int] = 16000,
|
| 1325 |
+
online_streaming: bool = False,
|
| 1326 |
+
audio_chunk_idx: int = 0,
|
| 1327 |
+
is_last_chunk: bool = False,
|
| 1328 |
+
**kwargs,
|
| 1329 |
+
) -> MiniCPMOBatchFeature:
|
| 1330 |
+
if images is not None:
|
| 1331 |
+
image_inputs = self.process_image(
|
| 1332 |
+
images=images, do_pad=do_pad, max_slice_nums=max_slice_nums, return_tensors=return_tensors
|
| 1333 |
+
)
|
| 1334 |
+
else:
|
| 1335 |
+
image_inputs = None
|
| 1336 |
+
|
| 1337 |
+
audio_features, audio_feature_lens, audio_phs = self.audio_feature_extract(
|
| 1338 |
+
audios,
|
| 1339 |
+
audio_parts,
|
| 1340 |
+
stream_input,
|
| 1341 |
+
sampling_rate,
|
| 1342 |
+
online_streaming=online_streaming,
|
| 1343 |
+
is_last_chunk=is_last_chunk,
|
| 1344 |
+
)
|
| 1345 |
+
|
| 1346 |
+
model_inputs = self._convert_omni_to_inputs(
|
| 1347 |
+
image_inputs,
|
| 1348 |
+
audio_phs,
|
| 1349 |
+
text,
|
| 1350 |
+
max_slice_nums=max_slice_nums,
|
| 1351 |
+
use_image_id=use_image_id,
|
| 1352 |
+
max_length=max_length,
|
| 1353 |
+
**kwargs,
|
| 1354 |
+
)
|
| 1355 |
+
|
| 1356 |
+
model_inputs["audio_features"] = audio_features
|
| 1357 |
+
model_inputs["audio_feature_lens"] = audio_feature_lens
|
| 1358 |
+
|
| 1359 |
+
result = MiniCPMOBatchFeature(data={**model_inputs})
|
| 1360 |
+
|
| 1361 |
+
if online_streaming:
|
| 1362 |
+
result.use_extra_context = True
|
| 1363 |
+
result.prefix_extra_frames = 0 if audio_chunk_idx == 0 else 2
|
| 1364 |
+
result.suffix_extra_frames = 2
|
| 1365 |
+
result.chunk_idx = audio_chunk_idx
|
| 1366 |
+
|
| 1367 |
+
return result
|
| 1368 |
+
|
| 1369 |
+
def audio_feature_extract(
|
| 1370 |
+
self,
|
| 1371 |
+
audios: Union[np.ndarray, List[np.ndarray], List[List[np.ndarray]], None] = None,
|
| 1372 |
+
audio_parts: Optional[list] = None,
|
| 1373 |
+
stream_input: Optional[bool] = False,
|
| 1374 |
+
sampling_rate: Optional[int] = None,
|
| 1375 |
+
chunk_length: Optional[int] = 1,
|
| 1376 |
+
online_streaming: bool = False,
|
| 1377 |
+
is_last_chunk: bool = False,
|
| 1378 |
+
**kwargs,
|
| 1379 |
+
):
|
| 1380 |
+
if audios is None:
|
| 1381 |
+
return [], [], []
|
| 1382 |
+
|
| 1383 |
+
if isinstance(audios, np.ndarray):
|
| 1384 |
+
audios_list = [[audios]]
|
| 1385 |
+
elif isinstance(audios[0], np.ndarray):
|
| 1386 |
+
audios_list = [audios]
|
| 1387 |
+
else:
|
| 1388 |
+
audios_list = audios
|
| 1389 |
+
|
| 1390 |
+
if audio_parts is not None:
|
| 1391 |
+
assert len(audio_parts) == len(audios_list)
|
| 1392 |
+
for parts, audios in zip(audio_parts, audios_list):
|
| 1393 |
+
assert len(parts) == len(audios)
|
| 1394 |
+
|
| 1395 |
+
audio_feature_lens_list = []
|
| 1396 |
+
audio_ph_list = []
|
| 1397 |
+
audio_features_all = []
|
| 1398 |
+
|
| 1399 |
+
# audio placeholder not dependent on audio_parts
|
| 1400 |
+
for audios in audios_list:
|
| 1401 |
+
if audios:
|
| 1402 |
+
audio_ph_list.append(
|
| 1403 |
+
[
|
| 1404 |
+
self.get_audio_placeholder(len(a), chunk_input=stream_input, chunk_length=chunk_length)
|
| 1405 |
+
for a in audios
|
| 1406 |
+
]
|
| 1407 |
+
)
|
| 1408 |
+
else:
|
| 1409 |
+
audio_ph_list.append([])
|
| 1410 |
+
|
| 1411 |
+
for idx, audios in enumerate(audios_list):
|
| 1412 |
+
if audio_parts is not None:
|
| 1413 |
+
# same audio part merge
|
| 1414 |
+
audio_part = audio_parts[idx]
|
| 1415 |
+
merge_audio = []
|
| 1416 |
+
cur_audio = []
|
| 1417 |
+
for aid, (part, audio) in enumerate(zip(audio_part, audios)):
|
| 1418 |
+
if aid == 0 or audio_part[aid] == audio_part[aid - 1]:
|
| 1419 |
+
cur_audio.append(audio)
|
| 1420 |
+
else:
|
| 1421 |
+
merge_audio.append(np.hstack(cur_audio))
|
| 1422 |
+
cur_audio = [audio]
|
| 1423 |
+
if cur_audio:
|
| 1424 |
+
merge_audio.append(np.hstack(cur_audio))
|
| 1425 |
+
else:
|
| 1426 |
+
merge_audio = audios
|
| 1427 |
+
|
| 1428 |
+
# If the audio exceeds 30 seconds, split it into chunks every 30 seconds.
|
| 1429 |
+
final_merge_audio = []
|
| 1430 |
+
max_audio_inp_len = 30 * sampling_rate
|
| 1431 |
+
for audio in merge_audio:
|
| 1432 |
+
if len(audio) <= max_audio_inp_len:
|
| 1433 |
+
final_merge_audio.append(audio)
|
| 1434 |
+
else:
|
| 1435 |
+
for i in range(math.ceil(len(audio) / max_audio_inp_len)):
|
| 1436 |
+
final_merge_audio.append(audio[i * max_audio_inp_len : (i + 1) * max_audio_inp_len])
|
| 1437 |
+
|
| 1438 |
+
audio_feature_lens = []
|
| 1439 |
+
|
| 1440 |
+
if audios:
|
| 1441 |
+
if online_streaming:
|
| 1442 |
+
# online streaming: only support single audio, directly use process_audio_streaming return format
|
| 1443 |
+
assert (
|
| 1444 |
+
len(final_merge_audio) == 1
|
| 1445 |
+
), f"online streaming mode only supports single audio, currently there are {len(final_merge_audio)}"
|
| 1446 |
+
audio = final_merge_audio[0]
|
| 1447 |
+
result = self.process_audio_streaming(
|
| 1448 |
+
audio, reset=False, return_batch_feature=True, is_last_chunk=is_last_chunk
|
| 1449 |
+
)
|
| 1450 |
+
audio_features_all.append(
|
| 1451 |
+
result["audio_features"].squeeze(0)
|
| 1452 |
+
) # [1, 80, T] -> [80, T], keep consistent with batch processing
|
| 1453 |
+
audio_feature_lens_list.append(result["audio_feature_lens"][0])
|
| 1454 |
+
else:
|
| 1455 |
+
# batch processing
|
| 1456 |
+
audio_inputs = self.audio_processor(
|
| 1457 |
+
final_merge_audio,
|
| 1458 |
+
sampling_rate=sampling_rate,
|
| 1459 |
+
return_attention_mask=True,
|
| 1460 |
+
padding="max_length",
|
| 1461 |
+
return_tensors="pt",
|
| 1462 |
+
**kwargs,
|
| 1463 |
+
)
|
| 1464 |
+
audio_feature = audio_inputs["input_features"]
|
| 1465 |
+
actual_lens = audio_inputs["attention_mask"].sum(dim=1)
|
| 1466 |
+
|
| 1467 |
+
for feat, lens in zip(audio_feature, actual_lens):
|
| 1468 |
+
audio_features_all.append(feat[:, :lens])
|
| 1469 |
+
audio_feature_lens.append(lens)
|
| 1470 |
+
|
| 1471 |
+
audio_feature_lens = torch.hstack(audio_feature_lens)
|
| 1472 |
+
audio_feature_lens_list.append(audio_feature_lens)
|
| 1473 |
+
else:
|
| 1474 |
+
audio_feature_lens_list.append([])
|
| 1475 |
+
|
| 1476 |
+
if audio_features_all:
|
| 1477 |
+
audio_features = [i.permute(1, 0) for i in audio_features_all]
|
| 1478 |
+
audio_features = torch.nn.utils.rnn.pad_sequence(
|
| 1479 |
+
audio_features, batch_first=True, padding_value=0.0
|
| 1480 |
+
).permute(0, 2, 1)
|
| 1481 |
+
else:
|
| 1482 |
+
audio_features = []
|
| 1483 |
+
|
| 1484 |
+
return audio_features, audio_feature_lens_list, audio_ph_list
|
| 1485 |
+
|
| 1486 |
+
def _convert(self, input_str, max_inp_length: Optional[int] = None):
|
| 1487 |
+
old_input_ids = self.tokenizer.encode(input_str)
|
| 1488 |
+
|
| 1489 |
+
listen_token_id = self.tokenizer.convert_tokens_to_ids("<|listen|>")
|
| 1490 |
+
input_ids = []
|
| 1491 |
+
for token in old_input_ids:
|
| 1492 |
+
if token != listen_token_id:
|
| 1493 |
+
input_ids.append(token)
|
| 1494 |
+
|
| 1495 |
+
if max_inp_length is not None:
|
| 1496 |
+
input_ids = input_ids[:max_inp_length]
|
| 1497 |
+
input_ids = torch.tensor(input_ids, dtype=torch.int32)
|
| 1498 |
+
|
| 1499 |
+
## image bound
|
| 1500 |
+
start_cond = (input_ids == self.tokenizer.im_start_id) | (input_ids == self.tokenizer.slice_start_id)
|
| 1501 |
+
end_cond = (input_ids == self.tokenizer.im_end_id) | (input_ids == self.tokenizer.slice_end_id)
|
| 1502 |
+
|
| 1503 |
+
image_start_idx = torch.where(start_cond)[0]
|
| 1504 |
+
image_start_idx += 1
|
| 1505 |
+
image_end_idx = torch.where(end_cond)[0]
|
| 1506 |
+
|
| 1507 |
+
valid_image_nums = max(len(image_start_idx), len(image_end_idx))
|
| 1508 |
+
|
| 1509 |
+
image_bounds = torch.hstack(
|
| 1510 |
+
[
|
| 1511 |
+
image_start_idx[:valid_image_nums].unsqueeze(-1),
|
| 1512 |
+
image_end_idx[:valid_image_nums].unsqueeze(-1),
|
| 1513 |
+
]
|
| 1514 |
+
)
|
| 1515 |
+
|
| 1516 |
+
## audio bound
|
| 1517 |
+
audio_start_idx = torch.where(input_ids == self.tokenizer.audio_start_id)[0]
|
| 1518 |
+
audio_end_idx = torch.where(input_ids == self.tokenizer.audio_end_id)[0]
|
| 1519 |
+
assert len(audio_start_idx) == len(audio_end_idx)
|
| 1520 |
+
audio_bounds = torch.hstack([(audio_start_idx + 1).unsqueeze(-1), audio_end_idx.unsqueeze(-1)])
|
| 1521 |
+
|
| 1522 |
+
spk_start_idx = torch.where(input_ids == self.tokenizer.spk_start_id)[0]
|
| 1523 |
+
spk_end_idx = torch.where(input_ids == self.tokenizer.spk_end_id)[0]
|
| 1524 |
+
assert len(spk_start_idx) == len(spk_end_idx)
|
| 1525 |
+
spk_bounds = torch.hstack([(spk_start_idx + 1).unsqueeze(-1), spk_end_idx.unsqueeze(-1)])
|
| 1526 |
+
|
| 1527 |
+
return input_ids, image_bounds, audio_bounds, spk_bounds
|
| 1528 |
+
|
| 1529 |
+
def _convert_omni_to_inputs(
|
| 1530 |
+
self,
|
| 1531 |
+
images,
|
| 1532 |
+
audio_phs,
|
| 1533 |
+
texts: Union[str, List[str]],
|
| 1534 |
+
truncation=None,
|
| 1535 |
+
max_length=None,
|
| 1536 |
+
max_slice_nums=None,
|
| 1537 |
+
use_image_id=None,
|
| 1538 |
+
return_tensors=None,
|
| 1539 |
+
**kwargs,
|
| 1540 |
+
):
|
| 1541 |
+
if images is None and audio_phs is None:
|
| 1542 |
+
model_inputs = self.tokenizer(
|
| 1543 |
+
texts, return_tensors=return_tensors, truncation=truncation, max_length=max_length, **kwargs
|
| 1544 |
+
)
|
| 1545 |
+
return MiniCPMOBatchFeature(data={**model_inputs})
|
| 1546 |
+
|
| 1547 |
+
image_pattern = ""
|
| 1548 |
+
audio_pattern = "<audio>./</audio>"
|
| 1549 |
+
split_pattern = f"({image_pattern}|{audio_pattern})"
|
| 1550 |
+
|
| 1551 |
+
if isinstance(texts, str):
|
| 1552 |
+
texts = [texts]
|
| 1553 |
+
|
| 1554 |
+
bs = len(texts)
|
| 1555 |
+
if images is not None:
|
| 1556 |
+
images, image_sizes, tgt_sizes = images["pixel_values"], images["image_sizes"], images["tgt_sizes"]
|
| 1557 |
+
else:
|
| 1558 |
+
images, image_sizes, tgt_sizes = [[]] * bs, [[]] * bs, [[]] * bs
|
| 1559 |
+
|
| 1560 |
+
input_ids_list = []
|
| 1561 |
+
image_bounds_list = []
|
| 1562 |
+
audio_bounds_list = []
|
| 1563 |
+
spk_bounds_list = []
|
| 1564 |
+
|
| 1565 |
+
for index, text in enumerate(texts):
|
| 1566 |
+
text_chunks = re.split(split_pattern, text)
|
| 1567 |
+
|
| 1568 |
+
image_tags = re.findall(image_pattern, text)
|
| 1569 |
+
audio_tags = re.findall(audio_pattern, text)
|
| 1570 |
+
|
| 1571 |
+
if image_tags:
|
| 1572 |
+
assert images is not None
|
| 1573 |
+
assert len(image_tags) == len(image_sizes[index])
|
| 1574 |
+
if audio_tags:
|
| 1575 |
+
assert audio_phs is not None
|
| 1576 |
+
assert len(audio_tags) == len(audio_phs[index])
|
| 1577 |
+
|
| 1578 |
+
image_id = 0
|
| 1579 |
+
audio_id = 0
|
| 1580 |
+
for i, chunk in enumerate(text_chunks):
|
| 1581 |
+
if chunk == image_pattern:
|
| 1582 |
+
image_placeholder = self.image_processor.get_slice_image_placeholder(
|
| 1583 |
+
image_sizes[index][image_id], image_id, max_slice_nums, use_image_id
|
| 1584 |
+
)
|
| 1585 |
+
image_id += 1
|
| 1586 |
+
text_chunks[i] = image_placeholder
|
| 1587 |
+
elif chunk == audio_pattern:
|
| 1588 |
+
audio_placeholder = audio_phs[index][audio_id]
|
| 1589 |
+
audio_id += 1
|
| 1590 |
+
text_chunks[i] = audio_placeholder
|
| 1591 |
+
|
| 1592 |
+
final_text = "".join(text_chunks)
|
| 1593 |
+
input_ids, image_bounds, audio_bounds, spk_bounds = self._convert(final_text, max_length)
|
| 1594 |
+
|
| 1595 |
+
input_ids_list.append(input_ids)
|
| 1596 |
+
image_bounds_list.append(image_bounds)
|
| 1597 |
+
audio_bounds_list.append(audio_bounds)
|
| 1598 |
+
spk_bounds_list.append(spk_bounds)
|
| 1599 |
+
|
| 1600 |
+
padded_input_ids, padding_lengths = self.pad(input_ids_list, padding_side="left")
|
| 1601 |
+
attention_mask = torch.ones_like(padded_input_ids, dtype=torch.bool)
|
| 1602 |
+
for i, length in enumerate(padding_lengths):
|
| 1603 |
+
image_bounds_list[i] = image_bounds_list[i] + length
|
| 1604 |
+
audio_bounds_list[i] = audio_bounds_list[i] + length
|
| 1605 |
+
spk_bounds_list[i] = spk_bounds_list[i] + length
|
| 1606 |
+
attention_mask[i, :length] = False
|
| 1607 |
+
|
| 1608 |
+
data = {
|
| 1609 |
+
"input_ids": padded_input_ids,
|
| 1610 |
+
"attention_mask": attention_mask,
|
| 1611 |
+
"pixel_values": images,
|
| 1612 |
+
"image_sizes": image_sizes,
|
| 1613 |
+
"image_bound": image_bounds_list,
|
| 1614 |
+
"tgt_sizes": tgt_sizes,
|
| 1615 |
+
"audio_bounds": audio_bounds_list,
|
| 1616 |
+
"spk_bounds": spk_bounds_list,
|
| 1617 |
+
}
|
| 1618 |
+
|
| 1619 |
+
return data
|
| 1620 |
+
|
| 1621 |
+
def pad(self, inputs, max_length=None, padding_value=0, padding_side="left"):
|
| 1622 |
+
items = []
|
| 1623 |
+
if isinstance(inputs[0], list):
|
| 1624 |
+
assert isinstance(inputs[0][0], torch.Tensor)
|
| 1625 |
+
for it in inputs:
|
| 1626 |
+
for tr in it:
|
| 1627 |
+
items.append(tr)
|
| 1628 |
+
else:
|
| 1629 |
+
assert isinstance(inputs[0], torch.Tensor)
|
| 1630 |
+
items = inputs
|
| 1631 |
+
|
| 1632 |
+
batch_size = len(items)
|
| 1633 |
+
shape = items[0].shape
|
| 1634 |
+
dim = len(shape)
|
| 1635 |
+
assert dim <= 2
|
| 1636 |
+
if max_length is None:
|
| 1637 |
+
max_length = 0
|
| 1638 |
+
max_length = max(max_length, max(item.shape[-1] for item in items))
|
| 1639 |
+
min_length = min(item.shape[-1] for item in items)
|
| 1640 |
+
dtype = items[0].dtype
|
| 1641 |
+
|
| 1642 |
+
if dim == 0:
|
| 1643 |
+
return torch.stack([item for item in items], dim=0), [0]
|
| 1644 |
+
elif dim == 1:
|
| 1645 |
+
if max_length == min_length:
|
| 1646 |
+
return torch.stack([item for item in items], dim=0), [0] * batch_size
|
| 1647 |
+
tensor = torch.zeros((batch_size, max_length), dtype=dtype) + padding_value
|
| 1648 |
+
else:
|
| 1649 |
+
tensor = torch.zeros((batch_size, max_length, shape[-1]), dtype=dtype) + padding_value
|
| 1650 |
+
|
| 1651 |
+
padding_length = []
|
| 1652 |
+
for i, item in enumerate(items):
|
| 1653 |
+
if dim == 1:
|
| 1654 |
+
if padding_side == "left":
|
| 1655 |
+
tensor[i, -len(item) :] = item.clone()
|
| 1656 |
+
else:
|
| 1657 |
+
tensor[i, : len(item)] = item.clone()
|
| 1658 |
+
elif dim == 2:
|
| 1659 |
+
if padding_side == "left":
|
| 1660 |
+
tensor[i, -len(item) :, :] = item.clone()
|
| 1661 |
+
else:
|
| 1662 |
+
tensor[i, : len(item), :] = item.clone()
|
| 1663 |
+
padding_length.append(tensor.shape[-1] - len(item))
|
| 1664 |
+
|
| 1665 |
+
return tensor, padding_length
|
processor_config.json
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"audio_processor": {
|
| 3 |
+
"audio_pool_step": 5,
|
| 4 |
+
"auto_map": {
|
| 5 |
+
"AutoFeatureExtractor": "processing_minicpmo.MiniCPMAAudioProcessor",
|
| 6 |
+
"AutoImageProcessor": "processing_minicpmo.MiniCPMVImageProcessor",
|
| 7 |
+
"AutoProcessor": "processing_minicpmo.MiniCPMOProcessor"
|
| 8 |
+
},
|
| 9 |
+
"chunk_length": 30,
|
| 10 |
+
"dither": 0.0,
|
| 11 |
+
"feature_extractor_type": "WhisperFeatureExtractor",
|
| 12 |
+
"feature_size": 80,
|
| 13 |
+
"hop_length": 160,
|
| 14 |
+
"im_end": "</image>",
|
| 15 |
+
"im_id_end": "</image_id>",
|
| 16 |
+
"im_id_start": "<image_id>",
|
| 17 |
+
"im_start": "",
|
| 55 |
+
"im_id_end": "</image_id>",
|
| 56 |
+
"im_id_start": "<image_id>",
|
| 57 |
+
"im_start": "",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
{
|
| 25 |
+
"content": "<ref>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"content": "</ref>",
|
| 33 |
+
"lstrip": false,
|
| 34 |
+
"normalized": false,
|
| 35 |
+
"rstrip": false,
|
| 36 |
+
"single_word": false
|
| 37 |
+
},
|
| 38 |
+
{
|
| 39 |
+
"content": "<box>",
|
| 40 |
+
"lstrip": false,
|
| 41 |
+
"normalized": false,
|
| 42 |
+
"rstrip": false,
|
| 43 |
+
"single_word": false
|
| 44 |
+
},
|
| 45 |
+
{
|
| 46 |
+
"content": "</box>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false
|
| 51 |
+
},
|
| 52 |
+
{
|
| 53 |
+
"content": "<quad>",
|
| 54 |
+
"lstrip": false,
|
| 55 |
+
"normalized": false,
|
| 56 |
+
"rstrip": false,
|
| 57 |
+
"single_word": false
|
| 58 |
+
},
|
| 59 |
+
{
|
| 60 |
+
"content": "</quad>",
|
| 61 |
+
"lstrip": false,
|
| 62 |
+
"normalized": false,
|
| 63 |
+
"rstrip": false,
|
| 64 |
+
"single_word": false
|
| 65 |
+
},
|
| 66 |
+
{
|
| 67 |
+
"content": "<point>",
|
| 68 |
+
"lstrip": false,
|
| 69 |
+
"normalized": false,
|
| 70 |
+
"rstrip": false,
|
| 71 |
+
"single_word": false
|
| 72 |
+
},
|
| 73 |
+
{
|
| 74 |
+
"content": "</point>",
|
| 75 |
+
"lstrip": false,
|
| 76 |
+
"normalized": false,
|
| 77 |
+
"rstrip": false,
|
| 78 |
+
"single_word": false
|
| 79 |
+
},
|
| 80 |
+
{
|
| 81 |
+
"content": "<slice>",
|
| 82 |
+
"lstrip": false,
|
| 83 |
+
"normalized": false,
|
| 84 |
+
"rstrip": false,
|
| 85 |
+
"single_word": false
|
| 86 |
+
},
|
| 87 |
+
{
|
| 88 |
+
"content": "</slice>",
|
| 89 |
+
"lstrip": false,
|
| 90 |
+
"normalized": false,
|
| 91 |
+
"rstrip": false,
|
| 92 |
+
"single_word": false
|
| 93 |
+
},
|
| 94 |
+
{
|
| 95 |
+
"content": "<image_id>",
|
| 96 |
+
"lstrip": false,
|
| 97 |
+
"normalized": false,
|
| 98 |
+
"rstrip": false,
|
| 99 |
+
"single_word": false
|
| 100 |
+
},
|
| 101 |
+
{
|
| 102 |
+
"content": "</image_id>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false
|
| 107 |
+
},
|
| 108 |
+
{
|
| 109 |
+
"content": "<unit>",
|
| 110 |
+
"lstrip": false,
|
| 111 |
+
"normalized": false,
|
| 112 |
+
"rstrip": false,
|
| 113 |
+
"single_word": false
|
| 114 |
+
},
|
| 115 |
+
{
|
| 116 |
+
"content": "</unit>",
|
| 117 |
+
"lstrip": false,
|
| 118 |
+
"normalized": false,
|
| 119 |
+
"rstrip": false,
|
| 120 |
+
"single_word": false
|
| 121 |
+
},
|
| 122 |
+
{
|
| 123 |
+
"content": "<answer>",
|
| 124 |
+
"lstrip": false,
|
| 125 |
+
"normalized": false,
|
| 126 |
+
"rstrip": false,
|
| 127 |
+
"single_word": false
|
| 128 |
+
},
|
| 129 |
+
{
|
| 130 |
+
"content": "</answer>",
|
| 131 |
+
"lstrip": false,
|
| 132 |
+
"normalized": false,
|
| 133 |
+
"rstrip": false,
|
| 134 |
+
"single_word": false
|
| 135 |
+
},
|
| 136 |
+
{
|
| 137 |
+
"content": "<focus>",
|
| 138 |
+
"lstrip": false,
|
| 139 |
+
"normalized": false,
|
| 140 |
+
"rstrip": false,
|
| 141 |
+
"single_word": false
|
| 142 |
+
},
|
| 143 |
+
{
|
| 144 |
+
"content": "</focus>",
|
| 145 |
+
"lstrip": false,
|
| 146 |
+
"normalized": false,
|
| 147 |
+
"rstrip": false,
|
| 148 |
+
"single_word": false
|
| 149 |
+
},
|
| 150 |
+
{
|
| 151 |
+
"content": "<line>",
|
| 152 |
+
"lstrip": false,
|
| 153 |
+
"normalized": false,
|
| 154 |
+
"rstrip": false,
|
| 155 |
+
"single_word": false
|
| 156 |
+
},
|
| 157 |
+
{
|
| 158 |
+
"content": "</line>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false
|
| 163 |
+
},
|
| 164 |
+
{
|
| 165 |
+
"content": "<perception>",
|
| 166 |
+
"lstrip": false,
|
| 167 |
+
"normalized": false,
|
| 168 |
+
"rstrip": false,
|
| 169 |
+
"single_word": false
|
| 170 |
+
},
|
| 171 |
+
{
|
| 172 |
+
"content": "</perception>",
|
| 173 |
+
"lstrip": false,
|
| 174 |
+
"normalized": false,
|
| 175 |
+
"rstrip": false,
|
| 176 |
+
"single_word": false
|
| 177 |
+
},
|
| 178 |
+
{
|
| 179 |
+
"content": "<source_image>",
|
| 180 |
+
"lstrip": false,
|
| 181 |
+
"normalized": false,
|
| 182 |
+
"rstrip": false,
|
| 183 |
+
"single_word": false
|
| 184 |
+
},
|
| 185 |
+
{
|
| 186 |
+
"content": "</source_image>",
|
| 187 |
+
"lstrip": false,
|
| 188 |
+
"normalized": false,
|
| 189 |
+
"rstrip": false,
|
| 190 |
+
"single_word": false
|
| 191 |
+
},
|
| 192 |
+
{
|
| 193 |
+
"content": "<image_save_to>",
|
| 194 |
+
"lstrip": false,
|
| 195 |
+
"normalized": false,
|
| 196 |
+
"rstrip": false,
|
| 197 |
+
"single_word": false
|
| 198 |
+
},
|
| 199 |
+
{
|
| 200 |
+
"content": "</image_save_to>",
|
| 201 |
+
"lstrip": false,
|
| 202 |
+
"normalized": false,
|
| 203 |
+
"rstrip": false,
|
| 204 |
+
"single_word": false
|
| 205 |
+
},
|
| 206 |
+
{
|
| 207 |
+
"content": "<|audio_start|>",
|
| 208 |
+
"lstrip": false,
|
| 209 |
+
"normalized": false,
|
| 210 |
+
"rstrip": false,
|
| 211 |
+
"single_word": false
|
| 212 |
+
},
|
| 213 |
+
{
|
| 214 |
+
"content": "<|audio|>",
|
| 215 |
+
"lstrip": false,
|
| 216 |
+
"normalized": false,
|
| 217 |
+
"rstrip": false,
|
| 218 |
+
"single_word": false
|
| 219 |
+
},
|
| 220 |
+
{
|
| 221 |
+
"content": "<|audio_end|>",
|
| 222 |
+
"lstrip": false,
|
| 223 |
+
"normalized": false,
|
| 224 |
+
"rstrip": false,
|
| 225 |
+
"single_word": false
|
| 226 |
+
},
|
| 227 |
+
{
|
| 228 |
+
"content": "<|spk_bos|>",
|
| 229 |
+
"lstrip": false,
|
| 230 |
+
"normalized": false,
|
| 231 |
+
"rstrip": false,
|
| 232 |
+
"single_word": false
|
| 233 |
+
},
|
| 234 |
+
{
|
| 235 |
+
"content": "<|spk|>",
|
| 236 |
+
"lstrip": false,
|
| 237 |
+
"normalized": false,
|
| 238 |
+
"rstrip": false,
|
| 239 |
+
"single_word": false
|
| 240 |
+
},
|
| 241 |
+
{
|
| 242 |
+
"content": "<|spk_eos|>",
|
| 243 |
+
"lstrip": false,
|
| 244 |
+
"normalized": false,
|
| 245 |
+
"rstrip": false,
|
| 246 |
+
"single_word": false
|
| 247 |
+
},
|
| 248 |
+
{
|
| 249 |
+
"content": "<|tts_bos|>",
|
| 250 |
+
"lstrip": false,
|
| 251 |
+
"normalized": false,
|
| 252 |
+
"rstrip": false,
|
| 253 |
+
"single_word": false
|
| 254 |
+
},
|
| 255 |
+
{
|
| 256 |
+
"content": "<|tts_eos|>",
|
| 257 |
+
"lstrip": false,
|
| 258 |
+
"normalized": false,
|
| 259 |
+
"rstrip": false,
|
| 260 |
+
"single_word": false
|
| 261 |
+
},
|
| 262 |
+
{
|
| 263 |
+
"content": "<|listen|>",
|
| 264 |
+
"lstrip": false,
|
| 265 |
+
"normalized": false,
|
| 266 |
+
"rstrip": false,
|
| 267 |
+
"single_word": false
|
| 268 |
+
},
|
| 269 |
+
{
|
| 270 |
+
"content": "<|speak|>",
|
| 271 |
+
"lstrip": false,
|
| 272 |
+
"normalized": false,
|
| 273 |
+
"rstrip": false,
|
| 274 |
+
"single_word": false
|
| 275 |
+
},
|
| 276 |
+
{
|
| 277 |
+
"content": "<|interrupt|>",
|
| 278 |
+
"lstrip": false,
|
| 279 |
+
"normalized": false,
|
| 280 |
+
"rstrip": false,
|
| 281 |
+
"single_word": false
|
| 282 |
+
},
|
| 283 |
+
{
|
| 284 |
+
"content": "<|vad_start|>",
|
| 285 |
+
"lstrip": false,
|
| 286 |
+
"normalized": false,
|
| 287 |
+
"rstrip": false,
|
| 288 |
+
"single_word": false
|
| 289 |
+
},
|
| 290 |
+
{
|
| 291 |
+
"content": "<|vad_end|>",
|
| 292 |
+
"lstrip": false,
|
| 293 |
+
"normalized": false,
|
| 294 |
+
"rstrip": false,
|
| 295 |
+
"single_word": false
|
| 296 |
+
},
|
| 297 |
+
{
|
| 298 |
+
"content": "<|emotion_start|>",
|
| 299 |
+
"lstrip": false,
|
| 300 |
+
"normalized": false,
|
| 301 |
+
"rstrip": false,
|
| 302 |
+
"single_word": false
|
| 303 |
+
},
|
| 304 |
+
{
|
| 305 |
+
"content": "<|emotion_end|>",
|
| 306 |
+
"lstrip": false,
|
| 307 |
+
"normalized": false,
|
| 308 |
+
"rstrip": false,
|
| 309 |
+
"single_word": false
|
| 310 |
+
},
|
| 311 |
+
{
|
| 312 |
+
"content": "<|speed_start|>",
|
| 313 |
+
"lstrip": false,
|
| 314 |
+
"normalized": false,
|
| 315 |
+
"rstrip": false,
|
| 316 |
+
"single_word": false
|
| 317 |
+
},
|
| 318 |
+
{
|
| 319 |
+
"content": "<|speed_end|>",
|
| 320 |
+
"lstrip": false,
|
| 321 |
+
"normalized": false,
|
| 322 |
+
"rstrip": false,
|
| 323 |
+
"single_word": false
|
| 324 |
+
},
|
| 325 |
+
{
|
| 326 |
+
"content": "<|pitch_start|>",
|
| 327 |
+
"lstrip": false,
|
| 328 |
+
"normalized": false,
|
| 329 |
+
"rstrip": false,
|
| 330 |
+
"single_word": false
|
| 331 |
+
},
|
| 332 |
+
{
|
| 333 |
+
"content": "<|pitch_end|>",
|
| 334 |
+
"lstrip": false,
|
| 335 |
+
"normalized": false,
|
| 336 |
+
"rstrip": false,
|
| 337 |
+
"single_word": false
|
| 338 |
+
},
|
| 339 |
+
{
|
| 340 |
+
"content": "<|turn_bos|>",
|
| 341 |
+
"lstrip": false,
|
| 342 |
+
"normalized": false,
|
| 343 |
+
"rstrip": false,
|
| 344 |
+
"single_word": false
|
| 345 |
+
},
|
| 346 |
+
{
|
| 347 |
+
"content": "<|turn_eos|>",
|
| 348 |
+
"lstrip": false,
|
| 349 |
+
"normalized": false,
|
| 350 |
+
"rstrip": false,
|
| 351 |
+
"single_word": false
|
| 352 |
+
},
|
| 353 |
+
{
|
| 354 |
+
"content": "<|chunk_eos|>",
|
| 355 |
+
"lstrip": false,
|
| 356 |
+
"normalized": false,
|
| 357 |
+
"rstrip": false,
|
| 358 |
+
"single_word": false
|
| 359 |
+
},
|
| 360 |
+
{
|
| 361 |
+
"content": "<|chunk_bos|>",
|
| 362 |
+
"lstrip": false,
|
| 363 |
+
"normalized": false,
|
| 364 |
+
"rstrip": false,
|
| 365 |
+
"single_word": false
|
| 366 |
+
},
|
| 367 |
+
{
|
| 368 |
+
"content": "<|chunk_tts_bos|>",
|
| 369 |
+
"lstrip": false,
|
| 370 |
+
"normalized": false,
|
| 371 |
+
"rstrip": false,
|
| 372 |
+
"single_word": false
|
| 373 |
+
},
|
| 374 |
+
{
|
| 375 |
+
"content": "<|chunk_tts_eos|>",
|
| 376 |
+
"lstrip": false,
|
| 377 |
+
"normalized": false,
|
| 378 |
+
"rstrip": false,
|
| 379 |
+
"single_word": false
|
| 380 |
+
},
|
| 381 |
+
{
|
| 382 |
+
"content": "<|tts_pad|>",
|
| 383 |
+
"lstrip": false,
|
| 384 |
+
"normalized": false,
|
| 385 |
+
"rstrip": false,
|
| 386 |
+
"single_word": false
|
| 387 |
+
},
|
| 388 |
+
{
|
| 389 |
+
"content": "<|timbre_7|>",
|
| 390 |
+
"lstrip": false,
|
| 391 |
+
"normalized": false,
|
| 392 |
+
"rstrip": false,
|
| 393 |
+
"single_word": false
|
| 394 |
+
},
|
| 395 |
+
{
|
| 396 |
+
"content": "<|timbre_8|>",
|
| 397 |
+
"lstrip": false,
|
| 398 |
+
"normalized": false,
|
| 399 |
+
"rstrip": false,
|
| 400 |
+
"single_word": false
|
| 401 |
+
},
|
| 402 |
+
{
|
| 403 |
+
"content": "<|timbre_9|>",
|
| 404 |
+
"lstrip": false,
|
| 405 |
+
"normalized": false,
|
| 406 |
+
"rstrip": false,
|
| 407 |
+
"single_word": false
|
| 408 |
+
},
|
| 409 |
+
{
|
| 410 |
+
"content": "<|timbre_10|>",
|
| 411 |
+
"lstrip": false,
|
| 412 |
+
"normalized": false,
|
| 413 |
+
"rstrip": false,
|
| 414 |
+
"single_word": false
|
| 415 |
+
},
|
| 416 |
+
{
|
| 417 |
+
"content": "<|timbre_11|>",
|
| 418 |
+
"lstrip": false,
|
| 419 |
+
"normalized": false,
|
| 420 |
+
"rstrip": false,
|
| 421 |
+
"single_word": false
|
| 422 |
+
},
|
| 423 |
+
{
|
| 424 |
+
"content": "<|timbre_12|>",
|
| 425 |
+
"lstrip": false,
|
| 426 |
+
"normalized": false,
|
| 427 |
+
"rstrip": false,
|
| 428 |
+
"single_word": false
|
| 429 |
+
},
|
| 430 |
+
{
|
| 431 |
+
"content": "<|timbre_13|>",
|
| 432 |
+
"lstrip": false,
|
| 433 |
+
"normalized": false,
|
| 434 |
+
"rstrip": false,
|
| 435 |
+
"single_word": false
|
| 436 |
+
},
|
| 437 |
+
{
|
| 438 |
+
"content": "<|timbre_14|>",
|
| 439 |
+
"lstrip": false,
|
| 440 |
+
"normalized": false,
|
| 441 |
+
"rstrip": false,
|
| 442 |
+
"single_word": false
|
| 443 |
+
},
|
| 444 |
+
{
|
| 445 |
+
"content": "<|timbre_15|>",
|
| 446 |
+
"lstrip": false,
|
| 447 |
+
"normalized": false,
|
| 448 |
+
"rstrip": false,
|
| 449 |
+
"single_word": false
|
| 450 |
+
},
|
| 451 |
+
{
|
| 452 |
+
"content": "<|timbre_16|>",
|
| 453 |
+
"lstrip": false,
|
| 454 |
+
"normalized": false,
|
| 455 |
+
"rstrip": false,
|
| 456 |
+
"single_word": false
|
| 457 |
+
},
|
| 458 |
+
{
|
| 459 |
+
"content": "<|timbre_17|>",
|
| 460 |
+
"lstrip": false,
|
| 461 |
+
"normalized": false,
|
| 462 |
+
"rstrip": false,
|
| 463 |
+
"single_word": false
|
| 464 |
+
},
|
| 465 |
+
{
|
| 466 |
+
"content": "<|timbre_18|>",
|
| 467 |
+
"lstrip": false,
|
| 468 |
+
"normalized": false,
|
| 469 |
+
"rstrip": false,
|
| 470 |
+
"single_word": false
|
| 471 |
+
},
|
| 472 |
+
{
|
| 473 |
+
"content": "<|timbre_19|>",
|
| 474 |
+
"lstrip": false,
|
| 475 |
+
"normalized": false,
|
| 476 |
+
"rstrip": false,
|
| 477 |
+
"single_word": false
|
| 478 |
+
},
|
| 479 |
+
{
|
| 480 |
+
"content": "<|timbre_20|>",
|
| 481 |
+
"lstrip": false,
|
| 482 |
+
"normalized": false,
|
| 483 |
+
"rstrip": false,
|
| 484 |
+
"single_word": false
|
| 485 |
+
},
|
| 486 |
+
{
|
| 487 |
+
"content": "<|timbre_21|>",
|
| 488 |
+
"lstrip": false,
|
| 489 |
+
"normalized": false,
|
| 490 |
+
"rstrip": false,
|
| 491 |
+
"single_word": false
|
| 492 |
+
},
|
| 493 |
+
{
|
| 494 |
+
"content": "<|timbre_22|>",
|
| 495 |
+
"lstrip": false,
|
| 496 |
+
"normalized": false,
|
| 497 |
+
"rstrip": false,
|
| 498 |
+
"single_word": false
|
| 499 |
+
},
|
| 500 |
+
{
|
| 501 |
+
"content": "<|timbre_23|>",
|
| 502 |
+
"lstrip": false,
|
| 503 |
+
"normalized": false,
|
| 504 |
+
"rstrip": false,
|
| 505 |
+
"single_word": false
|
| 506 |
+
},
|
| 507 |
+
{
|
| 508 |
+
"content": "<|timbre_24|>",
|
| 509 |
+
"lstrip": false,
|
| 510 |
+
"normalized": false,
|
| 511 |
+
"rstrip": false,
|
| 512 |
+
"single_word": false
|
| 513 |
+
},
|
| 514 |
+
{
|
| 515 |
+
"content": "<|timbre_25|>",
|
| 516 |
+
"lstrip": false,
|
| 517 |
+
"normalized": false,
|
| 518 |
+
"rstrip": false,
|
| 519 |
+
"single_word": false
|
| 520 |
+
},
|
| 521 |
+
{
|
| 522 |
+
"content": "<|timbre_26|>",
|
| 523 |
+
"lstrip": false,
|
| 524 |
+
"normalized": false,
|
| 525 |
+
"rstrip": false,
|
| 526 |
+
"single_word": false
|
| 527 |
+
},
|
| 528 |
+
{
|
| 529 |
+
"content": "<|timbre_27|>",
|
| 530 |
+
"lstrip": false,
|
| 531 |
+
"normalized": false,
|
| 532 |
+
"rstrip": false,
|
| 533 |
+
"single_word": false
|
| 534 |
+
},
|
| 535 |
+
{
|
| 536 |
+
"content": "<|timbre_28|>",
|
| 537 |
+
"lstrip": false,
|
| 538 |
+
"normalized": false,
|
| 539 |
+
"rstrip": false,
|
| 540 |
+
"single_word": false
|
| 541 |
+
},
|
| 542 |
+
{
|
| 543 |
+
"content": "<|timbre_29|>",
|
| 544 |
+
"lstrip": false,
|
| 545 |
+
"normalized": false,
|
| 546 |
+
"rstrip": false,
|
| 547 |
+
"single_word": false
|
| 548 |
+
},
|
| 549 |
+
{
|
| 550 |
+
"content": "<|timbre_30|>",
|
| 551 |
+
"lstrip": false,
|
| 552 |
+
"normalized": false,
|
| 553 |
+
"rstrip": false,
|
| 554 |
+
"single_word": false
|
| 555 |
+
},
|
| 556 |
+
{
|
| 557 |
+
"content": "<|timbre_31|>",
|
| 558 |
+
"lstrip": false,
|
| 559 |
+
"normalized": false,
|
| 560 |
+
"rstrip": false,
|
| 561 |
+
"single_word": false
|
| 562 |
+
}
|
| 563 |
+
],
|
| 564 |
+
"bos_token": "<|im_start|>",
|
| 565 |
+
"eos_token": {
|
| 566 |
+
"content": "<|im_end|>",
|
| 567 |
+
"lstrip": false,
|
| 568 |
+
"normalized": false,
|
| 569 |
+
"rstrip": false,
|
| 570 |
+
"single_word": false
|
| 571 |
+
},
|
| 572 |
+
"pad_token": {
|
| 573 |
+
"content": "<|endoftext|>",
|
| 574 |
+
"lstrip": false,
|
| 575 |
+
"normalized": false,
|
| 576 |
+
"rstrip": false,
|
| 577 |
+
"single_word": false
|
| 578 |
+
},
|
| 579 |
+
"unk_token": "<unk>"
|
| 580 |
+
}
|
tokenization_minicpmo_fast.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
# Copyright 2026 The OpenBMB Team. All rights reserved.
|
| 4 |
+
#
|
| 5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 6 |
+
# you may not use this file except in compliance with the License.
|
| 7 |
+
# You may obtain a copy of the License at
|
| 8 |
+
#
|
| 9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 10 |
+
#
|
| 11 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 14 |
+
# See the License for the specific language governing permissions and
|
| 15 |
+
# limitations under the License.
|
| 16 |
+
|
| 17 |
+
from typing import List
|
| 18 |
+
|
| 19 |
+
from transformers import Qwen2TokenizerFast
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class MiniCPMOTokenizerFast(Qwen2TokenizerFast):
|
| 23 |
+
def __init__(self, **kwargs):
|
| 24 |
+
self._bad_token_ids = kwargs.pop("bad_token_ids", [])
|
| 25 |
+
|
| 26 |
+
super().__init__(**kwargs)
|
| 27 |
+
|
| 28 |
+
# image
|
| 29 |
+
self.im_start = ""
|
| 31 |
+
self.ref_start = "<ref>"
|
| 32 |
+
self.ref_end = "</ref>"
|
| 33 |
+
self.box_start = "<box>"
|
| 34 |
+
self.box_end = "</box>"
|
| 35 |
+
self.quad_start = "<quad>"
|
| 36 |
+
self.quad_end = "</quad>"
|
| 37 |
+
self.slice_start = "<slice>"
|
| 38 |
+
self.slice_end = "</slice>"
|
| 39 |
+
self.im_id_start = "<image_id>"
|
| 40 |
+
self.im_id_end = "</image_id>"
|
| 41 |
+
|
| 42 |
+
# audio
|
| 43 |
+
self.audio_start = "<|audio_start|>"
|
| 44 |
+
self.audio_end = "<|audio_end|>"
|
| 45 |
+
self.spk_start = "<|spk_bos|>"
|
| 46 |
+
self.spk_end = "<|spk_eos|>"
|
| 47 |
+
self.tts_start = "<|tts_bos|>"
|
| 48 |
+
self.tts_end = "<|tts_eos|>"
|
| 49 |
+
|
| 50 |
+
@property
|
| 51 |
+
def eos_id(self):
|
| 52 |
+
return self.eos_token_id
|
| 53 |
+
|
| 54 |
+
@property
|
| 55 |
+
def bos_id(self):
|
| 56 |
+
return self.bos_token_id
|
| 57 |
+
|
| 58 |
+
@property
|
| 59 |
+
def unk_id(self):
|
| 60 |
+
return self.unk_token_id
|
| 61 |
+
|
| 62 |
+
@property
|
| 63 |
+
def im_start_id(self):
|
| 64 |
+
return self.convert_tokens_to_ids(self.im_start)
|
| 65 |
+
|
| 66 |
+
@property
|
| 67 |
+
def im_end_id(self):
|
| 68 |
+
return self.convert_tokens_to_ids(self.im_end)
|
| 69 |
+
|
| 70 |
+
@property
|
| 71 |
+
def slice_start_id(self):
|
| 72 |
+
return self.convert_tokens_to_ids(self.slice_start)
|
| 73 |
+
|
| 74 |
+
@property
|
| 75 |
+
def slice_end_id(self):
|
| 76 |
+
return self.convert_tokens_to_ids(self.slice_end)
|
| 77 |
+
|
| 78 |
+
@property
|
| 79 |
+
def im_id_start_id(self):
|
| 80 |
+
return self.convert_tokens_to_ids(self.im_id_start)
|
| 81 |
+
|
| 82 |
+
@property
|
| 83 |
+
def im_id_end_id(self):
|
| 84 |
+
return self.convert_tokens_to_ids(self.im_id_end)
|
| 85 |
+
|
| 86 |
+
@property
|
| 87 |
+
def audio_start_id(self):
|
| 88 |
+
return self.convert_tokens_to_ids(self.audio_start)
|
| 89 |
+
|
| 90 |
+
@property
|
| 91 |
+
def audio_end_id(self):
|
| 92 |
+
return self.convert_tokens_to_ids(self.audio_end)
|
| 93 |
+
|
| 94 |
+
@property
|
| 95 |
+
def spk_start_id(self):
|
| 96 |
+
return self.convert_tokens_to_ids(self.spk_start)
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def spk_end_id(self):
|
| 100 |
+
return self.convert_tokens_to_ids(self.spk_end)
|
| 101 |
+
|
| 102 |
+
@property
|
| 103 |
+
def tts_start_id(self):
|
| 104 |
+
return self.convert_tokens_to_ids(self.tts_start)
|
| 105 |
+
|
| 106 |
+
@property
|
| 107 |
+
def tts_end_id(self):
|
| 108 |
+
return self.convert_tokens_to_ids(self.tts_end)
|
| 109 |
+
|
| 110 |
+
@staticmethod
|
| 111 |
+
def escape(text: str) -> str:
|
| 112 |
+
return text
|
| 113 |
+
|
| 114 |
+
@staticmethod
|
| 115 |
+
def unescape(text: str) -> str:
|
| 116 |
+
return text
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def bad_token_ids(self) -> List[int]:
|
| 120 |
+
return self._bad_token_ids
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:66664f87759d9e829e7ef0ded96976727374dcd7ca6f3ae9bfe89bbda541e5af
|
| 3 |
+
size 11437708
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,6989 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_prefix_space": false,
|
| 3 |
+
"auto_map": {
|
| 4 |
+
"AutoTokenizer": [
|
| 5 |
+
"tokenization_minicpmo_fast.MiniCPMOTokenizerFast",
|
| 6 |
+
null
|
| 7 |
+
]
|
| 8 |
+
},
|
| 9 |
+
"backend": "tokenizers",
|
| 10 |
+
"bad_token_ids": [
|
| 11 |
+
7,
|
| 12 |
+
8,
|
| 13 |
+
94,
|
| 14 |
+
95,
|
| 15 |
+
96,
|
| 16 |
+
97,
|
| 17 |
+
98,
|
| 18 |
+
99,
|
| 19 |
+
100,
|
| 20 |
+
101,
|
| 21 |
+
102,
|
| 22 |
+
103,
|
| 23 |
+
104,
|
| 24 |
+
105,
|
| 25 |
+
106,
|
| 26 |
+
107,
|
| 27 |
+
108,
|
| 28 |
+
109,
|
| 29 |
+
110,
|
| 30 |
+
111,
|
| 31 |
+
112,
|
| 32 |
+
113,
|
| 33 |
+
114,
|
| 34 |
+
115,
|
| 35 |
+
116,
|
| 36 |
+
117,
|
| 37 |
+
118,
|
| 38 |
+
119,
|
| 39 |
+
120,
|
| 40 |
+
121,
|
| 41 |
+
122,
|
| 42 |
+
123,
|
| 43 |
+
124,
|
| 44 |
+
125,
|
| 45 |
+
126,
|
| 46 |
+
127,
|
| 47 |
+
128,
|
| 48 |
+
129,
|
| 49 |
+
130,
|
| 50 |
+
131,
|
| 51 |
+
132,
|
| 52 |
+
133,
|
| 53 |
+
134,
|
| 54 |
+
135,
|
| 55 |
+
136,
|
| 56 |
+
137,
|
| 57 |
+
138,
|
| 58 |
+
139,
|
| 59 |
+
140,
|
| 60 |
+
141,
|
| 61 |
+
142,
|
| 62 |
+
143,
|
| 63 |
+
144,
|
| 64 |
+
145,
|
| 65 |
+
146,
|
| 66 |
+
147,
|
| 67 |
+
148,
|
| 68 |
+
149,
|
| 69 |
+
150,
|
| 70 |
+
151,
|
| 71 |
+
152,
|
| 72 |
+
153,
|
| 73 |
+
154,
|
| 74 |
+
155,
|
| 75 |
+
156,
|
| 76 |
+
157,
|
| 77 |
+
158,
|
| 78 |
+
159,
|
| 79 |
+
160,
|
| 80 |
+
161,
|
| 81 |
+
162,
|
| 82 |
+
163,
|
| 83 |
+
164,
|
| 84 |
+
165,
|
| 85 |
+
166,
|
| 86 |
+
167,
|
| 87 |
+
168,
|
| 88 |
+
169,
|
| 89 |
+
170,
|
| 90 |
+
171,
|
| 91 |
+
172,
|
| 92 |
+
173,
|
| 93 |
+
174,
|
| 94 |
+
175,
|
| 95 |
+
176,
|
| 96 |
+
177,
|
| 97 |
+
178,
|
| 98 |
+
179,
|
| 99 |
+
180,
|
| 100 |
+
181,
|
| 101 |
+
182,
|
| 102 |
+
183,
|
| 103 |
+
184,
|
| 104 |
+
185,
|
| 105 |
+
186,
|
| 106 |
+
187,
|
| 107 |
+
198,
|
| 108 |
+
201,
|
| 109 |
+
222,
|
| 110 |
+
223,
|
| 111 |
+
224,
|
| 112 |
+
225,
|
| 113 |
+
226,
|
| 114 |
+
227,
|
| 115 |
+
228,
|
| 116 |
+
229,
|
| 117 |
+
230,
|
| 118 |
+
231,
|
| 119 |
+
232,
|
| 120 |
+
233,
|
| 121 |
+
234,
|
| 122 |
+
235,
|
| 123 |
+
236,
|
| 124 |
+
237,
|
| 125 |
+
238,
|
| 126 |
+
239,
|
| 127 |
+
240,
|
| 128 |
+
241,
|
| 129 |
+
242,
|
| 130 |
+
243,
|
| 131 |
+
244,
|
| 132 |
+
245,
|
| 133 |
+
246,
|
| 134 |
+
247,
|
| 135 |
+
248,
|
| 136 |
+
249,
|
| 137 |
+
250,
|
| 138 |
+
251,
|
| 139 |
+
252,
|
| 140 |
+
253,
|
| 141 |
+
254,
|
| 142 |
+
255,
|
| 143 |
+
271,
|
| 144 |
+
280,
|
| 145 |
+
317,
|
| 146 |
+
319,
|
| 147 |
+
320,
|
| 148 |
+
340,
|
| 149 |
+
341,
|
| 150 |
+
345,
|
| 151 |
+
368,
|
| 152 |
+
378,
|
| 153 |
+
382,
|
| 154 |
+
397,
|
| 155 |
+
401,
|
| 156 |
+
445,
|
| 157 |
+
456,
|
| 158 |
+
463,
|
| 159 |
+
492,
|
| 160 |
+
510,
|
| 161 |
+
515,
|
| 162 |
+
532,
|
| 163 |
+
543,
|
| 164 |
+
555,
|
| 165 |
+
568,
|
| 166 |
+
593,
|
| 167 |
+
624,
|
| 168 |
+
626,
|
| 169 |
+
630,
|
| 170 |
+
636,
|
| 171 |
+
692,
|
| 172 |
+
698,
|
| 173 |
+
699,
|
| 174 |
+
701,
|
| 175 |
+
715,
|
| 176 |
+
735,
|
| 177 |
+
736,
|
| 178 |
+
741,
|
| 179 |
+
751,
|
| 180 |
+
756,
|
| 181 |
+
797,
|
| 182 |
+
863,
|
| 183 |
+
871,
|
| 184 |
+
873,
|
| 185 |
+
876,
|
| 186 |
+
881,
|
| 187 |
+
899,
|
| 188 |
+
921,
|
| 189 |
+
935,
|
| 190 |
+
955,
|
| 191 |
+
972,
|
| 192 |
+
982,
|
| 193 |
+
1005,
|
| 194 |
+
1006,
|
| 195 |
+
1010,
|
| 196 |
+
1019,
|
| 197 |
+
1022,
|
| 198 |
+
1066,
|
| 199 |
+
1106,
|
| 200 |
+
1138,
|
| 201 |
+
1141,
|
| 202 |
+
1153,
|
| 203 |
+
1155,
|
| 204 |
+
1157,
|
| 205 |
+
1171,
|
| 206 |
+
1188,
|
| 207 |
+
1193,
|
| 208 |
+
1213,
|
| 209 |
+
1215,
|
| 210 |
+
1218,
|
| 211 |
+
1248,
|
| 212 |
+
1255,
|
| 213 |
+
1259,
|
| 214 |
+
1264,
|
| 215 |
+
1277,
|
| 216 |
+
1278,
|
| 217 |
+
1295,
|
| 218 |
+
1305,
|
| 219 |
+
1326,
|
| 220 |
+
1337,
|
| 221 |
+
1339,
|
| 222 |
+
1394,
|
| 223 |
+
1406,
|
| 224 |
+
1423,
|
| 225 |
+
1428,
|
| 226 |
+
1433,
|
| 227 |
+
1439,
|
| 228 |
+
1445,
|
| 229 |
+
1447,
|
| 230 |
+
1476,
|
| 231 |
+
1500,
|
| 232 |
+
1505,
|
| 233 |
+
1507,
|
| 234 |
+
1540,
|
| 235 |
+
1548,
|
| 236 |
+
1554,
|
| 237 |
+
1572,
|
| 238 |
+
1583,
|
| 239 |
+
1625,
|
| 240 |
+
1647,
|
| 241 |
+
1648,
|
| 242 |
+
1666,
|
| 243 |
+
1683,
|
| 244 |
+
1686,
|
| 245 |
+
1711,
|
| 246 |
+
1719,
|
| 247 |
+
1727,
|
| 248 |
+
1747,
|
| 249 |
+
1755,
|
| 250 |
+
1771,
|
| 251 |
+
1781,
|
| 252 |
+
1789,
|
| 253 |
+
1823,
|
| 254 |
+
1827,
|
| 255 |
+
1837,
|
| 256 |
+
1871,
|
| 257 |
+
1883,
|
| 258 |
+
1926,
|
| 259 |
+
1939,
|
| 260 |
+
1956,
|
| 261 |
+
2012,
|
| 262 |
+
2026,
|
| 263 |
+
2075,
|
| 264 |
+
2099,
|
| 265 |
+
2129,
|
| 266 |
+
2139,
|
| 267 |
+
2140,
|
| 268 |
+
2146,
|
| 269 |
+
2194,
|
| 270 |
+
2217,
|
| 271 |
+
2219,
|
| 272 |
+
2226,
|
| 273 |
+
2235,
|
| 274 |
+
2242,
|
| 275 |
+
2262,
|
| 276 |
+
2278,
|
| 277 |
+
2289,
|
| 278 |
+
2303,
|
| 279 |
+
2306,
|
| 280 |
+
2315,
|
| 281 |
+
2333,
|
| 282 |
+
2357,
|
| 283 |
+
2376,
|
| 284 |
+
2394,
|
| 285 |
+
2398,
|
| 286 |
+
2399,
|
| 287 |
+
2440,
|
| 288 |
+
2457,
|
| 289 |
+
2467,
|
| 290 |
+
2490,
|
| 291 |
+
2529,
|
| 292 |
+
2533,
|
| 293 |
+
2546,
|
| 294 |
+
2553,
|
| 295 |
+
2558,
|
| 296 |
+
2561,
|
| 297 |
+
2570,
|
| 298 |
+
2586,
|
| 299 |
+
2592,
|
| 300 |
+
2597,
|
| 301 |
+
2601,
|
| 302 |
+
2605,
|
| 303 |
+
2741,
|
| 304 |
+
2751,
|
| 305 |
+
2759,
|
| 306 |
+
2761,
|
| 307 |
+
2775,
|
| 308 |
+
2785,
|
| 309 |
+
2791,
|
| 310 |
+
2815,
|
| 311 |
+
2822,
|
| 312 |
+
2834,
|
| 313 |
+
2837,
|
| 314 |
+
2858,
|
| 315 |
+
2877,
|
| 316 |
+
2879,
|
| 317 |
+
2920,
|
| 318 |
+
2960,
|
| 319 |
+
3011,
|
| 320 |
+
3023,
|
| 321 |
+
3025,
|
| 322 |
+
3032,
|
| 323 |
+
3044,
|
| 324 |
+
3071,
|
| 325 |
+
3079,
|
| 326 |
+
3148,
|
| 327 |
+
3153,
|
| 328 |
+
3189,
|
| 329 |
+
3190,
|
| 330 |
+
3203,
|
| 331 |
+
3211,
|
| 332 |
+
3219,
|
| 333 |
+
3224,
|
| 334 |
+
3237,
|
| 335 |
+
3268,
|
| 336 |
+
3269,
|
| 337 |
+
3276,
|
| 338 |
+
3284,
|
| 339 |
+
3299,
|
| 340 |
+
3302,
|
| 341 |
+
3315,
|
| 342 |
+
3325,
|
| 343 |
+
3374,
|
| 344 |
+
3396,
|
| 345 |
+
3402,
|
| 346 |
+
3407,
|
| 347 |
+
3456,
|
| 348 |
+
3471,
|
| 349 |
+
3475,
|
| 350 |
+
3485,
|
| 351 |
+
3489,
|
| 352 |
+
3490,
|
| 353 |
+
3507,
|
| 354 |
+
3515,
|
| 355 |
+
3518,
|
| 356 |
+
3534,
|
| 357 |
+
3554,
|
| 358 |
+
3568,
|
| 359 |
+
3576,
|
| 360 |
+
3584,
|
| 361 |
+
3593,
|
| 362 |
+
3609,
|
| 363 |
+
3610,
|
| 364 |
+
3622,
|
| 365 |
+
3623,
|
| 366 |
+
3634,
|
| 367 |
+
3646,
|
| 368 |
+
3648,
|
| 369 |
+
3659,
|
| 370 |
+
3679,
|
| 371 |
+
3715,
|
| 372 |
+
3724,
|
| 373 |
+
3733,
|
| 374 |
+
3747,
|
| 375 |
+
3755,
|
| 376 |
+
3759,
|
| 377 |
+
3764,
|
| 378 |
+
3789,
|
| 379 |
+
3804,
|
| 380 |
+
3860,
|
| 381 |
+
3870,
|
| 382 |
+
3876,
|
| 383 |
+
3879,
|
| 384 |
+
3913,
|
| 385 |
+
3932,
|
| 386 |
+
3964,
|
| 387 |
+
3975,
|
| 388 |
+
3989,
|
| 389 |
+
3997,
|
| 390 |
+
4010,
|
| 391 |
+
4015,
|
| 392 |
+
4035,
|
| 393 |
+
4071,
|
| 394 |
+
4080,
|
| 395 |
+
4140,
|
| 396 |
+
4167,
|
| 397 |
+
4175,
|
| 398 |
+
4192,
|
| 399 |
+
4199,
|
| 400 |
+
4257,
|
| 401 |
+
4277,
|
| 402 |
+
4283,
|
| 403 |
+
4291,
|
| 404 |
+
4292,
|
| 405 |
+
4319,
|
| 406 |
+
4333,
|
| 407 |
+
4346,
|
| 408 |
+
4380,
|
| 409 |
+
4386,
|
| 410 |
+
4390,
|
| 411 |
+
4397,
|
| 412 |
+
4424,
|
| 413 |
+
4430,
|
| 414 |
+
4432,
|
| 415 |
+
4455,
|
| 416 |
+
4456,
|
| 417 |
+
4461,
|
| 418 |
+
4474,
|
| 419 |
+
4539,
|
| 420 |
+
4546,
|
| 421 |
+
4555,
|
| 422 |
+
4557,
|
| 423 |
+
4567,
|
| 424 |
+
4579,
|
| 425 |
+
4590,
|
| 426 |
+
4609,
|
| 427 |
+
4610,
|
| 428 |
+
4639,
|
| 429 |
+
4660,
|
| 430 |
+
4667,
|
| 431 |
+
4710,
|
| 432 |
+
4723,
|
| 433 |
+
4806,
|
| 434 |
+
4811,
|
| 435 |
+
4821,
|
| 436 |
+
4857,
|
| 437 |
+
4860,
|
| 438 |
+
4866,
|
| 439 |
+
4875,
|
| 440 |
+
4877,
|
| 441 |
+
4891,
|
| 442 |
+
4894,
|
| 443 |
+
4919,
|
| 444 |
+
4921,
|
| 445 |
+
4930,
|
| 446 |
+
4945,
|
| 447 |
+
4956,
|
| 448 |
+
4957,
|
| 449 |
+
4967,
|
| 450 |
+
4973,
|
| 451 |
+
5031,
|
| 452 |
+
5038,
|
| 453 |
+
5065,
|
| 454 |
+
5078,
|
| 455 |
+
5125,
|
| 456 |
+
5130,
|
| 457 |
+
5133,
|
| 458 |
+
5134,
|
| 459 |
+
5140,
|
| 460 |
+
5194,
|
| 461 |
+
5210,
|
| 462 |
+
5231,
|
| 463 |
+
5267,
|
| 464 |
+
5304,
|
| 465 |
+
5349,
|
| 466 |
+
5378,
|
| 467 |
+
5384,
|
| 468 |
+
5393,
|
| 469 |
+
5410,
|
| 470 |
+
5422,
|
| 471 |
+
5432,
|
| 472 |
+
5433,
|
| 473 |
+
5434,
|
| 474 |
+
5438,
|
| 475 |
+
5453,
|
| 476 |
+
5455,
|
| 477 |
+
5463,
|
| 478 |
+
5468,
|
| 479 |
+
5484,
|
| 480 |
+
5502,
|
| 481 |
+
5513,
|
| 482 |
+
5550,
|
| 483 |
+
5563,
|
| 484 |
+
5564,
|
| 485 |
+
5581,
|
| 486 |
+
5613,
|
| 487 |
+
5618,
|
| 488 |
+
5620,
|
| 489 |
+
5661,
|
| 490 |
+
5680,
|
| 491 |
+
5684,
|
| 492 |
+
5691,
|
| 493 |
+
5731,
|
| 494 |
+
5758,
|
| 495 |
+
5783,
|
| 496 |
+
5784,
|
| 497 |
+
5801,
|
| 498 |
+
5808,
|
| 499 |
+
5809,
|
| 500 |
+
5860,
|
| 501 |
+
5872,
|
| 502 |
+
5894,
|
| 503 |
+
5895,
|
| 504 |
+
5900,
|
| 505 |
+
5902,
|
| 506 |
+
5905,
|
| 507 |
+
5907,
|
| 508 |
+
5928,
|
| 509 |
+
5929,
|
| 510 |
+
5945,
|
| 511 |
+
5953,
|
| 512 |
+
5957,
|
| 513 |
+
5959,
|
| 514 |
+
5960,
|
| 515 |
+
5966,
|
| 516 |
+
5969,
|
| 517 |
+
5993,
|
| 518 |
+
6011,
|
| 519 |
+
6060,
|
| 520 |
+
6098,
|
| 521 |
+
6107,
|
| 522 |
+
6110,
|
| 523 |
+
6114,
|
| 524 |
+
6138,
|
| 525 |
+
6142,
|
| 526 |
+
6163,
|
| 527 |
+
6201,
|
| 528 |
+
6204,
|
| 529 |
+
6211,
|
| 530 |
+
6228,
|
| 531 |
+
6235,
|
| 532 |
+
6253,
|
| 533 |
+
6260,
|
| 534 |
+
6274,
|
| 535 |
+
6320,
|
| 536 |
+
6332,
|
| 537 |
+
6336,
|
| 538 |
+
6360,
|
| 539 |
+
6390,
|
| 540 |
+
6413,
|
| 541 |
+
6416,
|
| 542 |
+
6421,
|
| 543 |
+
6425,
|
| 544 |
+
6475,
|
| 545 |
+
6518,
|
| 546 |
+
6522,
|
| 547 |
+
6557,
|
| 548 |
+
6561,
|
| 549 |
+
6564,
|
| 550 |
+
6567,
|
| 551 |
+
6571,
|
| 552 |
+
6599,
|
| 553 |
+
6606,
|
| 554 |
+
6620,
|
| 555 |
+
6636,
|
| 556 |
+
6684,
|
| 557 |
+
6699,
|
| 558 |
+
6734,
|
| 559 |
+
6762,
|
| 560 |
+
6779,
|
| 561 |
+
6796,
|
| 562 |
+
6856,
|
| 563 |
+
6870,
|
| 564 |
+
6881,
|
| 565 |
+
6882,
|
| 566 |
+
6901,
|
| 567 |
+
6903,
|
| 568 |
+
6913,
|
| 569 |
+
6926,
|
| 570 |
+
6933,
|
| 571 |
+
6958,
|
| 572 |
+
6965,
|
| 573 |
+
6987,
|
| 574 |
+
7021,
|
| 575 |
+
7026,
|
| 576 |
+
7045,
|
| 577 |
+
7088,
|
| 578 |
+
7115,
|
| 579 |
+
7129,
|
| 580 |
+
7144,
|
| 581 |
+
7158,
|
| 582 |
+
7165,
|
| 583 |
+
7195,
|
| 584 |
+
7213,
|
| 585 |
+
7229,
|
| 586 |
+
7235,
|
| 587 |
+
7252,
|
| 588 |
+
7257,
|
| 589 |
+
7287,
|
| 590 |
+
7307,
|
| 591 |
+
7311,
|
| 592 |
+
7317,
|
| 593 |
+
7318,
|
| 594 |
+
7320,
|
| 595 |
+
7331,
|
| 596 |
+
7333,
|
| 597 |
+
7356,
|
| 598 |
+
7360,
|
| 599 |
+
7367,
|
| 600 |
+
7368,
|
| 601 |
+
7392,
|
| 602 |
+
7457,
|
| 603 |
+
7492,
|
| 604 |
+
7502,
|
| 605 |
+
7511,
|
| 606 |
+
7520,
|
| 607 |
+
7534,
|
| 608 |
+
7536,
|
| 609 |
+
7537,
|
| 610 |
+
7547,
|
| 611 |
+
7552,
|
| 612 |
+
7569,
|
| 613 |
+
7588,
|
| 614 |
+
7613,
|
| 615 |
+
7620,
|
| 616 |
+
7624,
|
| 617 |
+
7631,
|
| 618 |
+
7635,
|
| 619 |
+
7644,
|
| 620 |
+
7694,
|
| 621 |
+
7705,
|
| 622 |
+
7723,
|
| 623 |
+
7731,
|
| 624 |
+
7805,
|
| 625 |
+
7832,
|
| 626 |
+
7835,
|
| 627 |
+
7851,
|
| 628 |
+
7885,
|
| 629 |
+
7908,
|
| 630 |
+
8006,
|
| 631 |
+
8013,
|
| 632 |
+
8022,
|
| 633 |
+
8032,
|
| 634 |
+
8068,
|
| 635 |
+
8079,
|
| 636 |
+
8096,
|
| 637 |
+
8097,
|
| 638 |
+
8119,
|
| 639 |
+
8132,
|
| 640 |
+
8154,
|
| 641 |
+
8158,
|
| 642 |
+
8204,
|
| 643 |
+
8244,
|
| 644 |
+
8268,
|
| 645 |
+
8283,
|
| 646 |
+
8306,
|
| 647 |
+
8324,
|
| 648 |
+
8361,
|
| 649 |
+
8389,
|
| 650 |
+
8390,
|
| 651 |
+
8401,
|
| 652 |
+
8488,
|
| 653 |
+
8508,
|
| 654 |
+
8519,
|
| 655 |
+
8525,
|
| 656 |
+
8533,
|
| 657 |
+
8563,
|
| 658 |
+
8588,
|
| 659 |
+
8616,
|
| 660 |
+
8620,
|
| 661 |
+
8623,
|
| 662 |
+
8637,
|
| 663 |
+
8680,
|
| 664 |
+
8739,
|
| 665 |
+
8754,
|
| 666 |
+
8762,
|
| 667 |
+
8797,
|
| 668 |
+
8803,
|
| 669 |
+
8805,
|
| 670 |
+
8824,
|
| 671 |
+
8839,
|
| 672 |
+
8843,
|
| 673 |
+
8902,
|
| 674 |
+
8908,
|
| 675 |
+
8917,
|
| 676 |
+
8945,
|
| 677 |
+
8956,
|
| 678 |
+
8972,
|
| 679 |
+
8997,
|
| 680 |
+
8998,
|
| 681 |
+
9035,
|
| 682 |
+
9049,
|
| 683 |
+
9098,
|
| 684 |
+
9139,
|
| 685 |
+
9147,
|
| 686 |
+
9206,
|
| 687 |
+
9231,
|
| 688 |
+
9248,
|
| 689 |
+
9260,
|
| 690 |
+
9272,
|
| 691 |
+
9284,
|
| 692 |
+
9304,
|
| 693 |
+
9321,
|
| 694 |
+
9338,
|
| 695 |
+
9340,
|
| 696 |
+
9365,
|
| 697 |
+
9401,
|
| 698 |
+
9404,
|
| 699 |
+
9469,
|
| 700 |
+
9470,
|
| 701 |
+
9531,
|
| 702 |
+
9533,
|
| 703 |
+
9541,
|
| 704 |
+
9563,
|
| 705 |
+
9568,
|
| 706 |
+
9577,
|
| 707 |
+
9604,
|
| 708 |
+
9607,
|
| 709 |
+
9623,
|
| 710 |
+
9638,
|
| 711 |
+
9639,
|
| 712 |
+
9640,
|
| 713 |
+
9697,
|
| 714 |
+
9719,
|
| 715 |
+
9722,
|
| 716 |
+
9730,
|
| 717 |
+
9739,
|
| 718 |
+
9743,
|
| 719 |
+
9747,
|
| 720 |
+
9758,
|
| 721 |
+
9764,
|
| 722 |
+
9791,
|
| 723 |
+
9828,
|
| 724 |
+
9843,
|
| 725 |
+
9849,
|
| 726 |
+
9879,
|
| 727 |
+
9900,
|
| 728 |
+
9909,
|
| 729 |
+
9912,
|
| 730 |
+
9921,
|
| 731 |
+
9940,
|
| 732 |
+
9957,
|
| 733 |
+
9973,
|
| 734 |
+
9992,
|
| 735 |
+
10040,
|
| 736 |
+
10075,
|
| 737 |
+
10079,
|
| 738 |
+
10081,
|
| 739 |
+
10086,
|
| 740 |
+
10108,
|
| 741 |
+
10116,
|
| 742 |
+
10131,
|
| 743 |
+
10133,
|
| 744 |
+
10149,
|
| 745 |
+
10170,
|
| 746 |
+
10171,
|
| 747 |
+
10203,
|
| 748 |
+
10236,
|
| 749 |
+
10239,
|
| 750 |
+
10278,
|
| 751 |
+
10293,
|
| 752 |
+
10297,
|
| 753 |
+
10340,
|
| 754 |
+
10343,
|
| 755 |
+
10346,
|
| 756 |
+
10368,
|
| 757 |
+
10370,
|
| 758 |
+
10384,
|
| 759 |
+
10389,
|
| 760 |
+
10407,
|
| 761 |
+
10417,
|
| 762 |
+
10419,
|
| 763 |
+
10444,
|
| 764 |
+
10448,
|
| 765 |
+
10452,
|
| 766 |
+
10478,
|
| 767 |
+
10483,
|
| 768 |
+
10556,
|
| 769 |
+
10569,
|
| 770 |
+
10583,
|
| 771 |
+
10595,
|
| 772 |
+
10624,
|
| 773 |
+
10639,
|
| 774 |
+
10662,
|
| 775 |
+
10663,
|
| 776 |
+
10683,
|
| 777 |
+
10697,
|
| 778 |
+
10699,
|
| 779 |
+
10722,
|
| 780 |
+
10727,
|
| 781 |
+
10731,
|
| 782 |
+
10741,
|
| 783 |
+
10764,
|
| 784 |
+
10768,
|
| 785 |
+
10874,
|
| 786 |
+
10896,
|
| 787 |
+
10907,
|
| 788 |
+
10937,
|
| 789 |
+
10939,
|
| 790 |
+
10941,
|
| 791 |
+
10947,
|
| 792 |
+
10952,
|
| 793 |
+
10959,
|
| 794 |
+
10961,
|
| 795 |
+
10977,
|
| 796 |
+
10996,
|
| 797 |
+
11014,
|
| 798 |
+
11022,
|
| 799 |
+
11024,
|
| 800 |
+
11043,
|
| 801 |
+
11057,
|
| 802 |
+
11120,
|
| 803 |
+
11125,
|
| 804 |
+
11132,
|
| 805 |
+
11142,
|
| 806 |
+
11146,
|
| 807 |
+
11162,
|
| 808 |
+
11166,
|
| 809 |
+
11173,
|
| 810 |
+
11195,
|
| 811 |
+
11248,
|
| 812 |
+
11322,
|
| 813 |
+
11328,
|
| 814 |
+
11334,
|
| 815 |
+
11336,
|
| 816 |
+
11378,
|
| 817 |
+
11384,
|
| 818 |
+
11436,
|
| 819 |
+
11462,
|
| 820 |
+
11481,
|
| 821 |
+
11484,
|
| 822 |
+
11488,
|
| 823 |
+
11512,
|
| 824 |
+
11520,
|
| 825 |
+
11534,
|
| 826 |
+
11557,
|
| 827 |
+
11559,
|
| 828 |
+
11583,
|
| 829 |
+
11600,
|
| 830 |
+
11614,
|
| 831 |
+
11620,
|
| 832 |
+
11621,
|
| 833 |
+
11648,
|
| 834 |
+
11688,
|
| 835 |
+
11700,
|
| 836 |
+
11730,
|
| 837 |
+
11743,
|
| 838 |
+
11776,
|
| 839 |
+
11781,
|
| 840 |
+
11801,
|
| 841 |
+
11805,
|
| 842 |
+
11843,
|
| 843 |
+
11858,
|
| 844 |
+
11884,
|
| 845 |
+
11895,
|
| 846 |
+
11970,
|
| 847 |
+
11973,
|
| 848 |
+
11974,
|
| 849 |
+
11985,
|
| 850 |
+
11993,
|
| 851 |
+
11995,
|
| 852 |
+
12006,
|
| 853 |
+
12022,
|
| 854 |
+
12024,
|
| 855 |
+
12047,
|
| 856 |
+
12078,
|
| 857 |
+
12084,
|
| 858 |
+
12113,
|
| 859 |
+
12130,
|
| 860 |
+
12143,
|
| 861 |
+
12145,
|
| 862 |
+
12148,
|
| 863 |
+
12186,
|
| 864 |
+
12236,
|
| 865 |
+
12259,
|
| 866 |
+
12266,
|
| 867 |
+
12274,
|
| 868 |
+
12279,
|
| 869 |
+
12286,
|
| 870 |
+
12298,
|
| 871 |
+
12306,
|
| 872 |
+
12310,
|
| 873 |
+
12317,
|
| 874 |
+
12367,
|
| 875 |
+
12381,
|
| 876 |
+
12397,
|
| 877 |
+
12410,
|
| 878 |
+
12431,
|
| 879 |
+
12437,
|
| 880 |
+
12470,
|
| 881 |
+
12489,
|
| 882 |
+
12491,
|
| 883 |
+
12504,
|
| 884 |
+
12512,
|
| 885 |
+
12520,
|
| 886 |
+
12526,
|
| 887 |
+
12530,
|
| 888 |
+
12531,
|
| 889 |
+
12533,
|
| 890 |
+
12573,
|
| 891 |
+
12584,
|
| 892 |
+
12585,
|
| 893 |
+
12587,
|
| 894 |
+
12602,
|
| 895 |
+
12619,
|
| 896 |
+
12628,
|
| 897 |
+
12663,
|
| 898 |
+
12668,
|
| 899 |
+
12706,
|
| 900 |
+
12760,
|
| 901 |
+
12787,
|
| 902 |
+
12797,
|
| 903 |
+
12798,
|
| 904 |
+
12805,
|
| 905 |
+
12806,
|
| 906 |
+
12832,
|
| 907 |
+
12860,
|
| 908 |
+
12861,
|
| 909 |
+
12892,
|
| 910 |
+
12918,
|
| 911 |
+
12924,
|
| 912 |
+
12950,
|
| 913 |
+
12951,
|
| 914 |
+
12954,
|
| 915 |
+
13003,
|
| 916 |
+
13005,
|
| 917 |
+
13053,
|
| 918 |
+
13056,
|
| 919 |
+
13058,
|
| 920 |
+
13087,
|
| 921 |
+
13092,
|
| 922 |
+
13106,
|
| 923 |
+
13116,
|
| 924 |
+
13135,
|
| 925 |
+
13148,
|
| 926 |
+
13165,
|
| 927 |
+
13174,
|
| 928 |
+
13201,
|
| 929 |
+
13204,
|
| 930 |
+
13211,
|
| 931 |
+
13246,
|
| 932 |
+
13268,
|
| 933 |
+
13274,
|
| 934 |
+
13285,
|
| 935 |
+
13287,
|
| 936 |
+
13321,
|
| 937 |
+
13338,
|
| 938 |
+
13344,
|
| 939 |
+
13345,
|
| 940 |
+
13362,
|
| 941 |
+
13426,
|
| 942 |
+
13441,
|
| 943 |
+
13453,
|
| 944 |
+
13456,
|
| 945 |
+
13465,
|
| 946 |
+
13493,
|
| 947 |
+
13512,
|
| 948 |
+
13519,
|
| 949 |
+
13531,
|
| 950 |
+
13555,
|
| 951 |
+
13558,
|
| 952 |
+
13567,
|
| 953 |
+
13645,
|
| 954 |
+
13698,
|
| 955 |
+
13744,
|
| 956 |
+
13749,
|
| 957 |
+
13836,
|
| 958 |
+
13838,
|
| 959 |
+
13846,
|
| 960 |
+
13861,
|
| 961 |
+
13869,
|
| 962 |
+
13887,
|
| 963 |
+
13913,
|
| 964 |
+
13921,
|
| 965 |
+
13941,
|
| 966 |
+
13949,
|
| 967 |
+
13976,
|
| 968 |
+
13982,
|
| 969 |
+
13988,
|
| 970 |
+
14001,
|
| 971 |
+
14013,
|
| 972 |
+
14015,
|
| 973 |
+
14031,
|
| 974 |
+
14088,
|
| 975 |
+
14090,
|
| 976 |
+
14094,
|
| 977 |
+
14108,
|
| 978 |
+
14146,
|
| 979 |
+
14160,
|
| 980 |
+
14161,
|
| 981 |
+
14167,
|
| 982 |
+
14171,
|
| 983 |
+
14208,
|
| 984 |
+
14217,
|
| 985 |
+
14223,
|
| 986 |
+
14273,
|
| 987 |
+
14280,
|
| 988 |
+
14288,
|
| 989 |
+
14326,
|
| 990 |
+
14328,
|
| 991 |
+
14333,
|
| 992 |
+
14362,
|
| 993 |
+
14368,
|
| 994 |
+
14382,
|
| 995 |
+
14414,
|
| 996 |
+
14426,
|
| 997 |
+
14437,
|
| 998 |
+
14442,
|
| 999 |
+
14451,
|
| 1000 |
+
14467,
|
| 1001 |
+
14498,
|
| 1002 |
+
14512,
|
| 1003 |
+
14520,
|
| 1004 |
+
14546,
|
| 1005 |
+
14559,
|
| 1006 |
+
14573,
|
| 1007 |
+
14599,
|
| 1008 |
+
14618,
|
| 1009 |
+
14621,
|
| 1010 |
+
14654,
|
| 1011 |
+
14669,
|
| 1012 |
+
14675,
|
| 1013 |
+
14676,
|
| 1014 |
+
14687,
|
| 1015 |
+
14702,
|
| 1016 |
+
14705,
|
| 1017 |
+
14711,
|
| 1018 |
+
14731,
|
| 1019 |
+
14750,
|
| 1020 |
+
14808,
|
| 1021 |
+
14818,
|
| 1022 |
+
14922,
|
| 1023 |
+
14925,
|
| 1024 |
+
14929,
|
| 1025 |
+
14936,
|
| 1026 |
+
14942,
|
| 1027 |
+
14959,
|
| 1028 |
+
14979,
|
| 1029 |
+
14998,
|
| 1030 |
+
15005,
|
| 1031 |
+
15014,
|
| 1032 |
+
15028,
|
| 1033 |
+
15046,
|
| 1034 |
+
15047,
|
| 1035 |
+
15049,
|
| 1036 |
+
15063,
|
| 1037 |
+
15075,
|
| 1038 |
+
15076,
|
| 1039 |
+
15078,
|
| 1040 |
+
15084,
|
| 1041 |
+
15087,
|
| 1042 |
+
15096,
|
| 1043 |
+
15127,
|
| 1044 |
+
15143,
|
| 1045 |
+
15146,
|
| 1046 |
+
15157,
|
| 1047 |
+
15197,
|
| 1048 |
+
15213,
|
| 1049 |
+
15224,
|
| 1050 |
+
15225,
|
| 1051 |
+
15238,
|
| 1052 |
+
15240,
|
| 1053 |
+
15253,
|
| 1054 |
+
15290,
|
| 1055 |
+
15297,
|
| 1056 |
+
15299,
|
| 1057 |
+
15306,
|
| 1058 |
+
15329,
|
| 1059 |
+
15348,
|
| 1060 |
+
15362,
|
| 1061 |
+
15374,
|
| 1062 |
+
15393,
|
| 1063 |
+
15397,
|
| 1064 |
+
15414,
|
| 1065 |
+
15424,
|
| 1066 |
+
15434,
|
| 1067 |
+
15436,
|
| 1068 |
+
15441,
|
| 1069 |
+
15452,
|
| 1070 |
+
15483,
|
| 1071 |
+
15514,
|
| 1072 |
+
15538,
|
| 1073 |
+
15573,
|
| 1074 |
+
15600,
|
| 1075 |
+
15617,
|
| 1076 |
+
15620,
|
| 1077 |
+
15639,
|
| 1078 |
+
15674,
|
| 1079 |
+
15675,
|
| 1080 |
+
15677,
|
| 1081 |
+
15694,
|
| 1082 |
+
15716,
|
| 1083 |
+
15746,
|
| 1084 |
+
15752,
|
| 1085 |
+
15761,
|
| 1086 |
+
15766,
|
| 1087 |
+
15785,
|
| 1088 |
+
15793,
|
| 1089 |
+
15798,
|
| 1090 |
+
15799,
|
| 1091 |
+
15827,
|
| 1092 |
+
15842,
|
| 1093 |
+
15852,
|
| 1094 |
+
15861,
|
| 1095 |
+
15865,
|
| 1096 |
+
15896,
|
| 1097 |
+
15899,
|
| 1098 |
+
15902,
|
| 1099 |
+
15906,
|
| 1100 |
+
15912,
|
| 1101 |
+
15921,
|
| 1102 |
+
15927,
|
| 1103 |
+
15956,
|
| 1104 |
+
15979,
|
| 1105 |
+
15984,
|
| 1106 |
+
15989,
|
| 1107 |
+
16018,
|
| 1108 |
+
16043,
|
| 1109 |
+
16057,
|
| 1110 |
+
16060,
|
| 1111 |
+
16075,
|
| 1112 |
+
16094,
|
| 1113 |
+
16117,
|
| 1114 |
+
16159,
|
| 1115 |
+
16160,
|
| 1116 |
+
16179,
|
| 1117 |
+
16218,
|
| 1118 |
+
16235,
|
| 1119 |
+
16240,
|
| 1120 |
+
16264,
|
| 1121 |
+
16315,
|
| 1122 |
+
16324,
|
| 1123 |
+
16360,
|
| 1124 |
+
16361,
|
| 1125 |
+
16368,
|
| 1126 |
+
16418,
|
| 1127 |
+
16425,
|
| 1128 |
+
16448,
|
| 1129 |
+
16466,
|
| 1130 |
+
16499,
|
| 1131 |
+
16511,
|
| 1132 |
+
16512,
|
| 1133 |
+
16518,
|
| 1134 |
+
16630,
|
| 1135 |
+
16633,
|
| 1136 |
+
16640,
|
| 1137 |
+
16667,
|
| 1138 |
+
16707,
|
| 1139 |
+
16738,
|
| 1140 |
+
16747,
|
| 1141 |
+
16751,
|
| 1142 |
+
16760,
|
| 1143 |
+
16778,
|
| 1144 |
+
16813,
|
| 1145 |
+
16817,
|
| 1146 |
+
16825,
|
| 1147 |
+
16860,
|
| 1148 |
+
16863,
|
| 1149 |
+
16864,
|
| 1150 |
+
16885,
|
| 1151 |
+
16909,
|
| 1152 |
+
16913,
|
| 1153 |
+
16921,
|
| 1154 |
+
16930,
|
| 1155 |
+
16968,
|
| 1156 |
+
16975,
|
| 1157 |
+
16978,
|
| 1158 |
+
16980,
|
| 1159 |
+
17008,
|
| 1160 |
+
17016,
|
| 1161 |
+
17027,
|
| 1162 |
+
17036,
|
| 1163 |
+
17049,
|
| 1164 |
+
17055,
|
| 1165 |
+
17057,
|
| 1166 |
+
17084,
|
| 1167 |
+
17094,
|
| 1168 |
+
17096,
|
| 1169 |
+
17118,
|
| 1170 |
+
17135,
|
| 1171 |
+
17140,
|
| 1172 |
+
17156,
|
| 1173 |
+
17158,
|
| 1174 |
+
17185,
|
| 1175 |
+
17199,
|
| 1176 |
+
17223,
|
| 1177 |
+
17255,
|
| 1178 |
+
17260,
|
| 1179 |
+
17262,
|
| 1180 |
+
17277,
|
| 1181 |
+
17284,
|
| 1182 |
+
17351,
|
| 1183 |
+
17383,
|
| 1184 |
+
17400,
|
| 1185 |
+
17401,
|
| 1186 |
+
17409,
|
| 1187 |
+
17419,
|
| 1188 |
+
17429,
|
| 1189 |
+
17435,
|
| 1190 |
+
17462,
|
| 1191 |
+
17467,
|
| 1192 |
+
17476,
|
| 1193 |
+
17477,
|
| 1194 |
+
17483,
|
| 1195 |
+
17525,
|
| 1196 |
+
17550,
|
| 1197 |
+
17554,
|
| 1198 |
+
17560,
|
| 1199 |
+
17566,
|
| 1200 |
+
17587,
|
| 1201 |
+
17591,
|
| 1202 |
+
17607,
|
| 1203 |
+
17609,
|
| 1204 |
+
17621,
|
| 1205 |
+
17622,
|
| 1206 |
+
17634,
|
| 1207 |
+
17641,
|
| 1208 |
+
17642,
|
| 1209 |
+
17643,
|
| 1210 |
+
17671,
|
| 1211 |
+
17690,
|
| 1212 |
+
17701,
|
| 1213 |
+
17705,
|
| 1214 |
+
17706,
|
| 1215 |
+
17729,
|
| 1216 |
+
17730,
|
| 1217 |
+
17745,
|
| 1218 |
+
17767,
|
| 1219 |
+
17783,
|
| 1220 |
+
17794,
|
| 1221 |
+
17815,
|
| 1222 |
+
17825,
|
| 1223 |
+
17826,
|
| 1224 |
+
17830,
|
| 1225 |
+
17844,
|
| 1226 |
+
17848,
|
| 1227 |
+
17868,
|
| 1228 |
+
17894,
|
| 1229 |
+
17918,
|
| 1230 |
+
17928,
|
| 1231 |
+
17935,
|
| 1232 |
+
17973,
|
| 1233 |
+
17981,
|
| 1234 |
+
17989,
|
| 1235 |
+
18005,
|
| 1236 |
+
18059,
|
| 1237 |
+
18082,
|
| 1238 |
+
18113,
|
| 1239 |
+
18137,
|
| 1240 |
+
18140,
|
| 1241 |
+
18205,
|
| 1242 |
+
18208,
|
| 1243 |
+
18221,
|
| 1244 |
+
18236,
|
| 1245 |
+
18238,
|
| 1246 |
+
18259,
|
| 1247 |
+
18268,
|
| 1248 |
+
18285,
|
| 1249 |
+
18292,
|
| 1250 |
+
18334,
|
| 1251 |
+
18363,
|
| 1252 |
+
18364,
|
| 1253 |
+
18381,
|
| 1254 |
+
18390,
|
| 1255 |
+
18396,
|
| 1256 |
+
18421,
|
| 1257 |
+
18459,
|
| 1258 |
+
18505,
|
| 1259 |
+
18507,
|
| 1260 |
+
18535,
|
| 1261 |
+
18543,
|
| 1262 |
+
18544,
|
| 1263 |
+
18552,
|
| 1264 |
+
18556,
|
| 1265 |
+
18574,
|
| 1266 |
+
18584,
|
| 1267 |
+
18585,
|
| 1268 |
+
18591,
|
| 1269 |
+
18611,
|
| 1270 |
+
18639,
|
| 1271 |
+
18682,
|
| 1272 |
+
18722,
|
| 1273 |
+
18733,
|
| 1274 |
+
18742,
|
| 1275 |
+
18751,
|
| 1276 |
+
18754,
|
| 1277 |
+
18769,
|
| 1278 |
+
18772,
|
| 1279 |
+
18797,
|
| 1280 |
+
18811,
|
| 1281 |
+
18850,
|
| 1282 |
+
18893,
|
| 1283 |
+
18913,
|
| 1284 |
+
18914,
|
| 1285 |
+
18949,
|
| 1286 |
+
18959,
|
| 1287 |
+
18978,
|
| 1288 |
+
19011,
|
| 1289 |
+
19065,
|
| 1290 |
+
19066,
|
| 1291 |
+
19088,
|
| 1292 |
+
19107,
|
| 1293 |
+
19113,
|
| 1294 |
+
19123,
|
| 1295 |
+
19129,
|
| 1296 |
+
19134,
|
| 1297 |
+
19141,
|
| 1298 |
+
19144,
|
| 1299 |
+
19153,
|
| 1300 |
+
19184,
|
| 1301 |
+
19235,
|
| 1302 |
+
19238,
|
| 1303 |
+
19239,
|
| 1304 |
+
19245,
|
| 1305 |
+
19261,
|
| 1306 |
+
19306,
|
| 1307 |
+
19324,
|
| 1308 |
+
19328,
|
| 1309 |
+
19343,
|
| 1310 |
+
19347,
|
| 1311 |
+
19356,
|
| 1312 |
+
19376,
|
| 1313 |
+
19377,
|
| 1314 |
+
19385,
|
| 1315 |
+
19421,
|
| 1316 |
+
19457,
|
| 1317 |
+
19468,
|
| 1318 |
+
19475,
|
| 1319 |
+
19494,
|
| 1320 |
+
19506,
|
| 1321 |
+
19513,
|
| 1322 |
+
19514,
|
| 1323 |
+
19536,
|
| 1324 |
+
19546,
|
| 1325 |
+
19549,
|
| 1326 |
+
19564,
|
| 1327 |
+
19587,
|
| 1328 |
+
19595,
|
| 1329 |
+
19611,
|
| 1330 |
+
19630,
|
| 1331 |
+
19676,
|
| 1332 |
+
19687,
|
| 1333 |
+
19704,
|
| 1334 |
+
19707,
|
| 1335 |
+
19713,
|
| 1336 |
+
19738,
|
| 1337 |
+
19741,
|
| 1338 |
+
19778,
|
| 1339 |
+
19788,
|
| 1340 |
+
19799,
|
| 1341 |
+
19814,
|
| 1342 |
+
19821,
|
| 1343 |
+
19886,
|
| 1344 |
+
19896,
|
| 1345 |
+
19901,
|
| 1346 |
+
19916,
|
| 1347 |
+
19930,
|
| 1348 |
+
19946,
|
| 1349 |
+
19956,
|
| 1350 |
+
19973,
|
| 1351 |
+
19987,
|
| 1352 |
+
20024,
|
| 1353 |
+
20026,
|
| 1354 |
+
20047,
|
| 1355 |
+
20078,
|
| 1356 |
+
20098,
|
| 1357 |
+
20103,
|
| 1358 |
+
20136,
|
| 1359 |
+
20206,
|
| 1360 |
+
20225,
|
| 1361 |
+
20232,
|
| 1362 |
+
20235,
|
| 1363 |
+
20258,
|
| 1364 |
+
20267,
|
| 1365 |
+
20281,
|
| 1366 |
+
20289,
|
| 1367 |
+
20305,
|
| 1368 |
+
20307,
|
| 1369 |
+
20318,
|
| 1370 |
+
20320,
|
| 1371 |
+
20338,
|
| 1372 |
+
20354,
|
| 1373 |
+
20356,
|
| 1374 |
+
20375,
|
| 1375 |
+
20382,
|
| 1376 |
+
20386,
|
| 1377 |
+
20388,
|
| 1378 |
+
20390,
|
| 1379 |
+
20398,
|
| 1380 |
+
20418,
|
| 1381 |
+
20447,
|
| 1382 |
+
20472,
|
| 1383 |
+
20484,
|
| 1384 |
+
20571,
|
| 1385 |
+
20574,
|
| 1386 |
+
20582,
|
| 1387 |
+
20591,
|
| 1388 |
+
20625,
|
| 1389 |
+
20635,
|
| 1390 |
+
20698,
|
| 1391 |
+
20707,
|
| 1392 |
+
20709,
|
| 1393 |
+
20711,
|
| 1394 |
+
20741,
|
| 1395 |
+
20747,
|
| 1396 |
+
20749,
|
| 1397 |
+
20758,
|
| 1398 |
+
20770,
|
| 1399 |
+
20778,
|
| 1400 |
+
20779,
|
| 1401 |
+
20833,
|
| 1402 |
+
20862,
|
| 1403 |
+
20867,
|
| 1404 |
+
20879,
|
| 1405 |
+
20885,
|
| 1406 |
+
20912,
|
| 1407 |
+
20919,
|
| 1408 |
+
20941,
|
| 1409 |
+
20996,
|
| 1410 |
+
21005,
|
| 1411 |
+
21012,
|
| 1412 |
+
21035,
|
| 1413 |
+
21054,
|
| 1414 |
+
21082,
|
| 1415 |
+
21096,
|
| 1416 |
+
21103,
|
| 1417 |
+
21128,
|
| 1418 |
+
21135,
|
| 1419 |
+
21168,
|
| 1420 |
+
21174,
|
| 1421 |
+
21182,
|
| 1422 |
+
21200,
|
| 1423 |
+
21205,
|
| 1424 |
+
21227,
|
| 1425 |
+
21228,
|
| 1426 |
+
21230,
|
| 1427 |
+
21238,
|
| 1428 |
+
21246,
|
| 1429 |
+
21253,
|
| 1430 |
+
21280,
|
| 1431 |
+
21293,
|
| 1432 |
+
21298,
|
| 1433 |
+
21358,
|
| 1434 |
+
21370,
|
| 1435 |
+
21406,
|
| 1436 |
+
21421,
|
| 1437 |
+
21457,
|
| 1438 |
+
21518,
|
| 1439 |
+
21530,
|
| 1440 |
+
21549,
|
| 1441 |
+
21560,
|
| 1442 |
+
21562,
|
| 1443 |
+
21579,
|
| 1444 |
+
21613,
|
| 1445 |
+
21620,
|
| 1446 |
+
21636,
|
| 1447 |
+
21650,
|
| 1448 |
+
21654,
|
| 1449 |
+
21656,
|
| 1450 |
+
21668,
|
| 1451 |
+
21672,
|
| 1452 |
+
21675,
|
| 1453 |
+
21680,
|
| 1454 |
+
21686,
|
| 1455 |
+
21689,
|
| 1456 |
+
21696,
|
| 1457 |
+
21702,
|
| 1458 |
+
21704,
|
| 1459 |
+
21731,
|
| 1460 |
+
21783,
|
| 1461 |
+
21796,
|
| 1462 |
+
21798,
|
| 1463 |
+
21849,
|
| 1464 |
+
21865,
|
| 1465 |
+
21905,
|
| 1466 |
+
21906,
|
| 1467 |
+
21929,
|
| 1468 |
+
21949,
|
| 1469 |
+
21956,
|
| 1470 |
+
21972,
|
| 1471 |
+
21974,
|
| 1472 |
+
21987,
|
| 1473 |
+
22009,
|
| 1474 |
+
22022,
|
| 1475 |
+
22025,
|
| 1476 |
+
22042,
|
| 1477 |
+
22050,
|
| 1478 |
+
22071,
|
| 1479 |
+
22074,
|
| 1480 |
+
22078,
|
| 1481 |
+
22084,
|
| 1482 |
+
22085,
|
| 1483 |
+
22116,
|
| 1484 |
+
22129,
|
| 1485 |
+
22158,
|
| 1486 |
+
22165,
|
| 1487 |
+
22224,
|
| 1488 |
+
22225,
|
| 1489 |
+
22247,
|
| 1490 |
+
22297,
|
| 1491 |
+
22312,
|
| 1492 |
+
22322,
|
| 1493 |
+
22336,
|
| 1494 |
+
22337,
|
| 1495 |
+
22343,
|
| 1496 |
+
22345,
|
| 1497 |
+
22367,
|
| 1498 |
+
22370,
|
| 1499 |
+
22428,
|
| 1500 |
+
22438,
|
| 1501 |
+
22460,
|
| 1502 |
+
22498,
|
| 1503 |
+
22525,
|
| 1504 |
+
22558,
|
| 1505 |
+
22580,
|
| 1506 |
+
22597,
|
| 1507 |
+
22607,
|
| 1508 |
+
22612,
|
| 1509 |
+
22614,
|
| 1510 |
+
22649,
|
| 1511 |
+
22663,
|
| 1512 |
+
22677,
|
| 1513 |
+
22689,
|
| 1514 |
+
22701,
|
| 1515 |
+
22712,
|
| 1516 |
+
22726,
|
| 1517 |
+
22746,
|
| 1518 |
+
22754,
|
| 1519 |
+
22762,
|
| 1520 |
+
22768,
|
| 1521 |
+
22788,
|
| 1522 |
+
22796,
|
| 1523 |
+
22842,
|
| 1524 |
+
22857,
|
| 1525 |
+
22859,
|
| 1526 |
+
22861,
|
| 1527 |
+
22886,
|
| 1528 |
+
22895,
|
| 1529 |
+
22940,
|
| 1530 |
+
22956,
|
| 1531 |
+
22983,
|
| 1532 |
+
22987,
|
| 1533 |
+
22988,
|
| 1534 |
+
22991,
|
| 1535 |
+
23014,
|
| 1536 |
+
23032,
|
| 1537 |
+
23052,
|
| 1538 |
+
23053,
|
| 1539 |
+
23083,
|
| 1540 |
+
23093,
|
| 1541 |
+
23135,
|
| 1542 |
+
23147,
|
| 1543 |
+
23158,
|
| 1544 |
+
23174,
|
| 1545 |
+
23231,
|
| 1546 |
+
23254,
|
| 1547 |
+
23272,
|
| 1548 |
+
23315,
|
| 1549 |
+
23341,
|
| 1550 |
+
23351,
|
| 1551 |
+
23359,
|
| 1552 |
+
23369,
|
| 1553 |
+
23373,
|
| 1554 |
+
23398,
|
| 1555 |
+
23421,
|
| 1556 |
+
23426,
|
| 1557 |
+
23431,
|
| 1558 |
+
23433,
|
| 1559 |
+
23439,
|
| 1560 |
+
23443,
|
| 1561 |
+
23444,
|
| 1562 |
+
23459,
|
| 1563 |
+
23474,
|
| 1564 |
+
23478,
|
| 1565 |
+
23479,
|
| 1566 |
+
23482,
|
| 1567 |
+
23509,
|
| 1568 |
+
23515,
|
| 1569 |
+
23530,
|
| 1570 |
+
23547,
|
| 1571 |
+
23569,
|
| 1572 |
+
23571,
|
| 1573 |
+
23585,
|
| 1574 |
+
23599,
|
| 1575 |
+
23600,
|
| 1576 |
+
23625,
|
| 1577 |
+
23637,
|
| 1578 |
+
23669,
|
| 1579 |
+
23687,
|
| 1580 |
+
23754,
|
| 1581 |
+
23758,
|
| 1582 |
+
23781,
|
| 1583 |
+
23794,
|
| 1584 |
+
23866,
|
| 1585 |
+
23872,
|
| 1586 |
+
23884,
|
| 1587 |
+
23891,
|
| 1588 |
+
23894,
|
| 1589 |
+
23912,
|
| 1590 |
+
23934,
|
| 1591 |
+
24008,
|
| 1592 |
+
24011,
|
| 1593 |
+
24021,
|
| 1594 |
+
24023,
|
| 1595 |
+
24032,
|
| 1596 |
+
24041,
|
| 1597 |
+
24045,
|
| 1598 |
+
24071,
|
| 1599 |
+
24076,
|
| 1600 |
+
24077,
|
| 1601 |
+
24097,
|
| 1602 |
+
24123,
|
| 1603 |
+
24135,
|
| 1604 |
+
24144,
|
| 1605 |
+
24147,
|
| 1606 |
+
24148,
|
| 1607 |
+
24159,
|
| 1608 |
+
24183,
|
| 1609 |
+
24197,
|
| 1610 |
+
24212,
|
| 1611 |
+
24216,
|
| 1612 |
+
24226,
|
| 1613 |
+
24254,
|
| 1614 |
+
24265,
|
| 1615 |
+
24305,
|
| 1616 |
+
24312,
|
| 1617 |
+
24316,
|
| 1618 |
+
24320,
|
| 1619 |
+
24333,
|
| 1620 |
+
24337,
|
| 1621 |
+
24338,
|
| 1622 |
+
24345,
|
| 1623 |
+
24352,
|
| 1624 |
+
24365,
|
| 1625 |
+
24375,
|
| 1626 |
+
24377,
|
| 1627 |
+
24389,
|
| 1628 |
+
24391,
|
| 1629 |
+
24429,
|
| 1630 |
+
24437,
|
| 1631 |
+
24447,
|
| 1632 |
+
24460,
|
| 1633 |
+
24476,
|
| 1634 |
+
24485,
|
| 1635 |
+
24497,
|
| 1636 |
+
24520,
|
| 1637 |
+
24539,
|
| 1638 |
+
24616,
|
| 1639 |
+
24697,
|
| 1640 |
+
24727,
|
| 1641 |
+
24734,
|
| 1642 |
+
24735,
|
| 1643 |
+
24749,
|
| 1644 |
+
24750,
|
| 1645 |
+
24772,
|
| 1646 |
+
24796,
|
| 1647 |
+
24816,
|
| 1648 |
+
24817,
|
| 1649 |
+
24825,
|
| 1650 |
+
24827,
|
| 1651 |
+
24831,
|
| 1652 |
+
24840,
|
| 1653 |
+
24850,
|
| 1654 |
+
24853,
|
| 1655 |
+
24864,
|
| 1656 |
+
24881,
|
| 1657 |
+
24948,
|
| 1658 |
+
24974,
|
| 1659 |
+
24978,
|
| 1660 |
+
24985,
|
| 1661 |
+
25010,
|
| 1662 |
+
25011,
|
| 1663 |
+
25035,
|
| 1664 |
+
25046,
|
| 1665 |
+
25049,
|
| 1666 |
+
25089,
|
| 1667 |
+
25107,
|
| 1668 |
+
25110,
|
| 1669 |
+
25125,
|
| 1670 |
+
25138,
|
| 1671 |
+
25145,
|
| 1672 |
+
25162,
|
| 1673 |
+
25190,
|
| 1674 |
+
25209,
|
| 1675 |
+
25249,
|
| 1676 |
+
25289,
|
| 1677 |
+
25315,
|
| 1678 |
+
25319,
|
| 1679 |
+
25321,
|
| 1680 |
+
25340,
|
| 1681 |
+
25346,
|
| 1682 |
+
25389,
|
| 1683 |
+
25401,
|
| 1684 |
+
25409,
|
| 1685 |
+
25435,
|
| 1686 |
+
25447,
|
| 1687 |
+
25452,
|
| 1688 |
+
25464,
|
| 1689 |
+
25467,
|
| 1690 |
+
25468,
|
| 1691 |
+
25490,
|
| 1692 |
+
25495,
|
| 1693 |
+
25501,
|
| 1694 |
+
25521,
|
| 1695 |
+
25526,
|
| 1696 |
+
25534,
|
| 1697 |
+
25547,
|
| 1698 |
+
25556,
|
| 1699 |
+
25571,
|
| 1700 |
+
25589,
|
| 1701 |
+
25592,
|
| 1702 |
+
25624,
|
| 1703 |
+
25639,
|
| 1704 |
+
25644,
|
| 1705 |
+
25703,
|
| 1706 |
+
25731,
|
| 1707 |
+
25733,
|
| 1708 |
+
25746,
|
| 1709 |
+
25759,
|
| 1710 |
+
25772,
|
| 1711 |
+
25781,
|
| 1712 |
+
25786,
|
| 1713 |
+
25797,
|
| 1714 |
+
25805,
|
| 1715 |
+
25821,
|
| 1716 |
+
25829,
|
| 1717 |
+
25865,
|
| 1718 |
+
25870,
|
| 1719 |
+
25885,
|
| 1720 |
+
25890,
|
| 1721 |
+
25897,
|
| 1722 |
+
25906,
|
| 1723 |
+
25910,
|
| 1724 |
+
25912,
|
| 1725 |
+
25917,
|
| 1726 |
+
25923,
|
| 1727 |
+
25928,
|
| 1728 |
+
25931,
|
| 1729 |
+
25933,
|
| 1730 |
+
25956,
|
| 1731 |
+
25974,
|
| 1732 |
+
26027,
|
| 1733 |
+
26043,
|
| 1734 |
+
26072,
|
| 1735 |
+
26074,
|
| 1736 |
+
26082,
|
| 1737 |
+
26087,
|
| 1738 |
+
26097,
|
| 1739 |
+
26126,
|
| 1740 |
+
26169,
|
| 1741 |
+
26188,
|
| 1742 |
+
26230,
|
| 1743 |
+
26238,
|
| 1744 |
+
26245,
|
| 1745 |
+
26254,
|
| 1746 |
+
26259,
|
| 1747 |
+
26280,
|
| 1748 |
+
26285,
|
| 1749 |
+
26311,
|
| 1750 |
+
26336,
|
| 1751 |
+
26384,
|
| 1752 |
+
26405,
|
| 1753 |
+
26409,
|
| 1754 |
+
26432,
|
| 1755 |
+
26442,
|
| 1756 |
+
26454,
|
| 1757 |
+
26469,
|
| 1758 |
+
26487,
|
| 1759 |
+
26496,
|
| 1760 |
+
26506,
|
| 1761 |
+
26525,
|
| 1762 |
+
26546,
|
| 1763 |
+
26556,
|
| 1764 |
+
26566,
|
| 1765 |
+
26578,
|
| 1766 |
+
26579,
|
| 1767 |
+
26601,
|
| 1768 |
+
26609,
|
| 1769 |
+
26637,
|
| 1770 |
+
26690,
|
| 1771 |
+
26717,
|
| 1772 |
+
26731,
|
| 1773 |
+
26771,
|
| 1774 |
+
26772,
|
| 1775 |
+
26782,
|
| 1776 |
+
26794,
|
| 1777 |
+
26809,
|
| 1778 |
+
26828,
|
| 1779 |
+
26840,
|
| 1780 |
+
26850,
|
| 1781 |
+
26853,
|
| 1782 |
+
26856,
|
| 1783 |
+
26861,
|
| 1784 |
+
26903,
|
| 1785 |
+
26919,
|
| 1786 |
+
26927,
|
| 1787 |
+
26974,
|
| 1788 |
+
26999,
|
| 1789 |
+
27007,
|
| 1790 |
+
27010,
|
| 1791 |
+
27027,
|
| 1792 |
+
27042,
|
| 1793 |
+
27050,
|
| 1794 |
+
27062,
|
| 1795 |
+
27070,
|
| 1796 |
+
27085,
|
| 1797 |
+
27095,
|
| 1798 |
+
27113,
|
| 1799 |
+
27119,
|
| 1800 |
+
27126,
|
| 1801 |
+
27144,
|
| 1802 |
+
27151,
|
| 1803 |
+
27166,
|
| 1804 |
+
27184,
|
| 1805 |
+
27189,
|
| 1806 |
+
27191,
|
| 1807 |
+
27209,
|
| 1808 |
+
27214,
|
| 1809 |
+
27220,
|
| 1810 |
+
27249,
|
| 1811 |
+
27260,
|
| 1812 |
+
27271,
|
| 1813 |
+
27275,
|
| 1814 |
+
27301,
|
| 1815 |
+
27311,
|
| 1816 |
+
27316,
|
| 1817 |
+
27325,
|
| 1818 |
+
27352,
|
| 1819 |
+
27365,
|
| 1820 |
+
27398,
|
| 1821 |
+
27424,
|
| 1822 |
+
27427,
|
| 1823 |
+
27435,
|
| 1824 |
+
27438,
|
| 1825 |
+
27439,
|
| 1826 |
+
27443,
|
| 1827 |
+
27486,
|
| 1828 |
+
27487,
|
| 1829 |
+
27506,
|
| 1830 |
+
27510,
|
| 1831 |
+
27517,
|
| 1832 |
+
27538,
|
| 1833 |
+
27568,
|
| 1834 |
+
27580,
|
| 1835 |
+
27584,
|
| 1836 |
+
27610,
|
| 1837 |
+
27614,
|
| 1838 |
+
27631,
|
| 1839 |
+
27640,
|
| 1840 |
+
27654,
|
| 1841 |
+
27659,
|
| 1842 |
+
27668,
|
| 1843 |
+
27676,
|
| 1844 |
+
27701,
|
| 1845 |
+
27704,
|
| 1846 |
+
27757,
|
| 1847 |
+
27762,
|
| 1848 |
+
27766,
|
| 1849 |
+
27767,
|
| 1850 |
+
27771,
|
| 1851 |
+
27772,
|
| 1852 |
+
27818,
|
| 1853 |
+
27845,
|
| 1854 |
+
27855,
|
| 1855 |
+
27866,
|
| 1856 |
+
27876,
|
| 1857 |
+
27884,
|
| 1858 |
+
27901,
|
| 1859 |
+
27907,
|
| 1860 |
+
27978,
|
| 1861 |
+
27982,
|
| 1862 |
+
27999,
|
| 1863 |
+
28019,
|
| 1864 |
+
28038,
|
| 1865 |
+
28075,
|
| 1866 |
+
28094,
|
| 1867 |
+
28107,
|
| 1868 |
+
28114,
|
| 1869 |
+
28116,
|
| 1870 |
+
28127,
|
| 1871 |
+
28145,
|
| 1872 |
+
28154,
|
| 1873 |
+
28168,
|
| 1874 |
+
28175,
|
| 1875 |
+
28197,
|
| 1876 |
+
28207,
|
| 1877 |
+
28247,
|
| 1878 |
+
28261,
|
| 1879 |
+
28266,
|
| 1880 |
+
28272,
|
| 1881 |
+
28281,
|
| 1882 |
+
28311,
|
| 1883 |
+
28325,
|
| 1884 |
+
28329,
|
| 1885 |
+
28336,
|
| 1886 |
+
28348,
|
| 1887 |
+
28372,
|
| 1888 |
+
28375,
|
| 1889 |
+
28382,
|
| 1890 |
+
28389,
|
| 1891 |
+
28394,
|
| 1892 |
+
28406,
|
| 1893 |
+
28429,
|
| 1894 |
+
28447,
|
| 1895 |
+
28489,
|
| 1896 |
+
28520,
|
| 1897 |
+
28565,
|
| 1898 |
+
28570,
|
| 1899 |
+
28581,
|
| 1900 |
+
28653,
|
| 1901 |
+
28654,
|
| 1902 |
+
28665,
|
| 1903 |
+
28669,
|
| 1904 |
+
28671,
|
| 1905 |
+
28673,
|
| 1906 |
+
28680,
|
| 1907 |
+
28706,
|
| 1908 |
+
28712,
|
| 1909 |
+
28743,
|
| 1910 |
+
28757,
|
| 1911 |
+
28864,
|
| 1912 |
+
28875,
|
| 1913 |
+
28880,
|
| 1914 |
+
28907,
|
| 1915 |
+
28918,
|
| 1916 |
+
28927,
|
| 1917 |
+
28958,
|
| 1918 |
+
28974,
|
| 1919 |
+
28978,
|
| 1920 |
+
28987,
|
| 1921 |
+
29043,
|
| 1922 |
+
29076,
|
| 1923 |
+
29083,
|
| 1924 |
+
29084,
|
| 1925 |
+
29089,
|
| 1926 |
+
29110,
|
| 1927 |
+
29113,
|
| 1928 |
+
29122,
|
| 1929 |
+
29128,
|
| 1930 |
+
29150,
|
| 1931 |
+
29184,
|
| 1932 |
+
29191,
|
| 1933 |
+
29209,
|
| 1934 |
+
29220,
|
| 1935 |
+
29237,
|
| 1936 |
+
29240,
|
| 1937 |
+
29254,
|
| 1938 |
+
29324,
|
| 1939 |
+
29333,
|
| 1940 |
+
29345,
|
| 1941 |
+
29355,
|
| 1942 |
+
29359,
|
| 1943 |
+
29376,
|
| 1944 |
+
29389,
|
| 1945 |
+
29465,
|
| 1946 |
+
29487,
|
| 1947 |
+
29529,
|
| 1948 |
+
29555,
|
| 1949 |
+
29562,
|
| 1950 |
+
29592,
|
| 1951 |
+
29605,
|
| 1952 |
+
29616,
|
| 1953 |
+
29621,
|
| 1954 |
+
29633,
|
| 1955 |
+
29636,
|
| 1956 |
+
29675,
|
| 1957 |
+
29710,
|
| 1958 |
+
29721,
|
| 1959 |
+
29766,
|
| 1960 |
+
29768,
|
| 1961 |
+
29771,
|
| 1962 |
+
29773,
|
| 1963 |
+
29776,
|
| 1964 |
+
29811,
|
| 1965 |
+
29816,
|
| 1966 |
+
29833,
|
| 1967 |
+
29836,
|
| 1968 |
+
29879,
|
| 1969 |
+
29893,
|
| 1970 |
+
29932,
|
| 1971 |
+
29941,
|
| 1972 |
+
29955,
|
| 1973 |
+
29963,
|
| 1974 |
+
29975,
|
| 1975 |
+
29976,
|
| 1976 |
+
30006,
|
| 1977 |
+
30008,
|
| 1978 |
+
30018,
|
| 1979 |
+
30034,
|
| 1980 |
+
30085,
|
| 1981 |
+
30122,
|
| 1982 |
+
30139,
|
| 1983 |
+
30154,
|
| 1984 |
+
30212,
|
| 1985 |
+
30218,
|
| 1986 |
+
30247,
|
| 1987 |
+
30260,
|
| 1988 |
+
30263,
|
| 1989 |
+
30274,
|
| 1990 |
+
30323,
|
| 1991 |
+
30325,
|
| 1992 |
+
30354,
|
| 1993 |
+
30375,
|
| 1994 |
+
30376,
|
| 1995 |
+
30389,
|
| 1996 |
+
30390,
|
| 1997 |
+
30395,
|
| 1998 |
+
30407,
|
| 1999 |
+
30409,
|
| 2000 |
+
30424,
|
| 2001 |
+
30438,
|
| 2002 |
+
30458,
|
| 2003 |
+
30463,
|
| 2004 |
+
30467,
|
| 2005 |
+
30469,
|
| 2006 |
+
30475,
|
| 2007 |
+
30484,
|
| 2008 |
+
30493,
|
| 2009 |
+
30520,
|
| 2010 |
+
30523,
|
| 2011 |
+
30535,
|
| 2012 |
+
30558,
|
| 2013 |
+
30584,
|
| 2014 |
+
30625,
|
| 2015 |
+
30631,
|
| 2016 |
+
30634,
|
| 2017 |
+
30638,
|
| 2018 |
+
30645,
|
| 2019 |
+
30678,
|
| 2020 |
+
30695,
|
| 2021 |
+
30723,
|
| 2022 |
+
30734,
|
| 2023 |
+
30736,
|
| 2024 |
+
30749,
|
| 2025 |
+
30750,
|
| 2026 |
+
30757,
|
| 2027 |
+
30779,
|
| 2028 |
+
30793,
|
| 2029 |
+
30828,
|
| 2030 |
+
30831,
|
| 2031 |
+
30838,
|
| 2032 |
+
30850,
|
| 2033 |
+
30873,
|
| 2034 |
+
30885,
|
| 2035 |
+
30916,
|
| 2036 |
+
30917,
|
| 2037 |
+
30948,
|
| 2038 |
+
30949,
|
| 2039 |
+
30958,
|
| 2040 |
+
30981,
|
| 2041 |
+
30991,
|
| 2042 |
+
31159,
|
| 2043 |
+
31200,
|
| 2044 |
+
31218,
|
| 2045 |
+
31225,
|
| 2046 |
+
31239,
|
| 2047 |
+
31257,
|
| 2048 |
+
31278,
|
| 2049 |
+
31286,
|
| 2050 |
+
31295,
|
| 2051 |
+
31296,
|
| 2052 |
+
31305,
|
| 2053 |
+
31307,
|
| 2054 |
+
31338,
|
| 2055 |
+
31356,
|
| 2056 |
+
31368,
|
| 2057 |
+
31411,
|
| 2058 |
+
31418,
|
| 2059 |
+
31424,
|
| 2060 |
+
31436,
|
| 2061 |
+
31470,
|
| 2062 |
+
31482,
|
| 2063 |
+
31483,
|
| 2064 |
+
31487,
|
| 2065 |
+
31501,
|
| 2066 |
+
31540,
|
| 2067 |
+
31567,
|
| 2068 |
+
31587,
|
| 2069 |
+
31601,
|
| 2070 |
+
31673,
|
| 2071 |
+
31688,
|
| 2072 |
+
31706,
|
| 2073 |
+
31707,
|
| 2074 |
+
31715,
|
| 2075 |
+
31723,
|
| 2076 |
+
31729,
|
| 2077 |
+
31732,
|
| 2078 |
+
31740,
|
| 2079 |
+
31764,
|
| 2080 |
+
31781,
|
| 2081 |
+
31797,
|
| 2082 |
+
31813,
|
| 2083 |
+
31816,
|
| 2084 |
+
31856,
|
| 2085 |
+
31870,
|
| 2086 |
+
31884,
|
| 2087 |
+
31888,
|
| 2088 |
+
31906,
|
| 2089 |
+
31911,
|
| 2090 |
+
31931,
|
| 2091 |
+
31933,
|
| 2092 |
+
31951,
|
| 2093 |
+
31975,
|
| 2094 |
+
31980,
|
| 2095 |
+
31982,
|
| 2096 |
+
32013,
|
| 2097 |
+
32014,
|
| 2098 |
+
32028,
|
| 2099 |
+
32057,
|
| 2100 |
+
32068,
|
| 2101 |
+
32083,
|
| 2102 |
+
32087,
|
| 2103 |
+
32090,
|
| 2104 |
+
32148,
|
| 2105 |
+
32162,
|
| 2106 |
+
32181,
|
| 2107 |
+
32184,
|
| 2108 |
+
32203,
|
| 2109 |
+
32208,
|
| 2110 |
+
32224,
|
| 2111 |
+
32254,
|
| 2112 |
+
32263,
|
| 2113 |
+
32268,
|
| 2114 |
+
32279,
|
| 2115 |
+
32295,
|
| 2116 |
+
32314,
|
| 2117 |
+
32326,
|
| 2118 |
+
32343,
|
| 2119 |
+
32356,
|
| 2120 |
+
32373,
|
| 2121 |
+
32386,
|
| 2122 |
+
32423,
|
| 2123 |
+
32431,
|
| 2124 |
+
32450,
|
| 2125 |
+
32465,
|
| 2126 |
+
32484,
|
| 2127 |
+
32495,
|
| 2128 |
+
32511,
|
| 2129 |
+
32544,
|
| 2130 |
+
32545,
|
| 2131 |
+
32548,
|
| 2132 |
+
32574,
|
| 2133 |
+
32585,
|
| 2134 |
+
32596,
|
| 2135 |
+
32598,
|
| 2136 |
+
32602,
|
| 2137 |
+
32609,
|
| 2138 |
+
32616,
|
| 2139 |
+
32623,
|
| 2140 |
+
32624,
|
| 2141 |
+
32636,
|
| 2142 |
+
32647,
|
| 2143 |
+
32691,
|
| 2144 |
+
32735,
|
| 2145 |
+
32737,
|
| 2146 |
+
32756,
|
| 2147 |
+
32798,
|
| 2148 |
+
32805,
|
| 2149 |
+
32843,
|
| 2150 |
+
32868,
|
| 2151 |
+
32872,
|
| 2152 |
+
32881,
|
| 2153 |
+
32926,
|
| 2154 |
+
32968,
|
| 2155 |
+
32985,
|
| 2156 |
+
32989,
|
| 2157 |
+
32991,
|
| 2158 |
+
32996,
|
| 2159 |
+
33010,
|
| 2160 |
+
33013,
|
| 2161 |
+
33016,
|
| 2162 |
+
33021,
|
| 2163 |
+
33028,
|
| 2164 |
+
33030,
|
| 2165 |
+
33077,
|
| 2166 |
+
33084,
|
| 2167 |
+
33089,
|
| 2168 |
+
33093,
|
| 2169 |
+
33104,
|
| 2170 |
+
33130,
|
| 2171 |
+
33152,
|
| 2172 |
+
33157,
|
| 2173 |
+
33173,
|
| 2174 |
+
33179,
|
| 2175 |
+
33199,
|
| 2176 |
+
33293,
|
| 2177 |
+
33294,
|
| 2178 |
+
33333,
|
| 2179 |
+
33351,
|
| 2180 |
+
33352,
|
| 2181 |
+
33397,
|
| 2182 |
+
33413,
|
| 2183 |
+
33418,
|
| 2184 |
+
33419,
|
| 2185 |
+
33424,
|
| 2186 |
+
33436,
|
| 2187 |
+
33440,
|
| 2188 |
+
33442,
|
| 2189 |
+
33478,
|
| 2190 |
+
33484,
|
| 2191 |
+
33498,
|
| 2192 |
+
33543,
|
| 2193 |
+
33546,
|
| 2194 |
+
33552,
|
| 2195 |
+
33556,
|
| 2196 |
+
33579,
|
| 2197 |
+
33593,
|
| 2198 |
+
33621,
|
| 2199 |
+
33626,
|
| 2200 |
+
33641,
|
| 2201 |
+
33645,
|
| 2202 |
+
33660,
|
| 2203 |
+
33666,
|
| 2204 |
+
33673,
|
| 2205 |
+
33687,
|
| 2206 |
+
33694,
|
| 2207 |
+
33711,
|
| 2208 |
+
33734,
|
| 2209 |
+
33739,
|
| 2210 |
+
33761,
|
| 2211 |
+
33766,
|
| 2212 |
+
33783,
|
| 2213 |
+
33794,
|
| 2214 |
+
33800,
|
| 2215 |
+
33804,
|
| 2216 |
+
33810,
|
| 2217 |
+
33826,
|
| 2218 |
+
33862,
|
| 2219 |
+
33871,
|
| 2220 |
+
33916,
|
| 2221 |
+
33928,
|
| 2222 |
+
33929,
|
| 2223 |
+
33933,
|
| 2224 |
+
33943,
|
| 2225 |
+
33947,
|
| 2226 |
+
33949,
|
| 2227 |
+
33955,
|
| 2228 |
+
33959,
|
| 2229 |
+
33986,
|
| 2230 |
+
33987,
|
| 2231 |
+
33989,
|
| 2232 |
+
33999,
|
| 2233 |
+
34007,
|
| 2234 |
+
34010,
|
| 2235 |
+
34026,
|
| 2236 |
+
34052,
|
| 2237 |
+
34081,
|
| 2238 |
+
34082,
|
| 2239 |
+
34083,
|
| 2240 |
+
34135,
|
| 2241 |
+
34141,
|
| 2242 |
+
34143,
|
| 2243 |
+
34149,
|
| 2244 |
+
34184,
|
| 2245 |
+
34203,
|
| 2246 |
+
34232,
|
| 2247 |
+
34297,
|
| 2248 |
+
34299,
|
| 2249 |
+
34300,
|
| 2250 |
+
34321,
|
| 2251 |
+
34322,
|
| 2252 |
+
34327,
|
| 2253 |
+
34331,
|
| 2254 |
+
34332,
|
| 2255 |
+
34341,
|
| 2256 |
+
34369,
|
| 2257 |
+
34403,
|
| 2258 |
+
34410,
|
| 2259 |
+
34420,
|
| 2260 |
+
34425,
|
| 2261 |
+
34429,
|
| 2262 |
+
34480,
|
| 2263 |
+
34483,
|
| 2264 |
+
34499,
|
| 2265 |
+
34546,
|
| 2266 |
+
34577,
|
| 2267 |
+
34583,
|
| 2268 |
+
34590,
|
| 2269 |
+
34600,
|
| 2270 |
+
34622,
|
| 2271 |
+
34629,
|
| 2272 |
+
34642,
|
| 2273 |
+
34649,
|
| 2274 |
+
34670,
|
| 2275 |
+
34690,
|
| 2276 |
+
34759,
|
| 2277 |
+
34764,
|
| 2278 |
+
34773,
|
| 2279 |
+
34793,
|
| 2280 |
+
34799,
|
| 2281 |
+
34811,
|
| 2282 |
+
34812,
|
| 2283 |
+
34820,
|
| 2284 |
+
34821,
|
| 2285 |
+
34829,
|
| 2286 |
+
34864,
|
| 2287 |
+
34866,
|
| 2288 |
+
34885,
|
| 2289 |
+
34895,
|
| 2290 |
+
34923,
|
| 2291 |
+
34939,
|
| 2292 |
+
34944,
|
| 2293 |
+
34973,
|
| 2294 |
+
34985,
|
| 2295 |
+
34992,
|
| 2296 |
+
35017,
|
| 2297 |
+
35049,
|
| 2298 |
+
35055,
|
| 2299 |
+
35063,
|
| 2300 |
+
35065,
|
| 2301 |
+
35066,
|
| 2302 |
+
35075,
|
| 2303 |
+
35099,
|
| 2304 |
+
35106,
|
| 2305 |
+
35115,
|
| 2306 |
+
35117,
|
| 2307 |
+
35125,
|
| 2308 |
+
35146,
|
| 2309 |
+
35173,
|
| 2310 |
+
35178,
|
| 2311 |
+
35184,
|
| 2312 |
+
35190,
|
| 2313 |
+
35199,
|
| 2314 |
+
35200,
|
| 2315 |
+
35208,
|
| 2316 |
+
35219,
|
| 2317 |
+
35229,
|
| 2318 |
+
35248,
|
| 2319 |
+
35274,
|
| 2320 |
+
35289,
|
| 2321 |
+
35293,
|
| 2322 |
+
35297,
|
| 2323 |
+
35306,
|
| 2324 |
+
35311,
|
| 2325 |
+
35330,
|
| 2326 |
+
35334,
|
| 2327 |
+
35335,
|
| 2328 |
+
35358,
|
| 2329 |
+
35392,
|
| 2330 |
+
35393,
|
| 2331 |
+
35394,
|
| 2332 |
+
35410,
|
| 2333 |
+
35419,
|
| 2334 |
+
35420,
|
| 2335 |
+
35429,
|
| 2336 |
+
35438,
|
| 2337 |
+
35452,
|
| 2338 |
+
35460,
|
| 2339 |
+
35496,
|
| 2340 |
+
35521,
|
| 2341 |
+
35528,
|
| 2342 |
+
35537,
|
| 2343 |
+
35538,
|
| 2344 |
+
35553,
|
| 2345 |
+
35582,
|
| 2346 |
+
35635,
|
| 2347 |
+
35636,
|
| 2348 |
+
35653,
|
| 2349 |
+
35674,
|
| 2350 |
+
35676,
|
| 2351 |
+
35677,
|
| 2352 |
+
35680,
|
| 2353 |
+
35687,
|
| 2354 |
+
35696,
|
| 2355 |
+
35712,
|
| 2356 |
+
35718,
|
| 2357 |
+
35720,
|
| 2358 |
+
35721,
|
| 2359 |
+
35755,
|
| 2360 |
+
35786,
|
| 2361 |
+
35811,
|
| 2362 |
+
35829,
|
| 2363 |
+
35833,
|
| 2364 |
+
35850,
|
| 2365 |
+
35855,
|
| 2366 |
+
35864,
|
| 2367 |
+
35866,
|
| 2368 |
+
35888,
|
| 2369 |
+
35898,
|
| 2370 |
+
35963,
|
| 2371 |
+
36001,
|
| 2372 |
+
36014,
|
| 2373 |
+
36046,
|
| 2374 |
+
36073,
|
| 2375 |
+
36087,
|
| 2376 |
+
36088,
|
| 2377 |
+
36097,
|
| 2378 |
+
36109,
|
| 2379 |
+
36131,
|
| 2380 |
+
36168,
|
| 2381 |
+
36170,
|
| 2382 |
+
36174,
|
| 2383 |
+
36180,
|
| 2384 |
+
36197,
|
| 2385 |
+
36206,
|
| 2386 |
+
36207,
|
| 2387 |
+
36246,
|
| 2388 |
+
36260,
|
| 2389 |
+
36288,
|
| 2390 |
+
36289,
|
| 2391 |
+
36308,
|
| 2392 |
+
36323,
|
| 2393 |
+
36330,
|
| 2394 |
+
36334,
|
| 2395 |
+
36360,
|
| 2396 |
+
36363,
|
| 2397 |
+
36368,
|
| 2398 |
+
36384,
|
| 2399 |
+
36456,
|
| 2400 |
+
36474,
|
| 2401 |
+
36484,
|
| 2402 |
+
36502,
|
| 2403 |
+
36514,
|
| 2404 |
+
36548,
|
| 2405 |
+
36577,
|
| 2406 |
+
36606,
|
| 2407 |
+
36609,
|
| 2408 |
+
36610,
|
| 2409 |
+
36618,
|
| 2410 |
+
36622,
|
| 2411 |
+
36624,
|
| 2412 |
+
36652,
|
| 2413 |
+
36669,
|
| 2414 |
+
36677,
|
| 2415 |
+
36695,
|
| 2416 |
+
36713,
|
| 2417 |
+
36715,
|
| 2418 |
+
36720,
|
| 2419 |
+
36778,
|
| 2420 |
+
36781,
|
| 2421 |
+
36800,
|
| 2422 |
+
36818,
|
| 2423 |
+
36825,
|
| 2424 |
+
36827,
|
| 2425 |
+
36845,
|
| 2426 |
+
36876,
|
| 2427 |
+
36882,
|
| 2428 |
+
36884,
|
| 2429 |
+
36902,
|
| 2430 |
+
36914,
|
| 2431 |
+
36915,
|
| 2432 |
+
36928,
|
| 2433 |
+
36944,
|
| 2434 |
+
36951,
|
| 2435 |
+
36968,
|
| 2436 |
+
36978,
|
| 2437 |
+
36979,
|
| 2438 |
+
36984,
|
| 2439 |
+
37023,
|
| 2440 |
+
37038,
|
| 2441 |
+
37048,
|
| 2442 |
+
37051,
|
| 2443 |
+
37064,
|
| 2444 |
+
37076,
|
| 2445 |
+
37084,
|
| 2446 |
+
37096,
|
| 2447 |
+
37124,
|
| 2448 |
+
37148,
|
| 2449 |
+
37157,
|
| 2450 |
+
37178,
|
| 2451 |
+
37195,
|
| 2452 |
+
37204,
|
| 2453 |
+
37235,
|
| 2454 |
+
37254,
|
| 2455 |
+
37258,
|
| 2456 |
+
37265,
|
| 2457 |
+
37270,
|
| 2458 |
+
37275,
|
| 2459 |
+
37283,
|
| 2460 |
+
37289,
|
| 2461 |
+
37334,
|
| 2462 |
+
37336,
|
| 2463 |
+
37341,
|
| 2464 |
+
37365,
|
| 2465 |
+
37389,
|
| 2466 |
+
37396,
|
| 2467 |
+
37403,
|
| 2468 |
+
37440,
|
| 2469 |
+
37441,
|
| 2470 |
+
37445,
|
| 2471 |
+
37472,
|
| 2472 |
+
37485,
|
| 2473 |
+
37490,
|
| 2474 |
+
37509,
|
| 2475 |
+
37544,
|
| 2476 |
+
37595,
|
| 2477 |
+
37621,
|
| 2478 |
+
37640,
|
| 2479 |
+
37663,
|
| 2480 |
+
37673,
|
| 2481 |
+
37692,
|
| 2482 |
+
37698,
|
| 2483 |
+
37701,
|
| 2484 |
+
37713,
|
| 2485 |
+
37719,
|
| 2486 |
+
37770,
|
| 2487 |
+
37790,
|
| 2488 |
+
37792,
|
| 2489 |
+
37807,
|
| 2490 |
+
37833,
|
| 2491 |
+
37844,
|
| 2492 |
+
37846,
|
| 2493 |
+
37859,
|
| 2494 |
+
37880,
|
| 2495 |
+
37890,
|
| 2496 |
+
37913,
|
| 2497 |
+
37927,
|
| 2498 |
+
37945,
|
| 2499 |
+
37959,
|
| 2500 |
+
37960,
|
| 2501 |
+
37994,
|
| 2502 |
+
38036,
|
| 2503 |
+
38085,
|
| 2504 |
+
38088,
|
| 2505 |
+
38104,
|
| 2506 |
+
38177,
|
| 2507 |
+
38178,
|
| 2508 |
+
38192,
|
| 2509 |
+
38209,
|
| 2510 |
+
38210,
|
| 2511 |
+
38225,
|
| 2512 |
+
38249,
|
| 2513 |
+
38250,
|
| 2514 |
+
38291,
|
| 2515 |
+
38320,
|
| 2516 |
+
38330,
|
| 2517 |
+
38356,
|
| 2518 |
+
38382,
|
| 2519 |
+
38386,
|
| 2520 |
+
38408,
|
| 2521 |
+
38419,
|
| 2522 |
+
38427,
|
| 2523 |
+
38433,
|
| 2524 |
+
38436,
|
| 2525 |
+
38441,
|
| 2526 |
+
38445,
|
| 2527 |
+
38459,
|
| 2528 |
+
38464,
|
| 2529 |
+
38497,
|
| 2530 |
+
38502,
|
| 2531 |
+
38522,
|
| 2532 |
+
38523,
|
| 2533 |
+
38535,
|
| 2534 |
+
38547,
|
| 2535 |
+
38564,
|
| 2536 |
+
38577,
|
| 2537 |
+
38584,
|
| 2538 |
+
38592,
|
| 2539 |
+
38607,
|
| 2540 |
+
38608,
|
| 2541 |
+
38609,
|
| 2542 |
+
38615,
|
| 2543 |
+
38646,
|
| 2544 |
+
38657,
|
| 2545 |
+
38669,
|
| 2546 |
+
38679,
|
| 2547 |
+
38691,
|
| 2548 |
+
38705,
|
| 2549 |
+
38707,
|
| 2550 |
+
38732,
|
| 2551 |
+
38738,
|
| 2552 |
+
38739,
|
| 2553 |
+
38772,
|
| 2554 |
+
38776,
|
| 2555 |
+
38802,
|
| 2556 |
+
38812,
|
| 2557 |
+
38837,
|
| 2558 |
+
38842,
|
| 2559 |
+
38852,
|
| 2560 |
+
38866,
|
| 2561 |
+
38881,
|
| 2562 |
+
38903,
|
| 2563 |
+
38911,
|
| 2564 |
+
38915,
|
| 2565 |
+
38942,
|
| 2566 |
+
38958,
|
| 2567 |
+
38962,
|
| 2568 |
+
38966,
|
| 2569 |
+
38981,
|
| 2570 |
+
39002,
|
| 2571 |
+
39004,
|
| 2572 |
+
39012,
|
| 2573 |
+
39019,
|
| 2574 |
+
39024,
|
| 2575 |
+
39025,
|
| 2576 |
+
39047,
|
| 2577 |
+
39071,
|
| 2578 |
+
39098,
|
| 2579 |
+
39137,
|
| 2580 |
+
39144,
|
| 2581 |
+
39147,
|
| 2582 |
+
39175,
|
| 2583 |
+
39193,
|
| 2584 |
+
39205,
|
| 2585 |
+
39206,
|
| 2586 |
+
39226,
|
| 2587 |
+
39276,
|
| 2588 |
+
39278,
|
| 2589 |
+
39290,
|
| 2590 |
+
39315,
|
| 2591 |
+
39317,
|
| 2592 |
+
39321,
|
| 2593 |
+
39327,
|
| 2594 |
+
39341,
|
| 2595 |
+
39365,
|
| 2596 |
+
39366,
|
| 2597 |
+
39417,
|
| 2598 |
+
39444,
|
| 2599 |
+
39464,
|
| 2600 |
+
39467,
|
| 2601 |
+
39503,
|
| 2602 |
+
39514,
|
| 2603 |
+
39524,
|
| 2604 |
+
39528,
|
| 2605 |
+
39555,
|
| 2606 |
+
39567,
|
| 2607 |
+
39569,
|
| 2608 |
+
39573,
|
| 2609 |
+
39595,
|
| 2610 |
+
39635,
|
| 2611 |
+
39647,
|
| 2612 |
+
39648,
|
| 2613 |
+
39663,
|
| 2614 |
+
39678,
|
| 2615 |
+
39680,
|
| 2616 |
+
39692,
|
| 2617 |
+
39724,
|
| 2618 |
+
39729,
|
| 2619 |
+
39734,
|
| 2620 |
+
39749,
|
| 2621 |
+
39769,
|
| 2622 |
+
39774,
|
| 2623 |
+
39778,
|
| 2624 |
+
39781,
|
| 2625 |
+
39789,
|
| 2626 |
+
39790,
|
| 2627 |
+
39805,
|
| 2628 |
+
39809,
|
| 2629 |
+
39821,
|
| 2630 |
+
39831,
|
| 2631 |
+
39834,
|
| 2632 |
+
39839,
|
| 2633 |
+
39865,
|
| 2634 |
+
39866,
|
| 2635 |
+
39887,
|
| 2636 |
+
39902,
|
| 2637 |
+
39908,
|
| 2638 |
+
39928,
|
| 2639 |
+
39945,
|
| 2640 |
+
39952,
|
| 2641 |
+
39955,
|
| 2642 |
+
39969,
|
| 2643 |
+
39974,
|
| 2644 |
+
40005,
|
| 2645 |
+
40007,
|
| 2646 |
+
40023,
|
| 2647 |
+
40044,
|
| 2648 |
+
40046,
|
| 2649 |
+
40056,
|
| 2650 |
+
40057,
|
| 2651 |
+
40071,
|
| 2652 |
+
40087,
|
| 2653 |
+
40148,
|
| 2654 |
+
40201,
|
| 2655 |
+
40250,
|
| 2656 |
+
40254,
|
| 2657 |
+
40268,
|
| 2658 |
+
40304,
|
| 2659 |
+
40317,
|
| 2660 |
+
40318,
|
| 2661 |
+
40330,
|
| 2662 |
+
40337,
|
| 2663 |
+
40401,
|
| 2664 |
+
40419,
|
| 2665 |
+
40494,
|
| 2666 |
+
40539,
|
| 2667 |
+
40548,
|
| 2668 |
+
40556,
|
| 2669 |
+
40571,
|
| 2670 |
+
40583,
|
| 2671 |
+
40589,
|
| 2672 |
+
40610,
|
| 2673 |
+
40612,
|
| 2674 |
+
40617,
|
| 2675 |
+
40625,
|
| 2676 |
+
40629,
|
| 2677 |
+
40642,
|
| 2678 |
+
40650,
|
| 2679 |
+
40653,
|
| 2680 |
+
40654,
|
| 2681 |
+
40666,
|
| 2682 |
+
40691,
|
| 2683 |
+
40714,
|
| 2684 |
+
40722,
|
| 2685 |
+
40723,
|
| 2686 |
+
40725,
|
| 2687 |
+
40732,
|
| 2688 |
+
40742,
|
| 2689 |
+
40747,
|
| 2690 |
+
40764,
|
| 2691 |
+
40771,
|
| 2692 |
+
40804,
|
| 2693 |
+
40840,
|
| 2694 |
+
40845,
|
| 2695 |
+
40877,
|
| 2696 |
+
40901,
|
| 2697 |
+
40919,
|
| 2698 |
+
40938,
|
| 2699 |
+
40949,
|
| 2700 |
+
40964,
|
| 2701 |
+
41025,
|
| 2702 |
+
41037,
|
| 2703 |
+
41040,
|
| 2704 |
+
41075,
|
| 2705 |
+
41091,
|
| 2706 |
+
41117,
|
| 2707 |
+
41129,
|
| 2708 |
+
41153,
|
| 2709 |
+
41165,
|
| 2710 |
+
41169,
|
| 2711 |
+
41191,
|
| 2712 |
+
41196,
|
| 2713 |
+
41197,
|
| 2714 |
+
41233,
|
| 2715 |
+
41236,
|
| 2716 |
+
41237,
|
| 2717 |
+
41263,
|
| 2718 |
+
41289,
|
| 2719 |
+
41295,
|
| 2720 |
+
41320,
|
| 2721 |
+
41344,
|
| 2722 |
+
41364,
|
| 2723 |
+
41399,
|
| 2724 |
+
41401,
|
| 2725 |
+
41405,
|
| 2726 |
+
41422,
|
| 2727 |
+
41446,
|
| 2728 |
+
41453,
|
| 2729 |
+
41458,
|
| 2730 |
+
41479,
|
| 2731 |
+
41491,
|
| 2732 |
+
41516,
|
| 2733 |
+
41529,
|
| 2734 |
+
41546,
|
| 2735 |
+
41553,
|
| 2736 |
+
41573,
|
| 2737 |
+
41577,
|
| 2738 |
+
41578,
|
| 2739 |
+
41583,
|
| 2740 |
+
41596,
|
| 2741 |
+
41620,
|
| 2742 |
+
41636,
|
| 2743 |
+
41649,
|
| 2744 |
+
41655,
|
| 2745 |
+
41693,
|
| 2746 |
+
41694,
|
| 2747 |
+
41749,
|
| 2748 |
+
41785,
|
| 2749 |
+
41794,
|
| 2750 |
+
41838,
|
| 2751 |
+
41843,
|
| 2752 |
+
41853,
|
| 2753 |
+
41864,
|
| 2754 |
+
41892,
|
| 2755 |
+
41902,
|
| 2756 |
+
41909,
|
| 2757 |
+
41920,
|
| 2758 |
+
41922,
|
| 2759 |
+
41940,
|
| 2760 |
+
41942,
|
| 2761 |
+
41952,
|
| 2762 |
+
41958,
|
| 2763 |
+
41998,
|
| 2764 |
+
42012,
|
| 2765 |
+
42013,
|
| 2766 |
+
42015,
|
| 2767 |
+
42059,
|
| 2768 |
+
42071,
|
| 2769 |
+
42073,
|
| 2770 |
+
42078,
|
| 2771 |
+
42080,
|
| 2772 |
+
42083,
|
| 2773 |
+
42109,
|
| 2774 |
+
42132,
|
| 2775 |
+
42144,
|
| 2776 |
+
42154,
|
| 2777 |
+
42179,
|
| 2778 |
+
42181,
|
| 2779 |
+
42182,
|
| 2780 |
+
42194,
|
| 2781 |
+
42197,
|
| 2782 |
+
42236,
|
| 2783 |
+
42252,
|
| 2784 |
+
42273,
|
| 2785 |
+
42303,
|
| 2786 |
+
42307,
|
| 2787 |
+
42311,
|
| 2788 |
+
42342,
|
| 2789 |
+
42344,
|
| 2790 |
+
42363,
|
| 2791 |
+
42372,
|
| 2792 |
+
42384,
|
| 2793 |
+
42392,
|
| 2794 |
+
42398,
|
| 2795 |
+
42419,
|
| 2796 |
+
42450,
|
| 2797 |
+
42455,
|
| 2798 |
+
42476,
|
| 2799 |
+
42519,
|
| 2800 |
+
42521,
|
| 2801 |
+
42534,
|
| 2802 |
+
42574,
|
| 2803 |
+
42595,
|
| 2804 |
+
42604,
|
| 2805 |
+
42618,
|
| 2806 |
+
42625,
|
| 2807 |
+
42718,
|
| 2808 |
+
42732,
|
| 2809 |
+
42735,
|
| 2810 |
+
42788,
|
| 2811 |
+
42795,
|
| 2812 |
+
42833,
|
| 2813 |
+
42849,
|
| 2814 |
+
42871,
|
| 2815 |
+
42892,
|
| 2816 |
+
42902,
|
| 2817 |
+
42999,
|
| 2818 |
+
43002,
|
| 2819 |
+
43005,
|
| 2820 |
+
43036,
|
| 2821 |
+
43057,
|
| 2822 |
+
43060,
|
| 2823 |
+
43146,
|
| 2824 |
+
43153,
|
| 2825 |
+
43163,
|
| 2826 |
+
43175,
|
| 2827 |
+
43196,
|
| 2828 |
+
43202,
|
| 2829 |
+
43234,
|
| 2830 |
+
43270,
|
| 2831 |
+
43284,
|
| 2832 |
+
43305,
|
| 2833 |
+
43317,
|
| 2834 |
+
43333,
|
| 2835 |
+
43336,
|
| 2836 |
+
43341,
|
| 2837 |
+
43349,
|
| 2838 |
+
43355,
|
| 2839 |
+
43373,
|
| 2840 |
+
43385,
|
| 2841 |
+
43397,
|
| 2842 |
+
43410,
|
| 2843 |
+
43448,
|
| 2844 |
+
43459,
|
| 2845 |
+
43471,
|
| 2846 |
+
43472,
|
| 2847 |
+
43488,
|
| 2848 |
+
43493,
|
| 2849 |
+
43501,
|
| 2850 |
+
43502,
|
| 2851 |
+
43507,
|
| 2852 |
+
43547,
|
| 2853 |
+
43552,
|
| 2854 |
+
43559,
|
| 2855 |
+
43575,
|
| 2856 |
+
43606,
|
| 2857 |
+
43608,
|
| 2858 |
+
43614,
|
| 2859 |
+
43644,
|
| 2860 |
+
43664,
|
| 2861 |
+
43691,
|
| 2862 |
+
43716,
|
| 2863 |
+
43720,
|
| 2864 |
+
43722,
|
| 2865 |
+
43724,
|
| 2866 |
+
43738,
|
| 2867 |
+
43752,
|
| 2868 |
+
43759,
|
| 2869 |
+
43779,
|
| 2870 |
+
43839,
|
| 2871 |
+
43853,
|
| 2872 |
+
43872,
|
| 2873 |
+
43886,
|
| 2874 |
+
43926,
|
| 2875 |
+
43946,
|
| 2876 |
+
43952,
|
| 2877 |
+
44004,
|
| 2878 |
+
44014,
|
| 2879 |
+
44044,
|
| 2880 |
+
44048,
|
| 2881 |
+
44051,
|
| 2882 |
+
44054,
|
| 2883 |
+
44062,
|
| 2884 |
+
44075,
|
| 2885 |
+
44098,
|
| 2886 |
+
44099,
|
| 2887 |
+
44103,
|
| 2888 |
+
44104,
|
| 2889 |
+
44122,
|
| 2890 |
+
44132,
|
| 2891 |
+
44142,
|
| 2892 |
+
44147,
|
| 2893 |
+
44155,
|
| 2894 |
+
44163,
|
| 2895 |
+
44194,
|
| 2896 |
+
44212,
|
| 2897 |
+
44258,
|
| 2898 |
+
44292,
|
| 2899 |
+
44307,
|
| 2900 |
+
44311,
|
| 2901 |
+
44316,
|
| 2902 |
+
44342,
|
| 2903 |
+
44360,
|
| 2904 |
+
44364,
|
| 2905 |
+
44383,
|
| 2906 |
+
44386,
|
| 2907 |
+
44388,
|
| 2908 |
+
44401,
|
| 2909 |
+
44415,
|
| 2910 |
+
44416,
|
| 2911 |
+
44481,
|
| 2912 |
+
44490,
|
| 2913 |
+
44504,
|
| 2914 |
+
44568,
|
| 2915 |
+
44608,
|
| 2916 |
+
44611,
|
| 2917 |
+
44635,
|
| 2918 |
+
44651,
|
| 2919 |
+
44654,
|
| 2920 |
+
44660,
|
| 2921 |
+
44665,
|
| 2922 |
+
44680,
|
| 2923 |
+
44702,
|
| 2924 |
+
44706,
|
| 2925 |
+
44720,
|
| 2926 |
+
44723,
|
| 2927 |
+
44726,
|
| 2928 |
+
44732,
|
| 2929 |
+
44735,
|
| 2930 |
+
44746,
|
| 2931 |
+
44749,
|
| 2932 |
+
44752,
|
| 2933 |
+
44784,
|
| 2934 |
+
44811,
|
| 2935 |
+
44818,
|
| 2936 |
+
44824,
|
| 2937 |
+
44828,
|
| 2938 |
+
44832,
|
| 2939 |
+
44834,
|
| 2940 |
+
44841,
|
| 2941 |
+
44851,
|
| 2942 |
+
44862,
|
| 2943 |
+
44888,
|
| 2944 |
+
44891,
|
| 2945 |
+
44955,
|
| 2946 |
+
44957,
|
| 2947 |
+
44959,
|
| 2948 |
+
44965,
|
| 2949 |
+
44970,
|
| 2950 |
+
44973,
|
| 2951 |
+
44993,
|
| 2952 |
+
45014,
|
| 2953 |
+
45016,
|
| 2954 |
+
45023,
|
| 2955 |
+
45027,
|
| 2956 |
+
45029,
|
| 2957 |
+
45030,
|
| 2958 |
+
45033,
|
| 2959 |
+
45076,
|
| 2960 |
+
45100,
|
| 2961 |
+
45104,
|
| 2962 |
+
45119,
|
| 2963 |
+
45128,
|
| 2964 |
+
45130,
|
| 2965 |
+
45164,
|
| 2966 |
+
45169,
|
| 2967 |
+
45175,
|
| 2968 |
+
45219,
|
| 2969 |
+
45244,
|
| 2970 |
+
45313,
|
| 2971 |
+
45320,
|
| 2972 |
+
45335,
|
| 2973 |
+
45349,
|
| 2974 |
+
45375,
|
| 2975 |
+
45385,
|
| 2976 |
+
45407,
|
| 2977 |
+
45426,
|
| 2978 |
+
45432,
|
| 2979 |
+
45434,
|
| 2980 |
+
45438,
|
| 2981 |
+
45443,
|
| 2982 |
+
45456,
|
| 2983 |
+
45538,
|
| 2984 |
+
45575,
|
| 2985 |
+
45608,
|
| 2986 |
+
45609,
|
| 2987 |
+
45611,
|
| 2988 |
+
45626,
|
| 2989 |
+
45632,
|
| 2990 |
+
45638,
|
| 2991 |
+
45649,
|
| 2992 |
+
45675,
|
| 2993 |
+
45676,
|
| 2994 |
+
45705,
|
| 2995 |
+
45722,
|
| 2996 |
+
45748,
|
| 2997 |
+
45752,
|
| 2998 |
+
45806,
|
| 2999 |
+
45807,
|
| 3000 |
+
45811,
|
| 3001 |
+
45814,
|
| 3002 |
+
45830,
|
| 3003 |
+
45850,
|
| 3004 |
+
45858,
|
| 3005 |
+
45875,
|
| 3006 |
+
45881,
|
| 3007 |
+
45904,
|
| 3008 |
+
45924,
|
| 3009 |
+
45929,
|
| 3010 |
+
45961,
|
| 3011 |
+
45982,
|
| 3012 |
+
46006,
|
| 3013 |
+
46018,
|
| 3014 |
+
46021,
|
| 3015 |
+
46030,
|
| 3016 |
+
46046,
|
| 3017 |
+
46091,
|
| 3018 |
+
46151,
|
| 3019 |
+
46165,
|
| 3020 |
+
46186,
|
| 3021 |
+
46203,
|
| 3022 |
+
46215,
|
| 3023 |
+
46218,
|
| 3024 |
+
46225,
|
| 3025 |
+
46232,
|
| 3026 |
+
46251,
|
| 3027 |
+
46255,
|
| 3028 |
+
46259,
|
| 3029 |
+
46277,
|
| 3030 |
+
46282,
|
| 3031 |
+
46338,
|
| 3032 |
+
46341,
|
| 3033 |
+
46346,
|
| 3034 |
+
46353,
|
| 3035 |
+
46356,
|
| 3036 |
+
46373,
|
| 3037 |
+
46418,
|
| 3038 |
+
46426,
|
| 3039 |
+
46434,
|
| 3040 |
+
46442,
|
| 3041 |
+
46452,
|
| 3042 |
+
46500,
|
| 3043 |
+
46505,
|
| 3044 |
+
46519,
|
| 3045 |
+
46542,
|
| 3046 |
+
46548,
|
| 3047 |
+
46556,
|
| 3048 |
+
46577,
|
| 3049 |
+
46602,
|
| 3050 |
+
46605,
|
| 3051 |
+
46607,
|
| 3052 |
+
46644,
|
| 3053 |
+
46649,
|
| 3054 |
+
46656,
|
| 3055 |
+
46729,
|
| 3056 |
+
46739,
|
| 3057 |
+
46744,
|
| 3058 |
+
46750,
|
| 3059 |
+
46789,
|
| 3060 |
+
46796,
|
| 3061 |
+
46800,
|
| 3062 |
+
46801,
|
| 3063 |
+
46826,
|
| 3064 |
+
46827,
|
| 3065 |
+
46832,
|
| 3066 |
+
46851,
|
| 3067 |
+
46871,
|
| 3068 |
+
46873,
|
| 3069 |
+
46896,
|
| 3070 |
+
46903,
|
| 3071 |
+
46914,
|
| 3072 |
+
46964,
|
| 3073 |
+
46986,
|
| 3074 |
+
46998,
|
| 3075 |
+
47022,
|
| 3076 |
+
47065,
|
| 3077 |
+
47080,
|
| 3078 |
+
47101,
|
| 3079 |
+
47135,
|
| 3080 |
+
47144,
|
| 3081 |
+
47152,
|
| 3082 |
+
47184,
|
| 3083 |
+
47189,
|
| 3084 |
+
47207,
|
| 3085 |
+
47215,
|
| 3086 |
+
47218,
|
| 3087 |
+
47221,
|
| 3088 |
+
47254,
|
| 3089 |
+
47266,
|
| 3090 |
+
47300,
|
| 3091 |
+
47326,
|
| 3092 |
+
47336,
|
| 3093 |
+
47362,
|
| 3094 |
+
47369,
|
| 3095 |
+
47378,
|
| 3096 |
+
47379,
|
| 3097 |
+
47383,
|
| 3098 |
+
47415,
|
| 3099 |
+
47423,
|
| 3100 |
+
47446,
|
| 3101 |
+
47449,
|
| 3102 |
+
47453,
|
| 3103 |
+
47455,
|
| 3104 |
+
47456,
|
| 3105 |
+
47486,
|
| 3106 |
+
47489,
|
| 3107 |
+
47494,
|
| 3108 |
+
47503,
|
| 3109 |
+
47505,
|
| 3110 |
+
47527,
|
| 3111 |
+
47531,
|
| 3112 |
+
47552,
|
| 3113 |
+
47563,
|
| 3114 |
+
47603,
|
| 3115 |
+
47614,
|
| 3116 |
+
47616,
|
| 3117 |
+
47617,
|
| 3118 |
+
47620,
|
| 3119 |
+
47665,
|
| 3120 |
+
47683,
|
| 3121 |
+
47717,
|
| 3122 |
+
47726,
|
| 3123 |
+
47729,
|
| 3124 |
+
47760,
|
| 3125 |
+
47761,
|
| 3126 |
+
47785,
|
| 3127 |
+
47843,
|
| 3128 |
+
47857,
|
| 3129 |
+
47867,
|
| 3130 |
+
47893,
|
| 3131 |
+
47895,
|
| 3132 |
+
47911,
|
| 3133 |
+
47915,
|
| 3134 |
+
47936,
|
| 3135 |
+
47966,
|
| 3136 |
+
47972,
|
| 3137 |
+
47979,
|
| 3138 |
+
47989,
|
| 3139 |
+
48018,
|
| 3140 |
+
48030,
|
| 3141 |
+
48046,
|
| 3142 |
+
48059,
|
| 3143 |
+
48064,
|
| 3144 |
+
48076,
|
| 3145 |
+
48085,
|
| 3146 |
+
48108,
|
| 3147 |
+
48109,
|
| 3148 |
+
48115,
|
| 3149 |
+
48116,
|
| 3150 |
+
48139,
|
| 3151 |
+
48148,
|
| 3152 |
+
48174,
|
| 3153 |
+
48230,
|
| 3154 |
+
48242,
|
| 3155 |
+
48269,
|
| 3156 |
+
48271,
|
| 3157 |
+
48272,
|
| 3158 |
+
48281,
|
| 3159 |
+
48312,
|
| 3160 |
+
48320,
|
| 3161 |
+
48321,
|
| 3162 |
+
48330,
|
| 3163 |
+
48340,
|
| 3164 |
+
48364,
|
| 3165 |
+
48396,
|
| 3166 |
+
48405,
|
| 3167 |
+
48426,
|
| 3168 |
+
48443,
|
| 3169 |
+
48455,
|
| 3170 |
+
48458,
|
| 3171 |
+
48462,
|
| 3172 |
+
48503,
|
| 3173 |
+
48527,
|
| 3174 |
+
48531,
|
| 3175 |
+
48533,
|
| 3176 |
+
48566,
|
| 3177 |
+
48571,
|
| 3178 |
+
48591,
|
| 3179 |
+
48603,
|
| 3180 |
+
48622,
|
| 3181 |
+
48643,
|
| 3182 |
+
48644,
|
| 3183 |
+
48645,
|
| 3184 |
+
48651,
|
| 3185 |
+
48654,
|
| 3186 |
+
48656,
|
| 3187 |
+
48666,
|
| 3188 |
+
48668,
|
| 3189 |
+
48683,
|
| 3190 |
+
48690,
|
| 3191 |
+
48721,
|
| 3192 |
+
48724,
|
| 3193 |
+
48727,
|
| 3194 |
+
48746,
|
| 3195 |
+
48749,
|
| 3196 |
+
48754,
|
| 3197 |
+
48800,
|
| 3198 |
+
48814,
|
| 3199 |
+
48817,
|
| 3200 |
+
48840,
|
| 3201 |
+
48885,
|
| 3202 |
+
48898,
|
| 3203 |
+
48937,
|
| 3204 |
+
48949,
|
| 3205 |
+
48950,
|
| 3206 |
+
48953,
|
| 3207 |
+
48962,
|
| 3208 |
+
48967,
|
| 3209 |
+
48974,
|
| 3210 |
+
48997,
|
| 3211 |
+
49038,
|
| 3212 |
+
49048,
|
| 3213 |
+
49071,
|
| 3214 |
+
49088,
|
| 3215 |
+
49092,
|
| 3216 |
+
49100,
|
| 3217 |
+
49106,
|
| 3218 |
+
49116,
|
| 3219 |
+
49128,
|
| 3220 |
+
49129,
|
| 3221 |
+
49155,
|
| 3222 |
+
49166,
|
| 3223 |
+
49173,
|
| 3224 |
+
49174,
|
| 3225 |
+
49196,
|
| 3226 |
+
49217,
|
| 3227 |
+
49235,
|
| 3228 |
+
49237,
|
| 3229 |
+
49245,
|
| 3230 |
+
49248,
|
| 3231 |
+
49254,
|
| 3232 |
+
49270,
|
| 3233 |
+
49289,
|
| 3234 |
+
49323,
|
| 3235 |
+
49348,
|
| 3236 |
+
49408,
|
| 3237 |
+
49420,
|
| 3238 |
+
49422,
|
| 3239 |
+
49434,
|
| 3240 |
+
49475,
|
| 3241 |
+
49493,
|
| 3242 |
+
49542,
|
| 3243 |
+
49548,
|
| 3244 |
+
49555,
|
| 3245 |
+
49577,
|
| 3246 |
+
49589,
|
| 3247 |
+
49595,
|
| 3248 |
+
49602,
|
| 3249 |
+
49618,
|
| 3250 |
+
49622,
|
| 3251 |
+
49624,
|
| 3252 |
+
49669,
|
| 3253 |
+
49725,
|
| 3254 |
+
49747,
|
| 3255 |
+
49760,
|
| 3256 |
+
49777,
|
| 3257 |
+
49810,
|
| 3258 |
+
49854,
|
| 3259 |
+
49884,
|
| 3260 |
+
49897,
|
| 3261 |
+
49901,
|
| 3262 |
+
49917,
|
| 3263 |
+
49923,
|
| 3264 |
+
49943,
|
| 3265 |
+
49957,
|
| 3266 |
+
49962,
|
| 3267 |
+
49987,
|
| 3268 |
+
50048,
|
| 3269 |
+
50075,
|
| 3270 |
+
50102,
|
| 3271 |
+
50111,
|
| 3272 |
+
50119,
|
| 3273 |
+
50121,
|
| 3274 |
+
50134,
|
| 3275 |
+
50165,
|
| 3276 |
+
50177,
|
| 3277 |
+
50188,
|
| 3278 |
+
50236,
|
| 3279 |
+
50245,
|
| 3280 |
+
50294,
|
| 3281 |
+
50305,
|
| 3282 |
+
50316,
|
| 3283 |
+
50320,
|
| 3284 |
+
50347,
|
| 3285 |
+
50362,
|
| 3286 |
+
50432,
|
| 3287 |
+
50474,
|
| 3288 |
+
50496,
|
| 3289 |
+
50510,
|
| 3290 |
+
50514,
|
| 3291 |
+
50524,
|
| 3292 |
+
50534,
|
| 3293 |
+
50549,
|
| 3294 |
+
50593,
|
| 3295 |
+
50599,
|
| 3296 |
+
50600,
|
| 3297 |
+
50608,
|
| 3298 |
+
50640,
|
| 3299 |
+
50680,
|
| 3300 |
+
50713,
|
| 3301 |
+
50714,
|
| 3302 |
+
50718,
|
| 3303 |
+
50721,
|
| 3304 |
+
50724,
|
| 3305 |
+
50746,
|
| 3306 |
+
50758,
|
| 3307 |
+
50760,
|
| 3308 |
+
50803,
|
| 3309 |
+
50805,
|
| 3310 |
+
50807,
|
| 3311 |
+
50841,
|
| 3312 |
+
50869,
|
| 3313 |
+
50886,
|
| 3314 |
+
50919,
|
| 3315 |
+
50928,
|
| 3316 |
+
50938,
|
| 3317 |
+
50940,
|
| 3318 |
+
50948,
|
| 3319 |
+
50950,
|
| 3320 |
+
50963,
|
| 3321 |
+
50970,
|
| 3322 |
+
50978,
|
| 3323 |
+
50989,
|
| 3324 |
+
50994,
|
| 3325 |
+
51012,
|
| 3326 |
+
51027,
|
| 3327 |
+
51028,
|
| 3328 |
+
51030,
|
| 3329 |
+
51048,
|
| 3330 |
+
51067,
|
| 3331 |
+
51076,
|
| 3332 |
+
51088,
|
| 3333 |
+
51100,
|
| 3334 |
+
51113,
|
| 3335 |
+
51118,
|
| 3336 |
+
51124,
|
| 3337 |
+
51126,
|
| 3338 |
+
51128,
|
| 3339 |
+
51141,
|
| 3340 |
+
51200,
|
| 3341 |
+
51264,
|
| 3342 |
+
51275,
|
| 3343 |
+
51278,
|
| 3344 |
+
51308,
|
| 3345 |
+
51356,
|
| 3346 |
+
51363,
|
| 3347 |
+
51364,
|
| 3348 |
+
51377,
|
| 3349 |
+
51386,
|
| 3350 |
+
51387,
|
| 3351 |
+
51414,
|
| 3352 |
+
51418,
|
| 3353 |
+
51423,
|
| 3354 |
+
51427,
|
| 3355 |
+
51442,
|
| 3356 |
+
51461,
|
| 3357 |
+
51480,
|
| 3358 |
+
51494,
|
| 3359 |
+
51497,
|
| 3360 |
+
51506,
|
| 3361 |
+
51507,
|
| 3362 |
+
51519,
|
| 3363 |
+
51622,
|
| 3364 |
+
51632,
|
| 3365 |
+
51658,
|
| 3366 |
+
51666,
|
| 3367 |
+
51669,
|
| 3368 |
+
51672,
|
| 3369 |
+
51687,
|
| 3370 |
+
51746,
|
| 3371 |
+
51750,
|
| 3372 |
+
51752,
|
| 3373 |
+
51754,
|
| 3374 |
+
51787,
|
| 3375 |
+
51796,
|
| 3376 |
+
51814,
|
| 3377 |
+
51820,
|
| 3378 |
+
51837,
|
| 3379 |
+
51877,
|
| 3380 |
+
51902,
|
| 3381 |
+
51912,
|
| 3382 |
+
51921,
|
| 3383 |
+
51944,
|
| 3384 |
+
51946,
|
| 3385 |
+
52011,
|
| 3386 |
+
52027,
|
| 3387 |
+
52031,
|
| 3388 |
+
52039,
|
| 3389 |
+
52054,
|
| 3390 |
+
52072,
|
| 3391 |
+
52073,
|
| 3392 |
+
52080,
|
| 3393 |
+
52133,
|
| 3394 |
+
52154,
|
| 3395 |
+
52169,
|
| 3396 |
+
52187,
|
| 3397 |
+
52195,
|
| 3398 |
+
52240,
|
| 3399 |
+
52252,
|
| 3400 |
+
52262,
|
| 3401 |
+
52268,
|
| 3402 |
+
52285,
|
| 3403 |
+
52294,
|
| 3404 |
+
52324,
|
| 3405 |
+
52338,
|
| 3406 |
+
52347,
|
| 3407 |
+
52372,
|
| 3408 |
+
52386,
|
| 3409 |
+
52388,
|
| 3410 |
+
52397,
|
| 3411 |
+
52408,
|
| 3412 |
+
52411,
|
| 3413 |
+
52438,
|
| 3414 |
+
52444,
|
| 3415 |
+
52462,
|
| 3416 |
+
52463,
|
| 3417 |
+
52474,
|
| 3418 |
+
52481,
|
| 3419 |
+
52489,
|
| 3420 |
+
52506,
|
| 3421 |
+
52516,
|
| 3422 |
+
52527,
|
| 3423 |
+
52531,
|
| 3424 |
+
52574,
|
| 3425 |
+
52599,
|
| 3426 |
+
52607,
|
| 3427 |
+
52610,
|
| 3428 |
+
52613,
|
| 3429 |
+
52617,
|
| 3430 |
+
52620,
|
| 3431 |
+
52623,
|
| 3432 |
+
52629,
|
| 3433 |
+
52664,
|
| 3434 |
+
52684,
|
| 3435 |
+
52699,
|
| 3436 |
+
52707,
|
| 3437 |
+
52714,
|
| 3438 |
+
52720,
|
| 3439 |
+
52731,
|
| 3440 |
+
52792,
|
| 3441 |
+
52797,
|
| 3442 |
+
52798,
|
| 3443 |
+
52806,
|
| 3444 |
+
52818,
|
| 3445 |
+
52859,
|
| 3446 |
+
52887,
|
| 3447 |
+
52976,
|
| 3448 |
+
52981,
|
| 3449 |
+
52991,
|
| 3450 |
+
53005,
|
| 3451 |
+
53006,
|
| 3452 |
+
53025,
|
| 3453 |
+
53075,
|
| 3454 |
+
53099,
|
| 3455 |
+
53111,
|
| 3456 |
+
53126,
|
| 3457 |
+
53136,
|
| 3458 |
+
53196,
|
| 3459 |
+
53278,
|
| 3460 |
+
53279,
|
| 3461 |
+
53304,
|
| 3462 |
+
53321,
|
| 3463 |
+
53322,
|
| 3464 |
+
53372,
|
| 3465 |
+
53418,
|
| 3466 |
+
53430,
|
| 3467 |
+
53437,
|
| 3468 |
+
53440,
|
| 3469 |
+
53496,
|
| 3470 |
+
53497,
|
| 3471 |
+
53505,
|
| 3472 |
+
53556,
|
| 3473 |
+
53560,
|
| 3474 |
+
53589,
|
| 3475 |
+
53599,
|
| 3476 |
+
53600,
|
| 3477 |
+
53614,
|
| 3478 |
+
53632,
|
| 3479 |
+
53675,
|
| 3480 |
+
53722,
|
| 3481 |
+
53736,
|
| 3482 |
+
53740,
|
| 3483 |
+
53751,
|
| 3484 |
+
53780,
|
| 3485 |
+
53781,
|
| 3486 |
+
53808,
|
| 3487 |
+
53810,
|
| 3488 |
+
53815,
|
| 3489 |
+
53848,
|
| 3490 |
+
53895,
|
| 3491 |
+
53897,
|
| 3492 |
+
53916,
|
| 3493 |
+
53922,
|
| 3494 |
+
53923,
|
| 3495 |
+
53949,
|
| 3496 |
+
53973,
|
| 3497 |
+
53982,
|
| 3498 |
+
53989,
|
| 3499 |
+
53995,
|
| 3500 |
+
54027,
|
| 3501 |
+
54028,
|
| 3502 |
+
54050,
|
| 3503 |
+
54053,
|
| 3504 |
+
54060,
|
| 3505 |
+
54093,
|
| 3506 |
+
54108,
|
| 3507 |
+
54109,
|
| 3508 |
+
54128,
|
| 3509 |
+
54130,
|
| 3510 |
+
54131,
|
| 3511 |
+
54136,
|
| 3512 |
+
54145,
|
| 3513 |
+
54201,
|
| 3514 |
+
54203,
|
| 3515 |
+
54210,
|
| 3516 |
+
54229,
|
| 3517 |
+
54258,
|
| 3518 |
+
54268,
|
| 3519 |
+
54296,
|
| 3520 |
+
54323,
|
| 3521 |
+
54378,
|
| 3522 |
+
54394,
|
| 3523 |
+
54402,
|
| 3524 |
+
54403,
|
| 3525 |
+
54436,
|
| 3526 |
+
54443,
|
| 3527 |
+
54453,
|
| 3528 |
+
54457,
|
| 3529 |
+
54463,
|
| 3530 |
+
54470,
|
| 3531 |
+
54471,
|
| 3532 |
+
54489,
|
| 3533 |
+
54491,
|
| 3534 |
+
54492,
|
| 3535 |
+
54494,
|
| 3536 |
+
54496,
|
| 3537 |
+
54538,
|
| 3538 |
+
54597,
|
| 3539 |
+
54599,
|
| 3540 |
+
54616,
|
| 3541 |
+
54622,
|
| 3542 |
+
54630,
|
| 3543 |
+
54642,
|
| 3544 |
+
54646,
|
| 3545 |
+
54672,
|
| 3546 |
+
54680,
|
| 3547 |
+
54693,
|
| 3548 |
+
54697,
|
| 3549 |
+
54699,
|
| 3550 |
+
54702,
|
| 3551 |
+
54716,
|
| 3552 |
+
54728,
|
| 3553 |
+
54769,
|
| 3554 |
+
54784,
|
| 3555 |
+
54786,
|
| 3556 |
+
54837,
|
| 3557 |
+
54869,
|
| 3558 |
+
54878,
|
| 3559 |
+
54880,
|
| 3560 |
+
54907,
|
| 3561 |
+
54914,
|
| 3562 |
+
54924,
|
| 3563 |
+
54934,
|
| 3564 |
+
54941,
|
| 3565 |
+
54955,
|
| 3566 |
+
54978,
|
| 3567 |
+
54980,
|
| 3568 |
+
54995,
|
| 3569 |
+
55008,
|
| 3570 |
+
55030,
|
| 3571 |
+
55051,
|
| 3572 |
+
55059,
|
| 3573 |
+
55064,
|
| 3574 |
+
55065,
|
| 3575 |
+
55073,
|
| 3576 |
+
55079,
|
| 3577 |
+
55087,
|
| 3578 |
+
55098,
|
| 3579 |
+
55120,
|
| 3580 |
+
55122,
|
| 3581 |
+
55144,
|
| 3582 |
+
55157,
|
| 3583 |
+
55168,
|
| 3584 |
+
55178,
|
| 3585 |
+
55186,
|
| 3586 |
+
55187,
|
| 3587 |
+
55188,
|
| 3588 |
+
55212,
|
| 3589 |
+
55223,
|
| 3590 |
+
55224,
|
| 3591 |
+
55226,
|
| 3592 |
+
55237,
|
| 3593 |
+
55243,
|
| 3594 |
+
55266,
|
| 3595 |
+
55280,
|
| 3596 |
+
55283,
|
| 3597 |
+
55342,
|
| 3598 |
+
55350,
|
| 3599 |
+
55414,
|
| 3600 |
+
55421,
|
| 3601 |
+
55430,
|
| 3602 |
+
55435,
|
| 3603 |
+
55447,
|
| 3604 |
+
55458,
|
| 3605 |
+
55479,
|
| 3606 |
+
55483,
|
| 3607 |
+
55493,
|
| 3608 |
+
55504,
|
| 3609 |
+
55531,
|
| 3610 |
+
55546,
|
| 3611 |
+
55556,
|
| 3612 |
+
55561,
|
| 3613 |
+
55574,
|
| 3614 |
+
55589,
|
| 3615 |
+
55681,
|
| 3616 |
+
55685,
|
| 3617 |
+
55691,
|
| 3618 |
+
55698,
|
| 3619 |
+
55702,
|
| 3620 |
+
55705,
|
| 3621 |
+
55722,
|
| 3622 |
+
55729,
|
| 3623 |
+
55750,
|
| 3624 |
+
55764,
|
| 3625 |
+
55783,
|
| 3626 |
+
55809,
|
| 3627 |
+
55819,
|
| 3628 |
+
55823,
|
| 3629 |
+
55828,
|
| 3630 |
+
55838,
|
| 3631 |
+
55855,
|
| 3632 |
+
55890,
|
| 3633 |
+
55919,
|
| 3634 |
+
55924,
|
| 3635 |
+
55927,
|
| 3636 |
+
55957,
|
| 3637 |
+
55961,
|
| 3638 |
+
55962,
|
| 3639 |
+
55969,
|
| 3640 |
+
55979,
|
| 3641 |
+
55983,
|
| 3642 |
+
56002,
|
| 3643 |
+
56024,
|
| 3644 |
+
56040,
|
| 3645 |
+
56041,
|
| 3646 |
+
56045,
|
| 3647 |
+
56058,
|
| 3648 |
+
56062,
|
| 3649 |
+
56065,
|
| 3650 |
+
56073,
|
| 3651 |
+
56085,
|
| 3652 |
+
56114,
|
| 3653 |
+
56121,
|
| 3654 |
+
56132,
|
| 3655 |
+
56141,
|
| 3656 |
+
56144,
|
| 3657 |
+
56154,
|
| 3658 |
+
56162,
|
| 3659 |
+
56177,
|
| 3660 |
+
56247,
|
| 3661 |
+
56252,
|
| 3662 |
+
56268,
|
| 3663 |
+
56286,
|
| 3664 |
+
56291,
|
| 3665 |
+
56293,
|
| 3666 |
+
56337,
|
| 3667 |
+
56339,
|
| 3668 |
+
56351,
|
| 3669 |
+
56361,
|
| 3670 |
+
56391,
|
| 3671 |
+
56422,
|
| 3672 |
+
56504,
|
| 3673 |
+
56596,
|
| 3674 |
+
56606,
|
| 3675 |
+
56701,
|
| 3676 |
+
56703,
|
| 3677 |
+
56718,
|
| 3678 |
+
56761,
|
| 3679 |
+
56779,
|
| 3680 |
+
56782,
|
| 3681 |
+
56823,
|
| 3682 |
+
56831,
|
| 3683 |
+
56842,
|
| 3684 |
+
56870,
|
| 3685 |
+
56877,
|
| 3686 |
+
56882,
|
| 3687 |
+
56890,
|
| 3688 |
+
56912,
|
| 3689 |
+
56948,
|
| 3690 |
+
56962,
|
| 3691 |
+
56964,
|
| 3692 |
+
56980,
|
| 3693 |
+
56993,
|
| 3694 |
+
57018,
|
| 3695 |
+
57036,
|
| 3696 |
+
57048,
|
| 3697 |
+
57050,
|
| 3698 |
+
57068,
|
| 3699 |
+
57069,
|
| 3700 |
+
57073,
|
| 3701 |
+
57130,
|
| 3702 |
+
57133,
|
| 3703 |
+
57150,
|
| 3704 |
+
57154,
|
| 3705 |
+
57160,
|
| 3706 |
+
57178,
|
| 3707 |
+
57183,
|
| 3708 |
+
57213,
|
| 3709 |
+
57214,
|
| 3710 |
+
57223,
|
| 3711 |
+
57224,
|
| 3712 |
+
57249,
|
| 3713 |
+
57252,
|
| 3714 |
+
57307,
|
| 3715 |
+
57320,
|
| 3716 |
+
57332,
|
| 3717 |
+
57350,
|
| 3718 |
+
57351,
|
| 3719 |
+
57352,
|
| 3720 |
+
57362,
|
| 3721 |
+
57374,
|
| 3722 |
+
57384,
|
| 3723 |
+
57395,
|
| 3724 |
+
57425,
|
| 3725 |
+
57439,
|
| 3726 |
+
57475,
|
| 3727 |
+
57495,
|
| 3728 |
+
57524,
|
| 3729 |
+
57535,
|
| 3730 |
+
57545,
|
| 3731 |
+
57551,
|
| 3732 |
+
57564,
|
| 3733 |
+
57565,
|
| 3734 |
+
57570,
|
| 3735 |
+
57574,
|
| 3736 |
+
57576,
|
| 3737 |
+
57590,
|
| 3738 |
+
57599,
|
| 3739 |
+
57619,
|
| 3740 |
+
57709,
|
| 3741 |
+
57723,
|
| 3742 |
+
57737,
|
| 3743 |
+
57743,
|
| 3744 |
+
57758,
|
| 3745 |
+
57777,
|
| 3746 |
+
57786,
|
| 3747 |
+
57788,
|
| 3748 |
+
57798,
|
| 3749 |
+
57802,
|
| 3750 |
+
57807,
|
| 3751 |
+
57836,
|
| 3752 |
+
57862,
|
| 3753 |
+
57865,
|
| 3754 |
+
57867,
|
| 3755 |
+
57879,
|
| 3756 |
+
57887,
|
| 3757 |
+
57891,
|
| 3758 |
+
57911,
|
| 3759 |
+
57944,
|
| 3760 |
+
57956,
|
| 3761 |
+
57968,
|
| 3762 |
+
57979,
|
| 3763 |
+
58007,
|
| 3764 |
+
58018,
|
| 3765 |
+
58053,
|
| 3766 |
+
58062,
|
| 3767 |
+
58071,
|
| 3768 |
+
58081,
|
| 3769 |
+
58087,
|
| 3770 |
+
58092,
|
| 3771 |
+
58098,
|
| 3772 |
+
58128,
|
| 3773 |
+
58136,
|
| 3774 |
+
58157,
|
| 3775 |
+
58169,
|
| 3776 |
+
58172,
|
| 3777 |
+
58174,
|
| 3778 |
+
58177,
|
| 3779 |
+
58201,
|
| 3780 |
+
58208,
|
| 3781 |
+
58219,
|
| 3782 |
+
58230,
|
| 3783 |
+
58265,
|
| 3784 |
+
58290,
|
| 3785 |
+
58291,
|
| 3786 |
+
58299,
|
| 3787 |
+
58336,
|
| 3788 |
+
58337,
|
| 3789 |
+
58338,
|
| 3790 |
+
58359,
|
| 3791 |
+
58375,
|
| 3792 |
+
58384,
|
| 3793 |
+
58389,
|
| 3794 |
+
58406,
|
| 3795 |
+
58407,
|
| 3796 |
+
58410,
|
| 3797 |
+
58418,
|
| 3798 |
+
58423,
|
| 3799 |
+
58433,
|
| 3800 |
+
58445,
|
| 3801 |
+
58464,
|
| 3802 |
+
58501,
|
| 3803 |
+
58522,
|
| 3804 |
+
58557,
|
| 3805 |
+
58559,
|
| 3806 |
+
58590,
|
| 3807 |
+
58602,
|
| 3808 |
+
58606,
|
| 3809 |
+
58627,
|
| 3810 |
+
58629,
|
| 3811 |
+
58645,
|
| 3812 |
+
58658,
|
| 3813 |
+
58715,
|
| 3814 |
+
58724,
|
| 3815 |
+
58789,
|
| 3816 |
+
58831,
|
| 3817 |
+
58869,
|
| 3818 |
+
58872,
|
| 3819 |
+
58899,
|
| 3820 |
+
58902,
|
| 3821 |
+
58934,
|
| 3822 |
+
58935,
|
| 3823 |
+
58942,
|
| 3824 |
+
58949,
|
| 3825 |
+
58957,
|
| 3826 |
+
58958,
|
| 3827 |
+
58994,
|
| 3828 |
+
59000,
|
| 3829 |
+
59024,
|
| 3830 |
+
59026,
|
| 3831 |
+
59036,
|
| 3832 |
+
59043,
|
| 3833 |
+
59049,
|
| 3834 |
+
59053,
|
| 3835 |
+
59093,
|
| 3836 |
+
59101,
|
| 3837 |
+
59125,
|
| 3838 |
+
59139,
|
| 3839 |
+
59154,
|
| 3840 |
+
59204,
|
| 3841 |
+
59208,
|
| 3842 |
+
59209,
|
| 3843 |
+
59210,
|
| 3844 |
+
59216,
|
| 3845 |
+
59217,
|
| 3846 |
+
59312,
|
| 3847 |
+
59336,
|
| 3848 |
+
59360,
|
| 3849 |
+
59403,
|
| 3850 |
+
59404,
|
| 3851 |
+
59423,
|
| 3852 |
+
59454,
|
| 3853 |
+
59461,
|
| 3854 |
+
59475,
|
| 3855 |
+
59478,
|
| 3856 |
+
59479,
|
| 3857 |
+
59480,
|
| 3858 |
+
59482,
|
| 3859 |
+
59483,
|
| 3860 |
+
59484,
|
| 3861 |
+
59509,
|
| 3862 |
+
59544,
|
| 3863 |
+
59564,
|
| 3864 |
+
59581,
|
| 3865 |
+
59591,
|
| 3866 |
+
59603,
|
| 3867 |
+
59610,
|
| 3868 |
+
59611,
|
| 3869 |
+
59630,
|
| 3870 |
+
59649,
|
| 3871 |
+
59712,
|
| 3872 |
+
59757,
|
| 3873 |
+
59792,
|
| 3874 |
+
59841,
|
| 3875 |
+
59845,
|
| 3876 |
+
59865,
|
| 3877 |
+
59867,
|
| 3878 |
+
59876,
|
| 3879 |
+
59877,
|
| 3880 |
+
59911,
|
| 3881 |
+
59928,
|
| 3882 |
+
59979,
|
| 3883 |
+
59999,
|
| 3884 |
+
60016,
|
| 3885 |
+
60038,
|
| 3886 |
+
60083,
|
| 3887 |
+
60131,
|
| 3888 |
+
60156,
|
| 3889 |
+
60170,
|
| 3890 |
+
60171,
|
| 3891 |
+
60198,
|
| 3892 |
+
60207,
|
| 3893 |
+
60235,
|
| 3894 |
+
60240,
|
| 3895 |
+
60266,
|
| 3896 |
+
60288,
|
| 3897 |
+
60338,
|
| 3898 |
+
60344,
|
| 3899 |
+
60460,
|
| 3900 |
+
60488,
|
| 3901 |
+
60525,
|
| 3902 |
+
60543,
|
| 3903 |
+
60559,
|
| 3904 |
+
60581,
|
| 3905 |
+
60586,
|
| 3906 |
+
60596,
|
| 3907 |
+
60604,
|
| 3908 |
+
60627,
|
| 3909 |
+
60628,
|
| 3910 |
+
60671,
|
| 3911 |
+
60672,
|
| 3912 |
+
60674,
|
| 3913 |
+
60681,
|
| 3914 |
+
60704,
|
| 3915 |
+
60715,
|
| 3916 |
+
60740,
|
| 3917 |
+
60757,
|
| 3918 |
+
60765,
|
| 3919 |
+
60803,
|
| 3920 |
+
60808,
|
| 3921 |
+
60856,
|
| 3922 |
+
60864,
|
| 3923 |
+
60872,
|
| 3924 |
+
60873,
|
| 3925 |
+
60880,
|
| 3926 |
+
60895,
|
| 3927 |
+
60907,
|
| 3928 |
+
60971,
|
| 3929 |
+
60985,
|
| 3930 |
+
60994,
|
| 3931 |
+
60998,
|
| 3932 |
+
61002,
|
| 3933 |
+
61004,
|
| 3934 |
+
61021,
|
| 3935 |
+
61022,
|
| 3936 |
+
61032,
|
| 3937 |
+
61064,
|
| 3938 |
+
61081,
|
| 3939 |
+
61083,
|
| 3940 |
+
61118,
|
| 3941 |
+
61163,
|
| 3942 |
+
61166,
|
| 3943 |
+
61173,
|
| 3944 |
+
61200,
|
| 3945 |
+
61210,
|
| 3946 |
+
61211,
|
| 3947 |
+
61241,
|
| 3948 |
+
61250,
|
| 3949 |
+
61261,
|
| 3950 |
+
61265,
|
| 3951 |
+
61277,
|
| 3952 |
+
61281,
|
| 3953 |
+
61290,
|
| 3954 |
+
61301,
|
| 3955 |
+
61318,
|
| 3956 |
+
61320,
|
| 3957 |
+
61338,
|
| 3958 |
+
61340,
|
| 3959 |
+
61355,
|
| 3960 |
+
61381,
|
| 3961 |
+
61413,
|
| 3962 |
+
61414,
|
| 3963 |
+
61439,
|
| 3964 |
+
61513,
|
| 3965 |
+
61519,
|
| 3966 |
+
61525,
|
| 3967 |
+
61528,
|
| 3968 |
+
61556,
|
| 3969 |
+
61557,
|
| 3970 |
+
61563,
|
| 3971 |
+
61580,
|
| 3972 |
+
61581,
|
| 3973 |
+
61586,
|
| 3974 |
+
61617,
|
| 3975 |
+
61629,
|
| 3976 |
+
61633,
|
| 3977 |
+
61657,
|
| 3978 |
+
61680,
|
| 3979 |
+
61710,
|
| 3980 |
+
61765,
|
| 3981 |
+
61772,
|
| 3982 |
+
61780,
|
| 3983 |
+
61804,
|
| 3984 |
+
61827,
|
| 3985 |
+
61843,
|
| 3986 |
+
61861,
|
| 3987 |
+
61870,
|
| 3988 |
+
61910,
|
| 3989 |
+
61968,
|
| 3990 |
+
61969,
|
| 3991 |
+
61983,
|
| 3992 |
+
61993,
|
| 3993 |
+
61996,
|
| 3994 |
+
61997,
|
| 3995 |
+
62005,
|
| 3996 |
+
62021,
|
| 3997 |
+
62033,
|
| 3998 |
+
62036,
|
| 3999 |
+
62067,
|
| 4000 |
+
62099,
|
| 4001 |
+
62100,
|
| 4002 |
+
62108,
|
| 4003 |
+
62117,
|
| 4004 |
+
62123,
|
| 4005 |
+
62143,
|
| 4006 |
+
62191,
|
| 4007 |
+
62198,
|
| 4008 |
+
62217,
|
| 4009 |
+
62218,
|
| 4010 |
+
62256,
|
| 4011 |
+
62274,
|
| 4012 |
+
62296,
|
| 4013 |
+
62302,
|
| 4014 |
+
62307,
|
| 4015 |
+
62318,
|
| 4016 |
+
62319,
|
| 4017 |
+
62338,
|
| 4018 |
+
62344,
|
| 4019 |
+
62349,
|
| 4020 |
+
62382,
|
| 4021 |
+
62403,
|
| 4022 |
+
62416,
|
| 4023 |
+
62422,
|
| 4024 |
+
62447,
|
| 4025 |
+
62479,
|
| 4026 |
+
62480,
|
| 4027 |
+
62497,
|
| 4028 |
+
62512,
|
| 4029 |
+
62537,
|
| 4030 |
+
62540,
|
| 4031 |
+
62544,
|
| 4032 |
+
62555,
|
| 4033 |
+
62564,
|
| 4034 |
+
62591,
|
| 4035 |
+
62601,
|
| 4036 |
+
62610,
|
| 4037 |
+
62618,
|
| 4038 |
+
62631,
|
| 4039 |
+
62674,
|
| 4040 |
+
62697,
|
| 4041 |
+
62705,
|
| 4042 |
+
62720,
|
| 4043 |
+
62732,
|
| 4044 |
+
62740,
|
| 4045 |
+
62770,
|
| 4046 |
+
62789,
|
| 4047 |
+
62799,
|
| 4048 |
+
62817,
|
| 4049 |
+
62823,
|
| 4050 |
+
62863,
|
| 4051 |
+
62877,
|
| 4052 |
+
62879,
|
| 4053 |
+
62887,
|
| 4054 |
+
62896,
|
| 4055 |
+
62918,
|
| 4056 |
+
62965,
|
| 4057 |
+
62982,
|
| 4058 |
+
62986,
|
| 4059 |
+
62991,
|
| 4060 |
+
62994,
|
| 4061 |
+
63039,
|
| 4062 |
+
63040,
|
| 4063 |
+
63046,
|
| 4064 |
+
63106,
|
| 4065 |
+
63119,
|
| 4066 |
+
63156,
|
| 4067 |
+
63159,
|
| 4068 |
+
63173,
|
| 4069 |
+
63191,
|
| 4070 |
+
63194,
|
| 4071 |
+
63211,
|
| 4072 |
+
63219,
|
| 4073 |
+
63230,
|
| 4074 |
+
63242,
|
| 4075 |
+
63276,
|
| 4076 |
+
63295,
|
| 4077 |
+
63296,
|
| 4078 |
+
63324,
|
| 4079 |
+
63348,
|
| 4080 |
+
63351,
|
| 4081 |
+
63371,
|
| 4082 |
+
63372,
|
| 4083 |
+
63393,
|
| 4084 |
+
63400,
|
| 4085 |
+
63407,
|
| 4086 |
+
63453,
|
| 4087 |
+
63458,
|
| 4088 |
+
63477,
|
| 4089 |
+
63481,
|
| 4090 |
+
63485,
|
| 4091 |
+
63538,
|
| 4092 |
+
63545,
|
| 4093 |
+
63553,
|
| 4094 |
+
63554,
|
| 4095 |
+
63624,
|
| 4096 |
+
63632,
|
| 4097 |
+
63636,
|
| 4098 |
+
63660,
|
| 4099 |
+
63698,
|
| 4100 |
+
63720,
|
| 4101 |
+
63740,
|
| 4102 |
+
63741,
|
| 4103 |
+
63749,
|
| 4104 |
+
63761,
|
| 4105 |
+
63779,
|
| 4106 |
+
63802,
|
| 4107 |
+
63853,
|
| 4108 |
+
63861,
|
| 4109 |
+
63887,
|
| 4110 |
+
63926,
|
| 4111 |
+
63941,
|
| 4112 |
+
63963,
|
| 4113 |
+
63966,
|
| 4114 |
+
63985,
|
| 4115 |
+
63993,
|
| 4116 |
+
64044,
|
| 4117 |
+
64054,
|
| 4118 |
+
64058,
|
| 4119 |
+
64059,
|
| 4120 |
+
64071,
|
| 4121 |
+
64077,
|
| 4122 |
+
64092,
|
| 4123 |
+
64113,
|
| 4124 |
+
64114,
|
| 4125 |
+
64137,
|
| 4126 |
+
64139,
|
| 4127 |
+
64166,
|
| 4128 |
+
64175,
|
| 4129 |
+
64190,
|
| 4130 |
+
64212,
|
| 4131 |
+
64217,
|
| 4132 |
+
64247,
|
| 4133 |
+
64253,
|
| 4134 |
+
64266,
|
| 4135 |
+
64277,
|
| 4136 |
+
64322,
|
| 4137 |
+
64329,
|
| 4138 |
+
64359,
|
| 4139 |
+
64360,
|
| 4140 |
+
64388,
|
| 4141 |
+
64395,
|
| 4142 |
+
64399,
|
| 4143 |
+
64417,
|
| 4144 |
+
64419,
|
| 4145 |
+
64433,
|
| 4146 |
+
64434,
|
| 4147 |
+
64457,
|
| 4148 |
+
64497,
|
| 4149 |
+
64501,
|
| 4150 |
+
64518,
|
| 4151 |
+
64520,
|
| 4152 |
+
64540,
|
| 4153 |
+
64545,
|
| 4154 |
+
64568,
|
| 4155 |
+
64589,
|
| 4156 |
+
64600,
|
| 4157 |
+
64607,
|
| 4158 |
+
64627,
|
| 4159 |
+
64631,
|
| 4160 |
+
64632,
|
| 4161 |
+
64660,
|
| 4162 |
+
64772,
|
| 4163 |
+
64783,
|
| 4164 |
+
64805,
|
| 4165 |
+
64822,
|
| 4166 |
+
64827,
|
| 4167 |
+
64881,
|
| 4168 |
+
64884,
|
| 4169 |
+
64888,
|
| 4170 |
+
64893,
|
| 4171 |
+
64897,
|
| 4172 |
+
64976,
|
| 4173 |
+
65011,
|
| 4174 |
+
65016,
|
| 4175 |
+
65018,
|
| 4176 |
+
65019,
|
| 4177 |
+
65033,
|
| 4178 |
+
65037,
|
| 4179 |
+
65038,
|
| 4180 |
+
65052,
|
| 4181 |
+
65069,
|
| 4182 |
+
65079,
|
| 4183 |
+
65097,
|
| 4184 |
+
65131,
|
| 4185 |
+
65138,
|
| 4186 |
+
65159,
|
| 4187 |
+
65173,
|
| 4188 |
+
65185,
|
| 4189 |
+
65197,
|
| 4190 |
+
65225,
|
| 4191 |
+
65259,
|
| 4192 |
+
65260,
|
| 4193 |
+
65261,
|
| 4194 |
+
65264,
|
| 4195 |
+
65267,
|
| 4196 |
+
65287,
|
| 4197 |
+
65291,
|
| 4198 |
+
65297,
|
| 4199 |
+
65300,
|
| 4200 |
+
65305,
|
| 4201 |
+
65317,
|
| 4202 |
+
65360,
|
| 4203 |
+
65383,
|
| 4204 |
+
65387,
|
| 4205 |
+
65405,
|
| 4206 |
+
65416,
|
| 4207 |
+
65434,
|
| 4208 |
+
65440,
|
| 4209 |
+
65451,
|
| 4210 |
+
65456,
|
| 4211 |
+
65473,
|
| 4212 |
+
65490,
|
| 4213 |
+
65506,
|
| 4214 |
+
65519,
|
| 4215 |
+
65543,
|
| 4216 |
+
65553,
|
| 4217 |
+
65570,
|
| 4218 |
+
65579,
|
| 4219 |
+
65589,
|
| 4220 |
+
65611,
|
| 4221 |
+
65612,
|
| 4222 |
+
65622,
|
| 4223 |
+
65664,
|
| 4224 |
+
65667,
|
| 4225 |
+
65668,
|
| 4226 |
+
65672,
|
| 4227 |
+
65678,
|
| 4228 |
+
65683,
|
| 4229 |
+
65699,
|
| 4230 |
+
65722,
|
| 4231 |
+
65727,
|
| 4232 |
+
65737,
|
| 4233 |
+
65745,
|
| 4234 |
+
65759,
|
| 4235 |
+
65763,
|
| 4236 |
+
65766,
|
| 4237 |
+
65777,
|
| 4238 |
+
65787,
|
| 4239 |
+
65792,
|
| 4240 |
+
65810,
|
| 4241 |
+
65815,
|
| 4242 |
+
65830,
|
| 4243 |
+
65831,
|
| 4244 |
+
65846,
|
| 4245 |
+
65850,
|
| 4246 |
+
65861,
|
| 4247 |
+
65870,
|
| 4248 |
+
65871,
|
| 4249 |
+
65877,
|
| 4250 |
+
65887,
|
| 4251 |
+
65901,
|
| 4252 |
+
65912,
|
| 4253 |
+
65974,
|
| 4254 |
+
65980,
|
| 4255 |
+
66014,
|
| 4256 |
+
66092,
|
| 4257 |
+
66112,
|
| 4258 |
+
66118,
|
| 4259 |
+
66120,
|
| 4260 |
+
66153,
|
| 4261 |
+
66163,
|
| 4262 |
+
66203,
|
| 4263 |
+
66233,
|
| 4264 |
+
66261,
|
| 4265 |
+
66285,
|
| 4266 |
+
66297,
|
| 4267 |
+
66323,
|
| 4268 |
+
66371,
|
| 4269 |
+
66376,
|
| 4270 |
+
66377,
|
| 4271 |
+
66401,
|
| 4272 |
+
66405,
|
| 4273 |
+
66426,
|
| 4274 |
+
66436,
|
| 4275 |
+
66444,
|
| 4276 |
+
66474,
|
| 4277 |
+
66475,
|
| 4278 |
+
66498,
|
| 4279 |
+
66506,
|
| 4280 |
+
66519,
|
| 4281 |
+
66521,
|
| 4282 |
+
66535,
|
| 4283 |
+
66595,
|
| 4284 |
+
66600,
|
| 4285 |
+
66606,
|
| 4286 |
+
66627,
|
| 4287 |
+
66629,
|
| 4288 |
+
66635,
|
| 4289 |
+
66647,
|
| 4290 |
+
66656,
|
| 4291 |
+
66686,
|
| 4292 |
+
66689,
|
| 4293 |
+
66731,
|
| 4294 |
+
66778,
|
| 4295 |
+
66781,
|
| 4296 |
+
66783,
|
| 4297 |
+
66786,
|
| 4298 |
+
66816,
|
| 4299 |
+
66824,
|
| 4300 |
+
66834,
|
| 4301 |
+
66844,
|
| 4302 |
+
66853,
|
| 4303 |
+
66870,
|
| 4304 |
+
66882,
|
| 4305 |
+
66905,
|
| 4306 |
+
66910,
|
| 4307 |
+
66929,
|
| 4308 |
+
66941,
|
| 4309 |
+
66960,
|
| 4310 |
+
66995,
|
| 4311 |
+
67018,
|
| 4312 |
+
67022,
|
| 4313 |
+
67036,
|
| 4314 |
+
67054,
|
| 4315 |
+
67066,
|
| 4316 |
+
67073,
|
| 4317 |
+
67083,
|
| 4318 |
+
67097,
|
| 4319 |
+
67108,
|
| 4320 |
+
67150,
|
| 4321 |
+
67172,
|
| 4322 |
+
67183,
|
| 4323 |
+
67219,
|
| 4324 |
+
67225,
|
| 4325 |
+
67240,
|
| 4326 |
+
67293,
|
| 4327 |
+
67314,
|
| 4328 |
+
67334,
|
| 4329 |
+
67372,
|
| 4330 |
+
67392,
|
| 4331 |
+
67393,
|
| 4332 |
+
67411,
|
| 4333 |
+
67432,
|
| 4334 |
+
67436,
|
| 4335 |
+
67451,
|
| 4336 |
+
67455,
|
| 4337 |
+
67471,
|
| 4338 |
+
67476,
|
| 4339 |
+
67506,
|
| 4340 |
+
67564,
|
| 4341 |
+
67591,
|
| 4342 |
+
67615,
|
| 4343 |
+
67618,
|
| 4344 |
+
67625,
|
| 4345 |
+
67638,
|
| 4346 |
+
67650,
|
| 4347 |
+
67664,
|
| 4348 |
+
67680,
|
| 4349 |
+
67701,
|
| 4350 |
+
67708,
|
| 4351 |
+
67709,
|
| 4352 |
+
67750,
|
| 4353 |
+
67758,
|
| 4354 |
+
67773,
|
| 4355 |
+
67774,
|
| 4356 |
+
67796,
|
| 4357 |
+
67807,
|
| 4358 |
+
67811,
|
| 4359 |
+
67838,
|
| 4360 |
+
67848,
|
| 4361 |
+
67864,
|
| 4362 |
+
67886,
|
| 4363 |
+
67895,
|
| 4364 |
+
67914,
|
| 4365 |
+
67922,
|
| 4366 |
+
67924,
|
| 4367 |
+
67938,
|
| 4368 |
+
67940,
|
| 4369 |
+
67977,
|
| 4370 |
+
68006,
|
| 4371 |
+
68013,
|
| 4372 |
+
68046,
|
| 4373 |
+
68048,
|
| 4374 |
+
68081,
|
| 4375 |
+
68101,
|
| 4376 |
+
68132,
|
| 4377 |
+
68133,
|
| 4378 |
+
68165,
|
| 4379 |
+
68166,
|
| 4380 |
+
68169,
|
| 4381 |
+
68172,
|
| 4382 |
+
68174,
|
| 4383 |
+
68187,
|
| 4384 |
+
68191,
|
| 4385 |
+
68221,
|
| 4386 |
+
68241,
|
| 4387 |
+
68250,
|
| 4388 |
+
68258,
|
| 4389 |
+
68271,
|
| 4390 |
+
68294,
|
| 4391 |
+
68298,
|
| 4392 |
+
68300,
|
| 4393 |
+
68303,
|
| 4394 |
+
68327,
|
| 4395 |
+
68329,
|
| 4396 |
+
68346,
|
| 4397 |
+
68379,
|
| 4398 |
+
68405,
|
| 4399 |
+
68417,
|
| 4400 |
+
68420,
|
| 4401 |
+
68433,
|
| 4402 |
+
68434,
|
| 4403 |
+
68454,
|
| 4404 |
+
68464,
|
| 4405 |
+
68485,
|
| 4406 |
+
68492,
|
| 4407 |
+
68502,
|
| 4408 |
+
68547,
|
| 4409 |
+
68562,
|
| 4410 |
+
68590,
|
| 4411 |
+
68597,
|
| 4412 |
+
68601,
|
| 4413 |
+
68603,
|
| 4414 |
+
68612,
|
| 4415 |
+
68615,
|
| 4416 |
+
68638,
|
| 4417 |
+
68650,
|
| 4418 |
+
68653,
|
| 4419 |
+
68739,
|
| 4420 |
+
68746,
|
| 4421 |
+
68751,
|
| 4422 |
+
68752,
|
| 4423 |
+
68760,
|
| 4424 |
+
68762,
|
| 4425 |
+
68776,
|
| 4426 |
+
68780,
|
| 4427 |
+
68786,
|
| 4428 |
+
68804,
|
| 4429 |
+
68806,
|
| 4430 |
+
68827,
|
| 4431 |
+
68843,
|
| 4432 |
+
68888,
|
| 4433 |
+
68890,
|
| 4434 |
+
68901,
|
| 4435 |
+
68907,
|
| 4436 |
+
68908,
|
| 4437 |
+
68915,
|
| 4438 |
+
68916,
|
| 4439 |
+
68920,
|
| 4440 |
+
68941,
|
| 4441 |
+
69016,
|
| 4442 |
+
69033,
|
| 4443 |
+
69043,
|
| 4444 |
+
69068,
|
| 4445 |
+
69099,
|
| 4446 |
+
69127,
|
| 4447 |
+
69161,
|
| 4448 |
+
69163,
|
| 4449 |
+
69165,
|
| 4450 |
+
69169,
|
| 4451 |
+
69178,
|
| 4452 |
+
69192,
|
| 4453 |
+
69196,
|
| 4454 |
+
69210,
|
| 4455 |
+
69222,
|
| 4456 |
+
69227,
|
| 4457 |
+
69237,
|
| 4458 |
+
69248,
|
| 4459 |
+
69266,
|
| 4460 |
+
69272,
|
| 4461 |
+
69276,
|
| 4462 |
+
69366,
|
| 4463 |
+
69382,
|
| 4464 |
+
69383,
|
| 4465 |
+
69441,
|
| 4466 |
+
69457,
|
| 4467 |
+
69493,
|
| 4468 |
+
69494,
|
| 4469 |
+
69515,
|
| 4470 |
+
69540,
|
| 4471 |
+
69550,
|
| 4472 |
+
69567,
|
| 4473 |
+
69570,
|
| 4474 |
+
69597,
|
| 4475 |
+
69646,
|
| 4476 |
+
69687,
|
| 4477 |
+
69757,
|
| 4478 |
+
69761,
|
| 4479 |
+
69768,
|
| 4480 |
+
69782,
|
| 4481 |
+
69792,
|
| 4482 |
+
69826,
|
| 4483 |
+
69877,
|
| 4484 |
+
69903,
|
| 4485 |
+
69909,
|
| 4486 |
+
69911,
|
| 4487 |
+
69937,
|
| 4488 |
+
69938,
|
| 4489 |
+
69943,
|
| 4490 |
+
69950,
|
| 4491 |
+
69963,
|
| 4492 |
+
70031,
|
| 4493 |
+
70044,
|
| 4494 |
+
70053,
|
| 4495 |
+
70073,
|
| 4496 |
+
70079,
|
| 4497 |
+
70113,
|
| 4498 |
+
70119,
|
| 4499 |
+
70130,
|
| 4500 |
+
70143,
|
| 4501 |
+
70170,
|
| 4502 |
+
70177,
|
| 4503 |
+
70179,
|
| 4504 |
+
70180,
|
| 4505 |
+
70191,
|
| 4506 |
+
70193,
|
| 4507 |
+
70234,
|
| 4508 |
+
70235,
|
| 4509 |
+
70247,
|
| 4510 |
+
70249,
|
| 4511 |
+
70269,
|
| 4512 |
+
70285,
|
| 4513 |
+
70297,
|
| 4514 |
+
70305,
|
| 4515 |
+
70329,
|
| 4516 |
+
70339,
|
| 4517 |
+
70340,
|
| 4518 |
+
70342,
|
| 4519 |
+
70377,
|
| 4520 |
+
70457,
|
| 4521 |
+
70463,
|
| 4522 |
+
70467,
|
| 4523 |
+
70491,
|
| 4524 |
+
70542,
|
| 4525 |
+
70549,
|
| 4526 |
+
70557,
|
| 4527 |
+
70571,
|
| 4528 |
+
70576,
|
| 4529 |
+
70585,
|
| 4530 |
+
70597,
|
| 4531 |
+
70606,
|
| 4532 |
+
70634,
|
| 4533 |
+
70639,
|
| 4534 |
+
70641,
|
| 4535 |
+
70652,
|
| 4536 |
+
70674,
|
| 4537 |
+
70676,
|
| 4538 |
+
70685,
|
| 4539 |
+
70701,
|
| 4540 |
+
70731,
|
| 4541 |
+
70755,
|
| 4542 |
+
70759,
|
| 4543 |
+
70787,
|
| 4544 |
+
70818,
|
| 4545 |
+
70828,
|
| 4546 |
+
70844,
|
| 4547 |
+
70846,
|
| 4548 |
+
70851,
|
| 4549 |
+
70873,
|
| 4550 |
+
70876,
|
| 4551 |
+
70878,
|
| 4552 |
+
70881,
|
| 4553 |
+
70918,
|
| 4554 |
+
70931,
|
| 4555 |
+
70955,
|
| 4556 |
+
70956,
|
| 4557 |
+
70963,
|
| 4558 |
+
70975,
|
| 4559 |
+
70988,
|
| 4560 |
+
71013,
|
| 4561 |
+
71035,
|
| 4562 |
+
71064,
|
| 4563 |
+
71077,
|
| 4564 |
+
71100,
|
| 4565 |
+
71113,
|
| 4566 |
+
71119,
|
| 4567 |
+
71146,
|
| 4568 |
+
71153,
|
| 4569 |
+
71163,
|
| 4570 |
+
71167,
|
| 4571 |
+
71189,
|
| 4572 |
+
71192,
|
| 4573 |
+
71210,
|
| 4574 |
+
71220,
|
| 4575 |
+
71230,
|
| 4576 |
+
71239,
|
| 4577 |
+
71248,
|
| 4578 |
+
71265,
|
| 4579 |
+
71306,
|
| 4580 |
+
71362,
|
| 4581 |
+
71373,
|
| 4582 |
+
71379,
|
| 4583 |
+
71389,
|
| 4584 |
+
71412,
|
| 4585 |
+
71430,
|
| 4586 |
+
71443,
|
| 4587 |
+
71469,
|
| 4588 |
+
71471,
|
| 4589 |
+
71472,
|
| 4590 |
+
71473,
|
| 4591 |
+
71476,
|
| 4592 |
+
71481,
|
| 4593 |
+
71496,
|
| 4594 |
+
71515,
|
| 4595 |
+
71522,
|
| 4596 |
+
71524,
|
| 4597 |
+
71537,
|
| 4598 |
+
71544,
|
| 4599 |
+
71566,
|
| 4600 |
+
71568,
|
| 4601 |
+
71588,
|
| 4602 |
+
71610,
|
| 4603 |
+
71612,
|
| 4604 |
+
71630,
|
| 4605 |
+
71634,
|
| 4606 |
+
71646,
|
| 4607 |
+
71649,
|
| 4608 |
+
71650,
|
| 4609 |
+
71664,
|
| 4610 |
+
71698,
|
| 4611 |
+
71714,
|
| 4612 |
+
71721,
|
| 4613 |
+
71779,
|
| 4614 |
+
71812,
|
| 4615 |
+
71820,
|
| 4616 |
+
71847,
|
| 4617 |
+
71868,
|
| 4618 |
+
71880,
|
| 4619 |
+
71928,
|
| 4620 |
+
71930,
|
| 4621 |
+
71933,
|
| 4622 |
+
71943,
|
| 4623 |
+
71956,
|
| 4624 |
+
71970,
|
| 4625 |
+
72002,
|
| 4626 |
+
72009,
|
| 4627 |
+
72011,
|
| 4628 |
+
72016,
|
| 4629 |
+
72021,
|
| 4630 |
+
72025,
|
| 4631 |
+
72036,
|
| 4632 |
+
72080,
|
| 4633 |
+
72086,
|
| 4634 |
+
72103,
|
| 4635 |
+
72208,
|
| 4636 |
+
72219,
|
| 4637 |
+
72229,
|
| 4638 |
+
72253,
|
| 4639 |
+
72280,
|
| 4640 |
+
72333,
|
| 4641 |
+
72341,
|
| 4642 |
+
72344,
|
| 4643 |
+
72353,
|
| 4644 |
+
72383,
|
| 4645 |
+
72389,
|
| 4646 |
+
72430,
|
| 4647 |
+
72446,
|
| 4648 |
+
72458,
|
| 4649 |
+
72473,
|
| 4650 |
+
72476,
|
| 4651 |
+
72496,
|
| 4652 |
+
72497,
|
| 4653 |
+
72504,
|
| 4654 |
+
72507,
|
| 4655 |
+
72509,
|
| 4656 |
+
72534,
|
| 4657 |
+
72573,
|
| 4658 |
+
72589,
|
| 4659 |
+
72615,
|
| 4660 |
+
72636,
|
| 4661 |
+
72646,
|
| 4662 |
+
72648,
|
| 4663 |
+
72712,
|
| 4664 |
+
72727,
|
| 4665 |
+
72745,
|
| 4666 |
+
72764,
|
| 4667 |
+
72782,
|
| 4668 |
+
72785,
|
| 4669 |
+
72791,
|
| 4670 |
+
72793,
|
| 4671 |
+
72795,
|
| 4672 |
+
72796,
|
| 4673 |
+
72821,
|
| 4674 |
+
72825,
|
| 4675 |
+
72839,
|
| 4676 |
+
72855,
|
| 4677 |
+
72877,
|
| 4678 |
+
72882,
|
| 4679 |
+
72903,
|
| 4680 |
+
72905,
|
| 4681 |
+
72916,
|
| 4682 |
+
72929,
|
| 4683 |
+
72931,
|
| 4684 |
+
72946,
|
| 4685 |
+
73002,
|
| 4686 |
+
73024,
|
| 4687 |
+
73031,
|
| 4688 |
+
73066,
|
| 4689 |
+
73104,
|
| 4690 |
+
73131,
|
| 4691 |
+
73133,
|
| 4692 |
+
73140,
|
| 4693 |
+
73147,
|
| 4694 |
+
73162,
|
| 4695 |
+
73173,
|
| 4696 |
+
73196,
|
| 4697 |
+
73209,
|
| 4698 |
+
73242,
|
| 4699 |
+
73259,
|
| 4700 |
+
73269,
|
| 4701 |
+
73303,
|
| 4702 |
+
73330,
|
| 4703 |
+
73357,
|
| 4704 |
+
73363,
|
| 4705 |
+
73375,
|
| 4706 |
+
73392,
|
| 4707 |
+
73407,
|
| 4708 |
+
73416,
|
| 4709 |
+
73427,
|
| 4710 |
+
73443,
|
| 4711 |
+
73462,
|
| 4712 |
+
73472,
|
| 4713 |
+
73480,
|
| 4714 |
+
73510,
|
| 4715 |
+
73524,
|
| 4716 |
+
73530,
|
| 4717 |
+
73534,
|
| 4718 |
+
73554,
|
| 4719 |
+
73582,
|
| 4720 |
+
73592,
|
| 4721 |
+
73593,
|
| 4722 |
+
73611,
|
| 4723 |
+
73650,
|
| 4724 |
+
73663,
|
| 4725 |
+
73691,
|
| 4726 |
+
73699,
|
| 4727 |
+
73727,
|
| 4728 |
+
73744,
|
| 4729 |
+
73745,
|
| 4730 |
+
73748,
|
| 4731 |
+
73751,
|
| 4732 |
+
73798,
|
| 4733 |
+
73822,
|
| 4734 |
+
73834,
|
| 4735 |
+
73845,
|
| 4736 |
+
73863,
|
| 4737 |
+
73867,
|
| 4738 |
+
73900,
|
| 4739 |
+
73903,
|
| 4740 |
+
73925,
|
| 4741 |
+
73927,
|
| 4742 |
+
73952,
|
| 4743 |
+
73953,
|
| 4744 |
+
73963,
|
| 4745 |
+
73964,
|
| 4746 |
+
73971,
|
| 4747 |
+
73983,
|
| 4748 |
+
74029,
|
| 4749 |
+
74034,
|
| 4750 |
+
74071,
|
| 4751 |
+
74083,
|
| 4752 |
+
74092,
|
| 4753 |
+
74123,
|
| 4754 |
+
74128,
|
| 4755 |
+
74138,
|
| 4756 |
+
74165,
|
| 4757 |
+
74186,
|
| 4758 |
+
74196,
|
| 4759 |
+
74199,
|
| 4760 |
+
74203,
|
| 4761 |
+
74209,
|
| 4762 |
+
74228,
|
| 4763 |
+
74276,
|
| 4764 |
+
74282,
|
| 4765 |
+
74285,
|
| 4766 |
+
74312,
|
| 4767 |
+
74324,
|
| 4768 |
+
74326,
|
| 4769 |
+
74342,
|
| 4770 |
+
74361,
|
| 4771 |
+
74376,
|
| 4772 |
+
74384,
|
| 4773 |
+
74385,
|
| 4774 |
+
74446,
|
| 4775 |
+
74491,
|
| 4776 |
+
74494,
|
| 4777 |
+
74499,
|
| 4778 |
+
74525,
|
| 4779 |
+
74526,
|
| 4780 |
+
74545,
|
| 4781 |
+
74577,
|
| 4782 |
+
74588,
|
| 4783 |
+
74637,
|
| 4784 |
+
74678,
|
| 4785 |
+
74687,
|
| 4786 |
+
74699,
|
| 4787 |
+
74706,
|
| 4788 |
+
74727,
|
| 4789 |
+
74734,
|
| 4790 |
+
74740,
|
| 4791 |
+
74782,
|
| 4792 |
+
74792,
|
| 4793 |
+
74794,
|
| 4794 |
+
74824,
|
| 4795 |
+
74834,
|
| 4796 |
+
74843,
|
| 4797 |
+
74866,
|
| 4798 |
+
74869,
|
| 4799 |
+
74884,
|
| 4800 |
+
74897,
|
| 4801 |
+
75014,
|
| 4802 |
+
75026,
|
| 4803 |
+
75033,
|
| 4804 |
+
75042,
|
| 4805 |
+
75048,
|
| 4806 |
+
75053,
|
| 4807 |
+
75089,
|
| 4808 |
+
75097,
|
| 4809 |
+
75107,
|
| 4810 |
+
75132,
|
| 4811 |
+
75142,
|
| 4812 |
+
75145,
|
| 4813 |
+
75156,
|
| 4814 |
+
75191,
|
| 4815 |
+
75192,
|
| 4816 |
+
75218,
|
| 4817 |
+
75228,
|
| 4818 |
+
75241,
|
| 4819 |
+
75244,
|
| 4820 |
+
75246,
|
| 4821 |
+
75258,
|
| 4822 |
+
75276,
|
| 4823 |
+
75279,
|
| 4824 |
+
75282,
|
| 4825 |
+
75295,
|
| 4826 |
+
75346,
|
| 4827 |
+
75352,
|
| 4828 |
+
75360,
|
| 4829 |
+
75375,
|
| 4830 |
+
75387,
|
| 4831 |
+
75396,
|
| 4832 |
+
75453,
|
| 4833 |
+
75456,
|
| 4834 |
+
75467,
|
| 4835 |
+
75487,
|
| 4836 |
+
75499,
|
| 4837 |
+
75531,
|
| 4838 |
+
75558,
|
| 4839 |
+
75583,
|
| 4840 |
+
75598,
|
| 4841 |
+
75600,
|
| 4842 |
+
75616,
|
| 4843 |
+
75624,
|
| 4844 |
+
75640,
|
| 4845 |
+
75665,
|
| 4846 |
+
75671,
|
| 4847 |
+
75679,
|
| 4848 |
+
75693,
|
| 4849 |
+
75694,
|
| 4850 |
+
75699,
|
| 4851 |
+
75707,
|
| 4852 |
+
75719,
|
| 4853 |
+
75723,
|
| 4854 |
+
75743,
|
| 4855 |
+
75781,
|
| 4856 |
+
75800,
|
| 4857 |
+
75846,
|
| 4858 |
+
75862,
|
| 4859 |
+
75869,
|
| 4860 |
+
75883,
|
| 4861 |
+
75884,
|
| 4862 |
+
75890,
|
| 4863 |
+
75908,
|
| 4864 |
+
75910,
|
| 4865 |
+
75912,
|
| 4866 |
+
75947,
|
| 4867 |
+
75960,
|
| 4868 |
+
75962,
|
| 4869 |
+
75976,
|
| 4870 |
+
76014,
|
| 4871 |
+
76058,
|
| 4872 |
+
76080,
|
| 4873 |
+
76101,
|
| 4874 |
+
76122,
|
| 4875 |
+
76125,
|
| 4876 |
+
76172,
|
| 4877 |
+
76199,
|
| 4878 |
+
76206,
|
| 4879 |
+
76216,
|
| 4880 |
+
76222,
|
| 4881 |
+
76263,
|
| 4882 |
+
76270,
|
| 4883 |
+
76299,
|
| 4884 |
+
76300,
|
| 4885 |
+
76325,
|
| 4886 |
+
76327,
|
| 4887 |
+
76350,
|
| 4888 |
+
76352,
|
| 4889 |
+
76379,
|
| 4890 |
+
76423,
|
| 4891 |
+
76432,
|
| 4892 |
+
76440,
|
| 4893 |
+
76459,
|
| 4894 |
+
76496,
|
| 4895 |
+
76497,
|
| 4896 |
+
76540,
|
| 4897 |
+
76564,
|
| 4898 |
+
76590,
|
| 4899 |
+
76591,
|
| 4900 |
+
76606,
|
| 4901 |
+
76627,
|
| 4902 |
+
76643,
|
| 4903 |
+
76659,
|
| 4904 |
+
76669,
|
| 4905 |
+
76670,
|
| 4906 |
+
76675,
|
| 4907 |
+
76689,
|
| 4908 |
+
76690,
|
| 4909 |
+
76694,
|
| 4910 |
+
76723,
|
| 4911 |
+
76737,
|
| 4912 |
+
76741,
|
| 4913 |
+
76768,
|
| 4914 |
+
76777,
|
| 4915 |
+
76788,
|
| 4916 |
+
76840,
|
| 4917 |
+
76847,
|
| 4918 |
+
76850,
|
| 4919 |
+
76852,
|
| 4920 |
+
76860,
|
| 4921 |
+
76878,
|
| 4922 |
+
76888,
|
| 4923 |
+
76895,
|
| 4924 |
+
76910,
|
| 4925 |
+
76918,
|
| 4926 |
+
76919,
|
| 4927 |
+
76941,
|
| 4928 |
+
76952,
|
| 4929 |
+
76977,
|
| 4930 |
+
76986,
|
| 4931 |
+
77007,
|
| 4932 |
+
77009,
|
| 4933 |
+
77047,
|
| 4934 |
+
77051,
|
| 4935 |
+
77057,
|
| 4936 |
+
77127,
|
| 4937 |
+
77130,
|
| 4938 |
+
77136,
|
| 4939 |
+
77137,
|
| 4940 |
+
77156,
|
| 4941 |
+
77168,
|
| 4942 |
+
77204,
|
| 4943 |
+
77219,
|
| 4944 |
+
77305,
|
| 4945 |
+
77311,
|
| 4946 |
+
77314,
|
| 4947 |
+
77344,
|
| 4948 |
+
77358,
|
| 4949 |
+
77376,
|
| 4950 |
+
77383,
|
| 4951 |
+
77390,
|
| 4952 |
+
77407,
|
| 4953 |
+
77410,
|
| 4954 |
+
77417,
|
| 4955 |
+
77423,
|
| 4956 |
+
77434,
|
| 4957 |
+
77479,
|
| 4958 |
+
77496,
|
| 4959 |
+
77503,
|
| 4960 |
+
77515,
|
| 4961 |
+
77535,
|
| 4962 |
+
77544,
|
| 4963 |
+
77561,
|
| 4964 |
+
77565,
|
| 4965 |
+
77596,
|
| 4966 |
+
77620,
|
| 4967 |
+
77623,
|
| 4968 |
+
77684,
|
| 4969 |
+
77706,
|
| 4970 |
+
77721,
|
| 4971 |
+
77754,
|
| 4972 |
+
77767,
|
| 4973 |
+
77787,
|
| 4974 |
+
77804,
|
| 4975 |
+
77828,
|
| 4976 |
+
77833,
|
| 4977 |
+
77862,
|
| 4978 |
+
77863,
|
| 4979 |
+
77872,
|
| 4980 |
+
77908,
|
| 4981 |
+
77927,
|
| 4982 |
+
77933,
|
| 4983 |
+
77943,
|
| 4984 |
+
77955,
|
| 4985 |
+
77962,
|
| 4986 |
+
77978,
|
| 4987 |
+
77980,
|
| 4988 |
+
77993,
|
| 4989 |
+
78003,
|
| 4990 |
+
78009,
|
| 4991 |
+
78016,
|
| 4992 |
+
78039,
|
| 4993 |
+
78042,
|
| 4994 |
+
78082,
|
| 4995 |
+
78105,
|
| 4996 |
+
78108,
|
| 4997 |
+
78110,
|
| 4998 |
+
78126,
|
| 4999 |
+
78135,
|
| 5000 |
+
78137,
|
| 5001 |
+
78182,
|
| 5002 |
+
78239,
|
| 5003 |
+
78241,
|
| 5004 |
+
78254,
|
| 5005 |
+
78264,
|
| 5006 |
+
78286,
|
| 5007 |
+
78300,
|
| 5008 |
+
78314,
|
| 5009 |
+
78336,
|
| 5010 |
+
78337,
|
| 5011 |
+
78346,
|
| 5012 |
+
78355,
|
| 5013 |
+
78378,
|
| 5014 |
+
78384,
|
| 5015 |
+
78401,
|
| 5016 |
+
78402,
|
| 5017 |
+
78435,
|
| 5018 |
+
78458,
|
| 5019 |
+
78515,
|
| 5020 |
+
78552,
|
| 5021 |
+
78602,
|
| 5022 |
+
78608,
|
| 5023 |
+
78637,
|
| 5024 |
+
78642,
|
| 5025 |
+
78657,
|
| 5026 |
+
78664,
|
| 5027 |
+
78672,
|
| 5028 |
+
78757,
|
| 5029 |
+
78772,
|
| 5030 |
+
78774,
|
| 5031 |
+
78777,
|
| 5032 |
+
78778,
|
| 5033 |
+
78837,
|
| 5034 |
+
78838,
|
| 5035 |
+
78878,
|
| 5036 |
+
78900,
|
| 5037 |
+
78903,
|
| 5038 |
+
78910,
|
| 5039 |
+
78916,
|
| 5040 |
+
78921,
|
| 5041 |
+
78923,
|
| 5042 |
+
78928,
|
| 5043 |
+
78929,
|
| 5044 |
+
78938,
|
| 5045 |
+
78955,
|
| 5046 |
+
78971,
|
| 5047 |
+
78988,
|
| 5048 |
+
79000,
|
| 5049 |
+
79018,
|
| 5050 |
+
79076,
|
| 5051 |
+
79083,
|
| 5052 |
+
79089,
|
| 5053 |
+
79091,
|
| 5054 |
+
79098,
|
| 5055 |
+
79133,
|
| 5056 |
+
79141,
|
| 5057 |
+
79142,
|
| 5058 |
+
79146,
|
| 5059 |
+
79189,
|
| 5060 |
+
79201,
|
| 5061 |
+
79203,
|
| 5062 |
+
79207,
|
| 5063 |
+
79208,
|
| 5064 |
+
79218,
|
| 5065 |
+
79226,
|
| 5066 |
+
79245,
|
| 5067 |
+
79255,
|
| 5068 |
+
79268,
|
| 5069 |
+
79279,
|
| 5070 |
+
79295,
|
| 5071 |
+
79302,
|
| 5072 |
+
79304,
|
| 5073 |
+
79318,
|
| 5074 |
+
79319,
|
| 5075 |
+
79321,
|
| 5076 |
+
79322,
|
| 5077 |
+
79325,
|
| 5078 |
+
79326,
|
| 5079 |
+
79342,
|
| 5080 |
+
79352,
|
| 5081 |
+
79364,
|
| 5082 |
+
79371,
|
| 5083 |
+
79375,
|
| 5084 |
+
79389,
|
| 5085 |
+
79390,
|
| 5086 |
+
79394,
|
| 5087 |
+
79403,
|
| 5088 |
+
79413,
|
| 5089 |
+
79427,
|
| 5090 |
+
79448,
|
| 5091 |
+
79453,
|
| 5092 |
+
79480,
|
| 5093 |
+
79483,
|
| 5094 |
+
79515,
|
| 5095 |
+
79531,
|
| 5096 |
+
79535,
|
| 5097 |
+
79553,
|
| 5098 |
+
79558,
|
| 5099 |
+
79567,
|
| 5100 |
+
79583,
|
| 5101 |
+
79590,
|
| 5102 |
+
79597,
|
| 5103 |
+
79621,
|
| 5104 |
+
79625,
|
| 5105 |
+
79654,
|
| 5106 |
+
79714,
|
| 5107 |
+
79739,
|
| 5108 |
+
79775,
|
| 5109 |
+
79787,
|
| 5110 |
+
79795,
|
| 5111 |
+
79846,
|
| 5112 |
+
79858,
|
| 5113 |
+
79865,
|
| 5114 |
+
79868,
|
| 5115 |
+
79878,
|
| 5116 |
+
79879,
|
| 5117 |
+
79883,
|
| 5118 |
+
79888,
|
| 5119 |
+
79895,
|
| 5120 |
+
79910,
|
| 5121 |
+
79920,
|
| 5122 |
+
79931,
|
| 5123 |
+
79939,
|
| 5124 |
+
79949,
|
| 5125 |
+
79965,
|
| 5126 |
+
79993,
|
| 5127 |
+
80007,
|
| 5128 |
+
80078,
|
| 5129 |
+
80080,
|
| 5130 |
+
80090,
|
| 5131 |
+
80142,
|
| 5132 |
+
80154,
|
| 5133 |
+
80156,
|
| 5134 |
+
80169,
|
| 5135 |
+
80171,
|
| 5136 |
+
80178,
|
| 5137 |
+
80181,
|
| 5138 |
+
80212,
|
| 5139 |
+
80220,
|
| 5140 |
+
80244,
|
| 5141 |
+
80281,
|
| 5142 |
+
80306,
|
| 5143 |
+
80316,
|
| 5144 |
+
80330,
|
| 5145 |
+
80345,
|
| 5146 |
+
80357,
|
| 5147 |
+
80361,
|
| 5148 |
+
80368,
|
| 5149 |
+
80391,
|
| 5150 |
+
80394,
|
| 5151 |
+
80403,
|
| 5152 |
+
80410,
|
| 5153 |
+
80434,
|
| 5154 |
+
80436,
|
| 5155 |
+
80445,
|
| 5156 |
+
80455,
|
| 5157 |
+
80488,
|
| 5158 |
+
80505,
|
| 5159 |
+
80540,
|
| 5160 |
+
80550,
|
| 5161 |
+
80592,
|
| 5162 |
+
80608,
|
| 5163 |
+
80613,
|
| 5164 |
+
80634,
|
| 5165 |
+
80640,
|
| 5166 |
+
80644,
|
| 5167 |
+
80653,
|
| 5168 |
+
80687,
|
| 5169 |
+
80698,
|
| 5170 |
+
80702,
|
| 5171 |
+
80719,
|
| 5172 |
+
80741,
|
| 5173 |
+
80749,
|
| 5174 |
+
80803,
|
| 5175 |
+
80818,
|
| 5176 |
+
80821,
|
| 5177 |
+
80823,
|
| 5178 |
+
80874,
|
| 5179 |
+
80961,
|
| 5180 |
+
80984,
|
| 5181 |
+
81011,
|
| 5182 |
+
81012,
|
| 5183 |
+
81014,
|
| 5184 |
+
81036,
|
| 5185 |
+
81042,
|
| 5186 |
+
81048,
|
| 5187 |
+
81057,
|
| 5188 |
+
81065,
|
| 5189 |
+
81081,
|
| 5190 |
+
81091,
|
| 5191 |
+
81131,
|
| 5192 |
+
81141,
|
| 5193 |
+
81142,
|
| 5194 |
+
81161,
|
| 5195 |
+
81174,
|
| 5196 |
+
81243,
|
| 5197 |
+
81246,
|
| 5198 |
+
81250,
|
| 5199 |
+
81254,
|
| 5200 |
+
81281,
|
| 5201 |
+
81285,
|
| 5202 |
+
81349,
|
| 5203 |
+
81392,
|
| 5204 |
+
81394,
|
| 5205 |
+
81436,
|
| 5206 |
+
81439,
|
| 5207 |
+
81452,
|
| 5208 |
+
81483,
|
| 5209 |
+
81499,
|
| 5210 |
+
81511,
|
| 5211 |
+
81554,
|
| 5212 |
+
81556,
|
| 5213 |
+
81572,
|
| 5214 |
+
81580,
|
| 5215 |
+
81599,
|
| 5216 |
+
81608,
|
| 5217 |
+
81609,
|
| 5218 |
+
81618,
|
| 5219 |
+
81645,
|
| 5220 |
+
81654,
|
| 5221 |
+
81661,
|
| 5222 |
+
81662,
|
| 5223 |
+
81723,
|
| 5224 |
+
81740,
|
| 5225 |
+
81767,
|
| 5226 |
+
81779,
|
| 5227 |
+
81802,
|
| 5228 |
+
81840,
|
| 5229 |
+
81866,
|
| 5230 |
+
81868,
|
| 5231 |
+
81892,
|
| 5232 |
+
81942,
|
| 5233 |
+
81949,
|
| 5234 |
+
81956,
|
| 5235 |
+
81962,
|
| 5236 |
+
81970,
|
| 5237 |
+
82003,
|
| 5238 |
+
82006,
|
| 5239 |
+
82024,
|
| 5240 |
+
82034,
|
| 5241 |
+
82042,
|
| 5242 |
+
82049,
|
| 5243 |
+
82074,
|
| 5244 |
+
82120,
|
| 5245 |
+
82172,
|
| 5246 |
+
82206,
|
| 5247 |
+
82208,
|
| 5248 |
+
82216,
|
| 5249 |
+
82266,
|
| 5250 |
+
82273,
|
| 5251 |
+
82294,
|
| 5252 |
+
82361,
|
| 5253 |
+
82368,
|
| 5254 |
+
82379,
|
| 5255 |
+
82382,
|
| 5256 |
+
82432,
|
| 5257 |
+
82446,
|
| 5258 |
+
82466,
|
| 5259 |
+
82475,
|
| 5260 |
+
82484,
|
| 5261 |
+
82501,
|
| 5262 |
+
82506,
|
| 5263 |
+
82511,
|
| 5264 |
+
82530,
|
| 5265 |
+
82596,
|
| 5266 |
+
82598,
|
| 5267 |
+
82606,
|
| 5268 |
+
82630,
|
| 5269 |
+
82669,
|
| 5270 |
+
82671,
|
| 5271 |
+
82672,
|
| 5272 |
+
82693,
|
| 5273 |
+
82706,
|
| 5274 |
+
82712,
|
| 5275 |
+
82776,
|
| 5276 |
+
82777,
|
| 5277 |
+
82794,
|
| 5278 |
+
82798,
|
| 5279 |
+
82815,
|
| 5280 |
+
82819,
|
| 5281 |
+
82822,
|
| 5282 |
+
82848,
|
| 5283 |
+
82866,
|
| 5284 |
+
82868,
|
| 5285 |
+
82888,
|
| 5286 |
+
82893,
|
| 5287 |
+
82912,
|
| 5288 |
+
82921,
|
| 5289 |
+
82923,
|
| 5290 |
+
82934,
|
| 5291 |
+
82961,
|
| 5292 |
+
82971,
|
| 5293 |
+
82979,
|
| 5294 |
+
82994,
|
| 5295 |
+
82997,
|
| 5296 |
+
83002,
|
| 5297 |
+
83006,
|
| 5298 |
+
83007,
|
| 5299 |
+
83050,
|
| 5300 |
+
83051,
|
| 5301 |
+
83058,
|
| 5302 |
+
83065,
|
| 5303 |
+
83073,
|
| 5304 |
+
83097,
|
| 5305 |
+
83107,
|
| 5306 |
+
83137,
|
| 5307 |
+
83164,
|
| 5308 |
+
83191,
|
| 5309 |
+
83197,
|
| 5310 |
+
83207,
|
| 5311 |
+
83222,
|
| 5312 |
+
83226,
|
| 5313 |
+
83237,
|
| 5314 |
+
83244,
|
| 5315 |
+
83271,
|
| 5316 |
+
83280,
|
| 5317 |
+
83312,
|
| 5318 |
+
83320,
|
| 5319 |
+
83327,
|
| 5320 |
+
83332,
|
| 5321 |
+
83347,
|
| 5322 |
+
83359,
|
| 5323 |
+
83383,
|
| 5324 |
+
83394,
|
| 5325 |
+
83434,
|
| 5326 |
+
83447,
|
| 5327 |
+
83457,
|
| 5328 |
+
83460,
|
| 5329 |
+
83485,
|
| 5330 |
+
83503,
|
| 5331 |
+
83515,
|
| 5332 |
+
83519,
|
| 5333 |
+
83522,
|
| 5334 |
+
83532,
|
| 5335 |
+
83554,
|
| 5336 |
+
83555,
|
| 5337 |
+
83570,
|
| 5338 |
+
83591,
|
| 5339 |
+
83626,
|
| 5340 |
+
83643,
|
| 5341 |
+
83663,
|
| 5342 |
+
83670,
|
| 5343 |
+
83671,
|
| 5344 |
+
83710,
|
| 5345 |
+
83711,
|
| 5346 |
+
83724,
|
| 5347 |
+
83725,
|
| 5348 |
+
83738,
|
| 5349 |
+
83748,
|
| 5350 |
+
83775,
|
| 5351 |
+
83790,
|
| 5352 |
+
83809,
|
| 5353 |
+
83844,
|
| 5354 |
+
83849,
|
| 5355 |
+
83852,
|
| 5356 |
+
83853,
|
| 5357 |
+
83877,
|
| 5358 |
+
83886,
|
| 5359 |
+
83894,
|
| 5360 |
+
83900,
|
| 5361 |
+
83913,
|
| 5362 |
+
83917,
|
| 5363 |
+
83921,
|
| 5364 |
+
83933,
|
| 5365 |
+
83941,
|
| 5366 |
+
83950,
|
| 5367 |
+
83970,
|
| 5368 |
+
84008,
|
| 5369 |
+
84019,
|
| 5370 |
+
84025,
|
| 5371 |
+
84044,
|
| 5372 |
+
84086,
|
| 5373 |
+
84109,
|
| 5374 |
+
84119,
|
| 5375 |
+
84133,
|
| 5376 |
+
84141,
|
| 5377 |
+
84164,
|
| 5378 |
+
84200,
|
| 5379 |
+
84201,
|
| 5380 |
+
84212,
|
| 5381 |
+
84215,
|
| 5382 |
+
84238,
|
| 5383 |
+
84271,
|
| 5384 |
+
84274,
|
| 5385 |
+
84325,
|
| 5386 |
+
84330,
|
| 5387 |
+
84333,
|
| 5388 |
+
84345,
|
| 5389 |
+
84356,
|
| 5390 |
+
84413,
|
| 5391 |
+
84462,
|
| 5392 |
+
84472,
|
| 5393 |
+
84496,
|
| 5394 |
+
84500,
|
| 5395 |
+
84517,
|
| 5396 |
+
84539,
|
| 5397 |
+
84554,
|
| 5398 |
+
84558,
|
| 5399 |
+
84559,
|
| 5400 |
+
84562,
|
| 5401 |
+
84563,
|
| 5402 |
+
84565,
|
| 5403 |
+
84567,
|
| 5404 |
+
84572,
|
| 5405 |
+
84576,
|
| 5406 |
+
84580,
|
| 5407 |
+
84588,
|
| 5408 |
+
84595,
|
| 5409 |
+
84613,
|
| 5410 |
+
84631,
|
| 5411 |
+
84636,
|
| 5412 |
+
84645,
|
| 5413 |
+
84685,
|
| 5414 |
+
84693,
|
| 5415 |
+
84702,
|
| 5416 |
+
84705,
|
| 5417 |
+
84716,
|
| 5418 |
+
84738,
|
| 5419 |
+
84741,
|
| 5420 |
+
84852,
|
| 5421 |
+
84863,
|
| 5422 |
+
84875,
|
| 5423 |
+
84877,
|
| 5424 |
+
84898,
|
| 5425 |
+
84929,
|
| 5426 |
+
84940,
|
| 5427 |
+
84944,
|
| 5428 |
+
84960,
|
| 5429 |
+
84999,
|
| 5430 |
+
85004,
|
| 5431 |
+
85054,
|
| 5432 |
+
85103,
|
| 5433 |
+
85120,
|
| 5434 |
+
85161,
|
| 5435 |
+
85166,
|
| 5436 |
+
85167,
|
| 5437 |
+
85177,
|
| 5438 |
+
85180,
|
| 5439 |
+
85203,
|
| 5440 |
+
85243,
|
| 5441 |
+
85268,
|
| 5442 |
+
85277,
|
| 5443 |
+
85288,
|
| 5444 |
+
85312,
|
| 5445 |
+
85318,
|
| 5446 |
+
85321,
|
| 5447 |
+
85334,
|
| 5448 |
+
85342,
|
| 5449 |
+
85368,
|
| 5450 |
+
85392,
|
| 5451 |
+
85398,
|
| 5452 |
+
85407,
|
| 5453 |
+
85413,
|
| 5454 |
+
85422,
|
| 5455 |
+
85434,
|
| 5456 |
+
85447,
|
| 5457 |
+
85465,
|
| 5458 |
+
85490,
|
| 5459 |
+
85494,
|
| 5460 |
+
85500,
|
| 5461 |
+
85503,
|
| 5462 |
+
85540,
|
| 5463 |
+
85545,
|
| 5464 |
+
85568,
|
| 5465 |
+
85595,
|
| 5466 |
+
85602,
|
| 5467 |
+
85604,
|
| 5468 |
+
85617,
|
| 5469 |
+
85629,
|
| 5470 |
+
85677,
|
| 5471 |
+
85714,
|
| 5472 |
+
85718,
|
| 5473 |
+
85764,
|
| 5474 |
+
85785,
|
| 5475 |
+
85794,
|
| 5476 |
+
85795,
|
| 5477 |
+
85846,
|
| 5478 |
+
85847,
|
| 5479 |
+
85863,
|
| 5480 |
+
85867,
|
| 5481 |
+
85886,
|
| 5482 |
+
85892,
|
| 5483 |
+
85912,
|
| 5484 |
+
85925,
|
| 5485 |
+
85931,
|
| 5486 |
+
85990,
|
| 5487 |
+
85991,
|
| 5488 |
+
85994,
|
| 5489 |
+
86021,
|
| 5490 |
+
86025,
|
| 5491 |
+
86062,
|
| 5492 |
+
86076,
|
| 5493 |
+
86085,
|
| 5494 |
+
86089,
|
| 5495 |
+
86114,
|
| 5496 |
+
86146,
|
| 5497 |
+
86154,
|
| 5498 |
+
86197,
|
| 5499 |
+
86214,
|
| 5500 |
+
86215,
|
| 5501 |
+
86232,
|
| 5502 |
+
86234,
|
| 5503 |
+
86237,
|
| 5504 |
+
86256,
|
| 5505 |
+
86265,
|
| 5506 |
+
86303,
|
| 5507 |
+
86316,
|
| 5508 |
+
86321,
|
| 5509 |
+
86344,
|
| 5510 |
+
86378,
|
| 5511 |
+
86427,
|
| 5512 |
+
86435,
|
| 5513 |
+
86436,
|
| 5514 |
+
86459,
|
| 5515 |
+
86465,
|
| 5516 |
+
86474,
|
| 5517 |
+
86503,
|
| 5518 |
+
86558,
|
| 5519 |
+
86620,
|
| 5520 |
+
86637,
|
| 5521 |
+
86643,
|
| 5522 |
+
86661,
|
| 5523 |
+
86709,
|
| 5524 |
+
86724,
|
| 5525 |
+
86726,
|
| 5526 |
+
86729,
|
| 5527 |
+
86770,
|
| 5528 |
+
86779,
|
| 5529 |
+
86812,
|
| 5530 |
+
86827,
|
| 5531 |
+
86828,
|
| 5532 |
+
86843,
|
| 5533 |
+
86853,
|
| 5534 |
+
86856,
|
| 5535 |
+
86881,
|
| 5536 |
+
86922,
|
| 5537 |
+
86967,
|
| 5538 |
+
87008,
|
| 5539 |
+
87036,
|
| 5540 |
+
87037,
|
| 5541 |
+
87059,
|
| 5542 |
+
87079,
|
| 5543 |
+
87094,
|
| 5544 |
+
87109,
|
| 5545 |
+
87110,
|
| 5546 |
+
87114,
|
| 5547 |
+
87120,
|
| 5548 |
+
87136,
|
| 5549 |
+
87141,
|
| 5550 |
+
87248,
|
| 5551 |
+
87260,
|
| 5552 |
+
87282,
|
| 5553 |
+
87299,
|
| 5554 |
+
87346,
|
| 5555 |
+
87358,
|
| 5556 |
+
87411,
|
| 5557 |
+
87414,
|
| 5558 |
+
87433,
|
| 5559 |
+
87441,
|
| 5560 |
+
87442,
|
| 5561 |
+
87489,
|
| 5562 |
+
87553,
|
| 5563 |
+
87565,
|
| 5564 |
+
87567,
|
| 5565 |
+
87569,
|
| 5566 |
+
87573,
|
| 5567 |
+
87574,
|
| 5568 |
+
87586,
|
| 5569 |
+
87597,
|
| 5570 |
+
87619,
|
| 5571 |
+
87628,
|
| 5572 |
+
87647,
|
| 5573 |
+
87653,
|
| 5574 |
+
87676,
|
| 5575 |
+
87685,
|
| 5576 |
+
87712,
|
| 5577 |
+
87736,
|
| 5578 |
+
87741,
|
| 5579 |
+
87754,
|
| 5580 |
+
87771,
|
| 5581 |
+
87775,
|
| 5582 |
+
87793,
|
| 5583 |
+
87861,
|
| 5584 |
+
87874,
|
| 5585 |
+
87894,
|
| 5586 |
+
87903,
|
| 5587 |
+
87942,
|
| 5588 |
+
87951,
|
| 5589 |
+
87959,
|
| 5590 |
+
87964,
|
| 5591 |
+
88002,
|
| 5592 |
+
88033,
|
| 5593 |
+
88090,
|
| 5594 |
+
88099,
|
| 5595 |
+
88102,
|
| 5596 |
+
88117,
|
| 5597 |
+
88120,
|
| 5598 |
+
88125,
|
| 5599 |
+
88141,
|
| 5600 |
+
88143,
|
| 5601 |
+
88176,
|
| 5602 |
+
88252,
|
| 5603 |
+
88281,
|
| 5604 |
+
88349,
|
| 5605 |
+
88363,
|
| 5606 |
+
88380,
|
| 5607 |
+
88385,
|
| 5608 |
+
88433,
|
| 5609 |
+
88463,
|
| 5610 |
+
88480,
|
| 5611 |
+
88490,
|
| 5612 |
+
88512,
|
| 5613 |
+
88523,
|
| 5614 |
+
88529,
|
| 5615 |
+
88541,
|
| 5616 |
+
88573,
|
| 5617 |
+
88622,
|
| 5618 |
+
88657,
|
| 5619 |
+
88691,
|
| 5620 |
+
88726,
|
| 5621 |
+
88744,
|
| 5622 |
+
88763,
|
| 5623 |
+
88774,
|
| 5624 |
+
88783,
|
| 5625 |
+
88804,
|
| 5626 |
+
88830,
|
| 5627 |
+
88853,
|
| 5628 |
+
88866,
|
| 5629 |
+
88889,
|
| 5630 |
+
88901,
|
| 5631 |
+
88903,
|
| 5632 |
+
88928,
|
| 5633 |
+
88940,
|
| 5634 |
+
88945,
|
| 5635 |
+
88946,
|
| 5636 |
+
88988,
|
| 5637 |
+
88998,
|
| 5638 |
+
89004,
|
| 5639 |
+
89036,
|
| 5640 |
+
89039,
|
| 5641 |
+
89049,
|
| 5642 |
+
89061,
|
| 5643 |
+
89063,
|
| 5644 |
+
89071,
|
| 5645 |
+
89083,
|
| 5646 |
+
89092,
|
| 5647 |
+
89093,
|
| 5648 |
+
89095,
|
| 5649 |
+
89101,
|
| 5650 |
+
89138,
|
| 5651 |
+
89148,
|
| 5652 |
+
89157,
|
| 5653 |
+
89160,
|
| 5654 |
+
89166,
|
| 5655 |
+
89179,
|
| 5656 |
+
89212,
|
| 5657 |
+
89213,
|
| 5658 |
+
89238,
|
| 5659 |
+
89253,
|
| 5660 |
+
89262,
|
| 5661 |
+
89291,
|
| 5662 |
+
89297,
|
| 5663 |
+
89318,
|
| 5664 |
+
89329,
|
| 5665 |
+
89363,
|
| 5666 |
+
89382,
|
| 5667 |
+
89383,
|
| 5668 |
+
89384,
|
| 5669 |
+
89385,
|
| 5670 |
+
89389,
|
| 5671 |
+
89401,
|
| 5672 |
+
89411,
|
| 5673 |
+
89419,
|
| 5674 |
+
89427,
|
| 5675 |
+
89434,
|
| 5676 |
+
89443,
|
| 5677 |
+
89459,
|
| 5678 |
+
89460,
|
| 5679 |
+
89467,
|
| 5680 |
+
89478,
|
| 5681 |
+
89486,
|
| 5682 |
+
89488,
|
| 5683 |
+
89494,
|
| 5684 |
+
89516,
|
| 5685 |
+
89526,
|
| 5686 |
+
89528,
|
| 5687 |
+
89543,
|
| 5688 |
+
89598,
|
| 5689 |
+
89650,
|
| 5690 |
+
89683,
|
| 5691 |
+
89684,
|
| 5692 |
+
89694,
|
| 5693 |
+
89720,
|
| 5694 |
+
89727,
|
| 5695 |
+
89744,
|
| 5696 |
+
89746,
|
| 5697 |
+
89764,
|
| 5698 |
+
89788,
|
| 5699 |
+
89835,
|
| 5700 |
+
89855,
|
| 5701 |
+
89856,
|
| 5702 |
+
89869,
|
| 5703 |
+
89882,
|
| 5704 |
+
89903,
|
| 5705 |
+
89921,
|
| 5706 |
+
89929,
|
| 5707 |
+
89950,
|
| 5708 |
+
89958,
|
| 5709 |
+
89966,
|
| 5710 |
+
89998,
|
| 5711 |
+
90001,
|
| 5712 |
+
90094,
|
| 5713 |
+
90096,
|
| 5714 |
+
90118,
|
| 5715 |
+
90147,
|
| 5716 |
+
90148,
|
| 5717 |
+
90155,
|
| 5718 |
+
90177,
|
| 5719 |
+
90178,
|
| 5720 |
+
90192,
|
| 5721 |
+
90204,
|
| 5722 |
+
90208,
|
| 5723 |
+
90218,
|
| 5724 |
+
90220,
|
| 5725 |
+
90222,
|
| 5726 |
+
90230,
|
| 5727 |
+
90237,
|
| 5728 |
+
90241,
|
| 5729 |
+
90265,
|
| 5730 |
+
90276,
|
| 5731 |
+
90277,
|
| 5732 |
+
90278,
|
| 5733 |
+
90306,
|
| 5734 |
+
90308,
|
| 5735 |
+
90314,
|
| 5736 |
+
90332,
|
| 5737 |
+
90335,
|
| 5738 |
+
90358,
|
| 5739 |
+
90380,
|
| 5740 |
+
90387,
|
| 5741 |
+
90395,
|
| 5742 |
+
90408,
|
| 5743 |
+
90410,
|
| 5744 |
+
90415,
|
| 5745 |
+
90430,
|
| 5746 |
+
90440,
|
| 5747 |
+
90475,
|
| 5748 |
+
90476,
|
| 5749 |
+
90496,
|
| 5750 |
+
90515,
|
| 5751 |
+
90522,
|
| 5752 |
+
90546,
|
| 5753 |
+
90557,
|
| 5754 |
+
90621,
|
| 5755 |
+
90639,
|
| 5756 |
+
90643,
|
| 5757 |
+
90651,
|
| 5758 |
+
90675,
|
| 5759 |
+
90677,
|
| 5760 |
+
90679,
|
| 5761 |
+
90688,
|
| 5762 |
+
90698,
|
| 5763 |
+
90711,
|
| 5764 |
+
90717,
|
| 5765 |
+
90730,
|
| 5766 |
+
90734,
|
| 5767 |
+
90759,
|
| 5768 |
+
90768,
|
| 5769 |
+
90798,
|
| 5770 |
+
90827,
|
| 5771 |
+
90832,
|
| 5772 |
+
90839,
|
| 5773 |
+
90894,
|
| 5774 |
+
90906,
|
| 5775 |
+
90911,
|
| 5776 |
+
90959,
|
| 5777 |
+
90983,
|
| 5778 |
+
91008,
|
| 5779 |
+
91018,
|
| 5780 |
+
91035,
|
| 5781 |
+
91048,
|
| 5782 |
+
91050,
|
| 5783 |
+
91088,
|
| 5784 |
+
91093,
|
| 5785 |
+
91099,
|
| 5786 |
+
91117,
|
| 5787 |
+
91182,
|
| 5788 |
+
91196,
|
| 5789 |
+
91205,
|
| 5790 |
+
91211,
|
| 5791 |
+
91223,
|
| 5792 |
+
91249,
|
| 5793 |
+
91276,
|
| 5794 |
+
91278,
|
| 5795 |
+
91283,
|
| 5796 |
+
91287,
|
| 5797 |
+
91301,
|
| 5798 |
+
91347,
|
| 5799 |
+
91349,
|
| 5800 |
+
91354,
|
| 5801 |
+
91360,
|
| 5802 |
+
91383,
|
| 5803 |
+
91389,
|
| 5804 |
+
91399,
|
| 5805 |
+
91403,
|
| 5806 |
+
91413,
|
| 5807 |
+
91479,
|
| 5808 |
+
91514,
|
| 5809 |
+
91515,
|
| 5810 |
+
91517,
|
| 5811 |
+
91528,
|
| 5812 |
+
91545,
|
| 5813 |
+
91573,
|
| 5814 |
+
91581,
|
| 5815 |
+
91586,
|
| 5816 |
+
91595,
|
| 5817 |
+
91630,
|
| 5818 |
+
91637,
|
| 5819 |
+
91644,
|
| 5820 |
+
91697,
|
| 5821 |
+
91699,
|
| 5822 |
+
91737,
|
| 5823 |
+
91752,
|
| 5824 |
+
91756,
|
| 5825 |
+
91771,
|
| 5826 |
+
91779,
|
| 5827 |
+
91812,
|
| 5828 |
+
91827,
|
| 5829 |
+
91841,
|
| 5830 |
+
91867,
|
| 5831 |
+
91868,
|
| 5832 |
+
91888,
|
| 5833 |
+
91889,
|
| 5834 |
+
91898,
|
| 5835 |
+
91904,
|
| 5836 |
+
91921,
|
| 5837 |
+
91927,
|
| 5838 |
+
91935,
|
| 5839 |
+
91940,
|
| 5840 |
+
91944,
|
| 5841 |
+
91958,
|
| 5842 |
+
91976,
|
| 5843 |
+
92000,
|
| 5844 |
+
92010,
|
| 5845 |
+
92047,
|
| 5846 |
+
92056,
|
| 5847 |
+
92081,
|
| 5848 |
+
92084,
|
| 5849 |
+
92099,
|
| 5850 |
+
92102,
|
| 5851 |
+
92103,
|
| 5852 |
+
92108,
|
| 5853 |
+
92111,
|
| 5854 |
+
92120,
|
| 5855 |
+
92142,
|
| 5856 |
+
92149,
|
| 5857 |
+
92163,
|
| 5858 |
+
92165,
|
| 5859 |
+
92171,
|
| 5860 |
+
92173,
|
| 5861 |
+
92181,
|
| 5862 |
+
92187,
|
| 5863 |
+
92190,
|
| 5864 |
+
92213,
|
| 5865 |
+
92222,
|
| 5866 |
+
92228,
|
| 5867 |
+
92243,
|
| 5868 |
+
92277,
|
| 5869 |
+
92327,
|
| 5870 |
+
92346,
|
| 5871 |
+
92349,
|
| 5872 |
+
92378,
|
| 5873 |
+
92379,
|
| 5874 |
+
92456,
|
| 5875 |
+
92464,
|
| 5876 |
+
92465,
|
| 5877 |
+
92515,
|
| 5878 |
+
92562,
|
| 5879 |
+
92590,
|
| 5880 |
+
92650,
|
| 5881 |
+
92710,
|
| 5882 |
+
92723,
|
| 5883 |
+
92737,
|
| 5884 |
+
92753,
|
| 5885 |
+
92763,
|
| 5886 |
+
92782,
|
| 5887 |
+
92783,
|
| 5888 |
+
92833,
|
| 5889 |
+
92855,
|
| 5890 |
+
92880,
|
| 5891 |
+
92893,
|
| 5892 |
+
92897,
|
| 5893 |
+
92910,
|
| 5894 |
+
92978,
|
| 5895 |
+
92986,
|
| 5896 |
+
93004,
|
| 5897 |
+
93008,
|
| 5898 |
+
93029,
|
| 5899 |
+
93045,
|
| 5900 |
+
93047,
|
| 5901 |
+
93075,
|
| 5902 |
+
93083,
|
| 5903 |
+
93170,
|
| 5904 |
+
93175,
|
| 5905 |
+
93178,
|
| 5906 |
+
93188,
|
| 5907 |
+
93191,
|
| 5908 |
+
93196,
|
| 5909 |
+
93219,
|
| 5910 |
+
93227,
|
| 5911 |
+
93241,
|
| 5912 |
+
93244,
|
| 5913 |
+
93255,
|
| 5914 |
+
93271,
|
| 5915 |
+
93273,
|
| 5916 |
+
93296,
|
| 5917 |
+
93299,
|
| 5918 |
+
93314,
|
| 5919 |
+
93316,
|
| 5920 |
+
93317,
|
| 5921 |
+
93333,
|
| 5922 |
+
93342,
|
| 5923 |
+
93349,
|
| 5924 |
+
93389,
|
| 5925 |
+
93397,
|
| 5926 |
+
93437,
|
| 5927 |
+
93448,
|
| 5928 |
+
93490,
|
| 5929 |
+
93509,
|
| 5930 |
+
93538,
|
| 5931 |
+
93544,
|
| 5932 |
+
93549,
|
| 5933 |
+
93558,
|
| 5934 |
+
93596,
|
| 5935 |
+
93601,
|
| 5936 |
+
93611,
|
| 5937 |
+
93614,
|
| 5938 |
+
93618,
|
| 5939 |
+
93627,
|
| 5940 |
+
93633,
|
| 5941 |
+
93652,
|
| 5942 |
+
93670,
|
| 5943 |
+
93682,
|
| 5944 |
+
93718,
|
| 5945 |
+
93728,
|
| 5946 |
+
93744,
|
| 5947 |
+
93746,
|
| 5948 |
+
93754,
|
| 5949 |
+
93826,
|
| 5950 |
+
93844,
|
| 5951 |
+
93873,
|
| 5952 |
+
93892,
|
| 5953 |
+
93896,
|
| 5954 |
+
93897,
|
| 5955 |
+
93902,
|
| 5956 |
+
93905,
|
| 5957 |
+
93911,
|
| 5958 |
+
93920,
|
| 5959 |
+
93922,
|
| 5960 |
+
93939,
|
| 5961 |
+
93941,
|
| 5962 |
+
93977,
|
| 5963 |
+
93986,
|
| 5964 |
+
94010,
|
| 5965 |
+
94023,
|
| 5966 |
+
94028,
|
| 5967 |
+
94034,
|
| 5968 |
+
94037,
|
| 5969 |
+
94052,
|
| 5970 |
+
94081,
|
| 5971 |
+
94125,
|
| 5972 |
+
94127,
|
| 5973 |
+
94128,
|
| 5974 |
+
94136,
|
| 5975 |
+
94141,
|
| 5976 |
+
94169,
|
| 5977 |
+
94208,
|
| 5978 |
+
94227,
|
| 5979 |
+
94239,
|
| 5980 |
+
94244,
|
| 5981 |
+
94305,
|
| 5982 |
+
94326,
|
| 5983 |
+
94334,
|
| 5984 |
+
94335,
|
| 5985 |
+
94345,
|
| 5986 |
+
94367,
|
| 5987 |
+
94374,
|
| 5988 |
+
94391,
|
| 5989 |
+
94432,
|
| 5990 |
+
94439,
|
| 5991 |
+
94448,
|
| 5992 |
+
94456,
|
| 5993 |
+
94464,
|
| 5994 |
+
94491,
|
| 5995 |
+
94502,
|
| 5996 |
+
94511,
|
| 5997 |
+
94521,
|
| 5998 |
+
94538,
|
| 5999 |
+
94601,
|
| 6000 |
+
94614,
|
| 6001 |
+
94620,
|
| 6002 |
+
94679,
|
| 6003 |
+
94691,
|
| 6004 |
+
94702,
|
| 6005 |
+
94714,
|
| 6006 |
+
94722,
|
| 6007 |
+
94730,
|
| 6008 |
+
94745,
|
| 6009 |
+
94753,
|
| 6010 |
+
94767,
|
| 6011 |
+
94773,
|
| 6012 |
+
94786,
|
| 6013 |
+
94792,
|
| 6014 |
+
94799,
|
| 6015 |
+
94807,
|
| 6016 |
+
94821,
|
| 6017 |
+
94825,
|
| 6018 |
+
94883,
|
| 6019 |
+
94884,
|
| 6020 |
+
94886,
|
| 6021 |
+
94888,
|
| 6022 |
+
94892,
|
| 6023 |
+
94925,
|
| 6024 |
+
94931,
|
| 6025 |
+
94945,
|
| 6026 |
+
94947,
|
| 6027 |
+
94951,
|
| 6028 |
+
94964,
|
| 6029 |
+
94976,
|
| 6030 |
+
94985,
|
| 6031 |
+
95024,
|
| 6032 |
+
95032,
|
| 6033 |
+
95036,
|
| 6034 |
+
95053,
|
| 6035 |
+
95063,
|
| 6036 |
+
95069,
|
| 6037 |
+
95071,
|
| 6038 |
+
95087,
|
| 6039 |
+
95105,
|
| 6040 |
+
95106,
|
| 6041 |
+
95113,
|
| 6042 |
+
95114,
|
| 6043 |
+
95140,
|
| 6044 |
+
95159,
|
| 6045 |
+
95173,
|
| 6046 |
+
95211,
|
| 6047 |
+
95248,
|
| 6048 |
+
95255,
|
| 6049 |
+
95267,
|
| 6050 |
+
95270,
|
| 6051 |
+
95274,
|
| 6052 |
+
95278,
|
| 6053 |
+
95288,
|
| 6054 |
+
95299,
|
| 6055 |
+
95323,
|
| 6056 |
+
95349,
|
| 6057 |
+
95350,
|
| 6058 |
+
95357,
|
| 6059 |
+
95363,
|
| 6060 |
+
95368,
|
| 6061 |
+
95377,
|
| 6062 |
+
95423,
|
| 6063 |
+
95426,
|
| 6064 |
+
95438,
|
| 6065 |
+
95482,
|
| 6066 |
+
95492,
|
| 6067 |
+
95519,
|
| 6068 |
+
95522,
|
| 6069 |
+
95525,
|
| 6070 |
+
95527,
|
| 6071 |
+
95528,
|
| 6072 |
+
95546,
|
| 6073 |
+
95586,
|
| 6074 |
+
95605,
|
| 6075 |
+
95620,
|
| 6076 |
+
95642,
|
| 6077 |
+
95654,
|
| 6078 |
+
95670,
|
| 6079 |
+
95674,
|
| 6080 |
+
95685,
|
| 6081 |
+
95687,
|
| 6082 |
+
95707,
|
| 6083 |
+
95741,
|
| 6084 |
+
95753,
|
| 6085 |
+
95754,
|
| 6086 |
+
95804,
|
| 6087 |
+
95810,
|
| 6088 |
+
95815,
|
| 6089 |
+
95843,
|
| 6090 |
+
95849,
|
| 6091 |
+
95851,
|
| 6092 |
+
95884,
|
| 6093 |
+
95892,
|
| 6094 |
+
95901,
|
| 6095 |
+
95906,
|
| 6096 |
+
95916,
|
| 6097 |
+
95970,
|
| 6098 |
+
95974,
|
| 6099 |
+
95989,
|
| 6100 |
+
96017,
|
| 6101 |
+
96021,
|
| 6102 |
+
96025,
|
| 6103 |
+
96033,
|
| 6104 |
+
96050,
|
| 6105 |
+
96055,
|
| 6106 |
+
96060,
|
| 6107 |
+
96065,
|
| 6108 |
+
96069,
|
| 6109 |
+
96075,
|
| 6110 |
+
96090,
|
| 6111 |
+
96116,
|
| 6112 |
+
96117,
|
| 6113 |
+
96121,
|
| 6114 |
+
96133,
|
| 6115 |
+
96138,
|
| 6116 |
+
96155,
|
| 6117 |
+
96200,
|
| 6118 |
+
96279,
|
| 6119 |
+
96332,
|
| 6120 |
+
96335,
|
| 6121 |
+
96351,
|
| 6122 |
+
96360,
|
| 6123 |
+
96366,
|
| 6124 |
+
96390,
|
| 6125 |
+
96426,
|
| 6126 |
+
96461,
|
| 6127 |
+
96466,
|
| 6128 |
+
96494,
|
| 6129 |
+
96501,
|
| 6130 |
+
96503,
|
| 6131 |
+
96530,
|
| 6132 |
+
96561,
|
| 6133 |
+
96580,
|
| 6134 |
+
96581,
|
| 6135 |
+
96595,
|
| 6136 |
+
96604,
|
| 6137 |
+
96683,
|
| 6138 |
+
96705,
|
| 6139 |
+
96711,
|
| 6140 |
+
96721,
|
| 6141 |
+
96731,
|
| 6142 |
+
96756,
|
| 6143 |
+
96759,
|
| 6144 |
+
96771,
|
| 6145 |
+
96773,
|
| 6146 |
+
96786,
|
| 6147 |
+
96806,
|
| 6148 |
+
96808,
|
| 6149 |
+
96859,
|
| 6150 |
+
96866,
|
| 6151 |
+
96887,
|
| 6152 |
+
96899,
|
| 6153 |
+
96911,
|
| 6154 |
+
96912,
|
| 6155 |
+
96938,
|
| 6156 |
+
96950,
|
| 6157 |
+
96996,
|
| 6158 |
+
97000,
|
| 6159 |
+
97015,
|
| 6160 |
+
97061,
|
| 6161 |
+
97064,
|
| 6162 |
+
97085,
|
| 6163 |
+
97086,
|
| 6164 |
+
97095,
|
| 6165 |
+
97112,
|
| 6166 |
+
97149,
|
| 6167 |
+
97172,
|
| 6168 |
+
97207,
|
| 6169 |
+
97216,
|
| 6170 |
+
97233,
|
| 6171 |
+
97236,
|
| 6172 |
+
97238,
|
| 6173 |
+
97256,
|
| 6174 |
+
97259,
|
| 6175 |
+
97309,
|
| 6176 |
+
97314,
|
| 6177 |
+
97332,
|
| 6178 |
+
97342,
|
| 6179 |
+
97347,
|
| 6180 |
+
97348,
|
| 6181 |
+
97403,
|
| 6182 |
+
97433,
|
| 6183 |
+
97469,
|
| 6184 |
+
97470,
|
| 6185 |
+
97478,
|
| 6186 |
+
97494,
|
| 6187 |
+
97514,
|
| 6188 |
+
97529,
|
| 6189 |
+
97539,
|
| 6190 |
+
97556,
|
| 6191 |
+
97568,
|
| 6192 |
+
97591,
|
| 6193 |
+
97605,
|
| 6194 |
+
97606,
|
| 6195 |
+
97619,
|
| 6196 |
+
97644,
|
| 6197 |
+
97676,
|
| 6198 |
+
97739,
|
| 6199 |
+
97741,
|
| 6200 |
+
97833,
|
| 6201 |
+
97856,
|
| 6202 |
+
97887,
|
| 6203 |
+
97918,
|
| 6204 |
+
97923,
|
| 6205 |
+
97937,
|
| 6206 |
+
97942,
|
| 6207 |
+
97946,
|
| 6208 |
+
97962,
|
| 6209 |
+
97999,
|
| 6210 |
+
98008,
|
| 6211 |
+
98029,
|
| 6212 |
+
98045,
|
| 6213 |
+
98048,
|
| 6214 |
+
98079,
|
| 6215 |
+
98102,
|
| 6216 |
+
98140,
|
| 6217 |
+
98155,
|
| 6218 |
+
98164,
|
| 6219 |
+
98174,
|
| 6220 |
+
98187,
|
| 6221 |
+
98203,
|
| 6222 |
+
98207,
|
| 6223 |
+
98208,
|
| 6224 |
+
98220,
|
| 6225 |
+
98249,
|
| 6226 |
+
98251,
|
| 6227 |
+
98274,
|
| 6228 |
+
98276,
|
| 6229 |
+
98282,
|
| 6230 |
+
98291,
|
| 6231 |
+
98302,
|
| 6232 |
+
98313,
|
| 6233 |
+
98319,
|
| 6234 |
+
98320,
|
| 6235 |
+
98329,
|
| 6236 |
+
98330,
|
| 6237 |
+
98347,
|
| 6238 |
+
98364,
|
| 6239 |
+
98366,
|
| 6240 |
+
98372,
|
| 6241 |
+
98379,
|
| 6242 |
+
98385,
|
| 6243 |
+
98398,
|
| 6244 |
+
98401,
|
| 6245 |
+
98406,
|
| 6246 |
+
98422,
|
| 6247 |
+
98434,
|
| 6248 |
+
98436,
|
| 6249 |
+
98443,
|
| 6250 |
+
98517,
|
| 6251 |
+
98524,
|
| 6252 |
+
98527,
|
| 6253 |
+
98610,
|
| 6254 |
+
98642,
|
| 6255 |
+
98655,
|
| 6256 |
+
98665,
|
| 6257 |
+
98670,
|
| 6258 |
+
98685,
|
| 6259 |
+
98688,
|
| 6260 |
+
98708,
|
| 6261 |
+
98722,
|
| 6262 |
+
98729,
|
| 6263 |
+
98734,
|
| 6264 |
+
98737,
|
| 6265 |
+
98747,
|
| 6266 |
+
98759,
|
| 6267 |
+
98769,
|
| 6268 |
+
98788,
|
| 6269 |
+
98821,
|
| 6270 |
+
98837,
|
| 6271 |
+
98853,
|
| 6272 |
+
98873,
|
| 6273 |
+
98880,
|
| 6274 |
+
98891,
|
| 6275 |
+
98893,
|
| 6276 |
+
98899,
|
| 6277 |
+
98903,
|
| 6278 |
+
98906,
|
| 6279 |
+
98926,
|
| 6280 |
+
98952,
|
| 6281 |
+
98964,
|
| 6282 |
+
98965,
|
| 6283 |
+
98973,
|
| 6284 |
+
98976,
|
| 6285 |
+
98986,
|
| 6286 |
+
98997,
|
| 6287 |
+
99007,
|
| 6288 |
+
99010,
|
| 6289 |
+
99012,
|
| 6290 |
+
99057,
|
| 6291 |
+
99058,
|
| 6292 |
+
99076,
|
| 6293 |
+
99082,
|
| 6294 |
+
99095,
|
| 6295 |
+
99102,
|
| 6296 |
+
99141,
|
| 6297 |
+
99144,
|
| 6298 |
+
99151,
|
| 6299 |
+
99156,
|
| 6300 |
+
99157,
|
| 6301 |
+
99158,
|
| 6302 |
+
99159,
|
| 6303 |
+
99160,
|
| 6304 |
+
99161,
|
| 6305 |
+
99162,
|
| 6306 |
+
99163,
|
| 6307 |
+
99166,
|
| 6308 |
+
99167,
|
| 6309 |
+
99168,
|
| 6310 |
+
99169,
|
| 6311 |
+
99170,
|
| 6312 |
+
99171,
|
| 6313 |
+
99173,
|
| 6314 |
+
99174,
|
| 6315 |
+
99175,
|
| 6316 |
+
99176,
|
| 6317 |
+
99177,
|
| 6318 |
+
99179,
|
| 6319 |
+
99181,
|
| 6320 |
+
99183,
|
| 6321 |
+
99184,
|
| 6322 |
+
99187,
|
| 6323 |
+
99188,
|
| 6324 |
+
99189,
|
| 6325 |
+
99192,
|
| 6326 |
+
99196,
|
| 6327 |
+
99197,
|
| 6328 |
+
99198,
|
| 6329 |
+
99201,
|
| 6330 |
+
99202,
|
| 6331 |
+
99203,
|
| 6332 |
+
99206,
|
| 6333 |
+
99207,
|
| 6334 |
+
99211,
|
| 6335 |
+
99214,
|
| 6336 |
+
99215,
|
| 6337 |
+
99220,
|
| 6338 |
+
99221,
|
| 6339 |
+
99224,
|
| 6340 |
+
99228,
|
| 6341 |
+
99229,
|
| 6342 |
+
99230,
|
| 6343 |
+
99231,
|
| 6344 |
+
99238,
|
| 6345 |
+
99239,
|
| 6346 |
+
99240,
|
| 6347 |
+
99247,
|
| 6348 |
+
99248,
|
| 6349 |
+
99249,
|
| 6350 |
+
99254,
|
| 6351 |
+
99255,
|
| 6352 |
+
99256,
|
| 6353 |
+
99264,
|
| 6354 |
+
99265,
|
| 6355 |
+
99266,
|
| 6356 |
+
99267,
|
| 6357 |
+
99268,
|
| 6358 |
+
99269,
|
| 6359 |
+
99274,
|
| 6360 |
+
99275,
|
| 6361 |
+
99276,
|
| 6362 |
+
99281,
|
| 6363 |
+
99282,
|
| 6364 |
+
99289,
|
| 6365 |
+
99290,
|
| 6366 |
+
99291,
|
| 6367 |
+
99300,
|
| 6368 |
+
99301,
|
| 6369 |
+
99302,
|
| 6370 |
+
99303,
|
| 6371 |
+
99311,
|
| 6372 |
+
99323,
|
| 6373 |
+
99324,
|
| 6374 |
+
99325,
|
| 6375 |
+
99341,
|
| 6376 |
+
99342,
|
| 6377 |
+
99343,
|
| 6378 |
+
99344,
|
| 6379 |
+
99359,
|
| 6380 |
+
99374,
|
| 6381 |
+
99380,
|
| 6382 |
+
99381,
|
| 6383 |
+
99382,
|
| 6384 |
+
99401,
|
| 6385 |
+
99402,
|
| 6386 |
+
99423,
|
| 6387 |
+
99439,
|
| 6388 |
+
99442,
|
| 6389 |
+
99443,
|
| 6390 |
+
99444,
|
| 6391 |
+
99456,
|
| 6392 |
+
99484,
|
| 6393 |
+
99485,
|
| 6394 |
+
99516,
|
| 6395 |
+
99551,
|
| 6396 |
+
99552,
|
| 6397 |
+
99597,
|
| 6398 |
+
99598,
|
| 6399 |
+
99643,
|
| 6400 |
+
99648,
|
| 6401 |
+
99649,
|
| 6402 |
+
99714,
|
| 6403 |
+
99715,
|
| 6404 |
+
99777,
|
| 6405 |
+
99785,
|
| 6406 |
+
99874,
|
| 6407 |
+
99875,
|
| 6408 |
+
99977,
|
| 6409 |
+
99997,
|
| 6410 |
+
100024,
|
| 6411 |
+
100120,
|
| 6412 |
+
100121,
|
| 6413 |
+
100127,
|
| 6414 |
+
100128,
|
| 6415 |
+
100129,
|
| 6416 |
+
100130,
|
| 6417 |
+
100337,
|
| 6418 |
+
100459,
|
| 6419 |
+
100563,
|
| 6420 |
+
100620,
|
| 6421 |
+
100621,
|
| 6422 |
+
100988,
|
| 6423 |
+
101018,
|
| 6424 |
+
101022,
|
| 6425 |
+
101024,
|
| 6426 |
+
101025,
|
| 6427 |
+
101026,
|
| 6428 |
+
101027,
|
| 6429 |
+
101028,
|
| 6430 |
+
101029,
|
| 6431 |
+
101030,
|
| 6432 |
+
101031,
|
| 6433 |
+
101032,
|
| 6434 |
+
101033,
|
| 6435 |
+
101759,
|
| 6436 |
+
101838,
|
| 6437 |
+
101851,
|
| 6438 |
+
101860,
|
| 6439 |
+
101861,
|
| 6440 |
+
101862,
|
| 6441 |
+
101863,
|
| 6442 |
+
101864,
|
| 6443 |
+
101865,
|
| 6444 |
+
101866,
|
| 6445 |
+
101867,
|
| 6446 |
+
101868,
|
| 6447 |
+
101869,
|
| 6448 |
+
101870,
|
| 6449 |
+
101871,
|
| 6450 |
+
101872,
|
| 6451 |
+
101873,
|
| 6452 |
+
101874,
|
| 6453 |
+
101875,
|
| 6454 |
+
101876,
|
| 6455 |
+
101877,
|
| 6456 |
+
101878,
|
| 6457 |
+
101879,
|
| 6458 |
+
101880,
|
| 6459 |
+
101881,
|
| 6460 |
+
103893,
|
| 6461 |
+
103894,
|
| 6462 |
+
103895,
|
| 6463 |
+
103896,
|
| 6464 |
+
103897,
|
| 6465 |
+
103898,
|
| 6466 |
+
103899,
|
| 6467 |
+
103900,
|
| 6468 |
+
103901,
|
| 6469 |
+
103902,
|
| 6470 |
+
103903,
|
| 6471 |
+
103904,
|
| 6472 |
+
103905,
|
| 6473 |
+
103906,
|
| 6474 |
+
103907,
|
| 6475 |
+
103908,
|
| 6476 |
+
103909,
|
| 6477 |
+
103910,
|
| 6478 |
+
103911,
|
| 6479 |
+
103912,
|
| 6480 |
+
103913,
|
| 6481 |
+
103914,
|
| 6482 |
+
103915,
|
| 6483 |
+
103916,
|
| 6484 |
+
103917,
|
| 6485 |
+
104905,
|
| 6486 |
+
105619,
|
| 6487 |
+
109241,
|
| 6488 |
+
109992,
|
| 6489 |
+
110713,
|
| 6490 |
+
111747,
|
| 6491 |
+
112951,
|
| 6492 |
+
113943,
|
| 6493 |
+
114235,
|
| 6494 |
+
114397,
|
| 6495 |
+
115646,
|
| 6496 |
+
117035,
|
| 6497 |
+
119158,
|
| 6498 |
+
119346,
|
| 6499 |
+
119347,
|
| 6500 |
+
119348,
|
| 6501 |
+
119941,
|
| 6502 |
+
119964,
|
| 6503 |
+
120100,
|
| 6504 |
+
120409,
|
| 6505 |
+
121404,
|
| 6506 |
+
121667,
|
| 6507 |
+
121773,
|
| 6508 |
+
122154,
|
| 6509 |
+
122174,
|
| 6510 |
+
122202,
|
| 6511 |
+
122219,
|
| 6512 |
+
122222,
|
| 6513 |
+
122259,
|
| 6514 |
+
122267,
|
| 6515 |
+
122289,
|
| 6516 |
+
122317,
|
| 6517 |
+
122375,
|
| 6518 |
+
122382,
|
| 6519 |
+
122427,
|
| 6520 |
+
122448,
|
| 6521 |
+
122455,
|
| 6522 |
+
122514,
|
| 6523 |
+
122568,
|
| 6524 |
+
122596,
|
| 6525 |
+
122614,
|
| 6526 |
+
122618,
|
| 6527 |
+
122634,
|
| 6528 |
+
122659,
|
| 6529 |
+
122725,
|
| 6530 |
+
122740,
|
| 6531 |
+
122757,
|
| 6532 |
+
122802,
|
| 6533 |
+
122843,
|
| 6534 |
+
122855,
|
| 6535 |
+
122866,
|
| 6536 |
+
122890,
|
| 6537 |
+
122948,
|
| 6538 |
+
122950,
|
| 6539 |
+
122976,
|
| 6540 |
+
122985,
|
| 6541 |
+
122999,
|
| 6542 |
+
123007,
|
| 6543 |
+
123008,
|
| 6544 |
+
123016,
|
| 6545 |
+
123057,
|
| 6546 |
+
123064,
|
| 6547 |
+
123067,
|
| 6548 |
+
123072,
|
| 6549 |
+
123084,
|
| 6550 |
+
123091,
|
| 6551 |
+
123131,
|
| 6552 |
+
123164,
|
| 6553 |
+
123178,
|
| 6554 |
+
123182,
|
| 6555 |
+
123204,
|
| 6556 |
+
123206,
|
| 6557 |
+
123287,
|
| 6558 |
+
123301,
|
| 6559 |
+
123305,
|
| 6560 |
+
123318,
|
| 6561 |
+
123332,
|
| 6562 |
+
123378,
|
| 6563 |
+
123400,
|
| 6564 |
+
123408,
|
| 6565 |
+
123420,
|
| 6566 |
+
123489,
|
| 6567 |
+
123516,
|
| 6568 |
+
123527,
|
| 6569 |
+
123547,
|
| 6570 |
+
123614,
|
| 6571 |
+
123658,
|
| 6572 |
+
123676,
|
| 6573 |
+
123701,
|
| 6574 |
+
123714,
|
| 6575 |
+
123740,
|
| 6576 |
+
123747,
|
| 6577 |
+
123760,
|
| 6578 |
+
123806,
|
| 6579 |
+
123807,
|
| 6580 |
+
123808,
|
| 6581 |
+
123810,
|
| 6582 |
+
123811,
|
| 6583 |
+
123812,
|
| 6584 |
+
123813,
|
| 6585 |
+
123814,
|
| 6586 |
+
123815,
|
| 6587 |
+
123816,
|
| 6588 |
+
123817,
|
| 6589 |
+
123819,
|
| 6590 |
+
123821,
|
| 6591 |
+
123825,
|
| 6592 |
+
123827,
|
| 6593 |
+
123830,
|
| 6594 |
+
123836,
|
| 6595 |
+
123837,
|
| 6596 |
+
123839,
|
| 6597 |
+
123840,
|
| 6598 |
+
123841,
|
| 6599 |
+
123842,
|
| 6600 |
+
123844,
|
| 6601 |
+
123846,
|
| 6602 |
+
123847,
|
| 6603 |
+
123848,
|
| 6604 |
+
123851,
|
| 6605 |
+
123853,
|
| 6606 |
+
123854,
|
| 6607 |
+
123859,
|
| 6608 |
+
123867,
|
| 6609 |
+
123870,
|
| 6610 |
+
123871,
|
| 6611 |
+
123875,
|
| 6612 |
+
123876,
|
| 6613 |
+
123882,
|
| 6614 |
+
123889,
|
| 6615 |
+
123892,
|
| 6616 |
+
123893,
|
| 6617 |
+
123907,
|
| 6618 |
+
123908,
|
| 6619 |
+
123911,
|
| 6620 |
+
123912,
|
| 6621 |
+
123916,
|
| 6622 |
+
123917,
|
| 6623 |
+
123918,
|
| 6624 |
+
123927,
|
| 6625 |
+
123928,
|
| 6626 |
+
123929,
|
| 6627 |
+
123930,
|
| 6628 |
+
123931,
|
| 6629 |
+
123932,
|
| 6630 |
+
123933,
|
| 6631 |
+
123934,
|
| 6632 |
+
123946,
|
| 6633 |
+
123947,
|
| 6634 |
+
123950,
|
| 6635 |
+
123951,
|
| 6636 |
+
123953,
|
| 6637 |
+
123954,
|
| 6638 |
+
123955,
|
| 6639 |
+
123956,
|
| 6640 |
+
123957,
|
| 6641 |
+
123965,
|
| 6642 |
+
123967,
|
| 6643 |
+
123968,
|
| 6644 |
+
123969,
|
| 6645 |
+
123970,
|
| 6646 |
+
123986,
|
| 6647 |
+
123989,
|
| 6648 |
+
123996,
|
| 6649 |
+
124000,
|
| 6650 |
+
124001,
|
| 6651 |
+
124002,
|
| 6652 |
+
124003,
|
| 6653 |
+
124004,
|
| 6654 |
+
124005,
|
| 6655 |
+
124018,
|
| 6656 |
+
124019,
|
| 6657 |
+
124020,
|
| 6658 |
+
124021,
|
| 6659 |
+
124022,
|
| 6660 |
+
124023,
|
| 6661 |
+
124024,
|
| 6662 |
+
124025,
|
| 6663 |
+
124026,
|
| 6664 |
+
124049,
|
| 6665 |
+
124050,
|
| 6666 |
+
124051,
|
| 6667 |
+
124052,
|
| 6668 |
+
124053,
|
| 6669 |
+
124054,
|
| 6670 |
+
124063,
|
| 6671 |
+
124064,
|
| 6672 |
+
124066,
|
| 6673 |
+
124067,
|
| 6674 |
+
124068,
|
| 6675 |
+
124069,
|
| 6676 |
+
124070,
|
| 6677 |
+
124071,
|
| 6678 |
+
124092,
|
| 6679 |
+
124096,
|
| 6680 |
+
124098,
|
| 6681 |
+
124099,
|
| 6682 |
+
124100,
|
| 6683 |
+
124101,
|
| 6684 |
+
124102,
|
| 6685 |
+
124118,
|
| 6686 |
+
124134,
|
| 6687 |
+
124135,
|
| 6688 |
+
124136,
|
| 6689 |
+
124137,
|
| 6690 |
+
124149,
|
| 6691 |
+
124154,
|
| 6692 |
+
124159,
|
| 6693 |
+
124160,
|
| 6694 |
+
124161,
|
| 6695 |
+
124162,
|
| 6696 |
+
124163,
|
| 6697 |
+
124164,
|
| 6698 |
+
124165,
|
| 6699 |
+
124166,
|
| 6700 |
+
124167,
|
| 6701 |
+
124168,
|
| 6702 |
+
124193,
|
| 6703 |
+
124194,
|
| 6704 |
+
124195,
|
| 6705 |
+
124196,
|
| 6706 |
+
124197,
|
| 6707 |
+
124198,
|
| 6708 |
+
124199,
|
| 6709 |
+
124200,
|
| 6710 |
+
124201,
|
| 6711 |
+
124222,
|
| 6712 |
+
124235,
|
| 6713 |
+
124236,
|
| 6714 |
+
124240,
|
| 6715 |
+
124241,
|
| 6716 |
+
124243,
|
| 6717 |
+
124244,
|
| 6718 |
+
124245,
|
| 6719 |
+
124246,
|
| 6720 |
+
124247,
|
| 6721 |
+
124248,
|
| 6722 |
+
124249,
|
| 6723 |
+
124250,
|
| 6724 |
+
124251,
|
| 6725 |
+
124252,
|
| 6726 |
+
124287,
|
| 6727 |
+
124296,
|
| 6728 |
+
124297,
|
| 6729 |
+
124298,
|
| 6730 |
+
124299,
|
| 6731 |
+
124300,
|
| 6732 |
+
124301,
|
| 6733 |
+
124302,
|
| 6734 |
+
124303,
|
| 6735 |
+
124304,
|
| 6736 |
+
124305,
|
| 6737 |
+
124306,
|
| 6738 |
+
124307,
|
| 6739 |
+
124308,
|
| 6740 |
+
124309,
|
| 6741 |
+
124334,
|
| 6742 |
+
124349,
|
| 6743 |
+
124364,
|
| 6744 |
+
124365,
|
| 6745 |
+
124366,
|
| 6746 |
+
124367,
|
| 6747 |
+
124368,
|
| 6748 |
+
124369,
|
| 6749 |
+
124370,
|
| 6750 |
+
124371,
|
| 6751 |
+
124372,
|
| 6752 |
+
124373,
|
| 6753 |
+
124374,
|
| 6754 |
+
124433,
|
| 6755 |
+
124459,
|
| 6756 |
+
124470,
|
| 6757 |
+
124471,
|
| 6758 |
+
124472,
|
| 6759 |
+
124473,
|
| 6760 |
+
124474,
|
| 6761 |
+
124475,
|
| 6762 |
+
124570,
|
| 6763 |
+
124577,
|
| 6764 |
+
124578,
|
| 6765 |
+
124579,
|
| 6766 |
+
124580,
|
| 6767 |
+
124581,
|
| 6768 |
+
124582,
|
| 6769 |
+
124583,
|
| 6770 |
+
124584,
|
| 6771 |
+
124585,
|
| 6772 |
+
124586,
|
| 6773 |
+
124587,
|
| 6774 |
+
124588,
|
| 6775 |
+
124589,
|
| 6776 |
+
124590,
|
| 6777 |
+
124591,
|
| 6778 |
+
124592,
|
| 6779 |
+
124593,
|
| 6780 |
+
124594,
|
| 6781 |
+
124595,
|
| 6782 |
+
124596,
|
| 6783 |
+
124693,
|
| 6784 |
+
124727,
|
| 6785 |
+
124744,
|
| 6786 |
+
124745,
|
| 6787 |
+
124746,
|
| 6788 |
+
124747,
|
| 6789 |
+
124748,
|
| 6790 |
+
124749,
|
| 6791 |
+
124750,
|
| 6792 |
+
124751,
|
| 6793 |
+
124752,
|
| 6794 |
+
124753,
|
| 6795 |
+
124754,
|
| 6796 |
+
124755,
|
| 6797 |
+
124995,
|
| 6798 |
+
124996,
|
| 6799 |
+
124997,
|
| 6800 |
+
124998,
|
| 6801 |
+
124999,
|
| 6802 |
+
125000,
|
| 6803 |
+
125001,
|
| 6804 |
+
125002,
|
| 6805 |
+
125003,
|
| 6806 |
+
125004,
|
| 6807 |
+
125388,
|
| 6808 |
+
125413,
|
| 6809 |
+
125414,
|
| 6810 |
+
125415,
|
| 6811 |
+
125416,
|
| 6812 |
+
125417,
|
| 6813 |
+
125418,
|
| 6814 |
+
125419,
|
| 6815 |
+
125420,
|
| 6816 |
+
125421,
|
| 6817 |
+
125422,
|
| 6818 |
+
125423,
|
| 6819 |
+
125424,
|
| 6820 |
+
125425,
|
| 6821 |
+
125426,
|
| 6822 |
+
125427,
|
| 6823 |
+
125428,
|
| 6824 |
+
125429,
|
| 6825 |
+
125430,
|
| 6826 |
+
125639,
|
| 6827 |
+
125713,
|
| 6828 |
+
126159,
|
| 6829 |
+
126177,
|
| 6830 |
+
126178,
|
| 6831 |
+
126179,
|
| 6832 |
+
126180,
|
| 6833 |
+
126181,
|
| 6834 |
+
126182,
|
| 6835 |
+
126183,
|
| 6836 |
+
126184,
|
| 6837 |
+
126185,
|
| 6838 |
+
126186,
|
| 6839 |
+
126187,
|
| 6840 |
+
126188,
|
| 6841 |
+
126189,
|
| 6842 |
+
126190,
|
| 6843 |
+
126191,
|
| 6844 |
+
126192,
|
| 6845 |
+
126193,
|
| 6846 |
+
126365,
|
| 6847 |
+
126537,
|
| 6848 |
+
127041,
|
| 6849 |
+
127124,
|
| 6850 |
+
127165,
|
| 6851 |
+
127369,
|
| 6852 |
+
127572,
|
| 6853 |
+
127708,
|
| 6854 |
+
127819,
|
| 6855 |
+
127964,
|
| 6856 |
+
128222,
|
| 6857 |
+
128223,
|
| 6858 |
+
128224,
|
| 6859 |
+
128225,
|
| 6860 |
+
128226,
|
| 6861 |
+
128227,
|
| 6862 |
+
128228,
|
| 6863 |
+
128229,
|
| 6864 |
+
128230,
|
| 6865 |
+
128231,
|
| 6866 |
+
128232,
|
| 6867 |
+
128233,
|
| 6868 |
+
128234,
|
| 6869 |
+
128235,
|
| 6870 |
+
128236,
|
| 6871 |
+
128237,
|
| 6872 |
+
128238,
|
| 6873 |
+
128239,
|
| 6874 |
+
128240,
|
| 6875 |
+
128241,
|
| 6876 |
+
128242,
|
| 6877 |
+
129176,
|
| 6878 |
+
130507,
|
| 6879 |
+
130654,
|
| 6880 |
+
132202,
|
| 6881 |
+
132376,
|
| 6882 |
+
133552,
|
| 6883 |
+
134312,
|
| 6884 |
+
134380,
|
| 6885 |
+
134582,
|
| 6886 |
+
135078,
|
| 6887 |
+
136279,
|
| 6888 |
+
136516,
|
| 6889 |
+
136661,
|
| 6890 |
+
136940,
|
| 6891 |
+
136946,
|
| 6892 |
+
137345,
|
| 6893 |
+
137661,
|
| 6894 |
+
137767,
|
| 6895 |
+
137973,
|
| 6896 |
+
138658,
|
| 6897 |
+
138673,
|
| 6898 |
+
139793,
|
| 6899 |
+
139816,
|
| 6900 |
+
140356,
|
| 6901 |
+
141539,
|
| 6902 |
+
142067,
|
| 6903 |
+
142209,
|
| 6904 |
+
142210,
|
| 6905 |
+
142258,
|
| 6906 |
+
142274,
|
| 6907 |
+
142299,
|
| 6908 |
+
142451,
|
| 6909 |
+
142509,
|
| 6910 |
+
142731,
|
| 6911 |
+
142834,
|
| 6912 |
+
143241,
|
| 6913 |
+
143455,
|
| 6914 |
+
143861,
|
| 6915 |
+
143887,
|
| 6916 |
+
145773,
|
| 6917 |
+
146047,
|
| 6918 |
+
146632,
|
| 6919 |
+
148462,
|
| 6920 |
+
148774,
|
| 6921 |
+
148827,
|
| 6922 |
+
148860,
|
| 6923 |
+
148864,
|
| 6924 |
+
148880,
|
| 6925 |
+
149175,
|
| 6926 |
+
149178,
|
| 6927 |
+
149287,
|
| 6928 |
+
149352,
|
| 6929 |
+
149392,
|
| 6930 |
+
149394,
|
| 6931 |
+
149589,
|
| 6932 |
+
149591,
|
| 6933 |
+
149593,
|
| 6934 |
+
149714,
|
| 6935 |
+
149716,
|
| 6936 |
+
149718,
|
| 6937 |
+
149721,
|
| 6938 |
+
149946,
|
| 6939 |
+
149961,
|
| 6940 |
+
149963,
|
| 6941 |
+
149990,
|
| 6942 |
+
150114,
|
| 6943 |
+
150151,
|
| 6944 |
+
150168,
|
| 6945 |
+
150195,
|
| 6946 |
+
150223,
|
| 6947 |
+
150270,
|
| 6948 |
+
150479,
|
| 6949 |
+
150579,
|
| 6950 |
+
150792,
|
| 6951 |
+
150794,
|
| 6952 |
+
150800,
|
| 6953 |
+
150802,
|
| 6954 |
+
150807,
|
| 6955 |
+
150809,
|
| 6956 |
+
150814,
|
| 6957 |
+
150817,
|
| 6958 |
+
150819,
|
| 6959 |
+
150821,
|
| 6960 |
+
150824,
|
| 6961 |
+
150829,
|
| 6962 |
+
150836,
|
| 6963 |
+
151233,
|
| 6964 |
+
151254,
|
| 6965 |
+
151264,
|
| 6966 |
+
151266,
|
| 6967 |
+
151268,
|
| 6968 |
+
151270,
|
| 6969 |
+
151272,
|
| 6970 |
+
151274,
|
| 6971 |
+
151276,
|
| 6972 |
+
151278,
|
| 6973 |
+
151282,
|
| 6974 |
+
151366,
|
| 6975 |
+
151560
|
| 6976 |
+
],
|
| 6977 |
+
"bos_token": "<|im_start|>",
|
| 6978 |
+
"clean_up_tokenization_spaces": false,
|
| 6979 |
+
"eos_token": "<|im_end|>",
|
| 6980 |
+
"errors": "replace",
|
| 6981 |
+
"is_local": true,
|
| 6982 |
+
"model_max_length": 131072,
|
| 6983 |
+
"pad_token": "<|endoftext|>",
|
| 6984 |
+
"processor_class": "MiniCPMOProcessor",
|
| 6985 |
+
"split_special_tokens": false,
|
| 6986 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 6987 |
+
"unk_token": "<unk>",
|
| 6988 |
+
"use_fast": true
|
| 6989 |
+
}
|
utils.py
ADDED
|
@@ -0,0 +1,2417 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
# Copyright 2026 The OpenBMB Team. All rights reserved.
|
| 4 |
+
#
|
| 5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 6 |
+
# you may not use this file except in compliance with the License.
|
| 7 |
+
# You may obtain a copy of the License at
|
| 8 |
+
#
|
| 9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 10 |
+
#
|
| 11 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 14 |
+
# See the License for the specific language governing permissions and
|
| 15 |
+
# limitations under the License.
|
| 16 |
+
|
| 17 |
+
import logging
|
| 18 |
+
from dataclasses import dataclass
|
| 19 |
+
from typing import Any
|
| 20 |
+
from typing import Dict
|
| 21 |
+
from typing import List
|
| 22 |
+
from typing import Literal
|
| 23 |
+
from typing import Optional
|
| 24 |
+
from typing import Tuple
|
| 25 |
+
from typing import Union
|
| 26 |
+
|
| 27 |
+
import torch
|
| 28 |
+
import torch.nn.functional as F
|
| 29 |
+
import torch.nn.utils.parametrize as P
|
| 30 |
+
from transformers.cache_utils import DynamicCache
|
| 31 |
+
|
| 32 |
+
logger = logging.getLogger(__name__)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
# text
|
| 36 |
+
@dataclass
|
| 37 |
+
class GenerateChunkOutput:
|
| 38 |
+
chunk_token_ids: torch.Tensor
|
| 39 |
+
current_inputs_embeds: torch.Tensor
|
| 40 |
+
input_last_hidden_states: Optional[torch.Tensor] # for tts use_speaker_embedding
|
| 41 |
+
last_hidden_states: Optional[torch.Tensor] # for tts input feature (projector_semantic)
|
| 42 |
+
past_key_values: Optional[torch.Tensor]
|
| 43 |
+
finished: bool
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class ChunkPrefillChunkGenerate:
|
| 47 |
+
def __init__(self, model, tokenizer, terminators):
|
| 48 |
+
self.tokenizer = tokenizer
|
| 49 |
+
self.model = model
|
| 50 |
+
self.terminators = terminators
|
| 51 |
+
self.terminators_ids = [tokenizer.convert_tokens_to_ids(i) for i in self.terminators]
|
| 52 |
+
self.embedding_layer = self.model.get_input_embeddings()
|
| 53 |
+
|
| 54 |
+
self.forbidden_tokens = [
|
| 55 |
+
":",
|
| 56 |
+
":",
|
| 57 |
+
";",
|
| 58 |
+
"#",
|
| 59 |
+
"“",
|
| 60 |
+
"”",
|
| 61 |
+
"‘",
|
| 62 |
+
"’",
|
| 63 |
+
"@",
|
| 64 |
+
"*",
|
| 65 |
+
"【",
|
| 66 |
+
"】",
|
| 67 |
+
"「",
|
| 68 |
+
"」",
|
| 69 |
+
"(",
|
| 70 |
+
")",
|
| 71 |
+
"(",
|
| 72 |
+
")",
|
| 73 |
+
"[",
|
| 74 |
+
"]",
|
| 75 |
+
"&",
|
| 76 |
+
"/",
|
| 77 |
+
"$",
|
| 78 |
+
]
|
| 79 |
+
|
| 80 |
+
self.forbidden_token_ids = [tokenizer.convert_tokens_to_ids(i) for i in self.forbidden_tokens]
|
| 81 |
+
bad_token_ids = getattr(tokenizer, "bad_token_ids", [])
|
| 82 |
+
if bad_token_ids:
|
| 83 |
+
self.forbidden_token_ids.extend(bad_token_ids)
|
| 84 |
+
|
| 85 |
+
@staticmethod
|
| 86 |
+
def prepare_generation_config(do_sample, max_new_tokens=50, min_new_tokens=0, **kwargs):
|
| 87 |
+
num_beams = kwargs.get("num_beams", 3)
|
| 88 |
+
generation_config = {
|
| 89 |
+
"num_beams": num_beams,
|
| 90 |
+
"top_p": 0.8,
|
| 91 |
+
"top_k": 100,
|
| 92 |
+
"temperature": 0.7,
|
| 93 |
+
"do_sample": True,
|
| 94 |
+
"repetition_penalty": 1.05,
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
if do_sample:
|
| 98 |
+
generation_config.update(
|
| 99 |
+
{
|
| 100 |
+
"top_p": 0.8,
|
| 101 |
+
"top_k": 100,
|
| 102 |
+
"temperature": 0.7,
|
| 103 |
+
"do_sample": True,
|
| 104 |
+
"repetition_penalty": 1.05,
|
| 105 |
+
}
|
| 106 |
+
)
|
| 107 |
+
elif num_beams > 1:
|
| 108 |
+
generation_config.update({"num_beams": num_beams, "repetition_penalty": 1.2, "do_sample": False})
|
| 109 |
+
else:
|
| 110 |
+
generation_config.update({"do_sample": False, "repetition_penalty": 1.05})
|
| 111 |
+
|
| 112 |
+
generation_config.update((k, kwargs[k]) for k in generation_config.keys() & kwargs.keys())
|
| 113 |
+
generation_config["min_new_tokens"] = min_new_tokens
|
| 114 |
+
generation_config["max_new_tokens"] = max_new_tokens
|
| 115 |
+
|
| 116 |
+
return generation_config
|
| 117 |
+
|
| 118 |
+
def chunk_generate(
|
| 119 |
+
self,
|
| 120 |
+
inputs_embeds: torch.Tensor,
|
| 121 |
+
past_key_values,
|
| 122 |
+
is_first_generate_chunk: bool,
|
| 123 |
+
chunk_size: int,
|
| 124 |
+
return_hidden_states: bool,
|
| 125 |
+
do_sample: bool,
|
| 126 |
+
temperature: float,
|
| 127 |
+
top_p: float,
|
| 128 |
+
top_k: int,
|
| 129 |
+
repetition_penalty: float = 1.05,
|
| 130 |
+
length_penalty: float = 1.0,
|
| 131 |
+
all_input_ids: Optional[torch.Tensor] = None,
|
| 132 |
+
) -> GenerateChunkOutput:
|
| 133 |
+
"""
|
| 134 |
+
Args:
|
| 135 |
+
inputs_embeds: [1, seq_len, hidden_dim], Input embeddings of current chunk.
|
| 136 |
+
past_key_values: [num_layers, 2, batch_size, num_heads, seq_len, head_dim], Past key values for llm.
|
| 137 |
+
is_first_generate_chunk: bool, Whether this is the first generate chunk.
|
| 138 |
+
chunk_size: int, The size of the current chunk, default is 10, and it is fixed during training.
|
| 139 |
+
return_hidden_states: bool Whether to return the hidden states, default is True.
|
| 140 |
+
do_sample: bool Whether to sample from the model, default is True.
|
| 141 |
+
temperature: float The temperature for the model, default is 0.7.
|
| 142 |
+
top_p: float The top-p for the model, default is 0.8.
|
| 143 |
+
top_k: int The top-k for the model, default is 100.
|
| 144 |
+
repetition_penalty: float, The repetition penalty for the model, default is 1.05.
|
| 145 |
+
length_penalty: float, The length penalty for the model, default is 1.0. Higher value means more detailed generation.
|
| 146 |
+
all_input_ids: Optional[torch.Tensor], The input ids for the current chunk.
|
| 147 |
+
"""
|
| 148 |
+
|
| 149 |
+
finished = False
|
| 150 |
+
current_inputs_embeds = inputs_embeds.clone()
|
| 151 |
+
input_last_hidden_states = []
|
| 152 |
+
last_hidden_states = []
|
| 153 |
+
generated_tokens = []
|
| 154 |
+
|
| 155 |
+
for token_idx in range(chunk_size):
|
| 156 |
+
if is_first_generate_chunk and token_idx == 0:
|
| 157 |
+
# first generate chunk, prefill inputs_embeds
|
| 158 |
+
model_inputs = {
|
| 159 |
+
"inputs_embeds": current_inputs_embeds,
|
| 160 |
+
"past_key_values": past_key_values,
|
| 161 |
+
"use_cache": True,
|
| 162 |
+
"output_hidden_states": return_hidden_states,
|
| 163 |
+
}
|
| 164 |
+
else: # for all other cases: prefill the latest generated token
|
| 165 |
+
model_inputs = {
|
| 166 |
+
"inputs_embeds": current_inputs_embeds[:, -1:, :],
|
| 167 |
+
"past_key_values": past_key_values,
|
| 168 |
+
"use_cache": True,
|
| 169 |
+
"output_hidden_states": return_hidden_states,
|
| 170 |
+
}
|
| 171 |
+
|
| 172 |
+
with torch.no_grad():
|
| 173 |
+
outputs = self.model(**model_inputs)
|
| 174 |
+
|
| 175 |
+
# last token's logits
|
| 176 |
+
logits = outputs.logits[:, -1, :].to(copy=True, dtype=torch.float32, device=inputs_embeds.device)
|
| 177 |
+
|
| 178 |
+
# forbid specific tokens decoding = model.generate@suppress_tokens
|
| 179 |
+
if self.forbidden_token_ids:
|
| 180 |
+
logits[:, self.forbidden_token_ids] = float("-inf")
|
| 181 |
+
|
| 182 |
+
past_key_values = outputs.past_key_values
|
| 183 |
+
|
| 184 |
+
PENALTY_WINDOW_SIZE = 128
|
| 185 |
+
|
| 186 |
+
# apply repetition penalty
|
| 187 |
+
if repetition_penalty != 1.0:
|
| 188 |
+
# get token ids for repetition penalty
|
| 189 |
+
if all_input_ids is not None:
|
| 190 |
+
# use global input ids (including original input and generated part)
|
| 191 |
+
if len(generated_tokens) > 0:
|
| 192 |
+
generated_token_ids = torch.cat(generated_tokens, dim=1)
|
| 193 |
+
current_sequence = torch.cat(
|
| 194 |
+
[
|
| 195 |
+
all_input_ids[:, -PENALTY_WINDOW_SIZE:],
|
| 196 |
+
generated_token_ids,
|
| 197 |
+
],
|
| 198 |
+
dim=1,
|
| 199 |
+
)
|
| 200 |
+
else:
|
| 201 |
+
current_sequence = all_input_ids[:, -PENALTY_WINDOW_SIZE:]
|
| 202 |
+
unique_token_ids = torch.unique(current_sequence.squeeze(0))
|
| 203 |
+
elif len(generated_tokens) > 0:
|
| 204 |
+
# revert to original logic: only use generated tokens
|
| 205 |
+
generated_token_ids = torch.cat(generated_tokens, dim=1).squeeze(0)
|
| 206 |
+
unique_token_ids = torch.unique(generated_token_ids)
|
| 207 |
+
else:
|
| 208 |
+
unique_token_ids = torch.tensor([], dtype=torch.long, device=logits.device)
|
| 209 |
+
|
| 210 |
+
# apply repetition penalty
|
| 211 |
+
for token_id in unique_token_ids:
|
| 212 |
+
if logits[0, token_id] > 0:
|
| 213 |
+
logits[0, token_id] = logits[0, token_id] / repetition_penalty
|
| 214 |
+
else:
|
| 215 |
+
logits[0, token_id] = logits[0, token_id] * repetition_penalty
|
| 216 |
+
|
| 217 |
+
# apply length penalty, higher value means more detailed generation
|
| 218 |
+
if length_penalty != 1.0:
|
| 219 |
+
for eos_token_id in self.terminators_ids:
|
| 220 |
+
if logits[0, eos_token_id] > 0:
|
| 221 |
+
logits[0, eos_token_id] = logits[0, eos_token_id] / length_penalty
|
| 222 |
+
else:
|
| 223 |
+
logits[0, eos_token_id] = logits[0, eos_token_id] * length_penalty
|
| 224 |
+
|
| 225 |
+
# apply temperature
|
| 226 |
+
if temperature != 1.0:
|
| 227 |
+
logits = logits / temperature
|
| 228 |
+
|
| 229 |
+
if do_sample:
|
| 230 |
+
# Top-k filtering
|
| 231 |
+
if top_k > 0:
|
| 232 |
+
top_k_logits, top_k_indices = torch.topk(logits, min(top_k, logits.size(-1)))
|
| 233 |
+
logits_filtered = torch.full_like(logits, float("-inf"))
|
| 234 |
+
logits_filtered.scatter_(1, top_k_indices, top_k_logits)
|
| 235 |
+
logits = logits_filtered
|
| 236 |
+
|
| 237 |
+
# Top-p filtering
|
| 238 |
+
if top_p < 1.0:
|
| 239 |
+
sorted_logits, sorted_indices = torch.sort(logits, descending=True)
|
| 240 |
+
cumulative_probs = torch.cumsum(F.softmax(sorted_logits, dim=-1), dim=-1)
|
| 241 |
+
|
| 242 |
+
# remove tokens with cumulative probability greater than top_p
|
| 243 |
+
sorted_indices_to_remove = cumulative_probs > top_p
|
| 244 |
+
sorted_indices_to_remove[..., 1:] = sorted_indices_to_remove[..., :-1].clone()
|
| 245 |
+
sorted_indices_to_remove[..., 0] = 0
|
| 246 |
+
|
| 247 |
+
indices_to_remove = sorted_indices_to_remove.scatter(1, sorted_indices, sorted_indices_to_remove)
|
| 248 |
+
logits[indices_to_remove] = float("-inf")
|
| 249 |
+
|
| 250 |
+
# sampling
|
| 251 |
+
probs = F.softmax(logits, dim=-1)
|
| 252 |
+
next_token = torch.multinomial(probs, num_samples=1)
|
| 253 |
+
else:
|
| 254 |
+
next_token = torch.argmax(logits, dim=-1, keepdim=True)
|
| 255 |
+
|
| 256 |
+
if return_hidden_states:
|
| 257 |
+
if is_first_generate_chunk and token_idx == 0:
|
| 258 |
+
input_last_hidden_states.append(outputs.hidden_states[-1])
|
| 259 |
+
else:
|
| 260 |
+
last_hidden_states.append(outputs.hidden_states[-1])
|
| 261 |
+
|
| 262 |
+
# if terminator token, stop generating
|
| 263 |
+
if next_token.item() in self.terminators_ids:
|
| 264 |
+
finished = True
|
| 265 |
+
break
|
| 266 |
+
|
| 267 |
+
generated_tokens.append(next_token)
|
| 268 |
+
|
| 269 |
+
# convert new token to embeddings and concatenate
|
| 270 |
+
next_token_embed = self.embedding_layer(next_token)
|
| 271 |
+
|
| 272 |
+
# update inputs_embeds, add one
|
| 273 |
+
current_inputs_embeds = torch.cat([current_inputs_embeds, next_token_embed], dim=1)
|
| 274 |
+
|
| 275 |
+
if len(generated_tokens) > 0:
|
| 276 |
+
chunk_token_ids = torch.cat(generated_tokens, dim=1)
|
| 277 |
+
else:
|
| 278 |
+
# special case: if last chunk and first predict is eos token, return last token of previous chunk. return a tensor with shape (1, 0)
|
| 279 |
+
if finished:
|
| 280 |
+
chunk_token_ids = torch.zeros((1, 0), dtype=torch.long, device=current_inputs_embeds.device)
|
| 281 |
+
else:
|
| 282 |
+
raise Exception("this should not happen")
|
| 283 |
+
|
| 284 |
+
if len(last_hidden_states) > 0:
|
| 285 |
+
last_hidden_states = torch.cat(last_hidden_states, dim=1)
|
| 286 |
+
else:
|
| 287 |
+
# special case: if last chunk, return last token of previous chunk.
|
| 288 |
+
if finished:
|
| 289 |
+
last_hidden_states = torch.cat(last_hidden_states, dim=1)
|
| 290 |
+
else:
|
| 291 |
+
raise Exception("this should not happen")
|
| 292 |
+
|
| 293 |
+
if len(input_last_hidden_states) > 0:
|
| 294 |
+
input_last_hidden_states = torch.cat(input_last_hidden_states, dim=1)
|
| 295 |
+
else:
|
| 296 |
+
input_last_hidden_states = None
|
| 297 |
+
|
| 298 |
+
return GenerateChunkOutput(
|
| 299 |
+
chunk_token_ids=chunk_token_ids,
|
| 300 |
+
current_inputs_embeds=current_inputs_embeds,
|
| 301 |
+
input_last_hidden_states=input_last_hidden_states,
|
| 302 |
+
last_hidden_states=last_hidden_states,
|
| 303 |
+
past_key_values=past_key_values,
|
| 304 |
+
finished=finished,
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
def streaming_token_decoder(token_iterator, tokenizer, skip_special_tokens=False):
|
| 309 |
+
"""
|
| 310 |
+
Incrementally decode tokens from an iterator, handling partial multi-byte characters.
|
| 311 |
+
|
| 312 |
+
When streaming tokens, multi-byte characters (like Chinese) may be split across multiple
|
| 313 |
+
tokens. Decoding partial tokens results in replacement characters (U+FFFD). This function
|
| 314 |
+
buffers tokens and only yields complete characters.
|
| 315 |
+
|
| 316 |
+
Args:
|
| 317 |
+
token_iterator: An iterator yielding (token_ids, is_finished) tuples.
|
| 318 |
+
token_ids can be torch.Tensor or any iterable of integers.
|
| 319 |
+
tokenizer: The tokenizer to use for decoding.
|
| 320 |
+
skip_special_tokens: Whether to skip special tokens during decoding.
|
| 321 |
+
|
| 322 |
+
Yields:
|
| 323 |
+
(decoded_text, is_finished) tuples where decoded_text is the new text since last yield.
|
| 324 |
+
"""
|
| 325 |
+
accumulated_token_ids = []
|
| 326 |
+
yielded_text_len = 0
|
| 327 |
+
|
| 328 |
+
for token_ids, is_finished in token_iterator:
|
| 329 |
+
# Accumulate token IDs
|
| 330 |
+
if torch.is_tensor(token_ids):
|
| 331 |
+
accumulated_token_ids.extend(token_ids.reshape(-1).tolist())
|
| 332 |
+
else:
|
| 333 |
+
accumulated_token_ids.extend(list(token_ids) if hasattr(token_ids, "__iter__") else [token_ids])
|
| 334 |
+
|
| 335 |
+
# Decode all accumulated tokens
|
| 336 |
+
full_decoded = tokenizer.decode(accumulated_token_ids, skip_special_tokens=skip_special_tokens)
|
| 337 |
+
|
| 338 |
+
if is_finished:
|
| 339 |
+
# Final chunk - yield all remaining text
|
| 340 |
+
new_text = full_decoded[yielded_text_len:]
|
| 341 |
+
yield new_text, is_finished
|
| 342 |
+
else:
|
| 343 |
+
# Find safe prefix without incomplete multi-byte characters
|
| 344 |
+
# The replacement character '�' (U+FFFD) indicates incomplete decoding
|
| 345 |
+
new_text = full_decoded[yielded_text_len:]
|
| 346 |
+
|
| 347 |
+
# Hold back text ending with replacement character (incomplete UTF-8 sequence)
|
| 348 |
+
safe_end = len(new_text)
|
| 349 |
+
while safe_end > 0 and new_text[safe_end - 1] == "\ufffd":
|
| 350 |
+
safe_end -= 1
|
| 351 |
+
|
| 352 |
+
safe_text = new_text[:safe_end] if safe_end > 0 else ""
|
| 353 |
+
yielded_text_len += len(safe_text)
|
| 354 |
+
yield safe_text, is_finished
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
def torch_clone_recursive(obj):
|
| 358 |
+
"""Recursively clone nested containers of torch.Tensors.
|
| 359 |
+
|
| 360 |
+
Supported container types: dict, list, tuple. Non-container non-Tensor
|
| 361 |
+
objects are returned as-is.
|
| 362 |
+
"""
|
| 363 |
+
if torch.is_tensor(obj):
|
| 364 |
+
return obj.clone()
|
| 365 |
+
elif isinstance(obj, dict):
|
| 366 |
+
return {k: torch_clone_recursive(v) for k, v in obj.items()}
|
| 367 |
+
elif isinstance(obj, list):
|
| 368 |
+
return [torch_clone_recursive(v) for v in obj]
|
| 369 |
+
elif isinstance(obj, tuple):
|
| 370 |
+
return tuple(torch_clone_recursive(v) for v in obj)
|
| 371 |
+
else:
|
| 372 |
+
raise ValueError(f"Unsupported type: {type(obj)}")
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
def rotate_half(x: torch.Tensor) -> torch.Tensor:
|
| 376 |
+
"""Rotate half the hidden dims of the input for RoPE."""
|
| 377 |
+
dim = x.shape[-1]
|
| 378 |
+
x1 = x[..., : dim // 2]
|
| 379 |
+
x2 = x[..., dim // 2 :]
|
| 380 |
+
return torch.cat((-x2, x1), dim=-1)
|
| 381 |
+
|
| 382 |
+
|
| 383 |
+
@dataclass
|
| 384 |
+
class SpeculativeSnapshot:
|
| 385 |
+
"""Speculative snapshot for VAD speculative rollback.
|
| 386 |
+
|
| 387 |
+
Used in VAD speculative execution: creates a snapshot after streaming_prefill
|
| 388 |
+
and before streaming_generate. If speculation fails (user continues speaking),
|
| 389 |
+
the state can be restored to continue streaming_prefill.
|
| 390 |
+
|
| 391 |
+
Implementation:
|
| 392 |
+
- LLM KV Cache: only record length, restore by truncation (zero extra VRAM)
|
| 393 |
+
- Audio KV Cache: requires cloning, as generate sets it to None
|
| 394 |
+
- Mel processor: save full state snapshot (including buffer)
|
| 395 |
+
"""
|
| 396 |
+
|
| 397 |
+
# KV Cache length (for truncation recovery)
|
| 398 |
+
llm_cache_length: int
|
| 399 |
+
audio_cache_length: int
|
| 400 |
+
|
| 401 |
+
# session state
|
| 402 |
+
new_user_msg: bool
|
| 403 |
+
llm_generated: bool
|
| 404 |
+
llm_generate_completed: bool
|
| 405 |
+
|
| 406 |
+
# Round management
|
| 407 |
+
next_round_id: int
|
| 408 |
+
pending_round_id: Optional[int]
|
| 409 |
+
omni_chunk_history_length: int
|
| 410 |
+
|
| 411 |
+
# TTS state (requires cloning, but usually small)
|
| 412 |
+
tts_last_turn_tokens: Optional[torch.Tensor]
|
| 413 |
+
|
| 414 |
+
# Streaming processor state
|
| 415 |
+
audio_chunk_idx: int
|
| 416 |
+
|
| 417 |
+
# Mel processor state snapshot (including buffer)
|
| 418 |
+
mel_processor_snapshot: Optional[dict] = None
|
| 419 |
+
|
| 420 |
+
# Audio encoder KV cache (requires cloning to ensure determinism after recovery)
|
| 421 |
+
audio_past_key_values: Optional[tuple] = None
|
| 422 |
+
|
| 423 |
+
# timestamp (for debugging)
|
| 424 |
+
timestamp: float = 0.0
|
| 425 |
+
|
| 426 |
+
# debug field: for verifying correctness of recovery
|
| 427 |
+
llm_cache_checksum: Optional[float] = None # LLM KV Cache first layer K sum
|
| 428 |
+
audio_cache_checksum: Optional[float] = None # Audio KV Cache first layer K sum
|
| 429 |
+
mel_buffer_checksum: Optional[float] = None # Mel buffer sum
|
| 430 |
+
|
| 431 |
+
# RNG state (key: for ensuring determinism of dithering etc. after recovery)
|
| 432 |
+
rng_state_cpu: Optional[torch.Tensor] = None # torch CPU RNG state
|
| 433 |
+
rng_state_cuda: Optional[torch.Tensor] = None # torch CUDA RNG state (if on GPU)
|
| 434 |
+
|
| 435 |
+
def summary(self) -> str:
|
| 436 |
+
mel_buf_len = 0
|
| 437 |
+
if self.mel_processor_snapshot:
|
| 438 |
+
buf = self.mel_processor_snapshot.get("buffer")
|
| 439 |
+
if buf is not None:
|
| 440 |
+
mel_buf_len = len(buf)
|
| 441 |
+
return (
|
| 442 |
+
f"llm_cache={self.llm_cache_length}, "
|
| 443 |
+
f"audio_cache={self.audio_cache_length}, "
|
| 444 |
+
f"audio_chunk_idx={self.audio_chunk_idx}, "
|
| 445 |
+
f"mel_buffer={mel_buf_len}, "
|
| 446 |
+
f"history_len={self.omni_chunk_history_length}, "
|
| 447 |
+
f"new_user_msg={self.new_user_msg}, "
|
| 448 |
+
f"llm_generated={self.llm_generated}"
|
| 449 |
+
)
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
# tts
|
| 453 |
+
@dataclass
|
| 454 |
+
class TTSSamplingParams:
|
| 455 |
+
top_p: float = 0.85
|
| 456 |
+
min_p: float = 0.01
|
| 457 |
+
top_k: int = 25
|
| 458 |
+
repetition_penalty: float = 1.05
|
| 459 |
+
temperature: float = 0.8
|
| 460 |
+
win_size: int = 16
|
| 461 |
+
tau_r: float = 0.1
|
| 462 |
+
|
| 463 |
+
|
| 464 |
+
class TTSStreamingGenerator:
|
| 465 |
+
"""
|
| 466 |
+
Streaming generator for TTS that processes chunks and yields audio tokens in real-time.
|
| 467 |
+
|
| 468 |
+
Supported attention types:
|
| 469 |
+
- full_attention: Full attention, all tokens can attend to each other
|
| 470 |
+
- sliding_window: Sliding window attention, KV cache is truncated to fixed size (token_window_size)
|
| 471 |
+
- sliding_recompute: Sliding recompute, only keep previous chunk and recompute with current chunk
|
| 472 |
+
- reindex: Keep first chunk as sink, reindex sliding window positions via RoPE rotation
|
| 473 |
+
"""
|
| 474 |
+
|
| 475 |
+
def __init__(
|
| 476 |
+
self,
|
| 477 |
+
model,
|
| 478 |
+
temperature: float,
|
| 479 |
+
eos_token: Union[int, torch.Tensor],
|
| 480 |
+
chunk_size: int = 25, # s3tokenizer 1s = 25token
|
| 481 |
+
tts_last_turn_tokens: torch.Tensor = None,
|
| 482 |
+
logits_processors=None,
|
| 483 |
+
logits_warpers=None,
|
| 484 |
+
):
|
| 485 |
+
self.tts = model
|
| 486 |
+
self.device = model.device
|
| 487 |
+
self.temperature = torch.tensor([temperature], dtype=torch.float, device=self.device)
|
| 488 |
+
self.eos_token = (
|
| 489 |
+
torch.tensor(eos_token, device=self.device) if isinstance(eos_token, int) else eos_token.to(self.device)
|
| 490 |
+
)
|
| 491 |
+
|
| 492 |
+
self.num_vq = model.num_vq
|
| 493 |
+
self.num_audio_tokens = model.num_audio_tokens
|
| 494 |
+
self.recomputed_chunks = model.recomputed_chunks
|
| 495 |
+
self.emb_code = model.emb_code
|
| 496 |
+
self.head_code = model.head_code
|
| 497 |
+
|
| 498 |
+
# Attention type and window sizes
|
| 499 |
+
self.attention_type = model.attention_type # "full_attention", "sliding_window", "sliding_recompute", "reindex"
|
| 500 |
+
self.chunk_window_size = model.chunk_window_size # chunk-level window for sliding_recompute (default 2)
|
| 501 |
+
self.token_window_size = model.token_window_size # token-level window for sliding_window/reindex (default 300)
|
| 502 |
+
|
| 503 |
+
# RoPE config (for reindex mode)
|
| 504 |
+
self.rope_theta = model.model.config.rope_theta
|
| 505 |
+
self.head_dim = model.model.config.hidden_size // model.model.config.num_attention_heads
|
| 506 |
+
|
| 507 |
+
# Logits processors
|
| 508 |
+
self.logits_processors = logits_processors if logits_processors is not None else []
|
| 509 |
+
# Logits warpers (like TopP/TopK), separate from processors
|
| 510 |
+
self.logits_warpers = logits_warpers if logits_warpers is not None else []
|
| 511 |
+
|
| 512 |
+
# initialize state
|
| 513 |
+
self.past_key_values = None
|
| 514 |
+
self.text_start_pos = 0
|
| 515 |
+
self.idx = -1 # start from -1, become 0 when first called
|
| 516 |
+
self.all_conditions = []
|
| 517 |
+
self.all_generated_tokens = []
|
| 518 |
+
self.tts_last_turn_tokens = tts_last_turn_tokens
|
| 519 |
+
self.spk_emb = None
|
| 520 |
+
|
| 521 |
+
audio_bos = [self.tts.audio_bos_token_id]
|
| 522 |
+
audio_bos = torch.Tensor(audio_bos).to(self.tts.emb_text.weight.device, dtype=torch.long)
|
| 523 |
+
|
| 524 |
+
self.audio_bos_embeds = self.tts.emb_text(audio_bos).unsqueeze(0)
|
| 525 |
+
self.text_eos_embed = self.tts.emb_text(
|
| 526 |
+
torch.tensor(
|
| 527 |
+
[self.tts.config.text_eos_token_id],
|
| 528 |
+
device=self.tts.emb_text.weight.device,
|
| 529 |
+
dtype=torch.long,
|
| 530 |
+
)
|
| 531 |
+
).unsqueeze(0)
|
| 532 |
+
|
| 533 |
+
# buffer related, used to fill up chunk_size and yield to outside
|
| 534 |
+
self.chunk_size = chunk_size
|
| 535 |
+
self._token_buffer: List[torch.Tensor] = []
|
| 536 |
+
|
| 537 |
+
# Chunk info tracking for sliding_recompute and reindex
|
| 538 |
+
self._chunk_info: List[dict] = []
|
| 539 |
+
self._total_seq_len = 0
|
| 540 |
+
|
| 541 |
+
# Reindex mode: track sink (first chunk) length
|
| 542 |
+
self._sink_kv_len = 0
|
| 543 |
+
|
| 544 |
+
def _build_recompute_inputs(self, current_condition: torch.Tensor) -> torch.Tensor:
|
| 545 |
+
"""Build recompute inputs for sliding_recompute mode."""
|
| 546 |
+
if len(self._chunk_info) == 0:
|
| 547 |
+
return current_condition
|
| 548 |
+
|
| 549 |
+
prev_chunk = self._chunk_info[-1]
|
| 550 |
+
prev_condition = prev_chunk["condition"]
|
| 551 |
+
prev_audio_tokens = prev_chunk["audio_tokens"]
|
| 552 |
+
|
| 553 |
+
recompute_list = [prev_condition]
|
| 554 |
+
if len(prev_audio_tokens) > 0:
|
| 555 |
+
prev_audio_embeds = torch.cat([self.emb_code[0](tok) for tok in prev_audio_tokens], dim=1)
|
| 556 |
+
recompute_list.append(prev_audio_embeds)
|
| 557 |
+
|
| 558 |
+
recompute_list.append(current_condition)
|
| 559 |
+
return torch.cat(recompute_list, dim=1)
|
| 560 |
+
|
| 561 |
+
def _truncate_kv_cache_sliding_window(self):
|
| 562 |
+
"""Truncate KV cache for sliding_window mode."""
|
| 563 |
+
if self.past_key_values is None:
|
| 564 |
+
return
|
| 565 |
+
|
| 566 |
+
if hasattr(self.past_key_values, "get_seq_length"):
|
| 567 |
+
current_kv_len = self.past_key_values.get_seq_length()
|
| 568 |
+
else:
|
| 569 |
+
current_kv_len = self.past_key_values[0][0].shape[2]
|
| 570 |
+
|
| 571 |
+
if current_kv_len <= self.token_window_size:
|
| 572 |
+
return
|
| 573 |
+
|
| 574 |
+
new_cache = DynamicCache()
|
| 575 |
+
num_layers = (
|
| 576 |
+
len(self.past_key_values.key_cache)
|
| 577 |
+
if hasattr(self.past_key_values, "key_cache")
|
| 578 |
+
else len(self.past_key_values)
|
| 579 |
+
)
|
| 580 |
+
|
| 581 |
+
for layer_idx in range(num_layers):
|
| 582 |
+
if hasattr(self.past_key_values, "key_cache"):
|
| 583 |
+
key = self.past_key_values.key_cache[layer_idx][:, :, -self.token_window_size :, :]
|
| 584 |
+
value = self.past_key_values.value_cache[layer_idx][:, :, -self.token_window_size :, :]
|
| 585 |
+
else:
|
| 586 |
+
key = self.past_key_values[layer_idx][0][:, :, -self.token_window_size :, :]
|
| 587 |
+
value = self.past_key_values[layer_idx][1][:, :, -self.token_window_size :, :]
|
| 588 |
+
new_cache.update(key, value, layer_idx)
|
| 589 |
+
|
| 590 |
+
self.past_key_values = new_cache
|
| 591 |
+
|
| 592 |
+
@staticmethod
|
| 593 |
+
def _apply_rope_rotation(x: torch.Tensor, cos: torch.Tensor, sin: torch.Tensor) -> torch.Tensor:
|
| 594 |
+
"""Apply RoPE rotation to tensor."""
|
| 595 |
+
return x * cos + rotate_half(x) * sin
|
| 596 |
+
|
| 597 |
+
def _compute_rope_cos_sin(self, positions: torch.Tensor, device: torch.device, dtype: torch.dtype):
|
| 598 |
+
"""Compute RoPE cos and sin for given positions."""
|
| 599 |
+
dim_half = self.head_dim // 2
|
| 600 |
+
freq_seq = torch.arange(0, dim_half, dtype=torch.float32, device=device)
|
| 601 |
+
inv_freq = 1.0 / (self.rope_theta ** (freq_seq / dim_half))
|
| 602 |
+
|
| 603 |
+
# positions: [seq_len]
|
| 604 |
+
angles = positions.float().unsqueeze(-1) * inv_freq.unsqueeze(0) # [seq_len, dim_half]
|
| 605 |
+
angles = torch.cat([angles, angles], dim=-1) # [seq_len, head_dim]
|
| 606 |
+
|
| 607 |
+
cos = angles.cos().to(dtype)
|
| 608 |
+
sin = angles.sin().to(dtype)
|
| 609 |
+
return cos, sin
|
| 610 |
+
|
| 611 |
+
def _reindex_kv_cache(self):
|
| 612 |
+
"""
|
| 613 |
+
Reindex KV cache for reindex mode:
|
| 614 |
+
1. Keep first chunk as attention sink
|
| 615 |
+
2. Keep last chunk
|
| 616 |
+
3. Discard middle chunks
|
| 617 |
+
4. Reindex the last chunk's key positions to be right after sink via RoPE rotation
|
| 618 |
+
"""
|
| 619 |
+
if self.past_key_values is None or len(self._chunk_info) < 2:
|
| 620 |
+
return
|
| 621 |
+
|
| 622 |
+
# Get current KV cache length
|
| 623 |
+
if hasattr(self.past_key_values, "get_seq_length"):
|
| 624 |
+
current_kv_len = self.past_key_values.get_seq_length()
|
| 625 |
+
else:
|
| 626 |
+
current_kv_len = self.past_key_values[0][0].shape[2]
|
| 627 |
+
|
| 628 |
+
# Calculate sink length (first chunk)
|
| 629 |
+
sink_len = self._chunk_info[0]["condition_len"] + self._chunk_info[0]["audio_token_count"]
|
| 630 |
+
|
| 631 |
+
# Last chunk length
|
| 632 |
+
last_chunk = self._chunk_info[-1]
|
| 633 |
+
last_chunk_len = last_chunk["condition_len"] + last_chunk["audio_token_count"]
|
| 634 |
+
|
| 635 |
+
keep_len = sink_len + last_chunk_len
|
| 636 |
+
|
| 637 |
+
# Get device and dtype
|
| 638 |
+
device = self.past_key_values.key_cache[0].device
|
| 639 |
+
dtype = self.past_key_values.key_cache[0].dtype
|
| 640 |
+
|
| 641 |
+
if current_kv_len <= keep_len:
|
| 642 |
+
last_chunk_kv_len = current_kv_len - sink_len
|
| 643 |
+
if last_chunk_kv_len <= 0:
|
| 644 |
+
return
|
| 645 |
+
self.text_start_pos = current_kv_len
|
| 646 |
+
return
|
| 647 |
+
|
| 648 |
+
# Step 1: Truncate KV cache - keep sink and last chunk
|
| 649 |
+
new_cache = DynamicCache()
|
| 650 |
+
num_layers = len(self.past_key_values.key_cache)
|
| 651 |
+
|
| 652 |
+
original_start_pos = current_kv_len - last_chunk_len
|
| 653 |
+
new_start_pos = sink_len
|
| 654 |
+
delta = new_start_pos - original_start_pos # This is a scalar constant
|
| 655 |
+
delta_positions = torch.full((last_chunk_len,), delta, dtype=torch.float32, device=device)
|
| 656 |
+
|
| 657 |
+
# Compute rotation cos/sin
|
| 658 |
+
cos, sin = self._compute_rope_cos_sin(delta_positions, device, dtype)
|
| 659 |
+
cos = cos.unsqueeze(0).unsqueeze(0) # [1, 1, seq_len, head_dim]
|
| 660 |
+
sin = sin.unsqueeze(0).unsqueeze(0)
|
| 661 |
+
|
| 662 |
+
for layer_idx in range(num_layers):
|
| 663 |
+
key_full = self.past_key_values.key_cache[layer_idx]
|
| 664 |
+
value_full = self.past_key_values.value_cache[layer_idx]
|
| 665 |
+
|
| 666 |
+
# Extract sink and last chunk
|
| 667 |
+
key_sink = key_full[:, :, :sink_len, :]
|
| 668 |
+
value_sink = value_full[:, :, :sink_len, :]
|
| 669 |
+
key_last = key_full[:, :, -last_chunk_len:, :]
|
| 670 |
+
value_last = value_full[:, :, -last_chunk_len:, :]
|
| 671 |
+
|
| 672 |
+
# Apply RoPE rotation to reindex key positions
|
| 673 |
+
key_last_reindexed = self._apply_rope_rotation(key_last, cos, sin)
|
| 674 |
+
|
| 675 |
+
# Concatenate sink and reindexed last chunk
|
| 676 |
+
key = torch.cat([key_sink, key_last_reindexed], dim=2)
|
| 677 |
+
value = torch.cat([value_sink, value_last], dim=2)
|
| 678 |
+
|
| 679 |
+
new_cache.update(key, value, layer_idx)
|
| 680 |
+
|
| 681 |
+
self.past_key_values = new_cache
|
| 682 |
+
|
| 683 |
+
# Update text_start_pos to reflect new positions
|
| 684 |
+
self.text_start_pos = sink_len + last_chunk_len
|
| 685 |
+
|
| 686 |
+
@torch.inference_mode()
|
| 687 |
+
def generate_with_buffer(
|
| 688 |
+
self,
|
| 689 |
+
condition: torch.Tensor,
|
| 690 |
+
text_finished: bool = False,
|
| 691 |
+
max_new_token: int = 500,
|
| 692 |
+
):
|
| 693 |
+
"""input a condition embedding chunk, generate audio token each time,
|
| 694 |
+
and accumulate to buffer, only yield when buffer satisfies chunk_size.
|
| 695 |
+
|
| 696 |
+
Yields:
|
| 697 |
+
torch.Tensor of shape [chunk_size] (2D: [1, chunk_size])
|
| 698 |
+
"""
|
| 699 |
+
self.idx += 1
|
| 700 |
+
self.device = self.tts.device
|
| 701 |
+
|
| 702 |
+
# if text finished, first concatenate Text EOS
|
| 703 |
+
if text_finished:
|
| 704 |
+
condition = torch.cat([condition, self.text_eos_embed], dim=1)
|
| 705 |
+
|
| 706 |
+
# always concatenate Audio BOS
|
| 707 |
+
condition = torch.cat([condition, self.audio_bos_embeds], dim=1).to(self.device)
|
| 708 |
+
|
| 709 |
+
self.all_conditions.append(condition)
|
| 710 |
+
|
| 711 |
+
# Initialize current chunk info
|
| 712 |
+
current_chunk_info = {
|
| 713 |
+
"condition_len": condition.shape[1],
|
| 714 |
+
"audio_token_count": 0,
|
| 715 |
+
"condition": condition.clone(),
|
| 716 |
+
"audio_tokens": [],
|
| 717 |
+
}
|
| 718 |
+
|
| 719 |
+
# Handle different attention types
|
| 720 |
+
if self.attention_type == "sliding_recompute" and self.idx >= 1:
|
| 721 |
+
# sliding_recompute: discard KV cache, recompute with previous + current chunk
|
| 722 |
+
self.past_key_values = None
|
| 723 |
+
current_condition = self._build_recompute_inputs(condition)
|
| 724 |
+
self.text_start_pos = 0
|
| 725 |
+
elif self.attention_type == "reindex" and self.idx >= 1:
|
| 726 |
+
# reindex: truncate KV cache keeping sink + last chunk, reindex positions via RoPE
|
| 727 |
+
self._reindex_kv_cache()
|
| 728 |
+
current_condition = condition
|
| 729 |
+
# Always update text_start_pos based on actual KV cache length (like reference code)
|
| 730 |
+
if self.past_key_values is not None:
|
| 731 |
+
if hasattr(self.past_key_values, "get_seq_length"):
|
| 732 |
+
kv_len = self.past_key_values.get_seq_length()
|
| 733 |
+
else:
|
| 734 |
+
kv_len = self.past_key_values[0][0].shape[2]
|
| 735 |
+
self.text_start_pos = kv_len
|
| 736 |
+
else:
|
| 737 |
+
current_condition = condition
|
| 738 |
+
|
| 739 |
+
condition_length = current_condition.shape[1]
|
| 740 |
+
prefill_len = condition_length
|
| 741 |
+
finished = torch.zeros(1, dtype=torch.bool, device=self.device)
|
| 742 |
+
chunk_generated_tokens = []
|
| 743 |
+
|
| 744 |
+
for t in range(max_new_token):
|
| 745 |
+
if t == 0:
|
| 746 |
+
inputs_embeds = current_condition
|
| 747 |
+
pos_ids = torch.arange(
|
| 748 |
+
self.text_start_pos,
|
| 749 |
+
self.text_start_pos + condition_length,
|
| 750 |
+
dtype=torch.long,
|
| 751 |
+
device=self.device,
|
| 752 |
+
).unsqueeze(0)
|
| 753 |
+
else:
|
| 754 |
+
last = self.all_generated_tokens[-1]
|
| 755 |
+
# last: [1,1], directly as code id
|
| 756 |
+
inputs_embeds = self.emb_code[0](last)
|
| 757 |
+
pos_ids = torch.tensor(
|
| 758 |
+
[self.text_start_pos + prefill_len + t - 1],
|
| 759 |
+
dtype=torch.long,
|
| 760 |
+
device=self.device,
|
| 761 |
+
).unsqueeze(0)
|
| 762 |
+
|
| 763 |
+
outputs = self.tts.model(
|
| 764 |
+
position_ids=pos_ids,
|
| 765 |
+
past_key_values=self.past_key_values,
|
| 766 |
+
inputs_embeds=inputs_embeds,
|
| 767 |
+
use_cache=True,
|
| 768 |
+
)
|
| 769 |
+
hidden_states = outputs.last_hidden_state
|
| 770 |
+
|
| 771 |
+
# Handle KV cache based on attention type
|
| 772 |
+
if self.attention_type == "sliding_window":
|
| 773 |
+
self.past_key_values = outputs.past_key_values
|
| 774 |
+
self._truncate_kv_cache_sliding_window()
|
| 775 |
+
else:
|
| 776 |
+
self.past_key_values = outputs.past_key_values
|
| 777 |
+
|
| 778 |
+
with P.cached():
|
| 779 |
+
logits = torch.empty(
|
| 780 |
+
hidden_states.size(0),
|
| 781 |
+
hidden_states.size(1),
|
| 782 |
+
self.num_audio_tokens,
|
| 783 |
+
self.num_vq,
|
| 784 |
+
dtype=torch.float,
|
| 785 |
+
device=self.device,
|
| 786 |
+
)
|
| 787 |
+
for num_vq_iter in range(self.num_vq):
|
| 788 |
+
x: torch.Tensor = self.head_code[num_vq_iter](hidden_states)
|
| 789 |
+
logits[..., num_vq_iter] = x
|
| 790 |
+
del x
|
| 791 |
+
|
| 792 |
+
del hidden_states
|
| 793 |
+
|
| 794 |
+
logits = logits[:, -1].float()
|
| 795 |
+
|
| 796 |
+
logits = logits.permute(0, 2, 1)
|
| 797 |
+
logits = logits.reshape(-1, logits.size(2))
|
| 798 |
+
|
| 799 |
+
logits /= self.temperature
|
| 800 |
+
|
| 801 |
+
audio_bos = len(self.all_generated_tokens) == 0 and t == 0
|
| 802 |
+
|
| 803 |
+
if not audio_bos:
|
| 804 |
+
# use generated tokens (current chunk) as input for processor/warper (align with modeling_minicpmo)
|
| 805 |
+
all_generated_tokens = torch.cat(self.all_generated_tokens, dim=1).to(self.device) # [1, T]
|
| 806 |
+
for processor in self.logits_processors:
|
| 807 |
+
logits = processor(all_generated_tokens, logits)
|
| 808 |
+
|
| 809 |
+
for warper in self.logits_warpers:
|
| 810 |
+
logits = warper(all_generated_tokens, logits)
|
| 811 |
+
del all_generated_tokens
|
| 812 |
+
|
| 813 |
+
# sample next token (only use first codebook, same as generate)
|
| 814 |
+
scores = F.softmax(logits, dim=-1)
|
| 815 |
+
idx_next = torch.multinomial(scores, num_samples=1) # [(B*num_vq), 1]
|
| 816 |
+
next_id = idx_next.view(-1, self.num_vq)[:, 0:1] # only take first codebook → [B, 1]
|
| 817 |
+
del scores
|
| 818 |
+
|
| 819 |
+
if next_id.eq(
|
| 820 |
+
self.eos_token
|
| 821 |
+
).any(): # generated audio eos token, means this chunk is finished, no longer generate new tokens
|
| 822 |
+
finished[:] = True
|
| 823 |
+
else: # eos token cannot be added to buffer, he does not speak.
|
| 824 |
+
# convert next_id to correct shape [1, 1], no num_vq dimension
|
| 825 |
+
if next_id.dim() == 0: # if scalar
|
| 826 |
+
next_tok = next_id.unsqueeze(0).unsqueeze(0) # [1, 1]
|
| 827 |
+
elif next_id.dim() == 1: # if 1D [1]
|
| 828 |
+
next_tok = next_id.unsqueeze(0) # [1, 1]
|
| 829 |
+
else:
|
| 830 |
+
next_tok = next_id
|
| 831 |
+
|
| 832 |
+
self.all_generated_tokens.append(next_tok)
|
| 833 |
+
chunk_generated_tokens.append(next_tok)
|
| 834 |
+
|
| 835 |
+
# Update chunk info for sliding_recompute
|
| 836 |
+
current_chunk_info["audio_tokens"].append(next_tok.clone())
|
| 837 |
+
current_chunk_info["audio_token_count"] += 1
|
| 838 |
+
|
| 839 |
+
self._token_buffer.append(next_tok)
|
| 840 |
+
|
| 841 |
+
if len(self._token_buffer) == 0:
|
| 842 |
+
# case 1: if last text chunk, yield None
|
| 843 |
+
if text_finished:
|
| 844 |
+
yield torch.empty(1, 0, dtype=torch.long, device=self.device), True
|
| 845 |
+
break
|
| 846 |
+
# case 2: if not last text chunk, break directly
|
| 847 |
+
else:
|
| 848 |
+
break
|
| 849 |
+
else: # buffer has something
|
| 850 |
+
# case 1: if buffer is larger/equal to chunk_size, yield out
|
| 851 |
+
if len(self._token_buffer) >= self.chunk_size:
|
| 852 |
+
batch = torch.cat(self._token_buffer[: self.chunk_size], dim=1) # [1, chunk_size]
|
| 853 |
+
yield batch, False # → [1, chunk_size]
|
| 854 |
+
# discard yielded part
|
| 855 |
+
self._token_buffer = self._token_buffer[self.chunk_size :]
|
| 856 |
+
|
| 857 |
+
# case 2: if buffer is smaller than chunk_size
|
| 858 |
+
else:
|
| 859 |
+
# if generation finished, and is the last text chunk, yield all remaining tokens, then break
|
| 860 |
+
if finished.all():
|
| 861 |
+
if text_finished:
|
| 862 |
+
batch = torch.cat(self._token_buffer, dim=1) # [1, chunk_size]
|
| 863 |
+
yield batch, True # → [1, chunk_size]
|
| 864 |
+
self._token_buffer = []
|
| 865 |
+
break
|
| 866 |
+
else:
|
| 867 |
+
# not the last text chunk, need to wait for next text chunk to fill up buffer, then this call ends
|
| 868 |
+
break
|
| 869 |
+
else: # generation of this audio chunk is not finished, continue generating
|
| 870 |
+
continue
|
| 871 |
+
|
| 872 |
+
# Save current chunk info for sliding_recompute and reindex
|
| 873 |
+
self._chunk_info.append(current_chunk_info)
|
| 874 |
+
self._total_seq_len += condition.shape[1] + len(chunk_generated_tokens)
|
| 875 |
+
|
| 876 |
+
# Update text_start_pos based on attention type
|
| 877 |
+
if self.attention_type == "sliding_recompute":
|
| 878 |
+
# sliding_recompute: will be reset at next chunk start, update normally here
|
| 879 |
+
self.text_start_pos += prefill_len + len(chunk_generated_tokens)
|
| 880 |
+
elif self.attention_type == "reindex":
|
| 881 |
+
# reindex: position based on actual KV cache length (positions have been reindexed to be continuous)
|
| 882 |
+
if self.past_key_values is not None:
|
| 883 |
+
if hasattr(self.past_key_values, "get_seq_length"):
|
| 884 |
+
self.text_start_pos = self.past_key_values.get_seq_length()
|
| 885 |
+
else:
|
| 886 |
+
self.text_start_pos = self.past_key_values[0][0].shape[2]
|
| 887 |
+
else:
|
| 888 |
+
self.text_start_pos += condition.shape[1] + len(chunk_generated_tokens)
|
| 889 |
+
else:
|
| 890 |
+
self.text_start_pos += condition.shape[1] + len(chunk_generated_tokens)
|
| 891 |
+
# note: remaining tokens in buffer will be kept, and accumulated next time
|
| 892 |
+
|
| 893 |
+
|
| 894 |
+
# sliding window
|
| 895 |
+
@dataclass
|
| 896 |
+
class StreamingWindowConfig:
|
| 897 |
+
text_window_high_tokens: int = 8000
|
| 898 |
+
text_window_low_tokens: int = 6000
|
| 899 |
+
|
| 900 |
+
|
| 901 |
+
@dataclass
|
| 902 |
+
class DuplexWindowConfig:
|
| 903 |
+
"""duplex sliding window configuration
|
| 904 |
+
|
| 905 |
+
sliding window mode:
|
| 906 |
+
- "off": disable sliding window
|
| 907 |
+
- "basic": basic sliding window (trigger by cache length)
|
| 908 |
+
- "context": sliding window with context (trigger by unit number, preserve generated text to previous)
|
| 909 |
+
"""
|
| 910 |
+
|
| 911 |
+
# sliding window mode
|
| 912 |
+
sliding_window_mode: str = "off" # "off" / "basic" / "context"
|
| 913 |
+
|
| 914 |
+
# basic sliding window parameters
|
| 915 |
+
basic_window_high_tokens: int = 8000 # high watermark: trigger sliding window when exceeded
|
| 916 |
+
basic_window_low_tokens: int = 6000 # low watermark: keep to this value after sliding window
|
| 917 |
+
|
| 918 |
+
# context sliding window parameters
|
| 919 |
+
context_previous_max_tokens: int = 500 # previous maximum token number
|
| 920 |
+
context_max_units: int = 24 # maximum unit number (trigger sliding window when exceeded)
|
| 921 |
+
|
| 922 |
+
# verification mode (for comparison test)
|
| 923 |
+
verify_mode: bool = False # whether to enable verification log
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
def as_dynamic_cache(past_key_values):
|
| 927 |
+
"""Convert legacy tuple cache to DynamicCache if needed."""
|
| 928 |
+
if isinstance(past_key_values, DynamicCache):
|
| 929 |
+
return past_key_values
|
| 930 |
+
|
| 931 |
+
if isinstance(past_key_values, tuple):
|
| 932 |
+
return DynamicCache.from_legacy_cache(past_key_values)
|
| 933 |
+
|
| 934 |
+
return past_key_values
|
| 935 |
+
|
| 936 |
+
|
| 937 |
+
def get_kv_cache_length(cache) -> int:
|
| 938 |
+
"""Get the sequence length of a KV cache.
|
| 939 |
+
|
| 940 |
+
Args:
|
| 941 |
+
cache: DynamicCache or tuple-based cache
|
| 942 |
+
|
| 943 |
+
Returns:
|
| 944 |
+
The number of tokens in the cache
|
| 945 |
+
"""
|
| 946 |
+
if cache is None:
|
| 947 |
+
return 0
|
| 948 |
+
|
| 949 |
+
if isinstance(cache, DynamicCache):
|
| 950 |
+
if not cache.key_cache or not cache.key_cache[0].numel():
|
| 951 |
+
return 0
|
| 952 |
+
return cache.key_cache[0].shape[-2]
|
| 953 |
+
|
| 954 |
+
if isinstance(cache, tuple):
|
| 955 |
+
return cache[0][0].shape[2]
|
| 956 |
+
|
| 957 |
+
return 0
|
| 958 |
+
|
| 959 |
+
|
| 960 |
+
def get_rotary_cos_sin(
|
| 961 |
+
head_dim: int,
|
| 962 |
+
positions: torch.Tensor,
|
| 963 |
+
device: torch.device,
|
| 964 |
+
dtype: torch.dtype,
|
| 965 |
+
rope_theta: float = 10000.0,
|
| 966 |
+
inv_freq_cache: Optional[Dict[Tuple, torch.Tensor]] = None,
|
| 967 |
+
) -> Tuple[torch.Tensor, torch.Tensor]:
|
| 968 |
+
"""Compute RoPE cos and sin components for given positions.
|
| 969 |
+
|
| 970 |
+
Args:
|
| 971 |
+
head_dim: Dimension of each attention head
|
| 972 |
+
positions: Position indices tensor
|
| 973 |
+
device: Target device
|
| 974 |
+
dtype: Target dtype
|
| 975 |
+
rope_theta: RoPE base frequency (default 10000.0)
|
| 976 |
+
inv_freq_cache: Optional cache dict for inverse frequencies
|
| 977 |
+
|
| 978 |
+
Returns:
|
| 979 |
+
Tuple of (cos, sin) tensors with shape [1, 1, seq_len, head_dim]
|
| 980 |
+
"""
|
| 981 |
+
cache_key = (head_dim, device)
|
| 982 |
+
|
| 983 |
+
inv_freq = inv_freq_cache.get(cache_key) if inv_freq_cache is not None else None
|
| 984 |
+
if inv_freq is None or inv_freq.device != device or inv_freq.shape[0] != head_dim // 2:
|
| 985 |
+
exponent = torch.arange(0, head_dim, 2, device=device, dtype=torch.float32) / head_dim
|
| 986 |
+
inv_freq = 1.0 / (rope_theta**exponent)
|
| 987 |
+
if inv_freq_cache is not None:
|
| 988 |
+
inv_freq_cache[cache_key] = inv_freq
|
| 989 |
+
|
| 990 |
+
positions = positions.to(device=device, dtype=torch.float32)
|
| 991 |
+
angles = torch.einsum("i,j->ij", positions, inv_freq)
|
| 992 |
+
cos = torch.cos(angles)
|
| 993 |
+
sin = torch.sin(angles)
|
| 994 |
+
|
| 995 |
+
# Use cat instead of repeat_interleave, consistent with model's original RotaryEmbedding
|
| 996 |
+
# Original: emb = torch.cat((freqs, freqs), dim=-1) -> [f0, f1, ..., f_{d/2}, f0, f1, ..., f_{d/2}]
|
| 997 |
+
cos_full = torch.cat([cos, cos], dim=-1).to(dtype=dtype)
|
| 998 |
+
sin_full = torch.cat([sin, sin], dim=-1).to(dtype=dtype)
|
| 999 |
+
cos_full = cos_full.unsqueeze(0).unsqueeze(0)
|
| 1000 |
+
sin_full = sin_full.unsqueeze(0).unsqueeze(0)
|
| 1001 |
+
return cos_full, sin_full
|
| 1002 |
+
|
| 1003 |
+
|
| 1004 |
+
def realign_rotary_suffix(
|
| 1005 |
+
suffix_keys: torch.Tensor,
|
| 1006 |
+
old_positions: torch.Tensor,
|
| 1007 |
+
new_positions: torch.Tensor,
|
| 1008 |
+
rope_theta: float = 10000.0,
|
| 1009 |
+
inv_freq_cache: Optional[Dict[Tuple, torch.Tensor]] = None,
|
| 1010 |
+
) -> torch.Tensor:
|
| 1011 |
+
"""Realign RoPE position encoding after cache eviction.
|
| 1012 |
+
|
| 1013 |
+
When tokens are dropped from the middle of a cache, the suffix tokens
|
| 1014 |
+
need their RoPE embeddings recalculated with new position indices.
|
| 1015 |
+
|
| 1016 |
+
Args:
|
| 1017 |
+
suffix_keys: Key tensor to realign, shape [batch, heads, seq_len, head_dim]
|
| 1018 |
+
old_positions: Original position indices
|
| 1019 |
+
new_positions: New position indices after eviction
|
| 1020 |
+
rope_theta: RoPE base frequency
|
| 1021 |
+
inv_freq_cache: Optional cache dict for inverse frequencies
|
| 1022 |
+
|
| 1023 |
+
Returns:
|
| 1024 |
+
Realigned key tensor with same shape as input
|
| 1025 |
+
"""
|
| 1026 |
+
if suffix_keys.numel() == 0:
|
| 1027 |
+
return suffix_keys
|
| 1028 |
+
|
| 1029 |
+
head_dim = suffix_keys.shape[-1]
|
| 1030 |
+
device = suffix_keys.device
|
| 1031 |
+
dtype = suffix_keys.dtype
|
| 1032 |
+
|
| 1033 |
+
# Compute old position cos/sin
|
| 1034 |
+
cos_old, sin_old = get_rotary_cos_sin(head_dim, old_positions, device, dtype, rope_theta, inv_freq_cache)
|
| 1035 |
+
|
| 1036 |
+
# Inverse transform: recover original key
|
| 1037 |
+
base = cos_old * suffix_keys - sin_old * rotate_half(suffix_keys)
|
| 1038 |
+
|
| 1039 |
+
# Compute new position cos/sin
|
| 1040 |
+
cos_new, sin_new = get_rotary_cos_sin(head_dim, new_positions, device, dtype, rope_theta, inv_freq_cache)
|
| 1041 |
+
|
| 1042 |
+
# Forward transform: re-encode with new positions
|
| 1043 |
+
return cos_new * base + sin_new * rotate_half(base)
|
| 1044 |
+
|
| 1045 |
+
|
| 1046 |
+
def drop_tokens_from_cache(
|
| 1047 |
+
cache: Optional[DynamicCache | Tuple],
|
| 1048 |
+
length: int,
|
| 1049 |
+
preserve: int,
|
| 1050 |
+
position_offset: int,
|
| 1051 |
+
rope_theta: float = 10000.0,
|
| 1052 |
+
inv_freq_cache: Optional[Dict[Tuple, torch.Tensor]] = None,
|
| 1053 |
+
) -> Tuple[Optional[DynamicCache], int, bool]:
|
| 1054 |
+
"""Drop tokens from a KV cache while preserving system prompt.
|
| 1055 |
+
|
| 1056 |
+
Removes tokens in the range [preserve, preserve + length) from the cache,
|
| 1057 |
+
realigning RoPE embeddings for the suffix.
|
| 1058 |
+
|
| 1059 |
+
Args:
|
| 1060 |
+
cache: DynamicCache or tuple-based cache (will be converted to DynamicCache)
|
| 1061 |
+
length: Number of tokens to drop
|
| 1062 |
+
preserve: Number of tokens to preserve at the start (system prompt)
|
| 1063 |
+
position_offset: Current position offset for RoPE calculation
|
| 1064 |
+
rope_theta: RoPE base frequency
|
| 1065 |
+
inv_freq_cache: Optional cache dict for inverse frequencies
|
| 1066 |
+
|
| 1067 |
+
Returns:
|
| 1068 |
+
Tuple of (cache, new_position_offset, success)
|
| 1069 |
+
Note: Tuple cache will be converted to DynamicCache. Modification is in-place.
|
| 1070 |
+
"""
|
| 1071 |
+
if cache is None or length <= 0:
|
| 1072 |
+
return cache, position_offset, False
|
| 1073 |
+
|
| 1074 |
+
cache = as_dynamic_cache(cache)
|
| 1075 |
+
|
| 1076 |
+
total_len = get_kv_cache_length(cache)
|
| 1077 |
+
if total_len <= 0:
|
| 1078 |
+
return cache, position_offset, False
|
| 1079 |
+
|
| 1080 |
+
preserve = min(preserve, total_len)
|
| 1081 |
+
available = total_len - preserve
|
| 1082 |
+
|
| 1083 |
+
if available < length:
|
| 1084 |
+
logger.warning(
|
| 1085 |
+
"Cannot drop %d tokens: only %d available (total=%d, preserve=%d)",
|
| 1086 |
+
length,
|
| 1087 |
+
available,
|
| 1088 |
+
total_len,
|
| 1089 |
+
preserve,
|
| 1090 |
+
)
|
| 1091 |
+
return cache, position_offset, False
|
| 1092 |
+
|
| 1093 |
+
suffix_len = total_len - preserve - length
|
| 1094 |
+
# note: after RoPE reindex, the position of cache has been compressed (from preserve start)
|
| 1095 |
+
# so here should not add position_offset, but use the actual layout of current cache
|
| 1096 |
+
suffix_offset = preserve + length # suffix current position in cache
|
| 1097 |
+
prefix_offset = preserve # suffix new position (follow preserve)
|
| 1098 |
+
|
| 1099 |
+
# Prepare position tensors for RoPE realignment
|
| 1100 |
+
old_positions = None
|
| 1101 |
+
new_positions = None
|
| 1102 |
+
if suffix_len > 0:
|
| 1103 |
+
device = cache.key_cache[0].device
|
| 1104 |
+
old_positions = torch.arange(
|
| 1105 |
+
suffix_offset,
|
| 1106 |
+
suffix_offset + suffix_len,
|
| 1107 |
+
device=device,
|
| 1108 |
+
dtype=torch.long,
|
| 1109 |
+
)
|
| 1110 |
+
new_positions = torch.arange(
|
| 1111 |
+
prefix_offset,
|
| 1112 |
+
prefix_offset + suffix_len,
|
| 1113 |
+
device=device,
|
| 1114 |
+
dtype=torch.long,
|
| 1115 |
+
)
|
| 1116 |
+
|
| 1117 |
+
keep_len = total_len - length
|
| 1118 |
+
|
| 1119 |
+
# Process each layer (in-place modification)
|
| 1120 |
+
for layer_idx in range(len(cache.key_cache)):
|
| 1121 |
+
key_tensor = cache.key_cache[layer_idx]
|
| 1122 |
+
value_tensor = cache.value_cache[layer_idx]
|
| 1123 |
+
|
| 1124 |
+
if not key_tensor.numel():
|
| 1125 |
+
continue
|
| 1126 |
+
|
| 1127 |
+
# Preserve prefix (system prompt)
|
| 1128 |
+
prefix_keys = key_tensor[:, :, :preserve, :]
|
| 1129 |
+
prefix_values = value_tensor[:, :, :preserve, :]
|
| 1130 |
+
|
| 1131 |
+
if suffix_len > 0:
|
| 1132 |
+
# Keep and realign suffix
|
| 1133 |
+
suffix_keys = key_tensor[:, :, preserve + length :, :]
|
| 1134 |
+
suffix_values = value_tensor[:, :, preserve + length :, :]
|
| 1135 |
+
|
| 1136 |
+
if old_positions is not None and new_positions is not None and suffix_keys.numel():
|
| 1137 |
+
suffix_keys = realign_rotary_suffix(
|
| 1138 |
+
suffix_keys,
|
| 1139 |
+
old_positions,
|
| 1140 |
+
new_positions,
|
| 1141 |
+
rope_theta,
|
| 1142 |
+
inv_freq_cache,
|
| 1143 |
+
)
|
| 1144 |
+
|
| 1145 |
+
cache.key_cache[layer_idx] = torch.cat([prefix_keys, suffix_keys], dim=-2).contiguous()
|
| 1146 |
+
cache.value_cache[layer_idx] = torch.cat([prefix_values, suffix_values], dim=-2).contiguous()
|
| 1147 |
+
else:
|
| 1148 |
+
cache.key_cache[layer_idx] = prefix_keys.contiguous()
|
| 1149 |
+
cache.value_cache[layer_idx] = prefix_values.contiguous()
|
| 1150 |
+
|
| 1151 |
+
cache.crop(keep_len)
|
| 1152 |
+
cache._seen_tokens = max(keep_len, 0)
|
| 1153 |
+
|
| 1154 |
+
new_offset = position_offset + length
|
| 1155 |
+
logger.debug("Dropped %d tokens from cache, new length=%d", length, keep_len)
|
| 1156 |
+
|
| 1157 |
+
return cache, new_offset, True
|
| 1158 |
+
|
| 1159 |
+
|
| 1160 |
+
# stream decoder
|
| 1161 |
+
def top_k_top_p_filtering(logits, top_k=0, top_p=0.0, filter_value=-float("inf")):
|
| 1162 |
+
logits = logits.clone()
|
| 1163 |
+
|
| 1164 |
+
# Top-k filtering
|
| 1165 |
+
if top_k > 0:
|
| 1166 |
+
top_k = min(top_k, logits.size(-1))
|
| 1167 |
+
indices_to_remove = logits < torch.topk(logits, top_k)[0][..., -1, None]
|
| 1168 |
+
logits[indices_to_remove] = filter_value
|
| 1169 |
+
|
| 1170 |
+
# Top-p (nucleus) filtering
|
| 1171 |
+
if top_p > 0.0:
|
| 1172 |
+
sorted_logits, sorted_indices = torch.sort(logits, descending=True)
|
| 1173 |
+
probs = F.softmax(sorted_logits, dim=-1)
|
| 1174 |
+
cumulative_probs = torch.cumsum(probs, dim=-1)
|
| 1175 |
+
|
| 1176 |
+
sorted_indices_to_remove = cumulative_probs > top_p
|
| 1177 |
+
# keep the first token that exceeds top_p
|
| 1178 |
+
sorted_indices_to_remove[..., 1:] = sorted_indices_to_remove[..., :-1].clone()
|
| 1179 |
+
sorted_indices_to_remove[..., 0] = 0
|
| 1180 |
+
|
| 1181 |
+
indices_to_remove = sorted_indices[sorted_indices_to_remove]
|
| 1182 |
+
logits[0, indices_to_remove] = filter_value
|
| 1183 |
+
|
| 1184 |
+
return logits
|
| 1185 |
+
|
| 1186 |
+
|
| 1187 |
+
class StreamDecoder:
|
| 1188 |
+
def __init__(self, llm, tokenizer, special_token_ids=None, forbidden_token_ids=None):
|
| 1189 |
+
self.m = llm
|
| 1190 |
+
self.tokenizer = tokenizer
|
| 1191 |
+
self.listen_id = self.tokenizer.eos_token_id
|
| 1192 |
+
|
| 1193 |
+
self.chunk_eos_id = self.tokenizer.convert_tokens_to_ids("<|chunk_eos|>")
|
| 1194 |
+
self.chunk_tts_eos_id = self.tokenizer.convert_tokens_to_ids("<|chunk_tts_eos|>")
|
| 1195 |
+
self.turn_eos_id = self.tokenizer.convert_tokens_to_ids("<|turn_eos|>")
|
| 1196 |
+
self.speak_id = self.tokenizer.convert_tokens_to_ids("<|speak|>")
|
| 1197 |
+
|
| 1198 |
+
self.special_token_ids = special_token_ids if special_token_ids is not None else []
|
| 1199 |
+
|
| 1200 |
+
# cache special tokens (used for context sliding window filtering)
|
| 1201 |
+
self._all_special_ids = set()
|
| 1202 |
+
self._all_special_tokens_text = set()
|
| 1203 |
+
if self.tokenizer:
|
| 1204 |
+
if hasattr(self.tokenizer, "all_special_ids"):
|
| 1205 |
+
self._all_special_ids = set(self.tokenizer.all_special_ids)
|
| 1206 |
+
if hasattr(self.tokenizer, "all_special_tokens"):
|
| 1207 |
+
self._all_special_tokens_text = set(self.tokenizer.all_special_tokens)
|
| 1208 |
+
|
| 1209 |
+
custom_special_tokens = [
|
| 1210 |
+
"<unit>",
|
| 1211 |
+
"</unit>",
|
| 1212 |
+
"",
|
| 1214 |
+
"<slice>",
|
| 1215 |
+
"</slice>",
|
| 1216 |
+
"<|listen|>",
|
| 1217 |
+
"<|speak|>",
|
| 1218 |
+
"<|tts_bos|>",
|
| 1219 |
+
"<|tts_eos|>",
|
| 1220 |
+
"<|audio_start|>",
|
| 1221 |
+
"<|audio_end|>",
|
| 1222 |
+
"<|chunk_eos|>",
|
| 1223 |
+
"<|chunk_tts_eos|>",
|
| 1224 |
+
"<|turn_eos|>",
|
| 1225 |
+
"<|audio_start|>",
|
| 1226 |
+
"<|audio_end|>",
|
| 1227 |
+
]
|
| 1228 |
+
self._all_special_tokens_text.update(custom_special_tokens)
|
| 1229 |
+
for token in custom_special_tokens:
|
| 1230 |
+
token_id = self.tokenizer.convert_tokens_to_ids(token)
|
| 1231 |
+
if token_id is not None and token_id != self.tokenizer.unk_token_id:
|
| 1232 |
+
self._all_special_ids.add(token_id)
|
| 1233 |
+
|
| 1234 |
+
if forbidden_token_ids is None:
|
| 1235 |
+
self.forbidden_token_ids = []
|
| 1236 |
+
elif isinstance(forbidden_token_ids, int):
|
| 1237 |
+
self.forbidden_token_ids = [self.forbidden_token_ids]
|
| 1238 |
+
else:
|
| 1239 |
+
self.forbidden_token_ids = forbidden_token_ids
|
| 1240 |
+
self.forbidden_token_ids.append(self.chunk_eos_id)
|
| 1241 |
+
|
| 1242 |
+
assert isinstance(self.forbidden_token_ids, list)
|
| 1243 |
+
|
| 1244 |
+
self.cache = None
|
| 1245 |
+
self.context = ""
|
| 1246 |
+
self.generated_tokens = [] # track generated tokens
|
| 1247 |
+
self.generated_special_tokens = [] # track generated special tokens
|
| 1248 |
+
self.reset()
|
| 1249 |
+
self.embeds = None
|
| 1250 |
+
self.system_embeds = None
|
| 1251 |
+
|
| 1252 |
+
# sliding window related states
|
| 1253 |
+
self._unit_history: List[Dict[str, Any]] = []
|
| 1254 |
+
self._next_unit_id: int = 0
|
| 1255 |
+
self._pending_unit_id: Optional[int] = None
|
| 1256 |
+
self._pending_unit_start_cache_len: int = 0
|
| 1257 |
+
self._system_preserve_length: int = 0
|
| 1258 |
+
self._position_offset: int = 0
|
| 1259 |
+
self._window_config = DuplexWindowConfig()
|
| 1260 |
+
self._window_enabled: bool = True
|
| 1261 |
+
self._rope_inv_freq_cache: Dict[Tuple, torch.Tensor] = {}
|
| 1262 |
+
|
| 1263 |
+
# context preserving sliding window states
|
| 1264 |
+
# initial cache layout: [prefix] [suffix] [units...]
|
| 1265 |
+
# after first sliding window: [prefix] [previous_marker + content] [suffix] [units...]
|
| 1266 |
+
# fixed dynamic sliding region fixed
|
| 1267 |
+
self._preserve_prefix_length: int = 0 # original prefix length (fixed)
|
| 1268 |
+
self._previous_content_length: int = 0 # previous content length (dynamic, including marker)
|
| 1269 |
+
self._suffix_token_ids: List[int] = [] # suffix token ids (e.g. <|im_end|>)
|
| 1270 |
+
|
| 1271 |
+
# previous marker (added dynamically after first sliding window)
|
| 1272 |
+
self._previous_marker: str = "\n\nprevious: " # fixed prefix marker
|
| 1273 |
+
self._previous_marker_token_ids: List[int] = [] # marker token ids (initialized)
|
| 1274 |
+
self._has_previous: bool = False # whether previous marker has been added
|
| 1275 |
+
|
| 1276 |
+
# previous content
|
| 1277 |
+
self._previous_text: str = "" # accumulated generated text (without marker)
|
| 1278 |
+
self._previous_token_ids: List[int] = [] # previous full token ids (including marker)
|
| 1279 |
+
|
| 1280 |
+
# validation statistics
|
| 1281 |
+
self._sliding_event_count: int = 0 # sliding window trigger count
|
| 1282 |
+
self._total_dropped_tokens: int = 0 # total dropped token count
|
| 1283 |
+
self._total_dropped_units: int = 0 # total dropped unit count
|
| 1284 |
+
|
| 1285 |
+
def sliding_embeds(self):
|
| 1286 |
+
# tmp = system_embeds
|
| 1287 |
+
# tmp +-》 embeds after 5s
|
| 1288 |
+
# reset
|
| 1289 |
+
# feed
|
| 1290 |
+
pass
|
| 1291 |
+
|
| 1292 |
+
def reset(self):
|
| 1293 |
+
self.context = ""
|
| 1294 |
+
self.cache = None
|
| 1295 |
+
self.generated_tokens = []
|
| 1296 |
+
self.generated_special_tokens = []
|
| 1297 |
+
self.embeds = None
|
| 1298 |
+
self.system_embeds = None
|
| 1299 |
+
|
| 1300 |
+
# sliding window state reset
|
| 1301 |
+
old_unit_count = len(self._unit_history) if hasattr(self, "_unit_history") else 0
|
| 1302 |
+
self._unit_history = []
|
| 1303 |
+
self._next_unit_id = 0
|
| 1304 |
+
self._pending_unit_id = None
|
| 1305 |
+
self._pending_unit_start_cache_len = 0
|
| 1306 |
+
self._system_preserve_length = 0
|
| 1307 |
+
self._position_offset = 0
|
| 1308 |
+
self._rope_inv_freq_cache = {}
|
| 1309 |
+
|
| 1310 |
+
# context preserving sliding window state reset
|
| 1311 |
+
self._preserve_prefix_length = 0
|
| 1312 |
+
self._previous_content_length = 0
|
| 1313 |
+
self._suffix_token_ids = []
|
| 1314 |
+
self._previous_marker = "\n\nprevious: "
|
| 1315 |
+
self._previous_marker_token_ids = []
|
| 1316 |
+
self._has_previous = False
|
| 1317 |
+
self._previous_text = ""
|
| 1318 |
+
self._previous_token_ids = []
|
| 1319 |
+
|
| 1320 |
+
# validation statistics
|
| 1321 |
+
self._sliding_event_count = 0 # sliding window trigger count
|
| 1322 |
+
self._total_dropped_tokens = 0 # total dropped token count
|
| 1323 |
+
self._total_dropped_units = 0 # total dropped unit count
|
| 1324 |
+
|
| 1325 |
+
def get_cache_length(self) -> int:
|
| 1326 |
+
if self.cache is None:
|
| 1327 |
+
return 0
|
| 1328 |
+
if isinstance(self.cache, DynamicCache):
|
| 1329 |
+
if len(self.cache.key_cache) > 0 and self.cache.key_cache[0].numel() > 0:
|
| 1330 |
+
return self.cache.key_cache[0].shape[2]
|
| 1331 |
+
return 0
|
| 1332 |
+
# Tuple cache format
|
| 1333 |
+
return self.cache[0][0].shape[2]
|
| 1334 |
+
|
| 1335 |
+
def get_total_generated_tokens(self) -> int:
|
| 1336 |
+
return sum(len(u.get("generated_tokens", [])) for u in self._unit_history)
|
| 1337 |
+
|
| 1338 |
+
def register_unit_start(self) -> int:
|
| 1339 |
+
self._pending_unit_id = self._next_unit_id
|
| 1340 |
+
self._pending_unit_start_cache_len = self.get_cache_length()
|
| 1341 |
+
return self._pending_unit_id
|
| 1342 |
+
|
| 1343 |
+
def register_unit_end(
|
| 1344 |
+
self,
|
| 1345 |
+
input_type: str,
|
| 1346 |
+
generated_tokens: Optional[List[int]] = None,
|
| 1347 |
+
is_listen: bool = False,
|
| 1348 |
+
generated_text: Optional[str] = None,
|
| 1349 |
+
):
|
| 1350 |
+
"""Call when unit ends, record unit information
|
| 1351 |
+
|
| 1352 |
+
Should be called after feeding </unit> token
|
| 1353 |
+
|
| 1354 |
+
Args:
|
| 1355 |
+
input_type: "audio" / "video" / "omni" / "system"
|
| 1356 |
+
generated_tokens: tokens generated by the unit (token ids)
|
| 1357 |
+
is_listen: whether the unit is in listen state
|
| 1358 |
+
generated_text: text generated by the unit (used for context preserving mode)
|
| 1359 |
+
"""
|
| 1360 |
+
if self._pending_unit_id is None:
|
| 1361 |
+
logger.warning("register_unit_end called without register_unit_start")
|
| 1362 |
+
return
|
| 1363 |
+
|
| 1364 |
+
# calculate the length of the unit
|
| 1365 |
+
current_cache_len = self.get_cache_length()
|
| 1366 |
+
unit_len = current_cache_len - self._pending_unit_start_cache_len
|
| 1367 |
+
|
| 1368 |
+
if unit_len > 0:
|
| 1369 |
+
entry = {
|
| 1370 |
+
"unit_id": self._pending_unit_id,
|
| 1371 |
+
"length": unit_len,
|
| 1372 |
+
"type": input_type,
|
| 1373 |
+
"generated_tokens": generated_tokens or [],
|
| 1374 |
+
"generated_text": generated_text or "", # used for context preserving mode
|
| 1375 |
+
"is_listen": is_listen,
|
| 1376 |
+
}
|
| 1377 |
+
self._unit_history.append(entry)
|
| 1378 |
+
|
| 1379 |
+
self._pending_unit_id = None
|
| 1380 |
+
self._pending_unit_start_cache_len = 0
|
| 1381 |
+
self._next_unit_id += 1
|
| 1382 |
+
|
| 1383 |
+
def register_system_prompt(self):
|
| 1384 |
+
"""Call after system prompt prefill, record preserve length"""
|
| 1385 |
+
self._system_preserve_length = self.get_cache_length()
|
| 1386 |
+
|
| 1387 |
+
# sliding window core methods
|
| 1388 |
+
|
| 1389 |
+
def _get_rope_theta(self) -> float:
|
| 1390 |
+
"""get model rope_theta configuration"""
|
| 1391 |
+
return float(getattr(self.m.config, "rope_theta", 10000.0))
|
| 1392 |
+
|
| 1393 |
+
def _drop_tokens_from_cache(self, length: int) -> bool:
|
| 1394 |
+
"""remove specified number of tokens from cache (protect system prompt)
|
| 1395 |
+
|
| 1396 |
+
remove tokens in the range [preserve, preserve + length)
|
| 1397 |
+
supports DynamicCache and tuple cache formats
|
| 1398 |
+
"""
|
| 1399 |
+
if self.cache is None or length <= 0:
|
| 1400 |
+
return False
|
| 1401 |
+
|
| 1402 |
+
cache_type = "DynamicCache" if isinstance(self.cache, DynamicCache) else "TupleCache"
|
| 1403 |
+
cache_len_before = self.get_cache_length()
|
| 1404 |
+
offset_before = self._position_offset
|
| 1405 |
+
|
| 1406 |
+
new_cache, new_offset, success = drop_tokens_from_cache(
|
| 1407 |
+
cache=self.cache,
|
| 1408 |
+
length=length,
|
| 1409 |
+
preserve=self._system_preserve_length,
|
| 1410 |
+
position_offset=self._position_offset,
|
| 1411 |
+
rope_theta=self._get_rope_theta(),
|
| 1412 |
+
inv_freq_cache=self._rope_inv_freq_cache,
|
| 1413 |
+
)
|
| 1414 |
+
if success:
|
| 1415 |
+
self.cache = new_cache # For DynamicCache this is the same object (in-place)
|
| 1416 |
+
self._position_offset = new_offset
|
| 1417 |
+
|
| 1418 |
+
return success
|
| 1419 |
+
|
| 1420 |
+
def _drop_unit(self, unit_id: int) -> bool:
|
| 1421 |
+
"""remove specified unit"""
|
| 1422 |
+
entries = [u for u in self._unit_history if u["unit_id"] == unit_id]
|
| 1423 |
+
if not entries:
|
| 1424 |
+
return False
|
| 1425 |
+
|
| 1426 |
+
total_len = sum(e["length"] for e in entries)
|
| 1427 |
+
if total_len <= 0:
|
| 1428 |
+
for e in entries:
|
| 1429 |
+
self._unit_history.remove(e)
|
| 1430 |
+
return False
|
| 1431 |
+
|
| 1432 |
+
if not self._drop_tokens_from_cache(total_len):
|
| 1433 |
+
return False
|
| 1434 |
+
|
| 1435 |
+
for e in entries:
|
| 1436 |
+
self._unit_history.remove(e)
|
| 1437 |
+
|
| 1438 |
+
return True
|
| 1439 |
+
|
| 1440 |
+
def _drop_next_unit(self) -> bool:
|
| 1441 |
+
"""remove the earliest non-system unit"""
|
| 1442 |
+
for entry in self._unit_history:
|
| 1443 |
+
unit_id = entry.get("unit_id")
|
| 1444 |
+
if unit_id is None:
|
| 1445 |
+
continue
|
| 1446 |
+
# skip system type
|
| 1447 |
+
if entry.get("type") == "system":
|
| 1448 |
+
continue
|
| 1449 |
+
if self._drop_unit(unit_id):
|
| 1450 |
+
return True
|
| 1451 |
+
return False
|
| 1452 |
+
|
| 1453 |
+
def enforce_window(self) -> bool:
|
| 1454 |
+
"""enforce sliding window strategy (same as single-mode, only look at cache length)
|
| 1455 |
+
|
| 1456 |
+
when cache length exceeds high water line, loop to remove the earliest unit,
|
| 1457 |
+
until cache length drops below the low water line.
|
| 1458 |
+
"""
|
| 1459 |
+
if not self._window_enabled:
|
| 1460 |
+
return False
|
| 1461 |
+
|
| 1462 |
+
cfg = self._window_config
|
| 1463 |
+
cache_len_before = self.get_cache_length()
|
| 1464 |
+
|
| 1465 |
+
if cache_len_before <= cfg.basic_window_high_tokens:
|
| 1466 |
+
return False # not above high water line, no trigger
|
| 1467 |
+
|
| 1468 |
+
dropped_count = 0
|
| 1469 |
+
cache_len = cache_len_before
|
| 1470 |
+
while cache_len > cfg.basic_window_low_tokens:
|
| 1471 |
+
if not self._drop_next_unit():
|
| 1472 |
+
break
|
| 1473 |
+
dropped_count += 1
|
| 1474 |
+
cache_len = self.get_cache_length()
|
| 1475 |
+
|
| 1476 |
+
if dropped_count > 0:
|
| 1477 |
+
# update statistics counters
|
| 1478 |
+
self._sliding_event_count += 1
|
| 1479 |
+
self._total_dropped_tokens += cache_len_before - cache_len
|
| 1480 |
+
self._total_dropped_units += dropped_count
|
| 1481 |
+
|
| 1482 |
+
# consistency check
|
| 1483 |
+
expected = self._system_preserve_length + sum(u["length"] for u in self._unit_history)
|
| 1484 |
+
is_consistent = expected == cache_len
|
| 1485 |
+
if not is_consistent:
|
| 1486 |
+
logger.error(
|
| 1487 |
+
"CONSISTENCY ERROR! preserve=%d + sum(units)=%d != cache=%d, offset=%d",
|
| 1488 |
+
self._system_preserve_length,
|
| 1489 |
+
sum(u["length"] for u in self._unit_history),
|
| 1490 |
+
cache_len,
|
| 1491 |
+
self._position_offset,
|
| 1492 |
+
)
|
| 1493 |
+
|
| 1494 |
+
return dropped_count > 0
|
| 1495 |
+
|
| 1496 |
+
# context preserving sliding window methods
|
| 1497 |
+
|
| 1498 |
+
def register_system_prompt_with_context(
|
| 1499 |
+
self,
|
| 1500 |
+
suffix_token_ids: Optional[List[int]] = None,
|
| 1501 |
+
context_previous_marker: str = "\n\nprevious: ",
|
| 1502 |
+
):
|
| 1503 |
+
"""register system prompt (with context preserving mode)
|
| 1504 |
+
|
| 1505 |
+
initial cache layout: [prefix] [suffix] [units...]
|
| 1506 |
+
after first sliding window: [prefix] [context_previous_marker + content] [suffix] [units...]
|
| 1507 |
+
|
| 1508 |
+
when calling this method, cache should only have prefix (without previous marker)
|
| 1509 |
+
suffix will be fed in later
|
| 1510 |
+
|
| 1511 |
+
Args:
|
| 1512 |
+
suffix_token_ids: suffix token ids (e.g. id of <|im_end|>)
|
| 1513 |
+
context_previous_marker: previous marker prefix, e.g. "\\n\\nprevious: "
|
| 1514 |
+
"""
|
| 1515 |
+
# prefix = current cache content (fixed, without previous marker)
|
| 1516 |
+
self._preserve_prefix_length = self.get_cache_length()
|
| 1517 |
+
self._previous_content_length = 0 # initially no previous content
|
| 1518 |
+
self._suffix_token_ids = suffix_token_ids or []
|
| 1519 |
+
# total preserve length = prefix + suffix (initially no previous)
|
| 1520 |
+
self._system_preserve_length = self._preserve_prefix_length + len(self._suffix_token_ids)
|
| 1521 |
+
|
| 1522 |
+
# initialize previous related states
|
| 1523 |
+
self._previous_marker = context_previous_marker
|
| 1524 |
+
self._previous_marker_token_ids = (
|
| 1525 |
+
self.tokenizer.encode(context_previous_marker, add_special_tokens=False) if self.tokenizer else []
|
| 1526 |
+
)
|
| 1527 |
+
self._has_previous = False
|
| 1528 |
+
self._previous_text = ""
|
| 1529 |
+
self._previous_token_ids = []
|
| 1530 |
+
|
| 1531 |
+
def _extract_generated_text(self, units: List[Dict[str, Any]]) -> Tuple[str, List[int]]:
|
| 1532 |
+
"""extract generated text and token ids from units
|
| 1533 |
+
|
| 1534 |
+
Args:
|
| 1535 |
+
units: list of units to extract
|
| 1536 |
+
|
| 1537 |
+
Returns:
|
| 1538 |
+
(text, token_ids): concatenated text and token ids (filtered out special tokens)
|
| 1539 |
+
"""
|
| 1540 |
+
text_parts = []
|
| 1541 |
+
token_ids = []
|
| 1542 |
+
|
| 1543 |
+
for u in units:
|
| 1544 |
+
# only keep generated content of non-listen units
|
| 1545 |
+
if u.get("is_listen", False):
|
| 1546 |
+
continue
|
| 1547 |
+
gen_text = u.get("generated_text", "")
|
| 1548 |
+
gen_tokens = u.get("generated_tokens", [])
|
| 1549 |
+
|
| 1550 |
+
# filter out special tokens from text
|
| 1551 |
+
if gen_text:
|
| 1552 |
+
clean_text = gen_text
|
| 1553 |
+
for st in self._all_special_tokens_text:
|
| 1554 |
+
clean_text = clean_text.replace(st, "")
|
| 1555 |
+
if clean_text.strip():
|
| 1556 |
+
text_parts.append(clean_text)
|
| 1557 |
+
|
| 1558 |
+
# filter out special tokens
|
| 1559 |
+
if gen_tokens:
|
| 1560 |
+
filtered_tokens = [t for t in gen_tokens if t not in self._all_special_ids]
|
| 1561 |
+
token_ids.extend(filtered_tokens)
|
| 1562 |
+
|
| 1563 |
+
return "".join(text_parts), token_ids
|
| 1564 |
+
|
| 1565 |
+
def _rebuild_cache_with_previous(
|
| 1566 |
+
self,
|
| 1567 |
+
new_previous_tokens: List[int],
|
| 1568 |
+
units_to_keep_len: Optional[int] = None,
|
| 1569 |
+
) -> bool:
|
| 1570 |
+
"""rebuild cache, insert new previous content between prefix and suffix
|
| 1571 |
+
|
| 1572 |
+
cache layout change:
|
| 1573 |
+
[prefix] [old_prev] [suffix] [old_units] → [prefix] [new_prev] [suffix] [remaining_units]
|
| 1574 |
+
|
| 1575 |
+
Args:
|
| 1576 |
+
new_previous_tokens: new previous token ids
|
| 1577 |
+
units_to_keep_len: length of units to keep (from cache end backwards)
|
| 1578 |
+
if None, calculate based on unit_history
|
| 1579 |
+
|
| 1580 |
+
Returns:
|
| 1581 |
+
whether successful rebuild
|
| 1582 |
+
"""
|
| 1583 |
+
if self.cache is None:
|
| 1584 |
+
return False
|
| 1585 |
+
|
| 1586 |
+
old_previous_len = self._previous_content_length
|
| 1587 |
+
new_previous_len = len(new_previous_tokens)
|
| 1588 |
+
suffix_len = len(self._suffix_token_ids)
|
| 1589 |
+
total_cache_len = self.get_cache_length()
|
| 1590 |
+
|
| 1591 |
+
# calculate length of units to keep
|
| 1592 |
+
if units_to_keep_len is None:
|
| 1593 |
+
units_to_keep_len = sum(u["length"] for u in self._unit_history)
|
| 1594 |
+
|
| 1595 |
+
# special case: if previous is unchanged (new and old are empty), no need to rebuild prefix+suffix part of cache
|
| 1596 |
+
# but still need to reindex units RoPE (because a unit was deleted, position changed)
|
| 1597 |
+
if new_previous_len == 0 and old_previous_len == 0:
|
| 1598 |
+
# cache layout: [prefix(7)] [suffix(1)] [units...]
|
| 1599 |
+
# only keep prefix + suffix + remaining_units
|
| 1600 |
+
preserve_len = self._preserve_prefix_length + suffix_len
|
| 1601 |
+
|
| 1602 |
+
# simply slice cache: [prefix+suffix] + [remaining_units]
|
| 1603 |
+
# remaining_units in cache end
|
| 1604 |
+
if units_to_keep_len > 0:
|
| 1605 |
+
# [0:preserve_len] + [total-units_to_keep_len:total]
|
| 1606 |
+
prefix_suffix_cache = self._slice_cache(0, preserve_len)
|
| 1607 |
+
units_cache = self._slice_cache(total_cache_len - units_to_keep_len, None)
|
| 1608 |
+
|
| 1609 |
+
# calculate number of dropped tokens
|
| 1610 |
+
dropped_tokens = total_cache_len - preserve_len - units_to_keep_len
|
| 1611 |
+
|
| 1612 |
+
# reindex units RoPE: position from (preserve_len + dropped_tokens) to preserve_len
|
| 1613 |
+
# note: no position_offset, because cache position has been compressed (from 0 start)
|
| 1614 |
+
if dropped_tokens > 0:
|
| 1615 |
+
old_start = preserve_len + dropped_tokens
|
| 1616 |
+
new_start = preserve_len
|
| 1617 |
+
units_cache = self._reindex_rope_for_cache(units_cache, old_start, new_start, units_to_keep_len)
|
| 1618 |
+
|
| 1619 |
+
self.cache = self._concat_caches(prefix_suffix_cache, units_cache)
|
| 1620 |
+
else:
|
| 1621 |
+
self.cache = self._slice_cache(0, preserve_len)
|
| 1622 |
+
|
| 1623 |
+
return True
|
| 1624 |
+
|
| 1625 |
+
# 1. get prefix cache (fixed)
|
| 1626 |
+
prefix_end = self._preserve_prefix_length
|
| 1627 |
+
prefix_cache = self._slice_cache(0, prefix_end)
|
| 1628 |
+
|
| 1629 |
+
# 2. get units cache to keep (from end)
|
| 1630 |
+
units_start_in_old_cache = total_cache_len - units_to_keep_len
|
| 1631 |
+
units_cache = None
|
| 1632 |
+
if units_to_keep_len > 0:
|
| 1633 |
+
units_cache = self._slice_cache(units_start_in_old_cache, None)
|
| 1634 |
+
|
| 1635 |
+
# 3. calculate new previous + suffix cache (needs forward)
|
| 1636 |
+
# merge previous tokens and suffix tokens
|
| 1637 |
+
prev_suffix_tokens = new_previous_tokens + self._suffix_token_ids
|
| 1638 |
+
prev_suffix_len = len(prev_suffix_tokens)
|
| 1639 |
+
|
| 1640 |
+
new_prefix_prev_suffix_cache = prefix_cache
|
| 1641 |
+
if prev_suffix_len > 0:
|
| 1642 |
+
# Embed tokens
|
| 1643 |
+
prev_suffix_embeds = self.embed_tokens(prev_suffix_tokens)
|
| 1644 |
+
# calculate start position (after prefix)
|
| 1645 |
+
start_pos = self._preserve_prefix_length + self._position_offset
|
| 1646 |
+
|
| 1647 |
+
# forward calculate KV cache
|
| 1648 |
+
with torch.no_grad():
|
| 1649 |
+
device = prev_suffix_embeds.device
|
| 1650 |
+
position_ids = torch.arange(
|
| 1651 |
+
start_pos,
|
| 1652 |
+
start_pos + prev_suffix_len,
|
| 1653 |
+
device=device,
|
| 1654 |
+
).unsqueeze(0)
|
| 1655 |
+
|
| 1656 |
+
# use prefix cache as past_key_values
|
| 1657 |
+
outputs = self.m(
|
| 1658 |
+
inputs_embeds=(
|
| 1659 |
+
prev_suffix_embeds.unsqueeze(0) if prev_suffix_embeds.dim() == 2 else prev_suffix_embeds
|
| 1660 |
+
),
|
| 1661 |
+
position_ids=position_ids,
|
| 1662 |
+
past_key_values=prefix_cache,
|
| 1663 |
+
use_cache=True,
|
| 1664 |
+
return_dict=True,
|
| 1665 |
+
)
|
| 1666 |
+
# new cache contains prefix + new_previous + suffix
|
| 1667 |
+
new_prefix_prev_suffix_cache = outputs.past_key_values
|
| 1668 |
+
|
| 1669 |
+
# 4. adjust units cache RoPE
|
| 1670 |
+
# new layout: [prefix] [new_prev] [suffix] [units]
|
| 1671 |
+
# note: no position_offset, because cache position has been compressed (from 0 start)
|
| 1672 |
+
new_system_total = prefix_end + new_previous_len + suffix_len
|
| 1673 |
+
if units_cache is not None and self._get_cache_len(units_cache) > 0:
|
| 1674 |
+
old_start = units_start_in_old_cache
|
| 1675 |
+
new_start = new_system_total
|
| 1676 |
+
|
| 1677 |
+
if old_start != new_start:
|
| 1678 |
+
units_cache = self._reindex_rope_for_cache(units_cache, old_start, new_start, units_to_keep_len)
|
| 1679 |
+
|
| 1680 |
+
# 5. concatenate new cache
|
| 1681 |
+
if units_cache is not None and self._get_cache_len(units_cache) > 0:
|
| 1682 |
+
self.cache = self._concat_caches(new_prefix_prev_suffix_cache, units_cache)
|
| 1683 |
+
else:
|
| 1684 |
+
self.cache = new_prefix_prev_suffix_cache
|
| 1685 |
+
|
| 1686 |
+
# 6. update length
|
| 1687 |
+
self._previous_content_length = new_previous_len
|
| 1688 |
+
# total preserve length = prefix + previous + suffix
|
| 1689 |
+
self._system_preserve_length = prefix_end + new_previous_len + suffix_len
|
| 1690 |
+
|
| 1691 |
+
# print detailed cache layout information
|
| 1692 |
+
prev_text_preview = self._previous_text[:50] + "..." if len(self._previous_text) > 50 else self._previous_text
|
| 1693 |
+
suffix_preview = self.tokenizer.decode(self._suffix_token_ids) if self._suffix_token_ids else ""
|
| 1694 |
+
return True
|
| 1695 |
+
|
| 1696 |
+
def _slice_cache(self, start: int, end: Optional[int], clone: bool = True):
|
| 1697 |
+
"""slice cache
|
| 1698 |
+
|
| 1699 |
+
Args:
|
| 1700 |
+
start: start position
|
| 1701 |
+
end: end position (None means to end)
|
| 1702 |
+
clone: whether to clone (default True, to prevent shared memory issues)
|
| 1703 |
+
"""
|
| 1704 |
+
if self.cache is None:
|
| 1705 |
+
return None
|
| 1706 |
+
if isinstance(self.cache, DynamicCache):
|
| 1707 |
+
# DynamicCache
|
| 1708 |
+
new_key_cache = [
|
| 1709 |
+
k[:, :, start:end, :].clone() if clone else k[:, :, start:end, :] for k in self.cache.key_cache
|
| 1710 |
+
]
|
| 1711 |
+
new_value_cache = [
|
| 1712 |
+
v[:, :, start:end, :].clone() if clone else v[:, :, start:end, :] for v in self.cache.value_cache
|
| 1713 |
+
]
|
| 1714 |
+
new_cache = DynamicCache()
|
| 1715 |
+
new_cache.key_cache = new_key_cache
|
| 1716 |
+
new_cache.value_cache = new_value_cache
|
| 1717 |
+
return new_cache
|
| 1718 |
+
else:
|
| 1719 |
+
# Tuple cache
|
| 1720 |
+
if clone:
|
| 1721 |
+
return tuple(
|
| 1722 |
+
(layer[0][:, :, start:end, :].clone(), layer[1][:, :, start:end, :].clone()) for layer in self.cache
|
| 1723 |
+
)
|
| 1724 |
+
else:
|
| 1725 |
+
return tuple((layer[0][:, :, start:end, :], layer[1][:, :, start:end, :]) for layer in self.cache)
|
| 1726 |
+
|
| 1727 |
+
@staticmethod
|
| 1728 |
+
def _get_cache_len(cache) -> int:
|
| 1729 |
+
if cache is None:
|
| 1730 |
+
return 0
|
| 1731 |
+
if isinstance(cache, DynamicCache):
|
| 1732 |
+
if len(cache.key_cache) > 0 and cache.key_cache[0].numel() > 0:
|
| 1733 |
+
return cache.key_cache[0].shape[2]
|
| 1734 |
+
return 0
|
| 1735 |
+
|
| 1736 |
+
if cache and cache[0] and cache[0][0] is not None:
|
| 1737 |
+
return cache[0][0].shape[2]
|
| 1738 |
+
return 0
|
| 1739 |
+
|
| 1740 |
+
@staticmethod
|
| 1741 |
+
def _concat_caches(cache1, cache2):
|
| 1742 |
+
if cache1 is None:
|
| 1743 |
+
return cache2
|
| 1744 |
+
if cache2 is None:
|
| 1745 |
+
return cache1
|
| 1746 |
+
|
| 1747 |
+
if isinstance(cache1, DynamicCache):
|
| 1748 |
+
new_cache = DynamicCache()
|
| 1749 |
+
new_cache.key_cache = [torch.cat([k1, k2], dim=2) for k1, k2 in zip(cache1.key_cache, cache2.key_cache)]
|
| 1750 |
+
new_cache.value_cache = [
|
| 1751 |
+
torch.cat([v1, v2], dim=2) for v1, v2 in zip(cache1.value_cache, cache2.value_cache)
|
| 1752 |
+
]
|
| 1753 |
+
return new_cache
|
| 1754 |
+
else:
|
| 1755 |
+
return tuple(
|
| 1756 |
+
(
|
| 1757 |
+
torch.cat([layer1[0], layer2[0]], dim=2),
|
| 1758 |
+
torch.cat([layer1[1], layer2[1]], dim=2),
|
| 1759 |
+
)
|
| 1760 |
+
for layer1, layer2 in zip(cache1, cache2)
|
| 1761 |
+
)
|
| 1762 |
+
|
| 1763 |
+
def _reindex_rope_for_cache(self, cache, old_start: int, new_start: int, length: int):
|
| 1764 |
+
"""reindex RoPE position for cache"""
|
| 1765 |
+
if cache is None or length <= 0:
|
| 1766 |
+
return cache
|
| 1767 |
+
|
| 1768 |
+
if isinstance(cache, DynamicCache):
|
| 1769 |
+
device = cache.key_cache[0].device if cache.key_cache else None
|
| 1770 |
+
else:
|
| 1771 |
+
device = cache[0][0].device if cache and cache[0] else None
|
| 1772 |
+
|
| 1773 |
+
if device is None:
|
| 1774 |
+
return cache
|
| 1775 |
+
|
| 1776 |
+
old_positions = torch.arange(old_start, old_start + length, device=device, dtype=torch.long)
|
| 1777 |
+
new_positions = torch.arange(new_start, new_start + length, device=device, dtype=torch.long)
|
| 1778 |
+
|
| 1779 |
+
rope_theta = self._get_rope_theta()
|
| 1780 |
+
|
| 1781 |
+
if isinstance(cache, DynamicCache):
|
| 1782 |
+
new_key_cache = []
|
| 1783 |
+
for k in cache.key_cache:
|
| 1784 |
+
new_k = realign_rotary_suffix(k, old_positions, new_positions, rope_theta, self._rope_inv_freq_cache)
|
| 1785 |
+
new_key_cache.append(new_k)
|
| 1786 |
+
cache.key_cache = new_key_cache
|
| 1787 |
+
return cache
|
| 1788 |
+
else:
|
| 1789 |
+
new_cache = []
|
| 1790 |
+
for layer in cache:
|
| 1791 |
+
new_k = realign_rotary_suffix(
|
| 1792 |
+
layer[0], old_positions, new_positions, rope_theta, self._rope_inv_freq_cache
|
| 1793 |
+
)
|
| 1794 |
+
new_cache.append((new_k, layer[1]))
|
| 1795 |
+
return tuple(new_cache)
|
| 1796 |
+
|
| 1797 |
+
def _update_previous(
|
| 1798 |
+
self,
|
| 1799 |
+
new_text: str,
|
| 1800 |
+
new_tokens: List[int],
|
| 1801 |
+
max_tokens: int,
|
| 1802 |
+
) -> None:
|
| 1803 |
+
"""update previous context (also update cache)
|
| 1804 |
+
|
| 1805 |
+
when first sliding window, dynamically add marker + text, subsequent sliding window append text
|
| 1806 |
+
when content exceeds max_tokens, truncate content (keep marker)
|
| 1807 |
+
rebuild cache to maintain consistency
|
| 1808 |
+
|
| 1809 |
+
Args:
|
| 1810 |
+
new_text: new text
|
| 1811 |
+
new_tokens: new token ids
|
| 1812 |
+
max_tokens: previous content maximum token count (without marker)
|
| 1813 |
+
"""
|
| 1814 |
+
marker_len = len(self._previous_marker_token_ids)
|
| 1815 |
+
tokens_to_drop = 0
|
| 1816 |
+
|
| 1817 |
+
# if no new content, do not add marker, but still need to rebuild cache
|
| 1818 |
+
if not new_tokens and not new_text:
|
| 1819 |
+
# still need to rebuild cache (because a unit was deleted)
|
| 1820 |
+
self._rebuild_cache_with_previous(self._previous_token_ids)
|
| 1821 |
+
return
|
| 1822 |
+
|
| 1823 |
+
if not self._has_previous:
|
| 1824 |
+
# when first has actual content: add marker + text
|
| 1825 |
+
self._previous_text = new_text
|
| 1826 |
+
self._previous_token_ids = self._previous_marker_token_ids.copy() + new_tokens
|
| 1827 |
+
self._has_previous = True
|
| 1828 |
+
else:
|
| 1829 |
+
# subsequent sliding window: append text to previous
|
| 1830 |
+
self._previous_text += new_text
|
| 1831 |
+
self._previous_token_ids.extend(new_tokens)
|
| 1832 |
+
|
| 1833 |
+
# calculate token count of content (without marker)
|
| 1834 |
+
content_token_count = len(self._previous_token_ids) - marker_len
|
| 1835 |
+
|
| 1836 |
+
# check if need to truncate content (keep marker)
|
| 1837 |
+
if content_token_count > max_tokens:
|
| 1838 |
+
# truncate left content, keep marker + latest max_tokens content
|
| 1839 |
+
tokens_to_drop = content_token_count - max_tokens
|
| 1840 |
+
old_text = self._previous_text
|
| 1841 |
+
# keep marker + truncated content
|
| 1842 |
+
content_tokens = self._previous_token_ids[marker_len + tokens_to_drop :]
|
| 1843 |
+
self._previous_token_ids = self._previous_marker_token_ids.copy() + content_tokens
|
| 1844 |
+
# redecode text (only decode content part)
|
| 1845 |
+
try:
|
| 1846 |
+
self._previous_text = self.tokenizer.decode(
|
| 1847 |
+
content_tokens,
|
| 1848 |
+
skip_special_tokens=True,
|
| 1849 |
+
)
|
| 1850 |
+
except Exception as e:
|
| 1851 |
+
logger.warning("_update_previous: decode failed: %s", e)
|
| 1852 |
+
|
| 1853 |
+
# rebuild cache
|
| 1854 |
+
self._rebuild_cache_with_previous(self._previous_token_ids)
|
| 1855 |
+
|
| 1856 |
+
def _drop_unit_with_context(
|
| 1857 |
+
self,
|
| 1858 |
+
unit_id: int,
|
| 1859 |
+
max_previous_tokens: int,
|
| 1860 |
+
) -> Tuple[bool, str, List[int]]:
|
| 1861 |
+
"""remove specified unit and return its generated content (for context preserving)
|
| 1862 |
+
|
| 1863 |
+
process:
|
| 1864 |
+
1. extract generated content of unit
|
| 1865 |
+
2. remove unit from cache (without prefix+previous)
|
| 1866 |
+
3. append generated content to previous
|
| 1867 |
+
4. rebuild cache (in _update_previous)
|
| 1868 |
+
|
| 1869 |
+
Args:
|
| 1870 |
+
unit_id: unit ID to remove
|
| 1871 |
+
max_previous_tokens: previous maximum token count
|
| 1872 |
+
|
| 1873 |
+
Returns:
|
| 1874 |
+
(success, extracted_text, extracted_tokens): whether successful, extracted text and tokens
|
| 1875 |
+
"""
|
| 1876 |
+
entries = [u for u in self._unit_history if u["unit_id"] == unit_id]
|
| 1877 |
+
if not entries:
|
| 1878 |
+
return False, "", []
|
| 1879 |
+
|
| 1880 |
+
# extract generated content
|
| 1881 |
+
extracted_text, extracted_tokens = self._extract_generated_text(entries)
|
| 1882 |
+
|
| 1883 |
+
# calculate total length
|
| 1884 |
+
total_len = sum(e["length"] for e in entries)
|
| 1885 |
+
if total_len <= 0:
|
| 1886 |
+
for e in entries:
|
| 1887 |
+
self._unit_history.remove(e)
|
| 1888 |
+
return False, extracted_text, extracted_tokens
|
| 1889 |
+
|
| 1890 |
+
cache_before = self.get_cache_length()
|
| 1891 |
+
|
| 1892 |
+
# remove from unit_history (record for later processing)
|
| 1893 |
+
for e in entries:
|
| 1894 |
+
self._unit_history.remove(e)
|
| 1895 |
+
|
| 1896 |
+
# note: here no longer call _drop_tokens_from_cache
|
| 1897 |
+
# because _update_previous will rebuild the entire cache
|
| 1898 |
+
|
| 1899 |
+
# update previous (also rebuild cache)
|
| 1900 |
+
self._update_previous(extracted_text, extracted_tokens, max_previous_tokens)
|
| 1901 |
+
|
| 1902 |
+
return True, extracted_text, extracted_tokens
|
| 1903 |
+
|
| 1904 |
+
def _drop_next_unit_with_context(self, max_previous_tokens: int) -> bool:
|
| 1905 |
+
"""remove the earliest non-system unit (with context preserving)"""
|
| 1906 |
+
for entry in self._unit_history:
|
| 1907 |
+
unit_id = entry.get("unit_id")
|
| 1908 |
+
if unit_id is None:
|
| 1909 |
+
continue
|
| 1910 |
+
if entry.get("type") == "system":
|
| 1911 |
+
continue
|
| 1912 |
+
success, _, _ = self._drop_unit_with_context(unit_id, max_previous_tokens)
|
| 1913 |
+
if success:
|
| 1914 |
+
return True
|
| 1915 |
+
return False
|
| 1916 |
+
|
| 1917 |
+
def enforce_window_with_context(self) -> bool:
|
| 1918 |
+
"""context preserving sliding window execution
|
| 1919 |
+
|
| 1920 |
+
when unit count exceeds max_units, remove the earliest unit,
|
| 1921 |
+
and accumulate its generated content to previous.
|
| 1922 |
+
Cache will be automatically rebuilt in _update_previous.
|
| 1923 |
+
|
| 1924 |
+
Returns:
|
| 1925 |
+
whether sliding window is executed
|
| 1926 |
+
"""
|
| 1927 |
+
if not self._window_enabled:
|
| 1928 |
+
return False
|
| 1929 |
+
|
| 1930 |
+
cfg = self._window_config
|
| 1931 |
+
|
| 1932 |
+
if cfg.sliding_window_mode != "context":
|
| 1933 |
+
# if not context mode, fallback to basic sliding window
|
| 1934 |
+
return self.enforce_window()
|
| 1935 |
+
|
| 1936 |
+
cache_len_before = self.get_cache_length()
|
| 1937 |
+
units_before = len(self._unit_history)
|
| 1938 |
+
|
| 1939 |
+
# context preserving mode: only check if unit count exceeds limit
|
| 1940 |
+
# (previous exceeds limit in _update_previous will automatically truncate left)
|
| 1941 |
+
if units_before <= cfg.context_max_units:
|
| 1942 |
+
return False
|
| 1943 |
+
|
| 1944 |
+
# sliding window loop: remove unit until count ≤ max_units
|
| 1945 |
+
dropped_count = 0
|
| 1946 |
+
while len(self._unit_history) > cfg.context_max_units:
|
| 1947 |
+
if not self._drop_next_unit_with_context(cfg.context_previous_max_tokens):
|
| 1948 |
+
break
|
| 1949 |
+
|
| 1950 |
+
dropped_count += 1
|
| 1951 |
+
|
| 1952 |
+
cache_len_after = self.get_cache_length()
|
| 1953 |
+
|
| 1954 |
+
if dropped_count > 0:
|
| 1955 |
+
# update statistics counter
|
| 1956 |
+
self._sliding_event_count += 1
|
| 1957 |
+
self._total_dropped_tokens += cache_len_before - cache_len_after
|
| 1958 |
+
self._total_dropped_units += dropped_count
|
| 1959 |
+
|
| 1960 |
+
# consistency check
|
| 1961 |
+
expected = self._system_preserve_length + sum(u["length"] for u in self._unit_history)
|
| 1962 |
+
|
| 1963 |
+
return dropped_count > 0
|
| 1964 |
+
|
| 1965 |
+
def get_previous_context(self) -> Tuple[str, List[int]]:
|
| 1966 |
+
"""get current accumulated previous context
|
| 1967 |
+
|
| 1968 |
+
Returns:
|
| 1969 |
+
(previous_text, previous_token_ids): current accumulated text and token ids
|
| 1970 |
+
"""
|
| 1971 |
+
return self._previous_text, self._previous_token_ids.copy()
|
| 1972 |
+
|
| 1973 |
+
def get_window_stats(self) -> Dict[str, Any]:
|
| 1974 |
+
"""get sliding window statistics"""
|
| 1975 |
+
unit_lengths = [u["length"] for u in self._unit_history]
|
| 1976 |
+
return {
|
| 1977 |
+
"cache_length": self.get_cache_length(),
|
| 1978 |
+
"unit_count": len(self._unit_history),
|
| 1979 |
+
"unit_lengths": unit_lengths,
|
| 1980 |
+
"unit_total_length": sum(unit_lengths),
|
| 1981 |
+
"system_preserve_length": self._system_preserve_length,
|
| 1982 |
+
"position_offset": self._position_offset,
|
| 1983 |
+
"window_enabled": self._window_enabled,
|
| 1984 |
+
"total_generated_tokens": self.get_total_generated_tokens(),
|
| 1985 |
+
"pending_unit_id": self._pending_unit_id,
|
| 1986 |
+
"next_unit_id": self._next_unit_id,
|
| 1987 |
+
"config": {
|
| 1988 |
+
"sliding_window_mode": self._window_config.sliding_window_mode,
|
| 1989 |
+
"basic_window_high_tokens": self._window_config.basic_window_high_tokens,
|
| 1990 |
+
"basic_window_low_tokens": self._window_config.basic_window_low_tokens,
|
| 1991 |
+
"context_previous_max_tokens": self._window_config.context_previous_max_tokens,
|
| 1992 |
+
"context_max_units": self._window_config.context_max_units,
|
| 1993 |
+
},
|
| 1994 |
+
# context preserving related
|
| 1995 |
+
"preserve_prefix_length": self._preserve_prefix_length,
|
| 1996 |
+
"previous_content_length": self._previous_content_length,
|
| 1997 |
+
"suffix_token_count": len(self._suffix_token_ids),
|
| 1998 |
+
"previous_text_length": len(self._previous_text),
|
| 1999 |
+
"previous_token_count": len(self._previous_token_ids),
|
| 2000 |
+
"has_system_template": self._system_prompt_template is not None,
|
| 2001 |
+
}
|
| 2002 |
+
|
| 2003 |
+
def _verify_consistency(self) -> bool:
|
| 2004 |
+
"""verify unit history and cache length consistency"""
|
| 2005 |
+
expected = self._system_preserve_length + sum(u["length"] for u in self._unit_history)
|
| 2006 |
+
actual = self.get_cache_length()
|
| 2007 |
+
return expected == actual
|
| 2008 |
+
|
| 2009 |
+
def print_verification_summary(self) -> Dict[str, Any]:
|
| 2010 |
+
"""print verification summary (for comparing off/basic/context mode)
|
| 2011 |
+
|
| 2012 |
+
Returns:
|
| 2013 |
+
dictionary containing key verification data
|
| 2014 |
+
"""
|
| 2015 |
+
cfg = self._window_config
|
| 2016 |
+
|
| 2017 |
+
# collect all generated text
|
| 2018 |
+
all_generated_text = []
|
| 2019 |
+
all_generated_tokens = []
|
| 2020 |
+
for u in self._unit_history:
|
| 2021 |
+
if not u.get("is_listen", False):
|
| 2022 |
+
gen_text = u.get("generated_text", "")
|
| 2023 |
+
gen_tokens = u.get("generated_tokens", [])
|
| 2024 |
+
if gen_text:
|
| 2025 |
+
all_generated_text.append(gen_text)
|
| 2026 |
+
if gen_tokens:
|
| 2027 |
+
all_generated_tokens.extend(gen_tokens)
|
| 2028 |
+
|
| 2029 |
+
combined_text = "".join(all_generated_text)
|
| 2030 |
+
|
| 2031 |
+
summary = {
|
| 2032 |
+
"mode": cfg.sliding_window_mode,
|
| 2033 |
+
"final_cache_length": self.get_cache_length(),
|
| 2034 |
+
"final_unit_count": len(self._unit_history),
|
| 2035 |
+
"sliding_event_count": self._sliding_event_count,
|
| 2036 |
+
"total_dropped_tokens": self._total_dropped_tokens,
|
| 2037 |
+
"total_dropped_units": self._total_dropped_units,
|
| 2038 |
+
"total_generated_tokens": len(all_generated_tokens),
|
| 2039 |
+
"generated_text": combined_text,
|
| 2040 |
+
"previous_text": self._previous_text,
|
| 2041 |
+
"previous_token_count": len(self._previous_token_ids),
|
| 2042 |
+
"position_offset": self._position_offset,
|
| 2043 |
+
"system_preserve_length": self._system_preserve_length,
|
| 2044 |
+
}
|
| 2045 |
+
|
| 2046 |
+
return summary
|
| 2047 |
+
|
| 2048 |
+
def set_window_config(self, config: DuplexWindowConfig) -> None:
|
| 2049 |
+
"""set sliding window configuration"""
|
| 2050 |
+
self._window_config = config
|
| 2051 |
+
|
| 2052 |
+
def set_window_enabled(self, enabled: bool) -> None:
|
| 2053 |
+
"""enable/disable sliding window"""
|
| 2054 |
+
old_enabled = self._window_enabled
|
| 2055 |
+
self._window_enabled = enabled
|
| 2056 |
+
|
| 2057 |
+
def get_context(self):
|
| 2058 |
+
return self.context
|
| 2059 |
+
|
| 2060 |
+
def embed_token(self, tid):
|
| 2061 |
+
if isinstance(tid, int):
|
| 2062 |
+
tid = torch.tensor([tid], device=self.m.device)
|
| 2063 |
+
return self.m.model.embed_tokens(tid)
|
| 2064 |
+
|
| 2065 |
+
def embed_tokens(self, token_ids: List[int]) -> torch.Tensor:
|
| 2066 |
+
"""batch embed multiple tokens
|
| 2067 |
+
|
| 2068 |
+
Args:
|
| 2069 |
+
token_ids: list of token ids
|
| 2070 |
+
|
| 2071 |
+
Returns:
|
| 2072 |
+
embeddings tensor [L, H]
|
| 2073 |
+
"""
|
| 2074 |
+
if not token_ids:
|
| 2075 |
+
return torch.empty(0, self.m.config.hidden_size, device=self.m.device)
|
| 2076 |
+
tids = torch.tensor(token_ids, device=self.m.device)
|
| 2077 |
+
return self.m.model.embed_tokens(tids)
|
| 2078 |
+
|
| 2079 |
+
@torch.no_grad()
|
| 2080 |
+
def feed(self, embeds: torch.Tensor, return_logits: bool = False):
|
| 2081 |
+
"""
|
| 2082 |
+
embeds : [L, H] —— new embedding sequence fed into model at once
|
| 2083 |
+
"""
|
| 2084 |
+
L = embeds.size(0)
|
| 2085 |
+
device = embeds.device
|
| 2086 |
+
|
| 2087 |
+
past_len = self.get_cache_length()
|
| 2088 |
+
pos_ids = torch.arange(past_len, past_len + L, device=device).unsqueeze(0) # [1, L]
|
| 2089 |
+
|
| 2090 |
+
out = self.m(
|
| 2091 |
+
inputs_embeds=embeds.unsqueeze(0), # [1, L, H]
|
| 2092 |
+
position_ids=pos_ids,
|
| 2093 |
+
past_key_values=self.cache,
|
| 2094 |
+
# use_cache = True,
|
| 2095 |
+
return_dict=True,
|
| 2096 |
+
output_hidden_states=True,
|
| 2097 |
+
# attention_mask=attention_mask
|
| 2098 |
+
)
|
| 2099 |
+
self.cache = out.past_key_values
|
| 2100 |
+
|
| 2101 |
+
if return_logits:
|
| 2102 |
+
logits = self.m.lm_head(out.hidden_states[-1])[:, -1] # [1, vocab]
|
| 2103 |
+
return logits, out.hidden_states[-1]
|
| 2104 |
+
|
| 2105 |
+
@torch.no_grad()
|
| 2106 |
+
def decode(
|
| 2107 |
+
self,
|
| 2108 |
+
logits,
|
| 2109 |
+
mode: Literal["sampling", "greedy"] = "sampling",
|
| 2110 |
+
temperature=0.7,
|
| 2111 |
+
top_k=20,
|
| 2112 |
+
top_p=0.8,
|
| 2113 |
+
listen_top_k=None,
|
| 2114 |
+
listen_prob_scale=1.0,
|
| 2115 |
+
text_repetition_penalty=1.05,
|
| 2116 |
+
text_repetition_window_size=512,
|
| 2117 |
+
):
|
| 2118 |
+
"""
|
| 2119 |
+
Args:
|
| 2120 |
+
logits:
|
| 2121 |
+
mode: sampling or greedy
|
| 2122 |
+
temperature:
|
| 2123 |
+
top_k:
|
| 2124 |
+
top_p:
|
| 2125 |
+
listen_top_k: force listen_id to be in top-k to keep
|
| 2126 |
+
listen_prob_scale: multiply listen_id probability by a weight (<1 means decrease, >1 means increase)
|
| 2127 |
+
text_repetition_penalty: repetition penalty coefficient, >1.0 means decrease repetition, <1.0 means increase repetition
|
| 2128 |
+
text_repetition_window_size: repetition penalty window size
|
| 2129 |
+
|
| 2130 |
+
Sampling strategy:
|
| 2131 |
+
1. first sample all tokens with original logits (apply temperature)
|
| 2132 |
+
2. if sampled chunk_eos, return directly (keep the original model's decision of when to stop)
|
| 2133 |
+
3. if not sampled chunk_eos, mask it (set logit to -inf), continue sampling text tokens
|
| 2134 |
+
4. apply repetition penalty, top-k, top-p, etc. to the text tokens for the final sampling
|
| 2135 |
+
"""
|
| 2136 |
+
|
| 2137 |
+
logits = logits.clone()
|
| 2138 |
+
|
| 2139 |
+
# 0. independently check chunk_eos before sampling
|
| 2140 |
+
eos_id = self.chunk_eos_id
|
| 2141 |
+
|
| 2142 |
+
with torch.no_grad():
|
| 2143 |
+
if mode == "greedy":
|
| 2144 |
+
sampled_token = torch.argmax(logits[0]).item()
|
| 2145 |
+
else:
|
| 2146 |
+
original_probs = F.softmax(logits[0], dim=-1)
|
| 2147 |
+
sampled_token = torch.multinomial(original_probs, num_samples=1).item()
|
| 2148 |
+
|
| 2149 |
+
# if sampled chunk_eos, return directly
|
| 2150 |
+
if sampled_token == eos_id:
|
| 2151 |
+
next_token_id = torch.tensor([eos_id], device=logits.device)
|
| 2152 |
+
next_token_str = self.tokenizer.decode(next_token_id)
|
| 2153 |
+
|
| 2154 |
+
return next_token_id
|
| 2155 |
+
|
| 2156 |
+
# if not sampled chunk_eos, set its logit to -inf
|
| 2157 |
+
if self.forbidden_token_ids:
|
| 2158 |
+
logits[:, self.forbidden_token_ids] = float("-inf")
|
| 2159 |
+
|
| 2160 |
+
# 1. apply repetition penalty
|
| 2161 |
+
if text_repetition_penalty != 1.0 and len(self.generated_tokens) > 0:
|
| 2162 |
+
# get recent tokens (within window size) considering special tokens and normal tokens
|
| 2163 |
+
recent_tokens = self.generated_tokens[-text_repetition_window_size:]
|
| 2164 |
+
|
| 2165 |
+
# make it unique
|
| 2166 |
+
recent_tokens = list(set(recent_tokens))
|
| 2167 |
+
|
| 2168 |
+
# apply penalty to repeated tokens
|
| 2169 |
+
for token_id in recent_tokens:
|
| 2170 |
+
if token_id < logits.size(-1): # ensure token_id is in vocabulary range
|
| 2171 |
+
if text_repetition_penalty > 1.0:
|
| 2172 |
+
# penalize repetition: decrease logits
|
| 2173 |
+
logits[0, token_id] /= text_repetition_penalty
|
| 2174 |
+
else:
|
| 2175 |
+
# encourage repetition: increase logits
|
| 2176 |
+
logits[0, token_id] *= 1.0 / text_repetition_penalty
|
| 2177 |
+
|
| 2178 |
+
if listen_prob_scale != 1.0: # modify listen token logit separately
|
| 2179 |
+
logits[0, self.listen_id] *= listen_prob_scale
|
| 2180 |
+
|
| 2181 |
+
listen_rank = (logits[0] > logits[0, self.listen_id]).sum().item()
|
| 2182 |
+
|
| 2183 |
+
if listen_top_k is not None and listen_rank < listen_top_k: # listen_id is in top-k, return directly
|
| 2184 |
+
next_token_id = torch.tensor([self.listen_id], device=logits.device)
|
| 2185 |
+
next_token_str = self.tokenizer.decode(next_token_id)
|
| 2186 |
+
|
| 2187 |
+
if next_token_str == "<|listen|>":
|
| 2188 |
+
self.context += " "
|
| 2189 |
+
else:
|
| 2190 |
+
self.context += next_token_str
|
| 2191 |
+
|
| 2192 |
+
return next_token_id
|
| 2193 |
+
|
| 2194 |
+
if mode == "greedy":
|
| 2195 |
+
next_token_id = torch.argmax(logits, dim=-1)
|
| 2196 |
+
elif mode == "sampling":
|
| 2197 |
+
logits = logits / temperature
|
| 2198 |
+
logits = top_k_top_p_filtering(logits, top_k=top_k, top_p=top_p)
|
| 2199 |
+
probs = F.softmax(logits, dim=-1)
|
| 2200 |
+
next_token_id = torch.multinomial(probs, num_samples=1).squeeze(1)
|
| 2201 |
+
else:
|
| 2202 |
+
raise ValueError(f"Unsupported decode mode: {mode}")
|
| 2203 |
+
|
| 2204 |
+
if next_token_id.item() not in self.special_token_ids:
|
| 2205 |
+
self.generated_tokens.append(next_token_id.item())
|
| 2206 |
+
else:
|
| 2207 |
+
self.generated_special_tokens.append(next_token_id.item())
|
| 2208 |
+
|
| 2209 |
+
return next_token_id
|
| 2210 |
+
|
| 2211 |
+
|
| 2212 |
+
def _download_url_to_tempfile(url: str, suffix: str = "", timeout: int = 60) -> str:
|
| 2213 |
+
"""
|
| 2214 |
+
Download a URL to a temporary file and return the path.
|
| 2215 |
+
|
| 2216 |
+
Args:
|
| 2217 |
+
url: HTTP/HTTPS URL to download
|
| 2218 |
+
suffix: File suffix (e.g., ".jpg", ".wav", ".mp4")
|
| 2219 |
+
timeout: Download timeout in seconds
|
| 2220 |
+
|
| 2221 |
+
Returns:
|
| 2222 |
+
Path to the downloaded temporary file
|
| 2223 |
+
"""
|
| 2224 |
+
import tempfile
|
| 2225 |
+
|
| 2226 |
+
import requests
|
| 2227 |
+
|
| 2228 |
+
response = requests.get(url, timeout=timeout)
|
| 2229 |
+
response.raise_for_status()
|
| 2230 |
+
|
| 2231 |
+
with tempfile.NamedTemporaryFile(suffix=suffix, delete=False) as f:
|
| 2232 |
+
f.write(response.content)
|
| 2233 |
+
return f.name
|
| 2234 |
+
|
| 2235 |
+
|
| 2236 |
+
def _is_url(path: str) -> bool:
|
| 2237 |
+
return path.startswith(("http://", "https://"))
|
| 2238 |
+
|
| 2239 |
+
|
| 2240 |
+
def normalize_content_item(item) -> Union[str, Any, List[Any]]:
|
| 2241 |
+
"""Normalize structured content item to native format.
|
| 2242 |
+
|
| 2243 |
+
Supports:
|
| 2244 |
+
- Native format: str, PIL.Image, np.ndarray (pass through)
|
| 2245 |
+
- OpenAI structured format:
|
| 2246 |
+
- {"type": "text", "text": "..."} -> str
|
| 2247 |
+
- {"type": "image_url", "image_url": {"url": "..."}} -> PIL.Image
|
| 2248 |
+
- {"type": "audio_url", "audio_url": {"url": "..."}} -> np.ndarray
|
| 2249 |
+
- {"type": "video_url", "video_url": {"url": "...", ...}} -> List[Image, ndarray, ...]
|
| 2250 |
+
|
| 2251 |
+
URL formats supported:
|
| 2252 |
+
- Local file path: "/path/to/file.jpg"
|
| 2253 |
+
- HTTP/HTTPS URL: "https://example.com/image.jpg"
|
| 2254 |
+
|
| 2255 |
+
Args:
|
| 2256 |
+
item: Content item to normalize
|
| 2257 |
+
|
| 2258 |
+
Returns:
|
| 2259 |
+
Normalized item. For video_url, returns a tuple ("__video_contents__", list)
|
| 2260 |
+
that will be flattened by normalize_content().
|
| 2261 |
+
|
| 2262 |
+
Raises:
|
| 2263 |
+
ValueError: If content type is unknown or unsupported
|
| 2264 |
+
"""
|
| 2265 |
+
import os
|
| 2266 |
+
|
| 2267 |
+
import numpy as np
|
| 2268 |
+
from PIL import Image
|
| 2269 |
+
|
| 2270 |
+
if isinstance(item, str):
|
| 2271 |
+
return item
|
| 2272 |
+
if isinstance(item, Image.Image):
|
| 2273 |
+
return item
|
| 2274 |
+
if isinstance(item, np.ndarray):
|
| 2275 |
+
return item
|
| 2276 |
+
|
| 2277 |
+
if isinstance(item, dict):
|
| 2278 |
+
item_type = item.get("type")
|
| 2279 |
+
|
| 2280 |
+
if item_type == "text":
|
| 2281 |
+
return item.get("text", "")
|
| 2282 |
+
|
| 2283 |
+
elif item_type == "image_url":
|
| 2284 |
+
image_url_obj = item.get("image_url", {})
|
| 2285 |
+
url = image_url_obj.get("url", "") if isinstance(image_url_obj, dict) else image_url_obj
|
| 2286 |
+
|
| 2287 |
+
if _is_url(url):
|
| 2288 |
+
# Download to temp file
|
| 2289 |
+
temp_path = _download_url_to_tempfile(url, suffix=".jpg", timeout=30)
|
| 2290 |
+
img = Image.open(temp_path)
|
| 2291 |
+
os.unlink(temp_path)
|
| 2292 |
+
return img
|
| 2293 |
+
else:
|
| 2294 |
+
return Image.open(url)
|
| 2295 |
+
elif item_type == "audio_url":
|
| 2296 |
+
import librosa
|
| 2297 |
+
|
| 2298 |
+
audio_url_obj = item.get("audio_url", {})
|
| 2299 |
+
url = audio_url_obj.get("url", "") if isinstance(audio_url_obj, dict) else audio_url_obj
|
| 2300 |
+
|
| 2301 |
+
if _is_url(url):
|
| 2302 |
+
# Download to temp file
|
| 2303 |
+
temp_path = _download_url_to_tempfile(url, suffix=".wav", timeout=60)
|
| 2304 |
+
audio_np, _ = librosa.load(temp_path, sr=16000, mono=True)
|
| 2305 |
+
os.unlink(temp_path)
|
| 2306 |
+
return audio_np
|
| 2307 |
+
else:
|
| 2308 |
+
audio_np, _ = librosa.load(url, sr=16000, mono=True)
|
| 2309 |
+
return audio_np
|
| 2310 |
+
elif item_type == "video_url":
|
| 2311 |
+
# Video processing - returns a LIST of items (frames + audio segments)
|
| 2312 |
+
# Note: Unlike image_url/audio_url which return single items,
|
| 2313 |
+
# video_url returns a list that will be flattened into the content
|
| 2314 |
+
from minicpmo.utils import get_video_frame_audio_segments
|
| 2315 |
+
|
| 2316 |
+
video_url_obj = item.get("video_url", {})
|
| 2317 |
+
if isinstance(video_url_obj, dict):
|
| 2318 |
+
video_url = video_url_obj.get("url", "")
|
| 2319 |
+
# Get optional parameters from video_url object (OpenAI style)
|
| 2320 |
+
stack_frames = video_url_obj.get("stack_frames", 1)
|
| 2321 |
+
use_ffmpeg = video_url_obj.get("use_ffmpeg", False)
|
| 2322 |
+
use_audio = video_url_obj.get("use_audio", True)
|
| 2323 |
+
else:
|
| 2324 |
+
video_url = video_url_obj
|
| 2325 |
+
stack_frames = 1
|
| 2326 |
+
use_ffmpeg = False
|
| 2327 |
+
use_audio = True
|
| 2328 |
+
|
| 2329 |
+
# Handle HTTP/HTTPS URL - download to temp file
|
| 2330 |
+
temp_video_path = None
|
| 2331 |
+
if _is_url(video_url):
|
| 2332 |
+
temp_video_path = _download_url_to_tempfile(video_url, suffix=".mp4", timeout=120)
|
| 2333 |
+
video_path = temp_video_path
|
| 2334 |
+
else:
|
| 2335 |
+
video_path = video_url
|
| 2336 |
+
|
| 2337 |
+
# Extract frames and audio segments
|
| 2338 |
+
video_frames, audio_segments, stacked_frames = get_video_frame_audio_segments(
|
| 2339 |
+
video_path,
|
| 2340 |
+
stack_frames=stack_frames,
|
| 2341 |
+
use_ffmpeg=use_ffmpeg,
|
| 2342 |
+
use_audio=use_audio
|
| 2343 |
+
)
|
| 2344 |
+
|
| 2345 |
+
# Clean up temp file if downloaded
|
| 2346 |
+
if temp_video_path is not None:
|
| 2347 |
+
os.unlink(temp_video_path)
|
| 2348 |
+
|
| 2349 |
+
# Build omni_contents (interleaved frames and audio, or frames only)
|
| 2350 |
+
omni_contents = []
|
| 2351 |
+
for i in range(len(video_frames)):
|
| 2352 |
+
omni_contents.append(video_frames[i])
|
| 2353 |
+
if use_audio and audio_segments is not None:
|
| 2354 |
+
omni_contents.append(audio_segments[i])
|
| 2355 |
+
if stacked_frames is not None and i < len(stacked_frames) and stacked_frames[i] is not None:
|
| 2356 |
+
omni_contents.append(stacked_frames[i])
|
| 2357 |
+
|
| 2358 |
+
# Return as a special marker to be flattened later
|
| 2359 |
+
return "__video_contents__", omni_contents
|
| 2360 |
+
else:
|
| 2361 |
+
raise ValueError(f"Unknown content type: {item_type}")
|
| 2362 |
+
|
| 2363 |
+
raise ValueError(f"Cannot normalize content item of type: {type(item)}")
|
| 2364 |
+
|
| 2365 |
+
|
| 2366 |
+
def normalize_content(content) -> list:
|
| 2367 |
+
"""Normalize message content to list of native items.
|
| 2368 |
+
|
| 2369 |
+
Input formats:
|
| 2370 |
+
- str: "hello" -> ["hello"]
|
| 2371 |
+
- list of native items: [str, Image, np.ndarray] -> pass through with normalization
|
| 2372 |
+
- list of structured items: [{"type": "text", ...}] -> normalize each
|
| 2373 |
+
- video type: automatically expanded to omni_contents
|
| 2374 |
+
- mixed: works too
|
| 2375 |
+
|
| 2376 |
+
Args:
|
| 2377 |
+
content: Message content in any supported format
|
| 2378 |
+
|
| 2379 |
+
Returns:
|
| 2380 |
+
List of native items (str, PIL.Image, np.ndarray)
|
| 2381 |
+
|
| 2382 |
+
Examples:
|
| 2383 |
+
>>> normalize_content("hello")
|
| 2384 |
+
["hello"]
|
| 2385 |
+
|
| 2386 |
+
>>> normalize_content([{"type": "text", "text": "hi"}])
|
| 2387 |
+
["hi"]
|
| 2388 |
+
|
| 2389 |
+
>>> normalize_content([{"type": "video", "video": "/path/to/video.mp4"}])
|
| 2390 |
+
[<PIL.Image>, <np.ndarray>, <PIL.Image>, <np.ndarray>, ...]
|
| 2391 |
+
"""
|
| 2392 |
+
import numpy as np
|
| 2393 |
+
from PIL import Image
|
| 2394 |
+
|
| 2395 |
+
if isinstance(content, str):
|
| 2396 |
+
return [content]
|
| 2397 |
+
|
| 2398 |
+
if isinstance(content, list):
|
| 2399 |
+
result = []
|
| 2400 |
+
for item in content:
|
| 2401 |
+
normalized = normalize_content_item(item)
|
| 2402 |
+
# Handle video content (returns tuple with marker)
|
| 2403 |
+
if isinstance(normalized, tuple) and len(normalized) == 2 and normalized[0] == "__video_contents__":
|
| 2404 |
+
# Flatten video contents into result
|
| 2405 |
+
result.extend(normalized[1])
|
| 2406 |
+
else:
|
| 2407 |
+
result.append(normalized)
|
| 2408 |
+
return result
|
| 2409 |
+
|
| 2410 |
+
# Single non-list item (Image or np.ndarray)
|
| 2411 |
+
if isinstance(content, (Image.Image, np.ndarray)):
|
| 2412 |
+
return [content]
|
| 2413 |
+
|
| 2414 |
+
normalized = normalize_content_item(content)
|
| 2415 |
+
if isinstance(normalized, tuple) and len(normalized) == 2 and normalized[0] == "__video_contents__":
|
| 2416 |
+
return normalized[1]
|
| 2417 |
+
return [normalized]
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|